DEEP LEARNING
JOHN D. KELLEHER
THE
PRESS ESSENTIAL KNOWLEDGE SERIES
https://www.internetworldstats.com/weather.htm
http://www.thirstydream.com/python/
http://www.thirstydream.com/wp-content/uploads/2018/08/Assignment.html
http://www.thirstydream.com/wp-content/uploads/2018/08/Deep-learning.html
In [1]:
import numpy as np
In [2]:
input_data = np.array([2,3])
node_weights = {'node_0': np.array([1,1]),
'node_1': np.array([-1,1]),
'output_node': np.array([2,-1])}
In [3]:
# Calculating node 0 value
value_1 = (input_data * node_weights['node_0']).sum()
value_2 = (input_data * node_weights['node_1']).sum()
In [4]:
print(value_1)
print(value_2)
In [5]:
hidden_layer = np.array([value_1 , value_2])
print(hidden_layer)
output = (hidden_layer * node_weights['output_node']).sum()
print('Total Transactions :' ,output)
In [6]:
def relu(input):
output = max(0,input)
return(output)
value_1_output = relu(value_1)
value_2_output = relu(value_2)
In [7]:
hiden_layer = np.array([value_1_output , value_2_output])
output = (hiden_layer * node_weights['output_node']).sum()
print(output)
In [48]:
input_data = np.array([1,3,5])
weights = np.array([0,2,1])
target = 4
In [49]:
preds = (weights * input_data).sum()
print(preds)
In [50]:
error = preds - target
print(error)
slope = 2 * input_data * error
print(slope)
In [51]:
learning_rate = 0.01
#learning rate help to upadate the new weight so that the loss function will be minimum and in
#other words it is close to the target value
weight_update = weights - learning_rate * slope
new_preds = (weight_update * input_data).sum()
new_error = new_preds - target
print(new_error)
In [8]:
import numpy as np
import pandas as pd
wages = pd.read_csv('/Users/shivampandey/Downloads/python files/datasets/hourly_wages.csv')
wages.head()
Out[8]:
In [12]:
from keras.models import Sequential
from keras.layers import Dense
In [13]:
wages_target = wages['wage_per_hour']
In [14]:
wages = wages.drop(['wage_per_hour'],axis = 'columns')
In [15]:
n_cols = wages.shape[1] # this will give number of columns.
In [16]:
model = Sequential()
model.add(Dense(50 , activation = 'relu' , input_shape = (n_cols,)))
model.add(Dense(32 , activation = 'relu'))
model.add(Dense(1))
In [17]:
model.compile(optimizer = 'adam' , loss = 'mean_squared_error')
# Verify that model contains information from compiling
print("Loss function: " + model.loss)
In [18]:
model.fit(wages,wages_target)
Out[18]:
In [19]:
titanic = pd.read_csv('/Users/shivampandey/Downloads/python files/datasets/titanic_all_numeric.csv')
titanic.head()
Out[19]:
In [30]:
predictions = titanic.drop(['survived'],axis = 1).as_matrix()
In [38]:
n_cols = predictions.shape[1]
n_cols
Out[38]:
In [39]:
from keras.utils import to_categorical
target = to_categorical(titanic.survived)
In [40]:
model = Sequential()
#hidden layer
model.add(Dense(50 , activation = 'relu' , input_shape = (n_cols,)))
In [41]:
import tensorflow as trf
# output have separate node for each possible outcome softmax will use as activation for this.
model.add(Dense(2, activation = 'softmax'))
In [45]:
model.compile(optimizer = 'sgd' , loss = 'categorical_crossentropy')
In [43]:
model.fit(predictions , target)
Out[43]:
In [ ]:
# Specify, compile, and fit the model
model = Sequential()
model.add(Dense(32, activation='relu', input_shape = (n_cols,)))
model.add(Dense(2, activation='softmax'))
model.compile(optimizer='sgd',
loss='categorical_crossentropy',
metrics=['accuracy'])
model.fit(predictors, target)
# Calculate predictions: predictions
predictions = model.predict(pred_data)
# Calculate predicted probability of survival: predicted_prob_true
predicted_prob_true = predictions[:,1]
# print predicted_prob_true
print(predicted_prob_true)
In [46]:
from keras.optimizers import SGD
In [61]:
def get_new_model():
model = Sequential()
model.add(Dense(32 , activation = 'relu' , input_shape = (n_cols,)))
model.add(Dense(2 , activation = 'softmax'))
return(model)
In [62]:
lr_list = [0.000001 , 0.01 , 0.1]
for lr in lr_list:
print('\n\nTesting model with learning rate: %f\n'%lr )
#new model
model = get_new_model()
# SGD optimizer with specified learning rate: sgd_optimizer
sgd_optimizer = SGD(lr = lr)
model.compile(optimizer = sgd_optimizer , loss = 'categorical_crossentropy')
model.fit(predictions,target)
In [84]:
#Evaluating model accuracy on validation dataset
predictions = titanic.drop(['survived'],axis = 1)
n_cols = predictions.shape[1]
model =Sequential()
model.add(Dense(100 , activation = 'relu' , input_shape = (n_cols,)))
model.add(Dense(100 , activation = 'relu'))
model.add(Dense(2 , activation = 'softmax'))
In [85]:
model.compile(optimizer = 'adam' , loss = 'categorical_crossentropy')
model.fit(predictions , target , validation_split = 0.3)
Out[85]:
In [86]:
from keras.callbacks import EarlyStopping
early_stopping = EarlyStopping(patience = 2)
#it is 2 beacuse if loss function or accuracy are not going to improve after 2 epochs then it will stop automatically
model.fit(predictions , target , validation_split = 0.3 , epochs = 30 , callbacks = [early_stopping])
Out[86]:
In [87]:
model_1 = Sequential()
model_1.add(Dense(10 , activation = 'relu' , input_shape = (n_cols , )))
model_1.add(Dense(10 , activation = 'relu'))
model_1.add(Dense(2 , activation = 'softmax'))
In [89]:
model.fit(predictions , target , validation_split = 0.3 , epochs = 30 , callbacks = [early_stopping] ,
verbose = False)
model_1.compile(optimizer = 'adam' , loss = 'categorical_crossentropy')
model_1.fit(predictions , target , validation_split = 0.3 , epochs =30 , callbacks = [early_stopping] ,
verbose = False)
Out[89]:
In [ ]:
import matplotlib.pyplot as plt
plt.plot(model.history['val_loss'] , 'r' , model_1.history['val_loss'] , 'b')
plt.xlabel('Epochs')
plt.ylabel('Validation score')
plt.show()
No hay comentarios:
Publicar un comentario