I followed a tutorial on neural network model evaluation using cross-validation with code:
# Multiclass Classification with the Iris Flowers Dataset
import numpy
import pandas
from keras.models import Sequential
from keras.layers import Dense
from keras.wrappers.scikit_learn import KerasClassifier
from keras.utils import np_utils
from sklearn.model_selection import cross_val_score
from sklearn.model_selection import KFold
from sklearn.preprocessing import LabelEncoder
from sklearn.pipeline import Pipeline
# fix random seed for reproducibility
seed = 7
numpy.random.seed(seed)
# load dataset
dataframe = pandas.read_csv("/content/drive/My Drive/iris.data", header=None)
dataset = dataframe.values
X = dataset[:,0:4].astype(float)
Y = dataset[:,4]
# encode class values as integers
encoder = LabelEncoder()
encoder.fit(Y)
encoded_Y = encoder.transform(Y)
# convert integers to dummy variables (i.e. one hot encoded)
dummy_y = np_utils.to_categorical(encoded_Y)
# define baseline model
def baseline_model():
# create model
model = Sequential()
model.add(Dense(4, input_dim=4, activation="relu", kernel_initializer="normal"))
model.add(Dense(3, activation="sigmoid", kernel_initializer="normal"))
# Compile model
model.compile(loss= 'categorical_crossentropy' , optimizer= 'adam' , metrics=[ 'accuracy' ])
return model
estimator = KerasClassifier(build_fn=baseline_model, nb_epoch=200, batch_size=5, verbose=0)
kfold = KFold(n_splits=10, shuffle=True, random_state=seed)
results = cross_val_score(estimator, X, dummy_y, cv=kfold)
print("Accuracy: %.2f%% (%.2f%%)" % (results.mean()*100, results.std()*100))
The accuracy was supposed to be around 95.33% (4.27%)
but I got ~Accuracy: 34.00% (13.15%)
on a few attempts. The model code seems exactly the same. I downloaded the data from here as instructed. What could go wrong?
Replace this:
model.add(Dense(4, input_dim=4, activation="relu", kernel_initializer="normal"))
With this:
model.add(Dense(16, activation="relu"))
model.add(Dense(32, activation="relu"))
Then, your output layer as:
model.add(Dense(3, activation="softmax", kernel_initializer="normal"))
Your hidden layers were minuscule, and your activation function was wrong. For 3+ classes, it must be softmax
.
FULL working code:
import numpy
from keras.models import Sequential
from keras.layers import Dense
from keras.wrappers.scikit_learn import KerasClassifier
from keras.utils import np_utils
from sklearn.model_selection import cross_val_score
from sklearn.model_selection import KFold
from sklearn.preprocessing import MinMaxScaler
seed = 7
numpy.random.seed(seed)
from sklearn.datasets import load_iris
X, encoded_Y = load_iris(return_X_y=True)
mms = MinMaxScaler()
X = mms.fit_transform(X)
dummy_y = np_utils.to_categorical(encoded_Y)
def baseline_model():
model = Sequential()
model.add(Dense(4, input_dim=4, activation="relu", kernel_initializer="normal"))
model.add(Dense(8, activation="relu", kernel_initializer="normal"))
model.add(Dense(3, activation="softmax", kernel_initializer="normal"))
model.compile(loss= 'categorical_crossentropy' , optimizer='adam', metrics=[
'accuracy' ])
return model
estimator = KerasClassifier(build_fn=baseline_model, epochs=200, verbose=0)
kfold = KFold(n_splits=10, shuffle=True, random_state=seed)
results = cross_val_score(estimator, X, dummy_y, cv=kfold)
print(results)
Out[5]:
array([0.60000002, 0.93333334, 1. , 0.66666669, 0.80000001,
1. , 1. , 0.93333334, 0.80000001, 0.86666667])