http://www-cvr.ai.uiuc.edu/ponce_grp/data/ 我用birds那个
#以下是code
import os
import glob # 查找文件
from keras.models import Sequential
from keras.layers.core import Flatten, Dense, Dropout
from keras.layers.convolutional import Convolution2D, MaxPooling2D,
ZeroPadding2D
from keras.optimizers import SGD
import numpy as np
from pandas import Series, DataFrame
from keras.preprocessing.image import ImageDataGenerator
from keras.models import Sequential
from keras.layers.core import Dense, Dropout, Activation, Flatten
from keras.layers.advanced_activations import PReLU
from keras.layers.convolutional import Convolution2D, MaxPooling2D
from keras.optimizers import SGD, Adadelta, Adagrad
from keras.utils import np_utils, generic_utils
from six.moves import range
def load_data():
sed = 1000
data = np.empty((540,150,150,3),dtype="float32")
label = np.empty((540,))
imgs = os.listdir(r"C:\Users\user\Desktop\birds\train")
num = len(imgs)
times = 0
time = 0
for i in range(num):
if "egr" in imgs[i]:
img = Image.open("C:/Users/user/Desktop/birds/train/" + imgs[i])
arr = np.asarray(img, dtype="float32")
arr.resize((150,150,3))
data[i, :, :, :] = arr
label[i] = 0
#times +=1
elif "man" in imgs[i]:
img = Image.open("C:/Users/user/Desktop/birds/train/" + imgs[i])
arr = np.asarray(img, dtype="float32")
arr.resize((150,150,3))
data[i, :, :, :] = arr
label[i] = 1
#times +=1
elif "owl" in imgs[i]:
img = Image.open("C:/Users/user/Desktop/birds/train/" + imgs[i])
arr = np.asarray(img, dtype="float32")
arr.resize((150,150,3))
data[i, :, :, :] = arr
label[i] = 2
#times +=1
elif "puf" in imgs[i]:
img = Image.open("C:/Users/user/Desktop/birds/train/" + imgs[i])
arr = np.asarray(img, dtype="float32")
arr.resize((150,150,3))
data[i, :, :, :] = arr
label[i] = 3
#times +=1
elif "tou" in imgs[i]:
img = Image.open("C:/Users/user/Desktop/birds/train/" + imgs[i])
arr = np.asarray(img, dtype="float32")
arr.resize((150,150,3))
data[i, :, :, :] = arr
label[i] = 4
#times +=1
elif "wod" in imgs[i]:
img = Image.open("C:/Users/user/Desktop/birds/train/" + imgs[i])
arr = np.asarray(img, dtype="float32")
arr.resize((150,150,3))
data[i, :, :, :] = arr
label[i] = 5
#times +=1
else:
break
return data,label
train_data,train_labels = load_data()
def load_data1():
sed = 1000
data = np.empty((60,150,150,3),dtype="float32")
label = np.empty((60,))
imgs = os.listdir(r"C:\Users\user\Desktop\birds\test")
num = len(imgs)
times = 0
time = 0
for i in range(num):
if "egr" in imgs[i]:
img = Image.open("C:/Users/user/Desktop/birds/test/" + imgs[i])
arr = np.asarray(img, dtype="float32")
arr.resize((150,150,3))
data[i, :, :, :] = arr
label[i] = 0
#times +=1
elif "man" in imgs[i]:
img = Image.open("C:/Users/user/Desktop/birds/test/" + imgs[i])
arr = np.asarray(img, dtype="float32")
arr.resize((150,150,3))
data[i, :, :, :] = arr
label[i] = 1
#times +=1
elif "owl" in imgs[i]:
img = Image.open("C:/Users/user/Desktop/birds/test/" + imgs[i])
arr = np.asarray(img, dtype="float32")
arr.resize((150,150,3))
data[i, :, :, :] = arr
label[i] = 2
#times +=1
elif "puf" in imgs[i]:
img = Image.open("C:/Users/user/Desktop/birds/test/" + imgs[i])
arr = np.asarray(img, dtype="float32")
arr.resize((150,150,3))
data[i, :, :, :] = arr
label[i] = 3
#times +=1
elif "tou" in imgs[i]:
img = Image.open("C:/Users/user/Desktop/birds/test/" + imgs[i])
arr = np.asarray(img, dtype="float32")
arr.resize((150,150,3))
data[i, :, :, :] = arr
label[i] = 4
#times +=1
elif "wod" in imgs[i]:
img = Image.open("C:/Users/user/Desktop/birds/test/" + imgs[i])
arr = np.asarray(img, dtype="float32")
arr.resize((150,150,3))
data[i, :, :, :] = arr
label[i] = 5
#times +=1
else:
break
return data,label
datatest,labeltest = load_data1()
y_train=np_utils.to_categorical(train_labels,num_classes=6)
#np.utils.to_categorical : 若 y=2 -> [[ 0. 0. 1. 0. 0. 0. 0. 0. 0.
0.]]
y_test=np_utils.to_categorical(labeltest,num_classes=6)
model = Sequential()
model.add(Convolution2D(32, (3, 3),input_shape=(150, 150,3)))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Convolution2D(64, (3, 3)))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Convolution2D(64, (3, 3)))
model.add(Activation('relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Flatten())
model.add(Dense(64))
model.add(Activation('relu'))
model.add(Dropout(0.5))
model.add(Dense(6))
model.add(Activation('softmax'))
model.compile(loss='categorical_crossentropy',
optimizer='rmsprop',
metrics=['accuracy'])
model.fit(train_data, y_train,
nb_epoch=50, batch_size=20,
validation_data=(datatest, y_test))
我跑出的结果长这样 我觉得很怪
Epoch 1/50
540/540 [==============================] - 21s 40ms/step - loss: 2.0500 -
acc: 0.1630 - val_loss: 1.9806 - val_acc: 0.1667
Epoch 2/50
540/540 [==============================] - 21s 39ms/step - loss: 1.9121 -
acc: 0.1574 - val_loss: 1.8918 - val_acc: 0.1667
Epoch 3/50
540/540 [==============================] - 21s 39ms/step - loss: 1.8998 -
acc: 0.1704 - val_loss: 1.8457 - val_acc: 0.1667
Epoch 4/50
540/540 [==============================] - 21s 39ms/step - loss: 1.8353 -
acc: 0.1704 - val_loss: 1.8256 - val_acc: 0.1667
Epoch 5/50
540/540 [==============================] - 21s 38ms/step - loss: 1.8449 -
acc: 0.1574 - val_loss: 1.8175 - val_acc: 0.1667
Epoch 6/50
540/540 [==============================] - 21s 38ms/step - loss: 1.8065 -
acc: 0.1796 - val_loss: 1.8098 - val_acc: 0.1667
Epoch 7/50
540/540 [==============================] - 21s 39ms/step - loss: 1.8136 -
acc: 0.1778 - val_loss: 1.8035 - val_acc: 0.1667
Epoch 8/50
540/540 [==============================] - 22s 41ms/step - loss: 1.8419 -
acc: 0.1852 - val_loss: 1.7981 - val_acc: 0.1667
Epoch 9/50
540/540 [==============================] - 21s 39ms/step - loss: 1.8219 -
acc: 0.1685 - val_loss: 1.7956 - val_acc: 0.1667
Epoch 10/50
540/540 [==============================] - 22s 40ms/step - loss: 1.7954 -
acc: 0.1889 - val_loss: 1.7936 - val_acc: 0.1667
Epoch 11/50
540/540 [==============================] - 22s 40ms/step - loss: 1.8192 -
acc: 0.1741 - val_loss: 1.7926 - val_acc: 0.1667
Epoch 12/50
540/540 [==============================] - 21s 40ms/step - loss: 1.8244 -
acc: 0.1741 - val_loss: 1.7921 - val_acc: 0.1667
Epoch 13/50
540/540 [==============================] - 23s 42ms/step - loss: 1.8233 -
acc: 0.1815 - val_loss: 1.7919 - val_acc: 0.1667
Epoch 14/50
540/540 [==============================] - 22s 40ms/step - loss: 1.7947 -
acc: 0.1889 - val_loss: 1.7919 - val_acc: 0.1667
Epoch 15/50
540/540 [==============================] - 21s 39ms/step - loss: 1.7991 -
acc: 0.2019 - val_loss: 1.7919 - val_acc: 0.1667
Epoch 16/50
540/540 [==============================] - 21s 39ms/step - loss: 1.8107 -
acc: 0.2037 - val_loss: 1.7923 - val_acc: 0.1667
Epoch 17/50
540/540 [==============================] - 21s 40ms/step - loss: 1.8087 -
acc: 0.1815 - val_loss: 1.7945 - val_acc: 0.1500
Epoch 18/50
540/540 [==============================] - 21s 39ms/step - loss: 1.8248 -
acc: 0.2019 - val_loss: 1.7977 - val_acc: 0.1667
Epoch 19/50
540/540 [==============================] - 21s 39ms/step - loss: 1.8012 -
acc: 0.1611 - val_loss: 1.7968 - val_acc: 0.1667
Epoch 20/50
540/540 [==============================] - 21s 39ms/step - loss: 1.8415 -
acc: 0.1796 - val_loss: 1.7933 - val_acc: 0.1667
Epoch 21/50
540/540 [==============================] - 21s 39ms/step - loss: 1.8390 -
acc: 0.1778 - val_loss: 1.8040 - val_acc: 0.1667
Epoch 22/50
540/540 [==============================] - 21s 39ms/step - loss: 1.8363 -
acc: 0.1778 - val_loss: 1.8007 - val_acc: 0.1667
Epoch 23/50
540/540 [==============================] - 21s 39ms/step - loss: 1.8053 -
acc: 0.1630 - val_loss: 1.7787 - val_acc: 0.1500
Epoch 24/50
540/540 [==============================] - 21s 39ms/step - loss: 1.8064 -
acc: 0.1704 - val_loss: 1.7940 - val_acc: 0.1667
Epoch 25/50
540/540 [==============================] - 21s 39ms/step - loss: 1.8006 -
acc: 0.1481 - val_loss: 1.7979 - val_acc: 0.1667
Epoch 26/50
540/540 [==============================] - 21s 39ms/step - loss: 1.8004 -
acc: 0.1630 - val_loss: 1.7972 - val_acc: 0.1667
Epoch 27/50
540/540 [==============================] - 21s 39ms/step - loss: 1.8358 -
acc: 0.1870 - val_loss: 1.7968 - val_acc: 0.1667
Epoch 28/50
540/540 [==============================] - 21s 39ms/step - loss: 1.7930 -
acc: 0.1759 - val_loss: 1.7969 - val_acc: 0.1667
Epoch 29/50
540/540 [==============================] - 23s 42ms/step - loss: 1.8084 -
acc: 0.1648 - val_loss: 1.7971 - val_acc: 0.1667
主要是这边都维持一样val_acc: 0.1667
我觉得好像不太对, 请问有高人遇过这样问题吗?
谢谢回答