import keras #添加Kerasku
import sys,numpy as np
from keras.utils import np_utils
import os
from keras.datasets import mnist
print("licheng:"+"20"+'\n')
np.random.seed(1)
(x_train,y_train),(x_test,y_test) = mnist.load_data() #第一次进行Mnist 数据的下载
images,labels = (x_train[0:1000].reshape(1000,28*28)/255,y_train[0:1000]) #将图片信息和图片标识信息赋值给images 和labels
'''
print("x_train[0:1000]"+str(x_train[0:1000]))
print("x_train[0:1000].reshape(1000,28*28)"+str(x_train[0:1000].reshape(1000,28*28)))#是一个全零的矩阵
print("images:"+str(images))#感觉是一个10*100的矩阵。
print("labels"+str(labels))#0-10的杂乱的数字
'''
one_hot_lables = np.zeros((len(labels),10))#创建一个1000行,10列的全零矩阵
#print("one_hot_lables"+str(one_hot_lables))#
for i,l in enumerate(labels):
one_hot_lables[i][l] =1;
labels = one_hot_lables
test_images = x_test.reshape(len(x_test),28*28)/256
test_lables = np.zeros((len(y_test),10))
for i,l in enumerate(y_test):
test_lables[i][l] = 1
def tanh(x):
return np.tanh(x)
def tanh2deriv(output):
return 1-(output**2)
def softmax(x):
temp = np.exp(x)
return temp/np.sum(temp,axis=1,keepdims=True)
#relu = lambda x:(x>=0)*x
#relu2deriv = lambda x:x>=0
alpha,iterations = (2,300)
#hidden_size,
pixels_per_image,num_labels = (784,10)
batch_size = 128
input_rows = 28
input_cols = 28
kernel_rows = 3
kernel_cols = 3
num_kernels = 16
hidden_size = ((input_rows - kernel_rows)*(input_cols - kernel_cols))*num_kernels
kernels = 0.02*np.random.random((kernel_rows*kernel_cols,num_kernels)) -0.01
weight_1_2 = 0.2*np.random.random((hidden_size,num_labels)) - 0.1
def get_image_section(layer,row_from,row_to,col_from,col_to):
section = layer[:,row_from:row_to,col_from:col_to]
return section.reshape(-1,1,row_to-row_from,col_to-col_from)
for j in range(iterations):#一共循环350次
error,correct_cnt = (0.0,0)
for i in range(int(len(images)/batch_size)): #有多少个图片就有多少个循环,
#for i in range(1):
batch_start,batch_end = ((i*batch_size),((i+1)*batch_size))
#batch_start, batch_end = (0, 1)
layer_0 = images[batch_start:batch_end] #每一张图片解析出来的对应的像素点的单列矩阵或者是单行
layer_0 = layer_0.reshape(layer_0.shape[0],28,28)
#把layer_0重塑成一个三维数组,1,28,28
#print("layer_0.shape"+str(np.shape(layer_0)))
#print("layer_0"+str(" ")+str(layer_0))
#layer_0.shape
sects = list()
#print("layer_0.shape[1]" +str(layer_0.shape[1]))
#print("layer_0.shape[2]" + str(layer_0.shape[2]))
for row_start in range(layer_0.shape[1] - kernel_rows):
for col_start in range(layer_0.shape[2]-kernel_cols):
sect = get_image_section(layer_0,
row_start,
row_start+kernel_rows,
col_start,
col_start+kernel_cols)
#if row_start == 0:
#print("sect" +str(" ")+str(sect))
sects.append(sect)#将数据打散成3*3的小数据,然后组合在一起。一行可以转化成25个小的3*3
#print("sect" +str(" ")+str(sect))
expanded_input = np.concatenate(sects,axis =1)
#print("expanded_input" + str(" ") + str(expanded_input))
es = expanded_input.shape #输出为:es (1, 625, 3, 3)
#print("es" + str(" ") + str(es))
#print("es[0]" + str(" ") + str(es[0]))
#print("es[1]" + str(" ") + str(es[1]))
flattened_input = expanded_input.reshape(es[0]*es[1],-1) #输出为:flattened_input.shape (625, 9)
#print("flattened_input.shape" + str(" ") + str(np.shape(flattened_input)))
kernel_output = flattened_input.dot(kernels)#输出为:kernel_output.shape (625, 16)
#print("kernel_output" + str(" ") + str(kernel_output))
#print("kernel_output.shape" + str(" ") + str(np.shape(kernel_output)))
#print("layer_0:"+str(layer_0))
#layer_1 = relu(np.dot(layer_0,weight_0_1))#对二层神经网络的数据进行rule处理。小于0的数字都为0,大于0的数字都是本身。
layer_1 = tanh(kernel_output.reshape(es[0],-1))
#print("layer_1.shape" + str(" ") + str(np.shape(layer_1)))#layer_1.shape (1, 10000)
dropout_mask = np.random.randint(2,size=layer_1.shape)
layer_1 *= dropout_mask*2
#layer_2 = np.dot(layer_1,weight_1_2)#将第二层神经网络的值和第二层的权重加权和得到输出数据。
layer_2 = softmax(np.dot(layer_1,weight_1_2))
#error += np.sum((labels[batch_start:batch_end] - layer_2)**2)#把每一张图片的误差值进行累加
for k in range(batch_size):
labelset = labels[batch_start+k:batch_start+k+1]
_inc = int(np.argmax(layer_2[k:k+1])== \
np.argmax(labelset))#把每次预测成功率进行累加。
correct_cnt +=_inc
#layer_2_delta = np.full((100,10),(np.sum(labels[batch_start:batch_end]-layer_2))/batch_size)
#print(layer_2.shape)
layer_2_delta = (labels[batch_start:batch_end]-layer_2)\
/(batch_size * layer_2.shape[0])#计算权重反向误差第二层
#layer_2_delta = (labels[batch_start:batch_end]-layer_2) #计算权重反向误差第二层
layer_1_delta = layer_2_delta.dot(weight_1_2.T)*tanh2deriv(layer_1)#第一层权重误差
layer_1_delta *= dropout_mask
weight_1_2 += alpha *layer_1.T.dot(layer_2_delta)#修改第一层权重
l1d_reshape = layer_1_delta.reshape(kernel_output.shape)
k_update = flattened_input.T.dot(l1d_reshape)
kernels -= alpha*k_update
#weight_0_1 += alpha *layer_0.T.dot(layer_1_delta)#修改第二层权重
text_correct_cnt = 0
#sys.stdout.write("\r"+"I:"+str(j)+"error"+str(error/float(len(images)))[0:5] + "correct"+str(correct/float(len(images))))
#验证测试组的数字被预测出来的概率。
#for j in range(10):
# if(j%10 == 0 or j == iterations-1):
# error,correct = (0.0,0)
for i in range(len(test_images)):
layer_0 = test_images[i:i+1]
layer_0 = layer_0.reshape(layer_0.shape[0],28,28)
layer_0.shape
sects = list()
for row_start in range(layer_0.shape[1] - kernel_rows):
for col_start in range(layer_0.shape[2] - kernel_cols):
sect = get_image_section(layer_0,
row_start,
row_start+kernel_rows,
col_start,
col_start+kernel_cols)
sects.append(sect)
expanded_input = np.concatenate(sects,axis =1)
es = expanded_input.shape
flattened_input = expanded_input.reshape(es[0]*es[1],-1)
kernel_output = flattened_input.dot(kernels)
layer_1 = tanh(kernel_output.reshape(es[0],-1))
layer_2 = np.dot(layer_1,weight_1_2)
#error += np.sum((test_lables[i:i+1]-layer_2)**2)
text_correct_cnt += int(np.argmax(layer_2)==np.argmax(test_lables[i:i+1]))
if(j % 1 == 0):
print("\n"+"j"+str(j))
sys.stdout.write("test-acc:"+str(text_correct_cnt/float(len(test_images))) + \
"train-acc:"+str(correct_cnt/float(len(images))))
print()
#训练结果
'''
licheng:20
j0
test-acc:0.0288train-acc:0.055
j1
test-acc:0.0273train-acc:0.037
j2
test-acc:0.028train-acc:0.037
j3
test-acc:0.0292train-acc:0.04
j4
test-acc:0.0339train-acc:0.046
j5
test-acc:0.0478train-acc:0.068
j6
test-acc:0.0758train-acc:0.083
j7
test-acc:0.1316train-acc:0.096
j8
test-acc:0.2138train-acc:0.127
j9
test-acc:0.2942train-acc:0.148
j10
test-acc:0.3563train-acc:0.181
j11
test-acc:0.4023train-acc:0.209
j12
test-acc:0.4359train-acc:0.238
j13
test-acc:0.4472train-acc:0.286
j14
test-acc:0.4389train-acc:0.274
j15
test-acc:0.3951train-acc:0.257
j16
test-acc:0.2222train-acc:0.243
j17
test-acc:0.0613train-acc:0.112
j18
test-acc:0.0266train-acc:0.035
j19
test-acc:0.0127train-acc:0.026
j20
test-acc:0.0133train-acc:0.022
j21
test-acc:0.0185train-acc:0.038
j22
test-acc:0.0363train-acc:0.038
j23
test-acc:0.0929train-acc:0.067
j24
test-acc:0.1994train-acc:0.081
j25
test-acc:0.3085train-acc:0.154
j26
test-acc:0.4275train-acc:0.204
j27
test-acc:0.5324train-acc:0.256
j28
test-acc:0.5917train-acc:0.305
j29
test-acc:0.6323train-acc:0.341
j30
test-acc:0.6607train-acc:0.426
j31
test-acc:0.6815train-acc:0.439
j32
test-acc:0.7048train-acc:0.462
j33
test-acc:0.717train-acc:0.484
j34
test-acc:0.7313train-acc:0.505
j35
test-acc:0.7355train-acc:0.53
j36
test-acc:0.7417train-acc:0.548
j37
test-acc:0.747train-acc:0.534
j38
test-acc:0.7492train-acc:0.55
j39
test-acc:0.7459train-acc:0.562
j40
test-acc:0.7352train-acc:0.54
j41
test-acc:0.708train-acc:0.496
j42
test-acc:0.6486train-acc:0.456
j43
test-acc:0.5212train-acc:0.353
j44
test-acc:0.3312train-acc:0.234
j45
test-acc:0.2055train-acc:0.174
j46
test-acc:0.2162train-acc:0.136
j47
test-acc:0.2694train-acc:0.171
j48
test-acc:0.3255train-acc:0.172
j49
test-acc:0.361train-acc:0.186
j50
test-acc:0.4221train-acc:0.21
j51
test-acc:0.5172train-acc:0.223
j52
test-acc:0.6008train-acc:0.262
j53
test-acc:0.6478train-acc:0.308
j54
test-acc:0.6763train-acc:0.363
j55
test-acc:0.696train-acc:0.402
j56
test-acc:0.7079train-acc:0.434
j57
test-acc:0.7209train-acc:0.441
j58
test-acc:0.7304train-acc:0.475
j59
test-acc:0.7358train-acc:0.475
j60
test-acc:0.7405train-acc:0.525
j61
test-acc:0.7499train-acc:0.517
j62
test-acc:0.7534train-acc:0.517
j63
test-acc:0.7608train-acc:0.538
j64
test-acc:0.7646train-acc:0.554
j65
test-acc:0.7726train-acc:0.57
j66
test-acc:0.779train-acc:0.586
j67
test-acc:0.7854train-acc:0.595
j68
test-acc:0.7853train-acc:0.591
j69
test-acc:0.7927train-acc:0.605
j70
test-acc:0.7975train-acc:0.64
j71
test-acc:0.8013train-acc:0.621
j72
test-acc:0.8028train-acc:0.626
j73
test-acc:0.8095train-acc:0.631
j74
test-acc:0.8099train-acc:0.638
j75
test-acc:0.8157train-acc:0.661
j76
test-acc:0.8155train-acc:0.639
j77
test-acc:0.8183train-acc:0.65
j78
test-acc:0.8217train-acc:0.67
j79
test-acc:0.8247train-acc:0.675
j80
test-acc:0.8237train-acc:0.666
j81
test-acc:0.8269train-acc:0.673
j82
test-acc:0.8273train-acc:0.704
j83
test-acc:0.8313train-acc:0.674
j84
test-acc:0.8293train-acc:0.686
j85
test-acc:0.8333train-acc:0.699
j86
test-acc:0.8358train-acc:0.694
j87
test-acc:0.8375train-acc:0.704
j88
test-acc:0.837train-acc:0.697
j89
test-acc:0.8398train-acc:0.704
j90
test-acc:0.8396train-acc:0.687
j91
test-acc:0.8436train-acc:0.705
j92
test-acc:0.8436train-acc:0.711
j93
test-acc:0.8447train-acc:0.721
j94
test-acc:0.845train-acc:0.719
j95
test-acc:0.8471train-acc:0.724
j96
test-acc:0.8478train-acc:0.726
j97
test-acc:0.848train-acc:0.718
j98
test-acc:0.8495train-acc:0.719
j99
test-acc:0.85train-acc:0.73
j100
test-acc:0.8513train-acc:0.737
j101
test-acc:0.8504train-acc:0.73
j102
test-acc:0.8506train-acc:0.717
j103
test-acc:0.8528train-acc:0.74
j104
test-acc:0.8531train-acc:0.733
j105
test-acc:0.8538train-acc:0.73
j106
test-acc:0.8568train-acc:0.721
j107
test-acc:0.857train-acc:0.75
j108
test-acc:0.8558train-acc:0.731
j109
test-acc:0.8578train-acc:0.744
j110
test-acc:0.8589train-acc:0.754
j111
test-acc:0.8578train-acc:0.732
j112
test-acc:0.8583train-acc:0.747
j113
test-acc:0.859train-acc:0.747
j114
test-acc:0.8597train-acc:0.751
j115
test-acc:0.8602train-acc:0.74
j116
test-acc:0.8601train-acc:0.753
j117
test-acc:0.8588train-acc:0.746
j118
test-acc:0.8611train-acc:0.741
j119
test-acc:0.8616train-acc:0.731
j120
test-acc:0.8632train-acc:0.753
j121
test-acc:0.8611train-acc:0.743
j122
test-acc:0.8629train-acc:0.752
j123
test-acc:0.8647train-acc:0.76
j124
test-acc:0.8651train-acc:0.766
j125
test-acc:0.8659train-acc:0.752
j126
test-acc:0.868train-acc:0.756
j127
test-acc:0.8649train-acc:0.767
j128
test-acc:0.8661train-acc:0.747
j129
test-acc:0.8669train-acc:0.753
j130
test-acc:0.8695train-acc:0.753
j131
test-acc:0.8691train-acc:0.76
j132
test-acc:0.866train-acc:0.756
j133
test-acc:0.8668train-acc:0.769
j134
test-acc:0.8691train-acc:0.77
j135
test-acc:0.8681train-acc:0.757
j136
test-acc:0.8702train-acc:0.77
j137
test-acc:0.8705train-acc:0.77
j138
test-acc:0.8685train-acc:0.768
j139
test-acc:0.8664train-acc:0.774
j140
test-acc:0.8668train-acc:0.756
j141
test-acc:0.8704train-acc:0.783
j142
test-acc:0.8702train-acc:0.775
j143
test-acc:0.8728train-acc:0.769
j144
test-acc:0.8725train-acc:0.776
j145
test-acc:0.8721train-acc:0.772
j146
test-acc:0.8717train-acc:0.765
j147
test-acc:0.8747train-acc:0.777
j148
test-acc:0.8746train-acc:0.77
j149
test-acc:0.8735train-acc:0.778
j150
test-acc:0.8733train-acc:0.785
j151
test-acc:0.8732train-acc:0.76
j152
test-acc:0.8724train-acc:0.779
j153
test-acc:0.8755train-acc:0.772
j154
test-acc:0.8728train-acc:0.773
j155
test-acc:0.8755train-acc:0.784
j156
test-acc:0.8731train-acc:0.774
j157
test-acc:0.8743train-acc:0.782
j158
test-acc:0.8762train-acc:0.772
j159
test-acc:0.8755train-acc:0.79
j160
test-acc:0.8751train-acc:0.774
j161
test-acc:0.8749train-acc:0.782
j162
test-acc:0.8744train-acc:0.78
j163
test-acc:0.8766train-acc:0.782
j164
test-acc:0.874train-acc:0.796
j165
test-acc:0.8754train-acc:0.798
j166
test-acc:0.8766train-acc:0.794
j167
test-acc:0.8747train-acc:0.784
j168
test-acc:0.8768train-acc:0.796
j169
test-acc:0.8757train-acc:0.789
j170
test-acc:0.8767train-acc:0.79
j171
test-acc:0.8732train-acc:0.791
j172
test-acc:0.8766train-acc:0.797
j173
test-acc:0.8773train-acc:0.789
j174
test-acc:0.8778train-acc:0.781
j175
test-acc:0.8758train-acc:0.799
j176
test-acc:0.8774train-acc:0.785
j177
test-acc:0.8766train-acc:0.796
j178
test-acc:0.8784train-acc:0.803
j179
test-acc:0.8788train-acc:0.794
j180
test-acc:0.8779train-acc:0.794
j181
test-acc:0.8779train-acc:0.8
j182
test-acc:0.8786train-acc:0.791
j183
test-acc:0.8778train-acc:0.787
j184
test-acc:0.8768train-acc:0.781
j185
test-acc:0.8765train-acc:0.786
j186
test-acc:0.8764train-acc:0.793
j187
test-acc:0.8788train-acc:0.796
j188
test-acc:0.8792train-acc:0.789
j189
test-acc:0.8764train-acc:0.79
j190
test-acc:0.8774train-acc:0.787
j191
test-acc:0.8766train-acc:0.782
j192
test-acc:0.8802train-acc:0.798
j193
test-acc:0.8783train-acc:0.789
j194
test-acc:0.8797train-acc:0.785
j195
test-acc:0.8792train-acc:0.807
j196
test-acc:0.878train-acc:0.796
j197
test-acc:0.8785train-acc:0.801
j198
test-acc:0.8777train-acc:0.81
j199
test-acc:0.8772train-acc:0.784
j200
test-acc:0.8777train-acc:0.792
j201
test-acc:0.8784train-acc:0.794
j202
test-acc:0.8788train-acc:0.795
j203
test-acc:0.8802train-acc:0.781
j204
test-acc:0.8798train-acc:0.804
j205
test-acc:0.878train-acc:0.779
j206
test-acc:0.8788train-acc:0.792
j207
test-acc:0.8763train-acc:0.793
j208
test-acc:0.8794train-acc:0.792
j209
test-acc:0.8798train-acc:0.803
j210
test-acc:0.8788train-acc:0.804
j211
test-acc:0.8792train-acc:0.797
j212
test-acc:0.8764train-acc:0.791
j213
test-acc:0.88train-acc:0.801
j214
test-acc:0.8812train-acc:0.799
j215
test-acc:0.8806train-acc:0.79
j216
test-acc:0.88train-acc:0.8
j217
test-acc:0.8804train-acc:0.802
j218
test-acc:0.8786train-acc:0.807
j219
test-acc:0.8819train-acc:0.797
j220
test-acc:0.8795train-acc:0.799
j221
test-acc:0.8789train-acc:0.815
j222
test-acc:0.879train-acc:0.816
j223
test-acc:0.8793train-acc:0.809
j224
test-acc:0.8814train-acc:0.795
j225
test-acc:0.8796train-acc:0.799
j226
test-acc:0.8805train-acc:0.806
j227
test-acc:0.8803train-acc:0.808
j228
test-acc:0.8782train-acc:0.801
j229
test-acc:0.8803train-acc:0.814
j230
test-acc:0.8808train-acc:0.8
j231
test-acc:0.8808train-acc:0.798
j232
test-acc:0.8808train-acc:0.82
j233
test-acc:0.8794train-acc:0.794
j234
test-acc:0.8809train-acc:0.806
j235
test-acc:0.8807train-acc:0.808
j236
test-acc:0.8789train-acc:0.802
j237
test-acc:0.8796train-acc:0.81
j238
test-acc:0.8768train-acc:0.805
j239
test-acc:0.8781train-acc:0.792
j240
test-acc:0.8786train-acc:0.809
j241
test-acc:0.8761train-acc:0.802
j242
test-acc:0.8775train-acc:0.811
j243
test-acc:0.8806train-acc:0.814
j244
test-acc:0.8795train-acc:0.804
j245
test-acc:0.8787train-acc:0.801
j246
test-acc:0.8776train-acc:0.795
j247
test-acc:0.8785train-acc:0.808
j248
test-acc:0.8788train-acc:0.803
j249
test-acc:0.8776train-acc:0.813
j250
test-acc:0.879train-acc:0.808
j251
test-acc:0.8788train-acc:0.803
j252
test-acc:0.8791train-acc:0.812
j253
test-acc:0.8793train-acc:0.804
j254
test-acc:0.8779train-acc:0.815
j255
test-acc:0.8798train-acc:0.811
j256
test-acc:0.8798train-acc:0.806
j257
test-acc:0.8801train-acc:0.803
j258
test-acc:0.8779train-acc:0.795
j259
test-acc:0.8799train-acc:0.803
j260
test-acc:0.8801train-acc:0.805
j261
test-acc:0.8788train-acc:0.807
j262
test-acc:0.8786train-acc:0.804
j263
test-acc:0.8792train-acc:0.806
j264
test-acc:0.8779train-acc:0.796
j265
test-acc:0.8785train-acc:0.821
j266
test-acc:0.8794train-acc:0.81
j267
test-acc:0.8784train-acc:0.816
j268
test-acc:0.8777train-acc:0.812
j269
test-acc:0.8792train-acc:0.812
j270
test-acc:0.8779train-acc:0.813
j271
test-acc:0.8782train-acc:0.82
j272
test-acc:0.8791train-acc:0.821
j273
test-acc:0.878train-acc:0.823
j274
test-acc:0.8788train-acc:0.816
j275
test-acc:0.8794train-acc:0.82
j276
test-acc:0.8779train-acc:0.829
j277
test-acc:0.8794train-acc:0.809
j278
test-acc:0.8751train-acc:0.806
j279
test-acc:0.8796train-acc:0.813
j280
test-acc:0.88train-acc:0.816
j281
test-acc:0.8797train-acc:0.819
j282
test-acc:0.8805train-acc:0.809
j283
test-acc:0.8804train-acc:0.811
j284
test-acc:0.8779train-acc:0.808
j285
test-acc:0.8818train-acc:0.82
j286
test-acc:0.8791train-acc:0.822
j287
test-acc:0.8792train-acc:0.817
j288
test-acc:0.877train-acc:0.814
j289
test-acc:0.8785train-acc:0.807
j290
test-acc:0.8781train-acc:0.817
j291
test-acc:0.8795train-acc:0.82
j292
test-acc:0.8803train-acc:0.824
j293
test-acc:0.8779train-acc:0.812
j294
test-acc:0.8784train-acc:0.816
j295
test-acc:0.8771train-acc:0.817
j296
test-acc:0.877train-acc:0.826
j297
test-acc:0.8775train-acc:0.816
j298
test-acc:0.8774train-acc:0.804
j299
test-acc:0.8775train-acc:0.814
'''
从运行结果上看,其实和我们上次的程序处理的差不多。
其实这么来看的话,我们就需要进行更多的优化。其实有很多人已经做过相关的优化程序。后面我们将会学习的时候,更加深入的去理解别的更好的算法。