train_dnn_seq.py 1.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748
  1. # -*- encoding:utf-8 -*-
  2. import numpy
  3. from tensorflow.keras.optimizers import SGD, Adam
  4. from keras.models import Sequential
  5. from keras.layers import Dense
  6. from keras.layers import Dropout
  7. from keras.layers import Flatten
  8. from keras.utils import np_utils
  9. from keras import backend as K
  10. from keras.utils import np_utils
  11. import numpy as np
  12. import random
  13. def read_data(path):
  14. with open(path) as f :
  15. lines=f.readlines()
  16. lines=random.sample(lines,int(len(lines)/10))
  17. lines=[eval(line.strip()) for line in lines]
  18. X,Y=zip(*lines)
  19. X=np.array(X)
  20. X=1.0*X/256
  21. X=X.reshape(-1,28*28)#784
  22. Y=np.array(Y)
  23. Y=np_utils.to_categorical(Y,num_classes)
  24. return X,Y
  25. num_classes=11
  26. train_x,train_y=read_data("train_data")
  27. train_x_10,train_y_10=read_data("train_data_10")#reject data
  28. train_x=np.concatenate((train_x,train_x_10))
  29. train_y=np.concatenate((train_y,train_y_10))
  30. model = Sequential()
  31. #input_shape=(输入长,输入宽,输入通道数)
  32. model.add(Dense(units=256,input_dim=28*28, activation='relu',use_bias=True))
  33. model.add(Dense(units=128,input_dim=256,activation='relu',use_bias=True))
  34. model.add(Dense(units=128,input_dim=256,activation='relu',use_bias=True))
  35. model.add(Dense(units=num_classes,input_dim=128,use_bias=True, activation='softmax'))
  36. # Compile model
  37. model.compile(loss='categorical_crossentropy', optimizer="adam", metrics=['accuracy'])
  38. model.fit(train_x, train_y, batch_size=50, epochs=10,shuffle=True)
  39. model.save("model")