Browse Source

dnn图片分类

yufeng0528 4 years ago
parent
commit
5f616b877b

+ 42 - 0
dnn_image/convert_img.py

@@ -0,0 +1,42 @@
1
+# -*- encoding:utf-8 -*-
2
+import numpy as np
3
+from PIL import Image
4
+import random
5
+
6
+
7
+def read_data(path):
8
+	with open(path) as f :
9
+		lines=f.readlines()
10
+	lines=random.sample(lines,int(len(lines)/1000))
11
+	lines=[eval(line.strip()) for line in lines]
12
+	X,Y=zip(*lines)
13
+	X=np.array(X)
14
+	X=X.reshape(-1,28*28)
15
+	Y=np.array(Y)
16
+	return X,Y
17
+
18
+
19
+def plot(x,width,height,path):
20
+	img=[[0 for _ in range(0,width) ] for _ in range(0,height)]
21
+	for i in range(0,height):
22
+		for j in range(0,width):
23
+			img[i][j]=x[i*height+j]
24
+	img=np.array(img).astype('uint8')
25
+	new_im = Image.fromarray(img)
26
+	new_im.save(path)
27
+
28
+
29
+train_x,train_y = read_data("train_data")
30
+train_x_10,train_y_10 = read_data("train_data_10")
31
+train_x = np.concatenate((train_x,train_x_10))
32
+train_y = np.concatenate((train_y,train_y_10))
33
+count=0
34
+
35
+
36
+for i in range(0,len(train_x)):
37
+	print(count)
38
+	path="img/{}-{}.png".format(train_y[i],count)
39
+	print(path)
40
+	count+=1
41
+	plot(train_x[i],28,28,path)
42
+

+ 15 - 0
dnn_image/generate_random_data.py

@@ -0,0 +1,15 @@
1
+# -*- encoding:utf-8 -*-
2
+import numpy as np
3
+import random
4
+def generate():
5
+	X=[[random.randint(0,255) for i in range(0,28)] for j in range(0,28)]
6
+	X=[[0 if s <128 else s for s in ss] for ss in X]
7
+	return str([X,10])
8
+train_num=6000
9
+test_num=1000
10
+with open("train_data_10","w") as f :
11
+	train_data=[ generate() for _ in range(0,train_num)]
12
+	f.writelines("\n".join(train_data))
13
+with open("test_data_10","w") as f :
14
+	train_data=[ generate() for _ in range(0,test_num)]
15
+	f.writelines("\n".join(train_data))

+ 26 - 0
dnn_image/get_data.py

@@ -0,0 +1,26 @@
1
+# -*- encoding:utf-8 -*-
2
+import numpy as np
3
+
4
+
5
+def load_data(path='mnist.npz'):
6
+    f = np.load(path)
7
+    x_train, y_train = f['x_train'], f['y_train']
8
+    x_test, y_test = f['x_test'], f['y_test']
9
+    print(len(y_train))
10
+    print(len(y_test))
11
+    f.close()
12
+    return (x_train, y_train), (x_test, y_test)
13
+
14
+
15
+print("开始读取数据")
16
+(train_X, train_y), (test_X, test_y) = load_data()
17
+print("读取结束")
18
+
19
+train_data=zip(train_X,train_y)
20
+train_data=[str([x.tolist(),y]) for [x,y] in train_data]
21
+test_data=zip(test_X,test_y)
22
+test_data=[str([x.tolist(),y]) for [x,y] in test_data]
23
+with open("train_data","w") as f:
24
+	f.writelines("\n".join(train_data))
25
+with open("test_data","w") as f:
26
+	f.writelines("\n".join(test_data))

+ 30 - 0
dnn_image/predict.py

@@ -0,0 +1,30 @@
1
+from keras.models import load_model
2
+import numpy as np
3
+from keras.utils import np_utils
4
+
5
+
6
+def read_data(path):
7
+	with open(path) as f :
8
+		lines=f.readlines()[0:3]
9
+	lines=[eval(line.strip()) for line in lines]
10
+	X,Y=zip(*lines)
11
+	X=np.array(X)
12
+	X=1.0*X/256
13
+	X=X.reshape(-1,28*28)
14
+	Y=np.array(Y)
15
+	Y=np_utils.to_categorical(Y,num_classes)
16
+	return X,Y
17
+
18
+
19
+num_classes=11
20
+X,Y=read_data("test_data")
21
+model = load_model('model')  
22
+results=model.predict(X)
23
+
24
+for result,y in zip(results,Y):
25
+	print(result)
26
+	print(y)
27
+
28
+
29
+
30
+

+ 47 - 0
dnn_image/train_dnn_seq.py

@@ -0,0 +1,47 @@
1
+# -*- encoding:utf-8 -*-
2
+import numpy
3
+from tensorflow.keras.optimizers import SGD, Adam
4
+from keras.models import Sequential
5
+from keras.layers import Dense
6
+from keras.layers import Dropout
7
+from keras.layers import Flatten
8
+from keras.utils import np_utils
9
+from keras import backend as K
10
+from keras.utils import np_utils
11
+import numpy as np
12
+import random
13
+
14
+
15
+
16
+def read_data(path):
17
+	with open(path) as f :
18
+		lines=f.readlines()
19
+	lines=random.sample(lines,int(len(lines)/10))
20
+	lines=[eval(line.strip()) for line in lines]
21
+	X,Y=zip(*lines)
22
+	X=np.array(X)
23
+	X=1.0*X/256
24
+	X=X.reshape(-1,28*28)#784
25
+	Y=np.array(Y)
26
+	Y=np_utils.to_categorical(Y,num_classes)
27
+	return X,Y
28
+		
29
+num_classes=11
30
+train_x,train_y=read_data("train_data")
31
+train_x_10,train_y_10=read_data("train_data_10")#reject data
32
+train_x=np.concatenate((train_x,train_x_10))
33
+train_y=np.concatenate((train_y,train_y_10))
34
+model = Sequential()
35
+#input_shape=(输入长,输入宽,输入通道数)
36
+model.add(Dense(units=256,input_dim=28*28, activation='relu',use_bias=True))
37
+model.add(Dense(units=128,input_dim=256,activation='relu',use_bias=True))
38
+model.add(Dense(units=128,input_dim=256,activation='relu',use_bias=True))
39
+model.add(Dense(units=num_classes,input_dim=128,use_bias=True, activation='softmax'))
40
+# Compile model
41
+model.compile(loss='categorical_crossentropy', optimizer="adam", metrics=['accuracy'])
42
+
43
+model.fit(train_x, train_y, batch_size=50, epochs=10,shuffle=True)
44
+model.save("model")
45
+
46
+
47
+