1 from keras.datasets import imdb 2 from keras import layers 3 from keras import models 4 from keras import optimizers 5 import matplotlib.pyplot as plt 6 import numpy as np 7 8 def vectorize_data(x, dim = 10000): 9 res = np.zeros([len(x), dim]) 10 for i, string in enumerate(x): 11 res[i, string] = 1 12 return res 13 def main(): 14 (train_data, train_labels), (test_data, test_labels) = imdb.load_data(num_words=10000) 15 x_train = vectorize_data(train_data) 16 y_train = np.asanyarray(train_labels).astype(‘float32‘) 17 x_test = vectorize_data(test_data) 18 y_test = np.asarray(test_labels).astype(‘float32‘) 19 20 network = models.Sequential() 21 network.add(layers.Dense(16, activation = ‘relu‘, input_shape = (10000, ))) 22 network.add(layers.Dense(16, activation = ‘relu‘)) 23 network.add(layers.Dense(1, activation = ‘sigmoid‘)) 24 25 network.compile(optimizer = ‘rmsprop‘, loss = ‘binary_crossentropy‘, metrics = [‘accuracy‘]) 26 history = network.fit(x_train, y_train, batch_size = 512, epochs = 4) 27 28 loss, acc = network.evaluate(x_test, y_test) 29 30 print(‘acc == ‘, acc) 31 if __name__ == "__main__": 32 main()
原文地址:https://www.cnblogs.com/rising-sun/p/11618859.html
时间: 2024-11-06 07:19:18