神经网络入门(电影评论分类--------二分类问题)

匿名 (未验证) 提交于 2019-12-03 00:05:01

IMDB数据集

from keras.datasets import imdb(train_data,train_labels),(test_data,test_labels)=imdb.load_data(num_words=10000)print(train_data[0])print(train_labels[0])print(max([max(sequence) for sequence in train_data]))word_index=imdb.get_word_index()reverse_word_index=dict(    [(value,key) for (key,value) in word_index.items()])decoded_review=' '.join([reverse_word_index.get(i-3,'?') for i in train_data[0]])print(decoded_review)#将整数序列编码为二进制矩阵import numpy as npdef vectorize_sequences(sequences,dimension=10000):    results=np.zeros((len(sequences),dimension))    for i,sequence in enumerate(sequences):        results[i,sequence]=1    return resultsx_train=vectorize_sequences(train_data)x_test=vectorize_sequences(test_data)print(x_train[0])y_train=np.asarray(train_labels).astype('float32')y_test=np.asarray(test_labels).astype('float32')####模型定义#####from keras import modelsfrom keras import layersmodel=models.Sequential()model.add(layers.Dense(16,activation='relu',input_shape=(10000,)))model.add(layers.Dense(16,activation='relu'))model.add(layers.Dense(1,activation='sigmoid'))####模型编译####model.compile(optimizer='rmsprop',loss='binary_crossentropy',metrics=['accuracy'])#####配置优化器#####from keras import optimizersmodel.compile(optimizer=optimizers.RMSprop(lr=0.001),loss='binary_crossentropy',metrics=['accuracy'])####使用自定义的损失和指标###from keras import lossesfrom keras import metricsmodel.compile(optimizer=optimizers.RMSprop(lr=0.001),loss=losses.binary_crossentropy,metrics=[metrics.binary_accuracy])#####留出验证集######x_val=x_train[:10000]partial_x_train=x_train[10000:]y_val=y_train[:10000]partial_y_train=y_train[10000:]#####训练模型#######model.compile(optimizer='rmsprop',loss='binary_crossentropy',metrics=['acc'])history=model.fit(partial_x_train,partial_y_train,epochs=20,batch_size=512,validation_data=(x_val,y_val))history_dict=history.historyprint(history_dict.keys())#####绘制训练损失和验证损失####import matplotlib.pyplot as plthistory_dict=history.historyloss_values=history_dict['loss']val_loss_values=history_dict['val_loss']epochs=range(1,len(loss_values)+1)plt.plot(epochs,loss_values,'bo',label='Training loss')  ###'bo'表示蓝色圆点plt.plot(epochs,val_loss_values,'b',label='Validation loss')plt.title('Training and validation loss')plt.xlabel('Epochs')plt.ylabel('Loss')plt.legend()plt.show()######绘制训练精度和验证精度plt.clf()acc=history_dict['acc']val_acc=history_dict['val_acc']plt.plot(epochs,acc,'bo',label='Training acc')  ###'bo'表示蓝色圆点plt.plot(epochs,val_acc,'b',label='Validation acc')plt.title('Training and validation accuracy')plt.xlabel('Epochs')plt.ylabel('Accuracy')plt.legend()plt.show()
标签
易学教程内所有资源均来自网络或用户发布的内容,如有违反法律规定的内容欢迎反馈
该文章没有解决你所遇到的问题?点击提问,说说你的问题,让更多的人一起探讨吧!