完整代码的下载路径:https://download.csdn.net/download/lxiao428/11029921
很多人在介绍Mnist数据集的时候都是通过库在网上下载,我以前也是这么做的,但是今天发现远程服务器关闭连接了,而我本地又有这个Mnist数据集,我就想怎么讲训练数据和测试数据导入到我的代码训练中,网上找了好久都没有办法,so,搜肠刮肚找到的这个办法。
#加载Mnist数据集
from keras.datasets import mnist
import gzip
import os
import numpy
local_file = "F:\python\DeepLearning"
#(train_images, train_labels),(test_images, test_labels) = mnist.load_data()
TRAIN_IMAGES = 'train-images-idx3-ubyte.gz' #训练集图像的文件名
TRAIN_LABELS = 'train-labels-idx1-ubyte.gz' #训练集label的文件名
TEST_IMAGES = 't10k-images-idx3-ubyte.gz' #测试集图像的文件名
TEST_LABELS = 't10k-labels-idx1-ubyte.gz' #测试集label的文件名
#主要是下面的两个函数实现的:
def extract_images(filename):
def extract_labels(filename, one_hot=False):
train_images = extract_images(os.path.join(local_file,TRAIN_IMAGES))
train_labels = extract_labels(os.path.join(local_file,TRAIN_LABELS))
test_images = extract_images(os.path.join(local_file,TEST_IMAGES))
test_labels = extract_labels(os.path.join(local_file,TEST_LABELS))
#网络架构
'''
神经网络的核心组件是layer,它是一种数据处理模块,可以看成是数据过滤器。
'''
from keras import models
from keras import layers
network = models.Sequential()
network.add(layers.Dense(512, activation='relu',input_shape=(28*28,)))
network.add(layers.Dense(10, activation='softmax'))
#编译步骤
'''
要想训练网络,需要选择变异步骤的三个参数:
(1)损失函数(loss):衡量网络在训练数据集上的性能;
(2)优化器(optimizer):基于训练数据和损失函数更新网络的机制;
(3)训练和测试中的监控指标(metric):如精度
'''
network.compile(optimizer='rmsprop',
loss='categorical_crossentropy',
metrics=['accuracy'])
#数据预处理
train_images = train_images.reshape((60000, 28*28))
train_images = train_images.astype('float32')/255
test_images = test_images.reshape((10000, 28*28))
test_images = test_images.astype('float32')/255
#准备标签
from keras.utils import to_categorical
train_labels = to_categorical(train_labels)
test_labels = to_categorical(test_labels)
#训练网络
network.fit(train_images, train_labels, epochs = 5, batch_size = 256)
#性能评估
train_loss, train_acc = network.evaluate(test_images, test_labels)
print('test_acc:', train_acc)
print('test_error:', train_loss)
来源:CSDN
作者:lxiao428
链接:https://blog.csdn.net/lxiao428/article/details/83020066