手作業で学ぶ深さ学習(二)——多層感知機(ゼロから)


作者:Tyan博客:noahsnail.com|CSDN|簡書
注:本文は李沐大神の《手を出して深く勉強します》の課程のノートです!
#  mxnet
import mxnet as mx

#  
mx.random.seed(2)

from mxnet import gluon
from mxnet import ndarray as nd
from mxnet import autograd
from mxnet import image
from utils import load_data_fashion_mnist, accuracy, evaluate_accuracy, SGD

データ取得

#  
batch_size = 256

#  
train_data, test_data = load_data_fashion_mnist(batch_size)

たそうセンサ

#  
num_inputs = 28 * 28
#  ,  10 
num_outputs = 10

#  
num_hidden = 256

#  
weight_scale = 0.01

#  
W1 = nd.random_normal(shape=(num_inputs, num_hidden), scale=weight_scale)
b1 = nd.zeros(num_hidden)

#  
W2 = nd.random_normal(shape=(num_hidden, num_outputs), scale=weight_scale)
b2 = nd.zeros(num_outputs)

#  
params = [W1, b1, W2, b2]

#  ,  
for param in params:
    param.attach_grad()

アクティブ化関数

#  ReLU, relu(x)=max(x,0)
def relu(X):
    return nd.maximum(X, 0)

モデルの定義

def net(X):
    #  
    X = X.reshape((-1, num_inputs))
    #  
    h1 = relu(nd.dot(X, W1) + b1)
    #  
    output = nd.dot(h1, W2) + b2
    return output

Softmaxとクロスエントロピー損失関数

#  
softmax_cross_entropy = gluon.loss.SoftmaxCrossEntropyLoss()

トレーニング

#  
epochs = 5

##  
learning_rate = 0.1

#  
for epoch in range(epochs):
    #  
    train_loss = 0.0
    #  
    train_acc = 0.0
    #  
    for data, label in train_data:
        #  
        with autograd.record():
            #  
            output = net(data)
            #  
            loss = softmax_cross_entropy(output, label)
        #  
        loss.backward()
        #  
        SGD(params, learning_rate/batch_size)
        #  
        train_loss += nd.mean(loss).asscalar()
        #  
        train_acc += accuracy(output, label)

    #  
    test_acc = evaluate_accuracy(test_data, net)

    print("Epoch %d. Loss: %f, Train acc %f, Test acc %f" % (
        epoch, train_loss / len(train_data), train_acc / len(train_data), test_acc))
Epoch 0. Loss: 1.042064, Train acc 0.630976, Test acc 0.776142
Epoch 1. Loss: 0.601578, Train acc 0.788862, Test acc 0.815204
Epoch 2. Loss: 0.525148, Train acc 0.816556, Test acc 0.835136
Epoch 3. Loss: 0.486619, Train acc 0.829427, Test acc 0.833033
Epoch 4. Loss: 0.459395, Train acc 0.836104, Test acc 0.835136