zoukankan      html  css  js  c++  java
  • 使用gluon实现简单的CNN

    export PATH="/home/hxj/anaconda3/bin:$path"

    source activate gluon

    jupyter notebook

    from __future__ import print_function
    import mxnet as mx
    import numpy as np
    from mxnet import nd, autograd, gluon
    ctx = mx.cpu()
    mx.random.seed(1)
    
    batch_size = 100
    num_inputs = 784
    num_outputs = 10
    def transform(data, label):
        return nd.transpose(data.astype(np.float32), (2,0,1))/255, label.astype(np.float32)
    train_data = gluon.data.DataLoader(gluon.data.vision.FashionMNIST(train=True, transform=transform),
                                          batch_size, shuffle=True)
    test_data = gluon.data.DataLoader(gluon.data.vision.FashionMNIST(train=False, transform=transform),
                                         batch_size, shuffle=False)
    num_fc = 512
    net = gluon.nn.Sequential()
    with net.name_scope():
        net.add(gluon.nn.Conv2D(channels=20, kernel_size=5, activation='relu'))
        net.add(gluon.nn.MaxPool2D(pool_size=2, strides=2))
        net.add(gluon.nn.Conv2D(channels=50, kernel_size=5, activation='relu'))
        net.add(gluon.nn.MaxPool2D(pool_size=2, strides=2))
        # The Flatten layer collapses all axis, except the first one, into one axis.
        net.add(gluon.nn.Flatten())
        net.add(gluon.nn.Dense(num_fc, activation="relu"))
        net.add(gluon.nn.Dense(num_outputs))
    net.collect_params().initialize(mx.init.Xavier(magnitude=2.24), ctx=ctx)
    softmax_cross_entropy = gluon.loss.SoftmaxCrossEntropyLoss()
    trainer = gluon.Trainer(net.collect_params(), 'sgd', {'learning_rate': .1})
    def evaluate_accuracy(data_iterator, net):
        acc = mx.metric.Accuracy()
        for i, (data, label) in enumerate(data_iterator):
            data = data.as_in_context(ctx)
            label = label.as_in_context(ctx)
            output = net(data)
            predictions = nd.argmax(output, axis=1)
            acc.update(preds=predictions, labels=label)
        return acc.get()[1]
    epochs = 5
    smoothing_constant = .01
    
    for e in range(epochs):
        for i, (data, label) in enumerate(train_data):
            data = data.as_in_context(ctx)
            label = label.as_in_context(ctx)
            with autograd.record():
                output = net(data)
                loss = softmax_cross_entropy(output, label)
            loss.backward()
            trainer.step(data.shape[0])
    
            ##########################
            #  Keep a moving average of the losses
            ##########################
            curr_loss = nd.mean(loss).asscalar()
            moving_loss = (curr_loss if ((i == 0) and (e == 0))
                           else (1 - smoothing_constant) * moving_loss + smoothing_constant * curr_loss)
    
        test_accuracy = evaluate_accuracy(test_data, net)
        train_accuracy = evaluate_accuracy(train_data, net)
        print("Epoch %s. Loss: %s, Train_acc %s, Test_acc %s" % (e, moving_loss, train_accuracy, test_accuracy))
  • 相关阅读:
    去掉移动端页面 input, textarea, button, a 标签获取焦点时显示的黑影
    屏蔽敏感词的正则表达式
    flex弹性布局详解
    解决微信小程序手机请求数据不返回的问题
    基于Express框架开发接口
    window平台下MongoDB安装和环境搭建
    搭建基于Express框架运行环境
    详解闭包(推荐)
    解决浏览器兼容ES6特性
    ES6之promise的使用
  • 原文地址:https://www.cnblogs.com/hxjbc/p/7905862.html
Copyright © 2011-2022 走看看