zoukankan      html  css  js  c++  java
  • 双向lstm识别手写体数字

    代码:

    #!/usr/bin/env python
    # -*- coding: utf-8 -*-
    
    """
     @file bi_lstm.py
    """
    
    import sys
    
    import tensorflow as tf
    import numpy as np
    from tensorflow.examples.tutorials.mnist import input_data
    
    mnist = input_data.read_data_sets("data/", one_hot=True)
    
    learning_rate = 0.01
    max_samples = 400000
    batch_size = 128
    display_step = 10
    
    n_input = 28
    n_steps = 28
    n_hidden = 256
    n_classes = 10
    
    x = tf.placeholder("float", [None, n_steps, n_input])
    y = tf.placeholder("float", [None, n_classes])
    
    weights = tf.Variable(tf.random_normal([2 * n_hidden, n_classes]))
    biases = tf.Variable(tf.random_normal([n_classes]))
    
    def BiRNN(x, weights, biases):
        x = tf.transpose(x, [1, 0, 2])
        x = tf.reshape(x, [-1, n_input])
        x = tf.split(x, n_steps)
    
        lstm_fw_cell = tf.contrib.rnn.BasicLSTMCell(n_hidden, forget_bias=1.0)
        lstm_bw_cell = tf.contrib.rnn.BasicLSTMCell(n_hidden, forget_bias=1.0)
    
        output, _, _ = tf.contrib.rnn.static_bidirectional_rnn(lstm_fw_cell, lstm_bw_cell, x, dtype=tf.float32)
        return tf.matmul(output[-1], weights) + biases
    
    pred = BiRNN(x, weights, biases)
    
    cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits(logits=pred, labels=y))
    optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate).minimize(cost)
    
    correct_pred = tf.equal(tf.argmax(pred, 1), tf.argmax(y, 1))
    accuracy = tf.reduce_mean(tf.cast(correct_pred, tf.float32))
    
    init = tf.global_variables_initializer()
    
    with tf.Session() as sess:
        sess.run(init)
        step = 1
        print("start.")
        while step * batch_size < max_samples:
            batch_x, batch_y = mnist.train.next_batch(batch_size)
            batch_x = batch_x.reshape((batch_size, n_steps, n_input))
            sess.run(optimizer, feed_dict={x: batch_x, y: batch_y})
            if step % display_step == 0:
                acc = sess.run(accuracy, feed_dict={x: batch_x, y: batch_y})
                loss = sess.run(cost, feed_dict={x: batch_x, y: batch_y})
                print("Iter " + str(step * batch_size) + ", Minibatch Loss= " + "{:.6f}".format(loss) +
                        ", Training Accuracy= "+ "{:.5f}".format(acc))
            step += 1
        print("Optimization Finished!")
    
        test_len = 10000
        test_data = mnist.test.images[:test_len].reshape((-1, n_steps, n_input))
        test_label = mnist.test.labels[:test_len]
        print("Testing Accuracy:", sess.run(accuracy, feed_dict={x: test_data, y: test_label}))
    
    if __name__ == '__main__':
        pass
    View Code

    输出:

    Future major versions of TensorFlow will allow gradients to flow
    into the labels input on backprop by default.
    
    See @{tf.nn.softmax_cross_entropy_with_logits_v2}.
    
    start.
    Iter 1280, Minibatch Loss= 1.983782, Training Accuracy= 0.39062
    Iter 2560, Minibatch Loss= 1.385502, Training Accuracy= 0.48438
    Iter 3840, Minibatch Loss= 0.807503, Training Accuracy= 0.71094
    Iter 5120, Minibatch Loss= 0.625227, Training Accuracy= 0.82031
    Iter 6400, Minibatch Loss= 0.573644, Training Accuracy= 0.83594
    Iter 7680, Minibatch Loss= 0.484294, Training Accuracy= 0.84375
    Iter 8960, Minibatch Loss= 0.332071, Training Accuracy= 0.90625
    Iter 10240, Minibatch Loss= 0.320463, Training Accuracy= 0.89844
    Iter 11520, Minibatch Loss= 0.232943, Training Accuracy= 0.92969
    Iter 12800, Minibatch Loss= 0.302845, Training Accuracy= 0.88281
    Iter 14080, Minibatch Loss= 0.259136, Training Accuracy= 0.92188
    Iter 15360, Minibatch Loss= 0.336947, Training Accuracy= 0.89062
    Iter 16640, Minibatch Loss= 0.123919, Training Accuracy= 0.96094
    Iter 17920, Minibatch Loss= 0.122037, Training Accuracy= 0.96875
    Iter 19200, Minibatch Loss= 0.188062, Training Accuracy= 0.93750
    Iter 20480, Minibatch Loss= 0.142499, Training Accuracy= 0.92969
    Iter 21760, Minibatch Loss= 0.141500, Training Accuracy= 0.94531
    Iter 23040, Minibatch Loss= 0.089325, Training Accuracy= 0.96875
    Iter 24320, Minibatch Loss= 0.103748, Training Accuracy= 0.98438
    Iter 25600, Minibatch Loss= 0.234301, Training Accuracy= 0.92969
    Iter 26880, Minibatch Loss= 0.176208, Training Accuracy= 0.94531
    Iter 28160, Minibatch Loss= 0.196887, Training Accuracy= 0.95312
    Iter 29440, Minibatch Loss= 0.128842, Training Accuracy= 0.94531
    Iter 30720, Minibatch Loss= 0.263330, Training Accuracy= 0.94531
    Iter 32000, Minibatch Loss= 0.118437, Training Accuracy= 0.96875
    Iter 33280, Minibatch Loss= 0.125101, Training Accuracy= 0.94531
    Iter 34560, Minibatch Loss= 0.055070, Training Accuracy= 0.97656
    Iter 35840, Minibatch Loss= 0.145751, Training Accuracy= 0.96094
    Iter 37120, Minibatch Loss= 0.114613, Training Accuracy= 0.95312
    Iter 38400, Minibatch Loss= 0.097740, Training Accuracy= 0.98438
    Iter 39680, Minibatch Loss= 0.156779, Training Accuracy= 0.94531
    Iter 40960, Minibatch Loss= 0.061766, Training Accuracy= 0.97656
    Iter 42240, Minibatch Loss= 0.127621, Training Accuracy= 0.94531
    Iter 43520, Minibatch Loss= 0.209864, Training Accuracy= 0.92188
    Iter 44800, Minibatch Loss= 0.202891, Training Accuracy= 0.96094
    Iter 46080, Minibatch Loss= 0.106149, Training Accuracy= 0.96875
    Iter 47360, Minibatch Loss= 0.067649, Training Accuracy= 0.96094
    Iter 48640, Minibatch Loss= 0.060257, Training Accuracy= 0.98438
    Iter 49920, Minibatch Loss= 0.097187, Training Accuracy= 0.96875
    Iter 51200, Minibatch Loss= 0.054108, Training Accuracy= 0.98438
    Iter 52480, Minibatch Loss= 0.045095, Training Accuracy= 0.99219
    Iter 53760, Minibatch Loss= 0.037128, Training Accuracy= 0.99219
    Iter 55040, Minibatch Loss= 0.079761, Training Accuracy= 0.96875
    Iter 56320, Minibatch Loss= 0.027518, Training Accuracy= 1.00000
    Iter 57600, Minibatch Loss= 0.042745, Training Accuracy= 0.98438
    Iter 58880, Minibatch Loss= 0.092336, Training Accuracy= 0.96094
    Iter 60160, Minibatch Loss= 0.032620, Training Accuracy= 0.98438
    Iter 61440, Minibatch Loss= 0.033767, Training Accuracy= 0.99219
    Iter 62720, Minibatch Loss= 0.020563, Training Accuracy= 0.99219
    Iter 64000, Minibatch Loss= 0.145859, Training Accuracy= 0.96875
    Iter 65280, Minibatch Loss= 0.064511, Training Accuracy= 0.99219
    Iter 66560, Minibatch Loss= 0.049629, Training Accuracy= 0.98438
    Iter 67840, Minibatch Loss= 0.152412, Training Accuracy= 0.96875
    Iter 69120, Minibatch Loss= 0.072656, Training Accuracy= 0.98438
    Iter 70400, Minibatch Loss= 0.074452, Training Accuracy= 0.98438
    Iter 71680, Minibatch Loss= 0.026587, Training Accuracy= 0.99219
    Iter 72960, Minibatch Loss= 0.019888, Training Accuracy= 0.99219
    Iter 74240, Minibatch Loss= 0.061217, Training Accuracy= 0.98438
    Iter 75520, Minibatch Loss= 0.066838, Training Accuracy= 0.98438
    Iter 76800, Minibatch Loss= 0.060543, Training Accuracy= 0.99219
    Iter 78080, Minibatch Loss= 0.028488, Training Accuracy= 1.00000
    Iter 79360, Minibatch Loss= 0.035900, Training Accuracy= 0.98438
    Iter 80640, Minibatch Loss= 0.020374, Training Accuracy= 0.99219
    Iter 81920, Minibatch Loss= 0.098179, Training Accuracy= 0.97656
    Iter 83200, Minibatch Loss= 0.111376, Training Accuracy= 0.96875
    Iter 84480, Minibatch Loss= 0.100566, Training Accuracy= 0.96875
    Iter 85760, Minibatch Loss= 0.035282, Training Accuracy= 0.99219
    Iter 87040, Minibatch Loss= 0.077972, Training Accuracy= 0.98438
    Iter 88320, Minibatch Loss= 0.028642, Training Accuracy= 0.99219
    Iter 89600, Minibatch Loss= 0.134011, Training Accuracy= 0.96094
    Iter 90880, Minibatch Loss= 0.033377, Training Accuracy= 0.98438
    Iter 92160, Minibatch Loss= 0.097937, Training Accuracy= 0.96875
    Iter 93440, Minibatch Loss= 0.101677, Training Accuracy= 0.96875
    Iter 94720, Minibatch Loss= 0.078925, Training Accuracy= 0.98438
    Iter 96000, Minibatch Loss= 0.086982, Training Accuracy= 0.97656
    Iter 97280, Minibatch Loss= 0.037116, Training Accuracy= 0.99219
    Iter 98560, Minibatch Loss= 0.063905, Training Accuracy= 0.97656
    Iter 99840, Minibatch Loss= 0.059786, Training Accuracy= 0.98438
    Iter 101120, Minibatch Loss= 0.040024, Training Accuracy= 0.99219
    Iter 102400, Minibatch Loss= 0.026571, Training Accuracy= 0.99219
    Iter 103680, Minibatch Loss= 0.047032, Training Accuracy= 0.99219
    Iter 104960, Minibatch Loss= 0.102036, Training Accuracy= 0.97656
    Iter 106240, Minibatch Loss= 0.038912, Training Accuracy= 0.99219
    Iter 107520, Minibatch Loss= 0.082798, Training Accuracy= 0.96875
    Iter 108800, Minibatch Loss= 0.034525, Training Accuracy= 0.99219
    Iter 110080, Minibatch Loss= 0.061711, Training Accuracy= 0.99219
    Iter 111360, Minibatch Loss= 0.082786, Training Accuracy= 0.96875
    Iter 112640, Minibatch Loss= 0.080744, Training Accuracy= 0.98438
    Iter 113920, Minibatch Loss= 0.033039, Training Accuracy= 0.98438
    Iter 115200, Minibatch Loss= 0.055088, Training Accuracy= 0.98438
    Iter 116480, Minibatch Loss= 0.052973, Training Accuracy= 0.98438
    Iter 117760, Minibatch Loss= 0.026259, Training Accuracy= 0.99219
    Iter 119040, Minibatch Loss= 0.051832, Training Accuracy= 0.98438
    Iter 120320, Minibatch Loss= 0.036418, Training Accuracy= 0.99219
    Iter 121600, Minibatch Loss= 0.080739, Training Accuracy= 0.97656
    Iter 122880, Minibatch Loss= 0.038841, Training Accuracy= 0.98438
    Iter 124160, Minibatch Loss= 0.053453, Training Accuracy= 0.98438
    Iter 125440, Minibatch Loss= 0.089335, Training Accuracy= 0.97656
    Iter 126720, Minibatch Loss= 0.036742, Training Accuracy= 0.99219
    Iter 128000, Minibatch Loss= 0.019495, Training Accuracy= 0.99219
    Iter 129280, Minibatch Loss= 0.057399, Training Accuracy= 0.97656
    Iter 130560, Minibatch Loss= 0.028992, Training Accuracy= 0.99219
    Iter 131840, Minibatch Loss= 0.020546, Training Accuracy= 0.99219
    Iter 133120, Minibatch Loss= 0.076788, Training Accuracy= 0.99219
    Iter 134400, Minibatch Loss= 0.021873, Training Accuracy= 0.99219
    Iter 135680, Minibatch Loss= 0.015666, Training Accuracy= 0.99219
    Iter 136960, Minibatch Loss= 0.066758, Training Accuracy= 0.97656
    Iter 138240, Minibatch Loss= 0.072352, Training Accuracy= 0.97656
    Iter 139520, Minibatch Loss= 0.047164, Training Accuracy= 0.98438
    Iter 140800, Minibatch Loss= 0.042393, Training Accuracy= 0.99219
    Iter 142080, Minibatch Loss= 0.027188, Training Accuracy= 0.99219
    Iter 143360, Minibatch Loss= 0.033916, Training Accuracy= 0.99219
    Iter 144640, Minibatch Loss= 0.013945, Training Accuracy= 1.00000
    Iter 145920, Minibatch Loss= 0.014870, Training Accuracy= 1.00000
    Iter 147200, Minibatch Loss= 0.044671, Training Accuracy= 0.98438
    Iter 148480, Minibatch Loss= 0.024688, Training Accuracy= 0.99219
    Iter 149760, Minibatch Loss= 0.029531, Training Accuracy= 0.98438
    Iter 151040, Minibatch Loss= 0.005914, Training Accuracy= 1.00000
    Iter 152320, Minibatch Loss= 0.037701, Training Accuracy= 0.98438
    Iter 153600, Minibatch Loss= 0.026516, Training Accuracy= 0.99219
    Iter 154880, Minibatch Loss= 0.071220, Training Accuracy= 0.99219
    Iter 156160, Minibatch Loss= 0.055919, Training Accuracy= 0.98438
    Iter 157440, Minibatch Loss= 0.012041, Training Accuracy= 1.00000
    Iter 158720, Minibatch Loss= 0.056929, Training Accuracy= 0.97656
    Iter 160000, Minibatch Loss= 0.043089, Training Accuracy= 0.97656
    Iter 161280, Minibatch Loss= 0.020197, Training Accuracy= 1.00000
    Iter 162560, Minibatch Loss= 0.029051, Training Accuracy= 0.99219
    Iter 163840, Minibatch Loss= 0.014294, Training Accuracy= 1.00000
    Iter 165120, Minibatch Loss= 0.076162, Training Accuracy= 0.98438
    Iter 166400, Minibatch Loss= 0.028846, Training Accuracy= 0.99219
    Iter 167680, Minibatch Loss= 0.005558, Training Accuracy= 1.00000
    Iter 168960, Minibatch Loss= 0.010750, Training Accuracy= 1.00000
    Iter 170240, Minibatch Loss= 0.018523, Training Accuracy= 0.99219
    Iter 171520, Minibatch Loss= 0.032109, Training Accuracy= 0.99219
    Iter 172800, Minibatch Loss= 0.039989, Training Accuracy= 0.98438
    Iter 174080, Minibatch Loss= 0.054401, Training Accuracy= 0.97656
    Iter 175360, Minibatch Loss= 0.034869, Training Accuracy= 0.98438
    Iter 176640, Minibatch Loss= 0.037800, Training Accuracy= 0.98438
    Iter 177920, Minibatch Loss= 0.043919, Training Accuracy= 0.98438
    Iter 179200, Minibatch Loss= 0.022500, Training Accuracy= 1.00000
    Iter 180480, Minibatch Loss= 0.016076, Training Accuracy= 0.99219
    Iter 181760, Minibatch Loss= 0.025380, Training Accuracy= 0.99219
    Iter 183040, Minibatch Loss= 0.009514, Training Accuracy= 1.00000
    Iter 184320, Minibatch Loss= 0.040801, Training Accuracy= 0.99219
    Iter 185600, Minibatch Loss= 0.002740, Training Accuracy= 1.00000
    Iter 186880, Minibatch Loss= 0.023875, Training Accuracy= 1.00000
    Iter 188160, Minibatch Loss= 0.017125, Training Accuracy= 0.99219
    Iter 189440, Minibatch Loss= 0.005367, Training Accuracy= 1.00000
    Iter 190720, Minibatch Loss= 0.035036, Training Accuracy= 0.98438
    Iter 192000, Minibatch Loss= 0.035269, Training Accuracy= 0.99219
    Iter 193280, Minibatch Loss= 0.013947, Training Accuracy= 1.00000
    Iter 194560, Minibatch Loss= 0.017312, Training Accuracy= 1.00000
    Iter 195840, Minibatch Loss= 0.025173, Training Accuracy= 0.99219
    Iter 197120, Minibatch Loss= 0.017414, Training Accuracy= 1.00000
    Iter 198400, Minibatch Loss= 0.015304, Training Accuracy= 1.00000
    Iter 199680, Minibatch Loss= 0.017820, Training Accuracy= 0.99219
    Iter 200960, Minibatch Loss= 0.009961, Training Accuracy= 1.00000
    Iter 202240, Minibatch Loss= 0.006432, Training Accuracy= 1.00000
    Iter 203520, Minibatch Loss= 0.024251, Training Accuracy= 0.99219
    Iter 204800, Minibatch Loss= 0.055171, Training Accuracy= 0.98438
    Iter 206080, Minibatch Loss= 0.042602, Training Accuracy= 0.98438
    Iter 207360, Minibatch Loss= 0.072541, Training Accuracy= 0.98438
    Iter 208640, Minibatch Loss= 0.031647, Training Accuracy= 0.99219
    Iter 209920, Minibatch Loss= 0.012700, Training Accuracy= 1.00000
    Iter 211200, Minibatch Loss= 0.016038, Training Accuracy= 1.00000
    Iter 212480, Minibatch Loss= 0.020043, Training Accuracy= 0.98438
    Iter 213760, Minibatch Loss= 0.083469, Training Accuracy= 0.97656
    Iter 215040, Minibatch Loss= 0.020695, Training Accuracy= 0.99219
    Iter 216320, Minibatch Loss= 0.050699, Training Accuracy= 0.99219
    Iter 217600, Minibatch Loss= 0.060773, Training Accuracy= 0.97656
    Iter 218880, Minibatch Loss= 0.019434, Training Accuracy= 0.99219
    Iter 220160, Minibatch Loss= 0.047230, Training Accuracy= 0.99219
    Iter 221440, Minibatch Loss= 0.017340, Training Accuracy= 0.99219
    Iter 222720, Minibatch Loss= 0.008496, Training Accuracy= 1.00000
    Iter 224000, Minibatch Loss= 0.010664, Training Accuracy= 1.00000
    Iter 225280, Minibatch Loss= 0.025711, Training Accuracy= 0.99219
    Iter 226560, Minibatch Loss= 0.043708, Training Accuracy= 0.99219
    Iter 227840, Minibatch Loss= 0.026874, Training Accuracy= 0.99219
    Iter 229120, Minibatch Loss= 0.058900, Training Accuracy= 0.99219
    Iter 230400, Minibatch Loss= 0.012794, Training Accuracy= 1.00000
    Iter 231680, Minibatch Loss= 0.014411, Training Accuracy= 1.00000
    Iter 232960, Minibatch Loss= 0.040787, Training Accuracy= 0.97656
    Iter 234240, Minibatch Loss= 0.003678, Training Accuracy= 1.00000
    Iter 235520, Minibatch Loss= 0.065841, Training Accuracy= 0.98438
    Iter 236800, Minibatch Loss= 0.035435, Training Accuracy= 0.99219
    Iter 238080, Minibatch Loss= 0.067663, Training Accuracy= 0.97656
    Iter 239360, Minibatch Loss= 0.028159, Training Accuracy= 0.99219
    Iter 240640, Minibatch Loss= 0.012921, Training Accuracy= 1.00000
    Iter 241920, Minibatch Loss= 0.008625, Training Accuracy= 1.00000
    Iter 243200, Minibatch Loss= 0.006120, Training Accuracy= 1.00000
    Iter 244480, Minibatch Loss= 0.010980, Training Accuracy= 1.00000
    Iter 245760, Minibatch Loss= 0.011131, Training Accuracy= 0.99219
    Iter 247040, Minibatch Loss= 0.043961, Training Accuracy= 0.98438
    Iter 248320, Minibatch Loss= 0.008168, Training Accuracy= 1.00000
    Iter 249600, Minibatch Loss= 0.017708, Training Accuracy= 1.00000
    Iter 250880, Minibatch Loss= 0.010425, Training Accuracy= 1.00000
    Iter 252160, Minibatch Loss= 0.045483, Training Accuracy= 0.99219
    Iter 253440, Minibatch Loss= 0.030599, Training Accuracy= 0.99219
    Iter 254720, Minibatch Loss= 0.072293, Training Accuracy= 0.99219
    Iter 256000, Minibatch Loss= 0.021459, Training Accuracy= 0.99219
    Iter 257280, Minibatch Loss= 0.012665, Training Accuracy= 1.00000
    Iter 258560, Minibatch Loss= 0.036428, Training Accuracy= 0.99219
    Iter 259840, Minibatch Loss= 0.019121, Training Accuracy= 0.99219
    Iter 261120, Minibatch Loss= 0.004581, Training Accuracy= 1.00000
    Iter 262400, Minibatch Loss= 0.015861, Training Accuracy= 1.00000
    Iter 263680, Minibatch Loss= 0.024076, Training Accuracy= 0.99219
    Iter 264960, Minibatch Loss= 0.051509, Training Accuracy= 0.98438
    Iter 266240, Minibatch Loss= 0.006098, Training Accuracy= 1.00000
    Iter 267520, Minibatch Loss= 0.041859, Training Accuracy= 0.96875
    Iter 268800, Minibatch Loss= 0.009746, Training Accuracy= 1.00000
    Iter 270080, Minibatch Loss= 0.001752, Training Accuracy= 1.00000
    Iter 271360, Minibatch Loss= 0.046369, Training Accuracy= 0.97656
    Iter 272640, Minibatch Loss= 0.103629, Training Accuracy= 0.97656
    Iter 273920, Minibatch Loss= 0.057045, Training Accuracy= 0.98438
    Iter 275200, Minibatch Loss= 0.011060, Training Accuracy= 1.00000
    Iter 276480, Minibatch Loss= 0.046792, Training Accuracy= 0.98438
    Iter 277760, Minibatch Loss= 0.010364, Training Accuracy= 1.00000
    Iter 279040, Minibatch Loss= 0.010360, Training Accuracy= 1.00000
    Iter 280320, Minibatch Loss= 0.003965, Training Accuracy= 1.00000
    Iter 281600, Minibatch Loss= 0.009797, Training Accuracy= 1.00000
    Iter 282880, Minibatch Loss= 0.008067, Training Accuracy= 1.00000
    Iter 284160, Minibatch Loss= 0.058110, Training Accuracy= 0.98438
    Iter 285440, Minibatch Loss= 0.024091, Training Accuracy= 0.98438
    Iter 286720, Minibatch Loss= 0.027435, Training Accuracy= 0.99219
    Iter 288000, Minibatch Loss= 0.040484, Training Accuracy= 0.99219
    Iter 289280, Minibatch Loss= 0.012955, Training Accuracy= 1.00000
    Iter 290560, Minibatch Loss= 0.005558, Training Accuracy= 1.00000
    Iter 291840, Minibatch Loss= 0.021675, Training Accuracy= 0.99219
    Iter 293120, Minibatch Loss= 0.003508, Training Accuracy= 1.00000
    Iter 294400, Minibatch Loss= 0.004357, Training Accuracy= 1.00000
    Iter 295680, Minibatch Loss= 0.044073, Training Accuracy= 0.99219
    Iter 296960, Minibatch Loss= 0.004622, Training Accuracy= 1.00000
    Iter 298240, Minibatch Loss= 0.009959, Training Accuracy= 1.00000
    Iter 299520, Minibatch Loss= 0.006476, Training Accuracy= 1.00000
    Iter 300800, Minibatch Loss= 0.021495, Training Accuracy= 0.99219
    Iter 302080, Minibatch Loss= 0.029417, Training Accuracy= 0.98438
    Iter 303360, Minibatch Loss= 0.013953, Training Accuracy= 0.99219
    Iter 304640, Minibatch Loss= 0.008299, Training Accuracy= 1.00000
    Iter 305920, Minibatch Loss= 0.007523, Training Accuracy= 1.00000
    Iter 307200, Minibatch Loss= 0.007179, Training Accuracy= 1.00000
    Iter 308480, Minibatch Loss= 0.014411, Training Accuracy= 1.00000
    Iter 309760, Minibatch Loss= 0.027598, Training Accuracy= 0.98438
    Iter 311040, Minibatch Loss= 0.004308, Training Accuracy= 1.00000
    Iter 312320, Minibatch Loss= 0.044667, Training Accuracy= 0.99219
    Iter 313600, Minibatch Loss= 0.014869, Training Accuracy= 0.99219
    Iter 314880, Minibatch Loss= 0.031319, Training Accuracy= 0.99219
    Iter 316160, Minibatch Loss= 0.027762, Training Accuracy= 0.99219
    Iter 317440, Minibatch Loss= 0.012901, Training Accuracy= 1.00000
    Iter 318720, Minibatch Loss= 0.052712, Training Accuracy= 0.99219
    Iter 320000, Minibatch Loss= 0.035323, Training Accuracy= 0.99219
    Iter 321280, Minibatch Loss= 0.019957, Training Accuracy= 0.99219
    Iter 322560, Minibatch Loss= 0.056922, Training Accuracy= 0.99219
    Iter 323840, Minibatch Loss= 0.010080, Training Accuracy= 1.00000
    Iter 325120, Minibatch Loss= 0.016662, Training Accuracy= 0.99219
    Iter 326400, Minibatch Loss= 0.027193, Training Accuracy= 0.99219
    Iter 327680, Minibatch Loss= 0.005094, Training Accuracy= 1.00000
    Iter 328960, Minibatch Loss= 0.091339, Training Accuracy= 0.98438
    Iter 330240, Minibatch Loss= 0.010667, Training Accuracy= 0.99219
    Iter 331520, Minibatch Loss= 0.042124, Training Accuracy= 0.99219
    Iter 332800, Minibatch Loss= 0.036627, Training Accuracy= 0.99219
    Iter 334080, Minibatch Loss= 0.006127, Training Accuracy= 1.00000
    Iter 335360, Minibatch Loss= 0.083918, Training Accuracy= 0.97656
    Iter 336640, Minibatch Loss= 0.033910, Training Accuracy= 0.99219
    Iter 337920, Minibatch Loss= 0.030416, Training Accuracy= 0.99219
    Iter 339200, Minibatch Loss= 0.003682, Training Accuracy= 1.00000
    Iter 340480, Minibatch Loss= 0.006563, Training Accuracy= 1.00000
    Iter 341760, Minibatch Loss= 0.006030, Training Accuracy= 1.00000
    Iter 343040, Minibatch Loss= 0.007000, Training Accuracy= 1.00000
    Iter 344320, Minibatch Loss= 0.017850, Training Accuracy= 1.00000
    Iter 345600, Minibatch Loss= 0.008664, Training Accuracy= 1.00000
    Iter 346880, Minibatch Loss= 0.003662, Training Accuracy= 1.00000
    Iter 348160, Minibatch Loss= 0.007541, Training Accuracy= 1.00000
    Iter 349440, Minibatch Loss= 0.047478, Training Accuracy= 0.96875
    Iter 350720, Minibatch Loss= 0.007339, Training Accuracy= 1.00000
    Iter 352000, Minibatch Loss= 0.011754, Training Accuracy= 1.00000
    Iter 353280, Minibatch Loss= 0.018768, Training Accuracy= 1.00000
    Iter 354560, Minibatch Loss= 0.016439, Training Accuracy= 0.99219
    Iter 355840, Minibatch Loss= 0.015078, Training Accuracy= 1.00000
    Iter 357120, Minibatch Loss= 0.020177, Training Accuracy= 1.00000
    Iter 358400, Minibatch Loss= 0.007929, Training Accuracy= 1.00000
    Iter 359680, Minibatch Loss= 0.008112, Training Accuracy= 1.00000
    Iter 360960, Minibatch Loss= 0.012581, Training Accuracy= 0.99219
    Iter 362240, Minibatch Loss= 0.002359, Training Accuracy= 1.00000
    Iter 363520, Minibatch Loss= 0.022748, Training Accuracy= 0.99219
    Iter 364800, Minibatch Loss= 0.009527, Training Accuracy= 1.00000
    Iter 366080, Minibatch Loss= 0.074769, Training Accuracy= 0.99219
    Iter 367360, Minibatch Loss= 0.009048, Training Accuracy= 1.00000
    Iter 368640, Minibatch Loss= 0.018985, Training Accuracy= 0.99219
    Iter 369920, Minibatch Loss= 0.022493, Training Accuracy= 0.98438
    Iter 371200, Minibatch Loss= 0.010531, Training Accuracy= 1.00000
    Iter 372480, Minibatch Loss= 0.005892, Training Accuracy= 1.00000
    Iter 373760, Minibatch Loss= 0.012961, Training Accuracy= 1.00000
    Iter 375040, Minibatch Loss= 0.017029, Training Accuracy= 0.99219
    Iter 376320, Minibatch Loss= 0.024589, Training Accuracy= 0.98438
    Iter 377600, Minibatch Loss= 0.039714, Training Accuracy= 0.99219
    Iter 378880, Minibatch Loss= 0.019382, Training Accuracy= 0.99219
    Iter 380160, Minibatch Loss= 0.024236, Training Accuracy= 1.00000
    Iter 381440, Minibatch Loss= 0.005332, Training Accuracy= 1.00000
    Iter 382720, Minibatch Loss= 0.019701, Training Accuracy= 0.99219
    Iter 384000, Minibatch Loss= 0.064576, Training Accuracy= 0.98438
    Iter 385280, Minibatch Loss= 0.069708, Training Accuracy= 0.99219
    Iter 386560, Minibatch Loss= 0.035944, Training Accuracy= 0.99219
    Iter 387840, Minibatch Loss= 0.007348, Training Accuracy= 1.00000
    Iter 389120, Minibatch Loss= 0.021412, Training Accuracy= 0.99219
    Iter 390400, Minibatch Loss= 0.003215, Training Accuracy= 1.00000
    Iter 391680, Minibatch Loss= 0.003086, Training Accuracy= 1.00000
    Iter 392960, Minibatch Loss= 0.073474, Training Accuracy= 0.98438
    Iter 394240, Minibatch Loss= 0.024271, Training Accuracy= 0.99219
    Iter 395520, Minibatch Loss= 0.016379, Training Accuracy= 0.99219
    Iter 396800, Minibatch Loss= 0.009097, Training Accuracy= 1.00000
    Iter 398080, Minibatch Loss= 0.042525, Training Accuracy= 0.98438
    Iter 399360, Minibatch Loss= 0.020048, Training Accuracy= 0.99219
    Optimization Finished!
    ('Testing Accuracy:', 0.9835)
    View Code
  • 相关阅读:
    21-MySQL-Ubuntu-快速回到SQL语句的行首和行末
    2- SQL语句的强化
    1-数据准备
    20-MySQL-Ubuntu-数据表的查询-子查询(九)
    19-MySQL-Ubuntu-数据表的查询-自关联(八)
    18-MySQL-Ubuntu-数据表的查询-连接(七)
    17-MySQL-Ubuntu-数据表的查询-分页(六)
    16-MySQL-Ubuntu-数据表的查询-分组与聚合(五)
    15-MySQL-Ubuntu-数据表的查询-聚合函数(四)
    14-MySQL-Ubuntu-数据表的查询-范围查询(三)
  • 原文地址:https://www.cnblogs.com/yuanzhenliu/p/9541795.html
Copyright © 2011-2022 走看看