zoukankan      html  css  js  c++  java
  • TensorFlow部分函数理解(一)

    本篇介绍函数包括:
    tf.conv2d tf.nn.relu tf.nn.max_pool tf.nn.droupout tf.nn.sigmoid_cross_entropy_with_logits tf.truncated_normal tf.constant tf.placeholder tf.nn.bias_add tf.reduce_mean
    tf.squared_difference
    tf.square tf.Variable

    tf.conv2d

    import tensorflow as tf
    
    a = tf.constant([1,1,1,0,0,0,1,1,1,0,0,0,1,1,1,0,0,1,1,0,0,1,1,0,0],dtype=tf.float32,shape=[1,5,5,1])
    b = tf.constant([1,0,1,0,1,0,1,0,1],dtype=tf.float32,shape=[3,3,1,1])
    c = tf.nn.conv2d(a,b,strides=[1, 2, 2, 1],padding='VALID')
    d = tf.nn.conv2d(a,b,strides=[1, 2, 2, 1],padding='SAME')
    with tf.Session() as sess:
        print ("c shape:")
        print (c.shape)
        print ("c value:")
        print (sess.run(c))
        print ("d shape:")
        print (d.shape)
        print ("d value:")
        print (sess.run(d))

    然后执行:

    cd /home/ubuntu;
    python conv2d.py

    执行结果:

    c shape:
    (1, 3, 3, 1)
    c value:
    [[[[ 4.]
       [ 3.]
       [ 4.]]
    
      [[ 2.]
       [ 4.]
       [ 3.]]
    
      [[ 2.]
       [ 3.]
       [ 4.]]]]
    d shape:
    (1, 5, 5, 1)
    d value:
    [[[[ 2.]
       [ 2.]
       [ 3.]
       [ 1.]
       [ 1.]]
    
      [[ 1.]
       [ 4.]
       [ 3.]
       [ 4.]
       [ 1.]]
    
      [[ 1.]
       [ 2.]
       [ 4.]
       [ 3.]
       [ 3.]]
    
      [[ 1.]
       [ 2.]
       [ 3.]
       [ 4.]
       [ 1.]]
    
      [[ 0.]
       [ 2.]
       [ 2.]
       [ 1.]
       [ 1.]]]]

    tf.nn.relu:
    import tensorflow as tf
    
    a = tf.constant([1,-2,0,4,-5,6])
    b = tf.nn.relu(a)
    with tf.Session() as sess:
        print (sess.run(b))

    然后执行:

    cd /home/ubuntu;
    python relu.py

    执行结果:

    [1 0 0 4 0 6]


    tf.nn.max_pool
    import tensorflow as tf
    
    a = tf.constant([1,3,2,1,2,9,1,1,1,3,2,3,5,6,1,2],dtype=tf.float32,shape=[1,4,4,1])
    b = tf.nn.max_pool(a,ksize=[1, 2, 2, 1],strides=[1, 2, 2, 1],padding='VALID')
    c = tf.nn.max_pool(a,ksize=[1, 2, 2, 1],strides=[1, 2, 2, 1],padding='SAME')
    with tf.Session() as sess:
        print ("b shape:")
        print (b.shape)
        print ("b value:")
        print (sess.run(b))
        print ("c shape:")
        print (c.shape)
        print ("c value:")
        print (sess.run(c))

    然后执行:

    cd /home/ubuntu;
    python max_pool.py

    执行结果:

    b shape:
    (1, 2, 2, 1)
    b value:
    [[[[ 9.]
       [ 2.]]
    
      [[ 6.]
       [ 3.]]]]
    c shape:
    (1, 2, 2, 1)
    c value:
    [[[[ 9.]
       [ 2.]]
    
      [[ 6.]
       [ 3.]]]]

    tf.nn.droupout
    import tensorflow as tf
    
    a = tf.constant([1,2,3,4,5,6],shape=[2,3],dtype=tf.float32)
    b = tf.placeholder(tf.float32)
    c = tf.nn.dropout(a,b,[2,1],1)
    with tf.Session() as sess:
        sess.run(tf.global_variables_initializer())
        print (sess.run(c,feed_dict={b:0.75}))

    然后执行:

    cd /home/ubuntu;
    python dropout.py

    执行结果:

    [[ 0.          0.          0.        ]
     [ 5.33333349  6.66666651  8.        ]]


    tf.nn.sigmoid_cross_entropy_with_logits

    import tensorflow as tf
    x = tf.constant([1,2,3,4,5,6,7],dtype=tf.float64)
    y = tf.constant([1,1,1,0,0,1,0],dtype=tf.float64)
    loss = tf.nn.sigmoid_cross_entropy_with_logits(labels = y,logits = x)
    with tf.Session() as sess:
        print (sess.run(loss))

    然后执行:

    cd /home/ubuntu;
    python sigmoid_cross_entropy_with_logits.py

    执行结果:

    [  3.13261688e-01   1.26928011e-01   4.85873516e-02   4.01814993e+00
       5.00671535e+00   2.47568514e-03   7.00091147e+00]

    tf.truncated_normal

    import tensorflow as tf
    initial = tf.truncated_normal(shape=[3,3], mean=0, stddev=1)
    print(tf.Session().run(initial))

    然后执行:

    python /home/ubuntu/truncated_normal.py

    执行结果:

    将得到一个取值范围 [ -2, 2 ] 的 3 * 3 矩阵,您也可以尝试修改源代码看看输出结果有什么变化?

    [[-1.01231802 1.25015056 0.39860222]
    [ 0.43949991 -0.80240148 0.81758308]
    [-0.76539534 1.95935833 1.20631492]]

    tf.constant

    #!/usr/bin/python
    
    import tensorflow as tf
    import numpy as np
    a = tf.constant([1,2,3,4,5,6],shape=[2,3])
    b = tf.constant(-1,shape=[3,2])
    c = tf.matmul(a,b)
    
    e = tf.constant(np.arange(1,13,dtype=np.int32),shape=[2,2,3])
    f = tf.constant(np.arange(13,25,dtype=np.int32),shape=[2,3,2])
    g = tf.matmul(e,f)
    with tf.Session() as sess:
        print (sess.run(a))
        print ("##################################")
        print (sess.run(b))
        print ("##################################")
        print (sess.run(c))
        print ("##################################")
        print (sess.run(e))
        print ("##################################")
        print (sess.run(f))
        print ("##################################")
        print (sess.run(g))

    然后执行:

    python /home/ubuntu/constant.py

    执行结果:

    a: 2x3 维张量;
    b: 3x2 维张量;
    c: 2x2 维张量;
    e: 2x2x3 维张量;
    f: 2x3x2 维张量;
    g: 2x2x2 维张量。

    tf.placeholder

    #!/usr/bin/python
    
    import tensorflow as tf
    import numpy as np
    
    x = tf.placeholder(tf.float32,[None,3])
    y = tf.matmul(x,x)
    with tf.Session() as sess:
        rand_array = np.random.rand(3,3)
        print(sess.run(y,feed_dict={x:rand_array}))

    然后执行:

    python /home/ubuntu/placeholder.py

    执行结果:

    输出一个 3x3 的张量

    [[ 1.04605961 0.45888701 0.6270988 ]
    [ 0.86465603 0.87210596 0.71620005]
    [ 0.54584444 0.44113758 0.6248076 ]]



    tf.nn.bias_add

    #!/usr/bin/python
    
    import tensorflow as tf
    import numpy as np
    
    a = tf.constant([[1.0, 2.0],[1.0, 2.0],[1.0, 2.0]])
    b = tf.constant([2.0,1.0])
    c = tf.constant([1.0])
    sess = tf.Session()
    print (sess.run(tf.nn.bias_add(a, b)))
    #print (sess.run(tf.nn.bias_add(a,c))) error
    print ("##################################")
    print (sess.run(tf.add(a, b)))
    print ("##################################")
    print (sess.run(tf.add(a, c)))

    执行结果:

    [[ 3. 3.]
    [ 3. 3.]
    [ 3. 3.]]
    ##################################
    [[ 3. 3.]
    [ 3. 3.]
    [ 3. 3.]]
    ##################################
    [[ 2. 3.]
    [ 2. 3.]
    [ 2. 3.]]

    tf.reduce_mean

    #!/usr/bin/python
    
    import tensorflow as tf
    import numpy as np
    
    initial = [[1.,1.],[2.,2.]]
    x = tf.Variable(initial,dtype=tf.float32)
    init_op = tf.global_variables_initializer()
    with tf.Session() as sess:
        sess.run(init_op)
        print(sess.run(tf.reduce_mean(x)))
        print(sess.run(tf.reduce_mean(x,0))) #Column
        print(sess.run(tf.reduce_mean(x,1))) #row

    然后执行:

    python /home/ubuntu/reduce_mean.py

    执行结果:

    1.5
    [ 1.5  1.5]
    [ 1.  2.]


    tf.squared_difference

    #!/usr/bin/python
    
    import tensorflow as tf
    import numpy as np
    
    initial_x = [[1.,1.],[2.,2.]]
    x = tf.Variable(initial_x,dtype=tf.float32)
    initial_y = [[3.,3.],[4.,4.]]
    y = tf.Variable(initial_y,dtype=tf.float32)
    diff = tf.squared_difference(x,y)
    init_op = tf.global_variables_initializer()
    with tf.Session() as sess:
        sess.run(init_op)
        print(sess.run(diff))

    然后执行:

    python /home/ubuntu/squared_difference.py

    执行结果:

    [[ 4.  4.]
     [ 4.  4.]]

    tf.square

    #!/usr/bin/python
    import tensorflow as tf
    import numpy as np
    
    initial_x = [[1.,1.],[2.,2.]]
    x = tf.Variable(initial_x,dtype=tf.float32)
    x2 = tf.square(x)
    init_op = tf.global_variables_initializer()
    with tf.Session() as sess:
        sess.run(init_op)
        print(sess.run(x2))

    然后执行:

    python /home/ubuntu/square.py

    执行结果:

    [[ 1.  1.]
     [ 4.  4.]]

     

    tf.Variable

    #!/usr/bin/python
    
    import tensorflow as tf
    initial = tf.truncated_normal(shape=[10,10],mean=0,stddev=1)
    W=tf.Variable(initial)
    list = [[1.,1.],[2.,2.]]
    X = tf.Variable(list,dtype=tf.float32)
    init_op = tf.global_variables_initializer()
    with tf.Session() as sess:
        sess.run(init_op)
        print ("##################(1)################")
        print (sess.run(W))
        print ("##################(2)################")
        print (sess.run(W[:2,:2]))
        op = W[:2,:2].assign(22.*tf.ones((2,2)))
        print ("###################(3)###############")
        print (sess.run(op))
        print ("###################(4)###############")
        print (W.eval(sess)) #computes and returns the value of this variable
        print ("####################(5)##############")
        print (W.eval())  #Usage with the default session
        print ("#####################(6)#############")
        print (W.dtype)
        print (sess.run(W.initial_value))
        print (sess.run(W.op))
        print (W.shape)
        print ("###################(7)###############")
        print (sess.run(X))

    执行结果:

    ##################(1)################
    [[-1.23091912 -1.15485024  0.23904395  0.34435439 -0.99782348 -0.45796475
      -1.2815994  -1.86255741  0.61719501 -0.23074889]
     [ 0.04772037 -1.87820387 -0.94470227  0.36448902 -0.61483711 -0.88883013
      -1.33075011 -0.2014154  -0.29572284 -0.64329118]
     [-0.46051967 -1.50215697  0.52736723 -0.64575762  0.40186197  0.888547
       0.41293475  0.58065104  0.42752498 -0.41847843]
     [ 0.2490586  -0.70486099  0.12240842 -0.99978852  0.2651979   1.02804005
      -0.58180624 -0.32164943  0.02628148  1.41673708]
     [ 0.45682913  0.25587147  0.21995042  0.7875219   0.05864362 -0.18229504
       1.59454536  1.06499553  0.31585202 -0.08250634]
     [ 1.28422952 -0.09098073  0.08750965  0.58767647 -0.18843929  1.00211585
      -0.34881082 -0.88564688  0.59491009 -0.25224382]
     [-1.40284967  0.22108991 -1.71350789 -0.02776204  1.19743824  1.53484929
      -0.51727623 -0.58549863 -0.1318036  -1.1405164 ]
     [-0.89546037  0.8151502  -0.05706482  0.14027117 -0.01335291  1.14979923
      -0.11841752 -0.07685678 -0.37184918 -0.05404587]
     [-1.04701281  0.47635376 -0.67598844  0.44912511 -0.19697872  0.68457508
      -0.41106322  0.9739325   1.16200626  0.34319773]
     [ 0.77753568 -0.06508502  0.3194975  -0.73810351  0.79470289 -0.99434441
       1.00614071 -0.59807277  1.38162911  0.42871621]]
    ##################(2)################
    [[-1.23091912 -1.15485024]
     [ 0.04772037 -1.87820387]]
    ###################(3)###############
    [[  2.20000000e+01   2.20000000e+01   2.39043951e-01   3.44354391e-01
       -9.97823477e-01  -4.57964748e-01  -1.28159940e+00  -1.86255741e+00
        6.17195010e-01  -2.30748892e-01]
     [  2.20000000e+01   2.20000000e+01  -9.44702268e-01   3.64489019e-01
       -6.14837110e-01  -8.88830125e-01  -1.33075011e+00  -2.01415405e-01
       -2.95722842e-01  -6.43291175e-01]
     [ -4.60519671e-01  -1.50215697e+00   5.27367234e-01  -6.45757616e-01
        4.01861966e-01   8.88547003e-01   4.12934750e-01   5.80651045e-01
        4.27524984e-01  -4.18478429e-01]
     [  2.49058604e-01  -7.04860985e-01   1.22408420e-01  -9.99788523e-01
        2.65197903e-01   1.02804005e+00  -5.81806242e-01  -3.21649432e-01
        2.62814816e-02   1.41673708e+00]
     [  4.56829131e-01   2.55871475e-01   2.19950423e-01   7.87521899e-01
        5.86436242e-02  -1.82295039e-01   1.59454536e+00   1.06499553e+00
        3.15852016e-01  -8.25063437e-02]
     [  1.28422952e+00  -9.09807310e-02   8.75096470e-02   5.87676466e-01
       -1.88439295e-01   1.00211585e+00  -3.48810822e-01  -8.85646880e-01
        5.94910085e-01  -2.52243817e-01]
     [ -1.40284967e+00   2.21089914e-01  -1.71350789e+00  -2.77620405e-02
        1.19743824e+00   1.53484929e+00  -5.17276227e-01  -5.85498631e-01
       -1.31803602e-01  -1.14051640e+00]
     [ -8.95460367e-01   8.15150201e-01  -5.70648164e-02   1.40271172e-01
       -1.33529110e-02   1.14979923e+00  -1.18417524e-01  -7.68567771e-02
       -3.71849179e-01  -5.40458746e-02]
     [ -1.04701281e+00   4.76353765e-01  -6.75988436e-01   4.49125111e-01
       -1.96978718e-01   6.84575081e-01  -4.11063224e-01   9.73932505e-01
        1.16200626e+00   3.43197733e-01]
     [  7.77535677e-01  -6.50850236e-02   3.19497496e-01  -7.38103509e-01
        7.94702888e-01  -9.94344413e-01   1.00614071e+00  -5.98072767e-01
        1.38162911e+00   4.28716213e-01]]
    ###################(4)###############
    [[  2.20000000e+01   2.20000000e+01   2.39043951e-01   3.44354391e-01
       -9.97823477e-01  -4.57964748e-01  -1.28159940e+00  -1.86255741e+00
        6.17195010e-01  -2.30748892e-01]
     [  2.20000000e+01   2.20000000e+01  -9.44702268e-01   3.64489019e-01
       -6.14837110e-01  -8.88830125e-01  -1.33075011e+00  -2.01415405e-01
       -2.95722842e-01  -6.43291175e-01]
     [ -4.60519671e-01  -1.50215697e+00   5.27367234e-01  -6.45757616e-01
        4.01861966e-01   8.88547003e-01   4.12934750e-01   5.80651045e-01
        4.27524984e-01  -4.18478429e-01]
     [  2.49058604e-01  -7.04860985e-01   1.22408420e-01  -9.99788523e-01
        2.65197903e-01   1.02804005e+00  -5.81806242e-01  -3.21649432e-01
        2.62814816e-02   1.41673708e+00]
     [  4.56829131e-01   2.55871475e-01   2.19950423e-01   7.87521899e-01
        5.86436242e-02  -1.82295039e-01   1.59454536e+00   1.06499553e+00
        3.15852016e-01  -8.25063437e-02]
     [  1.28422952e+00  -9.09807310e-02   8.75096470e-02   5.87676466e-01
       -1.88439295e-01   1.00211585e+00  -3.48810822e-01  -8.85646880e-01
        5.94910085e-01  -2.52243817e-01]
     [ -1.40284967e+00   2.21089914e-01  -1.71350789e+00  -2.77620405e-02
        1.19743824e+00   1.53484929e+00  -5.17276227e-01  -5.85498631e-01
       -1.31803602e-01  -1.14051640e+00]
     [ -8.95460367e-01   8.15150201e-01  -5.70648164e-02   1.40271172e-01
       -1.33529110e-02   1.14979923e+00  -1.18417524e-01  -7.68567771e-02
       -3.71849179e-01  -5.40458746e-02]
     [ -1.04701281e+00   4.76353765e-01  -6.75988436e-01   4.49125111e-01
       -1.96978718e-01   6.84575081e-01  -4.11063224e-01   9.73932505e-01
        1.16200626e+00   3.43197733e-01]
     [  7.77535677e-01  -6.50850236e-02   3.19497496e-01  -7.38103509e-01
        7.94702888e-01  -9.94344413e-01   1.00614071e+00  -5.98072767e-01
        1.38162911e+00   4.28716213e-01]]
    ####################(5)##############
    [[  2.20000000e+01   2.20000000e+01   2.39043951e-01   3.44354391e-01
       -9.97823477e-01  -4.57964748e-01  -1.28159940e+00  -1.86255741e+00
        6.17195010e-01  -2.30748892e-01]
     [  2.20000000e+01   2.20000000e+01  -9.44702268e-01   3.64489019e-01
       -6.14837110e-01  -8.88830125e-01  -1.33075011e+00  -2.01415405e-01
       -2.95722842e-01  -6.43291175e-01]
     [ -4.60519671e-01  -1.50215697e+00   5.27367234e-01  -6.45757616e-01
        4.01861966e-01   8.88547003e-01   4.12934750e-01   5.80651045e-01
        4.27524984e-01  -4.18478429e-01]
     [  2.49058604e-01  -7.04860985e-01   1.22408420e-01  -9.99788523e-01
        2.65197903e-01   1.02804005e+00  -5.81806242e-01  -3.21649432e-01
        2.62814816e-02   1.41673708e+00]
     [  4.56829131e-01   2.55871475e-01   2.19950423e-01   7.87521899e-01
        5.86436242e-02  -1.82295039e-01   1.59454536e+00   1.06499553e+00
        3.15852016e-01  -8.25063437e-02]
     [  1.28422952e+00  -9.09807310e-02   8.75096470e-02   5.87676466e-01
       -1.88439295e-01   1.00211585e+00  -3.48810822e-01  -8.85646880e-01
        5.94910085e-01  -2.52243817e-01]
     [ -1.40284967e+00   2.21089914e-01  -1.71350789e+00  -2.77620405e-02
        1.19743824e+00   1.53484929e+00  -5.17276227e-01  -5.85498631e-01
       -1.31803602e-01  -1.14051640e+00]
     [ -8.95460367e-01   8.15150201e-01  -5.70648164e-02   1.40271172e-01
       -1.33529110e-02   1.14979923e+00  -1.18417524e-01  -7.68567771e-02
       -3.71849179e-01  -5.40458746e-02]
     [ -1.04701281e+00   4.76353765e-01  -6.75988436e-01   4.49125111e-01
       -1.96978718e-01   6.84575081e-01  -4.11063224e-01   9.73932505e-01
        1.16200626e+00   3.43197733e-01]
     [  7.77535677e-01  -6.50850236e-02   3.19497496e-01  -7.38103509e-01
        7.94702888e-01  -9.94344413e-01   1.00614071e+00  -5.98072767e-01
        1.38162911e+00   4.28716213e-01]]
    #####################(6)#############
    <dtype: 'float32_ref'>
    [[-0.41857633 -0.2713519   0.30368868  0.20746167  1.85322762  1.31566119
       1.54675031 -1.72509181  0.05661546  0.07088134]
     [ 1.67809737  0.83413428 -0.46248889 -0.64880568  1.0052985   0.28734493
       1.02057004  1.30170429 -0.92802709 -0.13301572]
     [-1.3703959  -0.96703321  0.81257963 -0.88620949 -0.0416972   0.41219631
      -0.77539968 -0.87115741 -0.61586332 -1.07051158]
     [-1.20221102  1.009269    0.53348398 -0.78492016 -1.57486057 -0.37586671
       0.79054028  0.42812335  0.50074643 -0.22152463]
     [-0.38758773  0.26680526 -0.07168344 -0.19825138 -0.0245118   0.76605487
      -1.60584402 -0.83085275 -1.21274364  0.12311368]
     [ 0.92161274  0.96963346 -0.51853895  0.39782578 -0.11624574  0.23405044
      -0.77997881 -1.42478561 -0.46830443 -0.2615248 ]
     [ 0.1299911  -0.64964086  1.48451924  0.13839777 -0.78998685 -0.6932441
      -0.05188456  0.72245222 -0.12273535 -0.16151385]
     [-0.93579388  1.08634007 -0.35739595 -1.54274142  0.42254066  0.74695534
      -0.0469315  -1.41842675  0.41519207 -0.59990394]
     [-1.28783917 -1.86210358 -0.63155401 -0.37928078 -1.80430996 -0.81117511
       1.12262106  1.10448146 -0.10529845  1.29226148]
     [-1.38174736  1.05984509 -0.46125889  1.05563366 -1.37600601  0.44229579
       1.21501267  0.55204743  0.11826833  0.17191544]]
    None
    (10, 10)
    ###################(7)###############
    [[ 1.  1.]
     [ 2.  2.]]
  • 相关阅读:
    《精通Ext JS 》
    《OpenStack部署实践》
    《方寸指间——移动设计实战手册》
    《产品经理那些事儿》
    C# 基于Aspose.Cells的数据导出到Excel
    mongodb中直接根据某个字段更新另外一个字段值
    C# mongodb中内嵌文档数组条件查询
    c# 在mongo中查询经纬度范围
    (转)MySql 获取所有级联父级或所有级联子级
    mongodb对数组元素及内嵌文档进行增删改查操作(转)
  • 原文地址:https://www.cnblogs.com/hozhangel/p/8080154.html
Copyright © 2011-2022 走看看