zoukankan      html  css  js  c++  java
  • 算术操作、张量转换、矩阵运算、归约计算

    算术操作

    TensorFlow的算术操作如下:

    操作                                                              描述

    tf.add(x, y, name=None)                                   求和

    tf.sub(x, y, name=None)                                    减法

    tf.mul(x, y, name=None)                                   乘法

    tf.div(x, y, name=None)                                    除法

    tf.mod(x, y, name=None)                                  取模

    tf.abs(x, name=None)                                       求绝对值

    tf.neg(x, name=None)                                      取负 (y = -x).

    tf.sign(x, name=None)                                      返回符号 y = sign(x) = -1 if x < 0; 0 if x == 0; 1 if x > 0.

    tf.inv(x, name=None)                                       取反

    tf.square(x, name=None)                                  计算平方 (y = x * x = x^2).

    tf.round(x, name=None)                                   舍入最接近的整数

    # ‘a’ is [0.9, 2.5, 2.3, -4.4]

    tf.round(a) ==> [ 1.0, 3.0, 2.0, -4.0 ]

    tf.sqrt(x, name=None)                                      开根号 (y = sqrt{x} = x^{1/2}).

    tf.pow(x, y, name=None)                                  幂次方

    # tensor ‘x’ is [[2, 2], [3, 3]]

    # tensor ‘y’ is [[8, 16], [2, 3]]

    tf.pow(x, y) ==> [[256, 65536], [9, 27]]

    tf.exp(x, name=None)                                      计算e的次方

    tf.log(x, name=None)                                       计算log,一个输入计算e的ln,两输入以第二输入为底

    tf.maximum(x, y, name=None)                          返回最大值 (x > y ? x : y)

    tf.minimum(x, y, name=None)                           返回最小值 (x < y ? x : y)

    tf.cos(x, name=None)                                       三角函数cosine

    tf.sin(x, name=None)                                       三角函数sine

    tf.tan(x, name=None)                                       三角函数tan

    tf.atan(x, name=None)                                      三角函数ctan

    张量转换

    数据类型转换Casting

    操作                                                              描述

    tf.string_to_number

    (string_tensor, out_type=None, name=None)        字符串转为数字

    tf.to_double(x, name=’ToDouble’)                转为64位浮点类型–float64

    tf.to_float(x, name=’ToFloat’)                      转为32位浮点类型–float32

    tf.to_int32(x, name=’ToInt32’)                      转为32位整型–int32

    tf.to_int64(x, name=’ToInt64’)                      转为64位整型–int64

    tf.cast(x, dtype, name=None)                             将x或者x.values转换为dtype

    # tensor a is [1.8, 2.2], dtype=tf.float

    tf.cast(a, tf.int32) ==> [1, 2] # dtype=tf.int32

    形状操作Shapes and Shaping

    操作                                                                            描述

    tf.shape(input, name=None)                                            返回数据的shape

    tf.size(input, name=None)                                               返回数据的元素数量

    # ‘t’ is [[[1, 1, 1], [2, 2, 2]], [[3, 3, 3], [4, 4, 4]]]]

    size(t) ==> 12

    tf.rank(input, name=None)                                              返回tensor的rank

    注意:此rank不同于矩阵的rank,

    tensor的rank表示一个tensor需要的索引数目来

    唯一表示任何一个元素也就是通常

    所说的 “order”, “degree”或”ndims”

    #’t’ is [[[1, 1, 1], [2, 2, 2]], [[3, 3, 3], [4, 4, 4]]]

    # shape of tensor ‘t’ is [2, 2, 3]

    rank(t) ==> 3

    tf.reshape(tensor, shape, name=None)                              改变tensor的形状

    tf.expand_dims(input, dim, name=None)                          插入维度1进入一个tensor中

    #该操作要求-1-input.dims()

    # ‘t’ is a tensor of shape [2]

    shape(expand_dims(t, 0)) ==> [1, 2]           

    shape(expand_dims(t, 1)) ==> [2, 1]    

    shape(expand_dims(t, -1)) ==> [2, 1]

    切片与合并(Slicing and Joining)

    操作                                                                                          描述

    tf.slice(input_, begin, size, name=None)                                         对tensor进行切片操作

    其中size[i] = input.dim_size(i) - begin[i]

    input is [[[1, 1, 1], [2, 2, 2]],[[3, 3, 3], [4, 4, 4]],

    [[5, 5, 5], [6, 6, 6]]]

    tf.slice(input, [1, 0, 0], [1, 1, 3]) ==> [[[3, 3, 3]]]

    tf.slice(input, [1, 0, 0], [1, 2, 3]) ==>

    [[[3, 3, 3],

    [4, 4, 4]]]

    tf.slice(input, [1, 0, 0], [2, 1, 3]) ==>

    [[[3, 3, 3]],

    [[5, 5, 5]]]

    tf.split(split_dim, num_split, value, name=’split’)                       沿着某一维度将tensor分离为num_split tensors

    # ‘value’ is a tensor with shape [5, 30]

    # Split ‘value’ into 3 tensors along dimension 1

    split0, split1, split2 = tf.split(1, 3, value)

    tf.shape(split0) ==> [5, 10]

    tf.concat(concat_dim, values, name=’concat’)                             沿着某一维度连结tensor

    t1 = [[1, 2, 3], [4, 5, 6]]

    t2 = [[7, 8, 9], [10, 11, 12]]

    tf.concat(0, [t1, t2]) ==> [[1, 2, 3], [4, 5, 6], [7, 8, 9], [10, 11, 12]]

    tf.concat(1, [t1, t2]) ==> [[1, 2, 3, 7, 8, 9], [4, 5, 6, 10, 11, 12]]

    如果想沿着tensor一新轴连结打包,那么可以:

    tf.concat(axis, [tf.expand_dims(t, axis) for t in tensors])

    tf.pack(tensors, axis=axis)                                                 等同于tf.concat(concat_dim, values, name=’concat’)   

    tf.reverse(tensor, dims, name=None)                                              沿着某维度进行序列反转

    其中dim为列表,元素为bool型,

    size等于rank(tensor),tensor ‘t’ is

    [[[[ 0, 1, 2, 3],

    [ 4, 5, 6, 7],

    [ 8, 9, 10, 11]],

    [[12, 13, 14, 15],

    [16, 17, 18, 19],

    [20, 21, 22, 23]]]]

    tensor ‘t’ shape is [1, 2, 3, 4]

    ‘dims’ is [False, False, False, True]

    reverse(t, dims) ==>

    [[[[ 3, 2, 1, 0],

    [ 7, 6, 5, 4],

    [ 11, 10, 9, 8]],

    [[15, 14, 13, 12],

    [19, 18, 17, 16],

    [23, 22, 21, 20]]]]

    tf.transpose(a, perm=None, name=’transpose’)                           调换tensor的维度顺序

    按照列表perm的维度排列调换tensor顺序,

    如为定义,则perm为(n-1…0)

    ‘x’ is [[1 2 3],[4 5 6]]

    tf.transpose(x) ==> [[1 4], [2 5],[3 6]]

    Equivalently

    tf.transpose(x, perm=[1, 0]) ==> [[1 4],[2 5], [3 6]]

    矩阵相关运算

    操作                                                                                                 描述

    tf.diag(diagonal, name=None)                                                              返回一个给定对角值的对角tensor

    ‘diagonal’ is [1, 2, 3, 4]

    tf.diag(diagonal) ==>

    [[1, 0, 0, 0]

    [0, 2, 0, 0]

    [0, 0, 3, 0]

    [0, 0, 0, 4]]

    tf.diag_part(input, name=None)                                                            功能与上面相反

    tf.trace(x, name=None)                                                                        求一个2维tensor足迹,即对角值diagonal之和

    tf.transpose(a, perm=None, name=’transpose’)                                  调换tensor的维度顺序

    tf.matmul(a, b, transpose_a=False, transpose_b=False,                            矩阵相乘

    a_is_sparse=False, b_is_sparse=False, name=None)                

    tf.matrix_inverse(input, adjoint=None, name=None)                               求方阵的逆矩阵

    归约计算

    操作       描述

    tf.reduce_sum(input_tensor, reduction_indices=None                              计算输入tensor按照reduction_indices指定的轴进行求和

    ‘x’ is [[1, 1, 1],[1, 1, 1]]

    tf.reduce_sum(x) ==> 6

    tf.reduce_sum(x, 0) ==> [2, 2, 2]  

    tf.reduce_sum(x, 1) ==> [3, 3]

    tf.reduce_sum(x, 1, keep_dims=True) ==> [[3], [3]]

    tf.reduce_sum(x, [0, 1]) ==> 6

    tf.reduce_min(input_tensor, reduction_indices=None)                             求tensor中最小值

    tf.reduce_max(input_tensor, reduction_indices=None)                             求tensor中最大值

    tf.reduce_mean(input_tensor, reduction_indices=None)                           求tensor中平均值

    tf.cumsum(x, axis=0, exclusive=False, reverse=False, name=None)          求累积和

    tf.cumsum([a, b, c]) ==> [a, a + b, a + b + c]

    tf.cumsum([a, b, c], exclusive=True) ==> [0, a, a + b] 

    tf.cumsum([a, b, c], reverse=True) ==> [a + b + c, b + c, c]

    tf.cumsum([a, b, c], exclusive=True, reverse=True) ==> [b + c, c, 0]

  • 相关阅读:
    [FJOI2016] 建筑师
    AtCoder
    [Poetize6] IncDec Sequence
    CodeForces
    洛谷 P4551 最长异或路径
    WC 2018/CTSC 2018/APIO 2018 游记
    洛谷 P4538 收集邮票
    「PKUWC 2018」随机算法 (60分部分分做法)
    bzoj 3718: [PA2014]Parking
    bzoj 1023: [SHOI2008]cactus仙人掌图 2125: 最短路 4728: 挪威的森林 静态仙人掌上路径长度的维护系列
  • 原文地址:https://www.cnblogs.com/yongfuxue/p/10095877.html
Copyright © 2011-2022 走看看