zoukankan      html  css  js  c++  java
  • theano 入门教程1.6

    theano 实例, 逻辑回归(logistic regression)

    # -*- coding: utf-8 -*-

    """

    Created on Fri Jun 06 08:56:54 2014


    @author: Administrator

    """


    import theano

    import numpy as np

    import theano.tensor as T




    def logistic_regression():

    rng = np.random

    N = 400

    feats = 784

    D = (rng.randn(N, feats), rng.randint(size=N, low=0, high=2))

    training_steps = 10000

    x = T.matrix('x')

    y = T.vector('y')

    w = theano.shared(rng.randn(feats), name='w')

    b = theano.shared(0., name='b')

    print "Initial model: "

    print w.get_value(), b.get_value()


    p_1 = 1/(1 + T.exp(-T.dot(x, w) - b))

    prediction = p_1 > 0.5

    xent = -y * T.log(p_1) - (1-y) * T.log(1-p_1)

    cost = xent.mean() + 0.01

    gw, gb = T.grad(cost, [w, b])


    # Compile

    train = theano.function(

    inputs=[x,y],

    outputs=[prediction, xent],

    updates=((w, w-0.1*gw), (b, b-0.1*gb)))

    predict = theano.function([x], prediction)

    for i in range(training_steps):

    pred, err = train(D[0], D[1])

    print 'Final model:'

    print w.get_value(), b.get_value()

    print 'target values for D:', D[1]

    print 'prediction on D:', predict(D[0])



    if __name__ == '__main__':

    logistic_regression()    





  • 相关阅读:
    vgcreate语法
    lsmod语法
    lvm语法2
    lvm语法
    mount语法
    fdisk语法
    mdadm语法
    ln语法
    mknod语法
    黑客常用dos-cmd命令
  • 原文地址:https://www.cnblogs.com/fireae/p/3772656.html
Copyright © 2011-2022 走看看