zoukankan      html  css  js  c++  java
  • 统计学习导论:基于R应用——第五章习题

    第五章习题

    1.

    我们主要用到下面三个公式:

    根据上述公式,我们将式子化简为

    求导即可得到得到公式5-6。

    2.

    (a)

    1 - 1/n

    (b)

    自助法是有有放回的,所以第二个的概率还是1 - 1/n

    (c)

    由于自助法是有放回的,且每次抽样都是独立事件,所以概率是(1 - 1/n)^n

    (d)

    答案是1-(1-1/5)^5 = 67.2%

    (e)

    63.4%

    (f)

    63.2%

    (g)

    pr = function(n) return(1 - (1 - 1/n)^n)
    x = 1:1e+05
    plot(x, pr(x))

    3题和4题略

    5.

    (a)

    library(ISLR)
    summary(Default)
    
    attach(Default)
    
    set.seed(1)
    glm.fit = glm(default ~ income + balance, data = Default, family = binomial)

    (b)

    train = sample(dim(Default)[1], dim(Default)[1]/2)
    glm.fit = glm(default ~ income + balance, data = Default, family = binomial, subset = train)
    glm.pred = rep("No", dim(Default)[1]/2)
    glm.probs = predict(glm.fit, Default[-train, ], type = "response")
    glm.pred[glm.probs > 0.5] = "Yes"
    mean(glm.pred != Default[-train, ]$default)

    (c)

    把(b)跑三遍。。。

    (d)

    上面代码在拟合逻辑回归的时候加个变量即可

    6.

    (a)

    library(ISLR)
    summary(Default)
    attach(Default)
    
    set.seed(1)
    glm.fit = glm(default ~ income + balance, data = Default, family = binomial)
    summary(glm.fit)

    (b)

    boot.fn = function(data, index) return(coef(glm(default ~ income + balance, data = data, family = binomial, subset = index)))

    (c)

    library(boot)
    boot(Default, boot.fn, 50)

    7.

    (a)

    library(ISLR)
    summary(Weekly)
    set.seed(1)
    attach(Weekly)
    
    glm.fit = glm(Direction ~ Lag1 + Lag2, data = Weekly, family = binomial)
    summary(glm.fit)

    (b)

    glm.fit = glm(Direction ~ Lag1 + Lag2, data = Weekly[-1, ], family = binomial)
    summary(glm.fit)

    (c)

    predict.glm(glm.fit, Weekly[1, ], type = "response") > 0.5

    (d)

    count = rep(0, dim(Weekly)[1])
    for (i in 1:(dim(Weekly)[1])) {
      glm.fit = glm(Direction ~ Lag1 + Lag2, data = Weekly[-i, ], family = binomial)
      is_up = predict.glm(glm.fit, Weekly[i, ], type = "response") > 0.5
      is_true_up = Weekly[i, ]$Direction == "Up"
      if (is_up != is_true_up) 
        count[i] = 1
    }
    sum(count)

    (e)

    mean(count)

    8.

    (a)

    n为100,p为2

    (b)

    set.seed(1)
    y = rnorm(100)
    x = rnorm(100)
    y = x - 2 * x^2 + rnorm(100)
    plot(x, y)

    (c)

    library(boot)
    Data = data.frame(x, y)
    set.seed(1)
    
    glm.fit = glm(y ~ x)
    cv.glm(Data, glm.fit)$delta
    
    glm.fit = glm(y ~ poly(x, 2))
    cv.glm(Data, glm.fit)$delta
    
    glm.fit = glm(y ~ poly(x, 3))
    cv.glm(Data, glm.fit)$delta
    
    glm.fit = glm(y ~ poly(x, 4))
    cv.glm(Data, glm.fit)$delta

    (d)

    set.seed(10)
    glm.fit = glm(y ~ x)
    cv.glm(Data, glm.fit)$delta
    
    glm.fit = glm(y ~ poly(x, 2))
    cv.glm(Data, glm.fit)$delta
    
    glm.fit = glm(y ~ poly(x, 3))
    cv.glm(Data, glm.fit)$delta
    
    glm.fit = glm(y ~ poly(x, 4))
    cv.glm(Data, glm.fit)$delta
    

    结果一样。。。

    (e)

    二次的最小

    9.

    (a)

    library(MASS)
    summary(Boston)
    
    set.seed(1)
    attach(Boston)
    
    
    medv.mean = mean(medv)
    medv.mean

    (b)

    medv.err = sd(medv)/sqrt(length(medv))
    medv.err

    (c)

    boot.fn = function(data, index) return(mean(data[index]))
    library(boot)
    bstrap = boot(medv, boot.fn, 1000)
    bstrap

    (d)

    t.test(medv)
    c(bstrap$t0 - 2 * 0.4119, bstrap$t0 + 2 * 0.4119)

    (e)

    medv.med = median(medv)
    medv.med

    (f)

    boot.fn = function(data, index) return(median(data[index]))
    boot(medv, boot.fn, 1000)

    (g)

    medv.tenth = quantile(medv, c(0.1))
    medv.tenth

    (h)

    boot.fn = function(data, index) return(quantile(data[index], c(0.1)))
    boot(medv, boot.fn, 1000)
    

      

  • 相关阅读:
    Javascript入门(三)函数
    Javascript入门(二)变量、获取元素、操作元素
    Javascript入门(一)弹出方框
    Linux常用命令(二)查找当前ip地址
    python笔记(一)获取当前目录路径和文件
    Linux常用命令(一)查看日志
    产品对话 | 愿云原生不再只有Kubernete
    在线公开课 | 5G时代的视频云服务关键技术与实践
    IT培训行业变革大会,7月11日启程!
    业内首发 | 区块链数据服务
  • 原文地址:https://www.cnblogs.com/-Sai-/p/5471358.html
Copyright © 2011-2022 走看看