代码:
# -*- coding: utf-8 -*- import urllib,urllib2,re from random import choice from scrapy.selector import Selector #特稳定IP 107.151.152.218:80 #http://www.xicidaili.com/wn/测试可用IP '107.151.136.202:80',222.124.130.34:8080,'103.14.196.74:8080','107.151.142.114:80''54.169.238.128:9999' #agent = ['49.113.101.167:8090','113.78.28.205:8090','219.136.31.16:8090','119.131.83.227:8090','221.221.206.208:8090','116.52.16.57:8090','182.109.80.149:9000','59.55.59.41:9000','115.223.201.206:9000'] agent = ['107.151.136.194:80'] proxy= choice(agent) #urllib2.ProxyHandler({'http': 'http://%s/' % proxy}) #content = urllib2.urlopen('http://www.ip.cn/').read()#111.192.249.170 handlers = [urllib2.ProxyHandler({'http': 'http://%s/' % proxy})] opener = urllib2.build_opener(*handlers) #opener = urllib2.build_opener() content = opener.open(urllib2.quote('http://www.ip.cn/', safe=":/"), timeout=30).read() sel = Selector(text=content, type="html") ip = sel.re(u'<code>s*(.*)s*</code>') print ip
使用代理IP地址成功的时候会显示使用的代理IP地址,或者显示timeout以及链接错误。
完毕。