import urllib import urllib2 #import webbrowser import re import socket def is_domain_in_black_list(domain, ip): try_time = 3 url = "http://www.bulkblacklist.com/" for i in range(try_time): try: data = urllib.urlencode({'domains': domain}) results = urllib2.urlopen(url, data, timeout=30) all_data = results.read() #with open("results.html", "w") as f: # f.write(all_data) #print all_data #webbrowser.open("results.html") assert all_data.find(domain) >= 0 search = re.compile(r'(d+.d+.d+.d+)') black_ip_list = search.findall(all_data) print black_ip_list return ip in black_ip_list except urllib2.URLError, e: if isinstance(e.reason, socket.timeout): print domain, "timeout:", e else: print domain, "err:",e return False def is_domain_access_normal(domain): try_time = 3 for i in range(try_time): try: url = "http://%s" % domain html = urllib2.urlopen(url, timeout=10).read() #print url, html return True except urllib2.URLError, e: if isinstance(e.reason, socket.timeout): print "timeout:", e else: print "other err:",e return False domain = "tojoycloud.org" print "is_domain_in_black_list:", is_domain_in_black_list(domain, "1.1.2.3") print "is_domain_access_normal:", is_domain_access_normal(domain)