zoukankan      html  css  js  c++  java
  • python采集百度搜索结果带有特定URL的链接

    #coding utf-8
    import requests
    from bs4 import BeautifulSoup as bs
    import re
    from Queue import Queue
    import threading
    from argparse import ArgumentParser
    
    arg = ArgumentParser(description='baidu_url_collet py-script by xiaoye')
    arg.add_argument('keyword',help='keyword like inurl:?id=for searching sqli site')
    arg.add_argument('-p','--page',help='page count',dest='pagecount',type=int)
    arg.add_argument('-t','--thread',help='the thread_count',dest='thread_count',type=int,default=10)
    arg.add_argument('-o','--outfile',help='the file save result',dest='oufile',type=int,default='result.txt')
    result = arg.parse_args()
    headers = {'User-Agent':'Mozilla/5.0(windows NT 10.0 WX64;rv:50.0) Gecko/20100101 Firefox/50.0'}
    
    class Bg_url(threading.Thread):
        def __init__(self,que):
            threading.Thread.__init__(self)
            self._que = que
        def run(self):
            while not self._que.empty():
                URL = self._que.get()
                try:
                    self.bd_url_collet(URL)
                except Exception,e:
                    print(e)
                    pass
        def bd_url_collect(self, url):
            r = requests.get(url, headers=headers, timeout=3)
            soup = bs(r.content, 'lxml', from_encoding='utf-8')
            bqs = soup.find_all(name='a', attrs={‘data-click‘:re.compile(r'.'), 'class':None})#获得从百度搜索出来的a标签的链接
            for bq in bqs:
                r = requests.get(bq['href'], headers=headers, timeout=3)#获取真实链接
                if r.status_code == 200:#如果状态码为200
                    print r.url
                    with open(result.outfile, 'a') as f:
                        f.write(r.url + '
    ')
    def main():
        thread = []
        thread_count = result.thread_count
        que = Queue()
        for i in range(0,(result.pagecount-1)*10,10):
        que.put('https://www.baidu.com/s?wd=' + result.keyword + '&pn=' + str(i))
        or i in range(thread_count):
        thread.append(Bd_url(que))
    
        for i in thread:
            i.start()
    
        for i in thread:
            i.join()
            
    if __name__ == '__main__':
        main()
        
    #执行格式
    python aaaaa.py "inurl:asp?id=" -p 30 -t 30
  • 相关阅读:
    Linux文件的复制、删除和移动命令
    Linux文件夹文件创建、删除
    Python 常用代码片段
    Chrome 插件 PageSpeed Insights
    VI打开和编辑多个文件的命令
    Linux case 及 函数位置参数
    C#编程利器之三:接口(Interface)
    C#编程利器之四:委托与事件(Delegate and event)
    解读设计模式简单工厂模式(SimpleFactory Pattern),你要什么我就给你什么
    C#编程利器之五:集合对象(Collections)
  • 原文地址:https://www.cnblogs.com/linyouyi/p/10533384.html
Copyright © 2011-2022 走看看