Skip to content

Commit 1b11c35

Browse files
committed
2.2.7 build
1 parent 93c6c96 commit 1b11c35

15 files changed

+139
-26
lines changed

CHANGELOG.md

+6
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,9 @@
1+
#### 2.2.7
2+
- Added `--casing` option for casing style enforcement
3+
- Added `--ratelimit` option for explicitly defining requests/second
4+
- Fixed "decrease chunk size/use --stable" type errors in some cases
5+
- Fixed a bug in anamoly detection
6+
17
#### 2.2.6
28
- Fixed Arjun getting infinitely stuck on some webpages
39

arjun/__init__.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
__version__ = '2.2.6'
1+
__version__ = '2.2.7'

arjun/__main__.py

+17-9
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@
1313
from arjun.core.utils import fetch_params, stable_request, random_str, slicer, confirm, populate, reader, nullify, prepare_requests, compatible_path
1414

1515
from arjun.plugins.heuristic import heuristic
16+
from arjun.plugins.wl import detect_casing, covert_to_case
1617

1718
arjun_dir = compatible_path(mem.__file__.replace(compatible_path('/core/config.py'), ''))
1819

@@ -25,7 +26,7 @@
2526
parser.add_argument('-d', help='Delay between requests in seconds. (default: 0)', dest='delay', type=float, default=0)
2627
parser.add_argument('-t', help='Number of concurrent threads. (default: 5)', dest='threads', type=int, default=5)
2728
parser.add_argument('-w', help='Wordlist file path. (default: {arjundir}/db/large.txt)', dest='wordlist', default=arjun_dir+'/db/large.txt')
28-
parser.add_argument('-m', help='Request method to use: GET/POST/XML/JSON/HEADERS. (default: GET)', dest='method', default='GET')
29+
parser.add_argument('-m', help='Request method to use: GET/POST/XML/JSON. (default: GET)', dest='method', default='GET')
2930
parser.add_argument('-i', help='Import target URLs from file.', dest='import_file', nargs='?', const=True)
3031
parser.add_argument('-T', help='HTTP request timeout in seconds. (default: 15)', dest='timeout', type=float, default=15)
3132
parser.add_argument('-c', help='Chunk size. The number of parameters to be sent at once', type=int, dest='chunks', default=250)
@@ -36,6 +37,7 @@
3637
parser.add_argument('--stable', help='Prefer stability over speed.', dest='stable', action='store_true')
3738
parser.add_argument('--include', help='Include this data in every request.', dest='include', default={})
3839
parser.add_argument('--disable-redirects', help='disable redirects', dest='disable_redirects', action='store_true')
40+
parser.add_argument('--casing', help='casing style for params e.g. like_this, likeThis, likethis', dest='casing')
3941
args = parser.parse_args() # arguments to be parsed
4042

4143
if args.quiet:
@@ -77,7 +79,11 @@
7779
passive_params = fetch_params(host)
7880
wordlist.update(passive_params)
7981
print('%s Collected %s parameters, added to the wordlist' % (info, len(passive_params)))
80-
wordlist = list(wordlist)
82+
if args.casing:
83+
delimiter, casing = detect_casing(args.casing)
84+
wordlist = [covert_to_case(word, delimiter, casing) for word in wordlist]
85+
else:
86+
wordlist = list(wordlist)
8187
except FileNotFoundError:
8288
exit('%s The specified file for parameters doesn\'t exist' % bad)
8389

@@ -118,11 +124,15 @@ def initialize(request, wordlist, single_url=False):
118124
return 'skipped'
119125
print('%s Probing the target for stability' % run)
120126
request['url'] = stable_request(url, request['headers'])
127+
mem.var['healthy_url'] = True
121128
if not request['url']:
122129
return 'skipped'
123130
else:
124131
fuzz = "z" + random_str(6)
125132
response_1 = requester(request, {fuzz[:-1]: fuzz[::-1][:-1]})
133+
mem.var['healthy_url'] = response_1.status_code not in (400, 413, 418, 429, 503)
134+
if not mem.var['healthy_url']:
135+
print('%s Target returned HTTP %i, this may cause problems.' % (bad, request.status_code))
126136
if single_url:
127137
print('%s Analysing HTTP response for anomalies' % run)
128138
response_2 = requester(request, {fuzz[:-1]: fuzz[::-1][:-1]})
@@ -139,16 +149,14 @@ def initialize(request, wordlist, single_url=False):
139149
reason = compare(response_3, factors, {zzuf[:-1]: zzuf[::-1][:-1]})[2]
140150
if not reason:
141151
break
142-
factors[reason] = False
143-
if single_url:
144-
print('%s Analysing HTTP response for potential parameter names' % run)
152+
factors[reason] = None
145153
if found:
146154
num = len(found)
147155
if words_exist:
148-
print('%s Heuristic scanner found %i parameters' % (good, num))
156+
print('%s Extracted %i parameters from response for testing' % (good, num))
149157
else:
150158
s = 's' if num > 1 else ''
151-
print('%s Heuristic scanner found %i parameter%s: %s' % (good, num, s, ', '.join(found)))
159+
print('%s Extracted %i parameter%s from response for testing: %s' % (good, num, s, ', '.join(found)))
152160
if single_url:
153161
print('%s Logicforcing the URL endpoint' % run)
154162
populated = populate(wordlist)
@@ -192,11 +200,11 @@ def main():
192200
count = 0
193201
for request in requests:
194202
url = request['url']
203+
print('%s Scanning %d/%d: %s' % (run, count, len(requests), url))
195204
these_params = initialize(request, wordlist, single_url=is_single)
196205
count += 1
197206
mem.var['kill'] = False
198207
mem.var['bad_req_count'] = 0
199-
print('%s Scanning %d/%d: %s' % (run, count, len(requests), url))
200208
if these_params == 'skipped':
201209
print('%s Skipped %s due to errors' % (bad, url))
202210
elif these_params:
@@ -205,7 +213,7 @@ def main():
205213
final_result[url]['method'] = request['method']
206214
final_result[url]['headers'] = request['headers']
207215
exporter(final_result)
208-
print('%s Parameters found: %s\n' % (good, ', '.join(final_result[url]['params'])))
216+
print('%s Parameters found: %-4s\n' % (good, ', '.join(final_result[url]['params'])))
209217
if not mem.var['json_file']:
210218
final_result = {}
211219
continue

arjun/core/anomaly.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -44,7 +44,7 @@ def define(response_1, response_2, param, value, wordlist):
4444
elif remove_tags(body_1) == remove_tags(body_2):
4545
factors['same_plaintext'] = remove_tags(body_1)
4646
elif body_1 and body_2 and body_1.count('\\n') == body_2.count('\\n'):
47-
factors['lines_diff'] = diff_map(body_1, body_2)
47+
factors['lines_diff'] = diff_map(body_1, body_2)
4848
if param not in response_2.text:
4949
factors['param_missing'] = [word for word in wordlist if word in response_2.text]
5050
if value not in response_2.text:

arjun/core/error_handler.py

+4
Original file line numberDiff line numberDiff line change
@@ -4,6 +4,7 @@
44

55
from arjun.core.colors import bad
66

7+
78
def connection_refused():
89
"""
910
checks if a request should be retried if the server refused connection
@@ -17,6 +18,7 @@ def connection_refused():
1718
print('%s Target has rate limiting in place, please use --stable switch' % bad)
1819
return 'kill'
1920

21+
2022
def error_handler(response, factors):
2123
"""
2224
decides what to do after performing a HTTP request
@@ -26,6 +28,8 @@ def error_handler(response, factors):
2628
returns str
2729
"""
2830
if type(response) != str and response.status_code in (400, 413, 418, 429, 503):
31+
if not mem.var['healthy_url']:
32+
return 'ok'
2933
if response.status_code == 503:
3034
mem.var['kill'] = True
3135
print('%s Target is unable to process requests, try --stable switch' % bad)

arjun/core/exporter.py

+4
Original file line numberDiff line numberDiff line change
@@ -6,13 +6,15 @@
66

77
from arjun.core.utils import create_query_string
88

9+
910
def json_export(result):
1011
"""
1112
exports result to a file in JSON format
1213
"""
1314
with open(mem.var['json_file'], 'w+', encoding='utf8') as json_output:
1415
json.dump(result, json_output, sort_keys=True, indent=4)
1516

17+
1618
def burp_export(result):
1719
"""
1820
exports results to Burp Suite by sending request to Burp proxy
@@ -30,6 +32,7 @@ def burp_export(result):
3032
elif data['method'] == 'JSON':
3133
requests.post(url, json=populate(data['params']), headers=data['headers'], proxies=proxies, verify=False)
3234

35+
3336
def text_export(result):
3437
"""
3538
exports results to a text file, one url per line
@@ -48,6 +51,7 @@ def text_export(result):
4851
elif data['method'] == 'POST':
4952
text_file.write(clean_url + '\t' + query_string + '\n')
5053

54+
5155
def exporter(result):
5256
"""
5357
main exporter function that calls other export functions

arjun/core/importer.py

+15-12
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,18 @@
11
import re
22

3+
burp_regex = re.compile(r'''(?m)^ <url><!\[CDATA\[(.+?)\]\]></url>
4+
<host ip="[^"]*">[^<]+</host>
5+
<port>[^<]*</port>
6+
<protocol>[^<]*</protocol>
7+
<method><!\[CDATA\[(.+?)\]\]></method>
8+
<path>.*</path>
9+
<extension>(.*)</extension>
10+
<request base64="(?:false|true)"><!\[CDATA\[([\s\S]+?)]]></request>
11+
<status>([^<]*)</status>
12+
<responselength>([^<]*)</responselength>
13+
<mimetype>([^<]*)</mimetype>''')
14+
15+
316
def reader(path, mode='string'):
417
"""
518
reads a file
@@ -11,6 +24,7 @@ def reader(path, mode='string'):
1124
else:
1225
return ''.join([line for line in file])
1326

27+
1428
def parse_request(string):
1529
"""
1630
parses http request
@@ -25,6 +39,7 @@ def parse_request(string):
2539
result['data'] = match.group(4)
2640
return result
2741

42+
2843
def parse_headers(string):
2944
"""
3045
parses headers
@@ -37,18 +52,6 @@ def parse_headers(string):
3752
result[splitted[0]] = ':'.join(splitted[1:]).strip()
3853
return result
3954

40-
burp_regex = re.compile(r'''(?m)^ <url><!\[CDATA\[(.+?)\]\]></url>
41-
<host ip="[^"]*">[^<]+</host>
42-
<port>[^<]*</port>
43-
<protocol>[^<]*</protocol>
44-
<method><!\[CDATA\[(.+?)\]\]></method>
45-
<path>.*</path>
46-
<extension>(.*)</extension>
47-
<request base64="(?:false|true)"><!\[CDATA\[([\s\S]+?)]]></request>
48-
<status>([^<]*)</status>
49-
<responselength>([^<]*)</responselength>
50-
<mimetype>([^<]*)</mimetype>''')
51-
5255

5356
def burp_import(path):
5457
"""

arjun/core/prompt.py

+1
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
import os
22
import tempfile
33

4+
45
def prompt(default=None):
56
"""
67
lets user paste input by opening a temp file in a text editor

arjun/core/requester.py

+1
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@
1111

1212
warnings.filterwarnings('ignore') # Disable SSL related warnings
1313

14+
1415
@sleep_and_retry
1516
@limits(calls=mem.var['rate_limit'], period=1)
1617
def requester(request, payload={}):

arjun/core/utils.py

+3-2
Original file line numberDiff line numberDiff line change
@@ -153,7 +153,7 @@ def create_query_string(params):
153153
pair = param + '=' + random_str(4) + '&'
154154
query_string += pair
155155
if query_string.endswith('&'):
156-
query_string = query_string[:-1]
156+
query_string = query_string[:-1]
157157
return '?' + query_string
158158

159159

@@ -180,6 +180,7 @@ def extract_js(response):
180180
scripts.append(actual_parts[0])
181181
return scripts
182182

183+
183184
def parse_headers(string):
184185
"""
185186
parses headers
@@ -274,7 +275,7 @@ def prepare_requests(args):
274275
'headers': headers,
275276
'include': params
276277
}
277-
)
278+
)
278279
elif args.import_file:
279280
result = importer(args.import_file, mem.var['method'], headers, args.include)
280281
return result

arjun/plugins/commoncrawl.py

+2-1
Original file line numberDiff line numberDiff line change
@@ -2,9 +2,10 @@
22

33
from urllib.parse import urlparse
44

5+
56
def commoncrawl(host, page=0):
67
these_params = set()
7-
response = requests.get('http://index.commoncrawl.org/CC-MAIN-2020-29-index?url=*.%s&fl=url&page=%s&limit=10000' % (host, page), verify=False).text
8+
response = requests.get('http://index.commoncrawl.org/CC-MAIN-2024-42-index?url=*.%s&fl=url&page=%s&limit=10000' % (host, page), verify=False).text
89
if response.startswith('<!DOCTYPE html>'):
910
return ([], False, 'commoncrawl')
1011
urls = response.split('\n')

arjun/plugins/heuristic.py

+2
Original file line numberDiff line numberDiff line change
@@ -11,9 +11,11 @@
1111
re_empty_vars = re.compile(r'''(?:[;\n]|\bvar|\blet)(\w+)\s*=\s*(?:['"`]{1,2}|true|false|null)''')
1212
re_map_keys = re.compile(r'''['"](\w+?)['"]\s*:\s*['"`]''')
1313

14+
1415
def is_not_junk(param):
1516
return (re_not_junk.match(param) is not None)
1617

18+
1719
def heuristic(raw_response, wordlist):
1820
words_exist = False
1921
potential_params = []

arjun/plugins/otx.py

+1
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22

33
from urllib.parse import urlparse
44

5+
56
def otx(host, page):
67
these_params = set()
78
data = requests.get('https://otx.alienvault.com/api/v1/indicators/hostname/%s/url_list?limit=50&page=%d' % (host, page+1), verify=False).json()

arjun/plugins/wayback.py

+1
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22

33
from urllib.parse import urlparse
44

5+
56
def wayback(host, page):
67
payload = {
78
'url': host,

arjun/plugins/wl.py

+80
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,80 @@
1+
def detect_casing(string):
2+
"""Detect the casing style and delimiter of given string."""
3+
delimiter = ""
4+
casing = ""
5+
6+
if string.islower():
7+
casing = "l"
8+
elif string.isupper():
9+
casing = "u"
10+
else:
11+
casing = casing = "c" if string[0].islower() else "p"
12+
13+
if "-" in string:
14+
delimiter = "-"
15+
elif "_" in string:
16+
delimiter = "_"
17+
elif "." in string:
18+
delimiter = "."
19+
20+
return delimiter, casing
21+
22+
23+
def transform(parts, delimiter, casing):
24+
"""Combine list of strings to form a string with given casing style."""
25+
if len(parts) == 1:
26+
if casing == "l":
27+
return parts[0].lower()
28+
elif casing == "u":
29+
return parts[0].upper()
30+
return parts[0]
31+
32+
result = []
33+
for i, part in enumerate(parts):
34+
if casing == "l":
35+
transformed = part.lower()
36+
elif casing == "u":
37+
transformed = part.upper()
38+
elif casing == "c":
39+
if i == 0:
40+
transformed = part.lower()
41+
else:
42+
transformed = part.lower().title()
43+
else: # casing == "p"
44+
transformed = part.lower().title()
45+
46+
result.append(transformed)
47+
48+
return delimiter.join(result)
49+
50+
51+
def handle(text):
52+
"""Break down a string into array of 'words'."""
53+
if "-" in text:
54+
return text.split("-")
55+
elif "_" in text:
56+
return text.split("_")
57+
elif "." in text:
58+
return text.split(".")
59+
60+
if not text.islower() and not text.isupper():
61+
parts = []
62+
temp = ""
63+
for char in text:
64+
if not char.isupper():
65+
temp += char
66+
else:
67+
if temp:
68+
parts.append(temp)
69+
temp = char
70+
if temp:
71+
parts.append(temp)
72+
return parts
73+
74+
return [text]
75+
76+
77+
def covert_to_case(string, delimiter, casing):
78+
"""Process input stream and write transformed text to output stream."""
79+
parts = handle(string)
80+
return transform(parts, delimiter, casing)

0 commit comments

Comments
 (0)