Skip to content

Commit 4219512

Browse files
authored
Merge pull request #171 from s0md3v/2.1.6
2.1.6
2 parents 01c755d + 76e3c0e commit 4219512

File tree

8 files changed

+51
-16
lines changed

8 files changed

+51
-16
lines changed

CHANGELOG.md

+6
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,9 @@
1+
#### 2.1.6
2+
- Fixed multiple breaking bugs
3+
- Export results as they come in multi-target mode
4+
- Various improvements to output in multi-target mode
5+
- changed default chunk size 300->500 and threads to 2->5
6+
17
#### 2.1.5
28
- Fixed header comparison (will fix infinite bruteforce on some targets)
39
- Fixed catastrophic backtracking in some regexes (arjun used to get stuck)

arjun/__init__.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -1 +1 @@
1-
__version__ = '2.1.5'
1+
__version__ = '2.1.6'

arjun/__main__.py

+31-12
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@
1010
from arjun.core.bruter import bruter
1111
from arjun.core.exporter import exporter
1212
from arjun.core.requester import requester
13-
from arjun.core.anomaly import define
13+
from arjun.core.anomaly import define, compare
1414
from arjun.core.utils import fetch_params, stable_request, random_str, slicer, confirm, populate, reader, nullify, prepare_requests, compatible_path
1515

1616
from arjun.plugins.heuristic import heuristic
@@ -24,12 +24,12 @@
2424
parser.add_argument('-oT', help='Path for text output file.', dest='text_file')
2525
parser.add_argument('-oB', help='Port for output to Burp Suite Proxy. Default port is 8080.', dest='burp_port', nargs='?', const=8080)
2626
parser.add_argument('-d', help='Delay between requests in seconds. (default: 0)', dest='delay', type=float, default=0)
27-
parser.add_argument('-t', help='Number of concurrent threads. (default: 2)', dest='threads', type=int, default=2)
27+
parser.add_argument('-t', help='Number of concurrent threads. (default: 5)', dest='threads', type=int, default=5)
2828
parser.add_argument('-w', help='Wordlist file path. (default: {arjundir}/db/large.txt)', dest='wordlist', default=arjun_dir+'/db/large.txt')
2929
parser.add_argument('-m', help='Request method to use: GET/POST/XML/JSON. (default: GET)', dest='method', default='GET')
3030
parser.add_argument('-i', help='Import target URLs from file.', dest='import_file', nargs='?', const=True)
3131
parser.add_argument('-T', help='HTTP request timeout in seconds. (default: 15)', dest='timeout', type=float, default=15)
32-
parser.add_argument('-c', help='Chunk size. The number of parameters to be sent at once', type=int, dest='chunks', default=300)
32+
parser.add_argument('-c', help='Chunk size. The number of parameters to be sent at once', type=int, dest='chunks', default=500)
3333
parser.add_argument('-q', help='Quiet mode. No output.', dest='quiet', action='store_true')
3434
parser.add_argument('--headers', help='Add headers. Separate multiple headers with a new line.', dest='headers', nargs='?', const=True)
3535
parser.add_argument('--passive', help='Collect parameter names from passive sources like wayback, commoncrawl and otx.', dest='passive', nargs='?', const='-')
@@ -102,7 +102,7 @@ def narrower(request, factors, param_groups):
102102
return anomalous_params
103103

104104

105-
def initialize(request, wordlist):
105+
def initialize(request, wordlist, single_url=False):
106106
"""
107107
handles parameter finding process for a single request object
108108
returns 'skipped' (on error), list on success
@@ -118,27 +118,37 @@ def initialize(request, wordlist):
118118
else:
119119
fuzz = random_str(6)
120120
response_1 = requester(request, {fuzz: fuzz[::-1]})
121-
print('%s Analysing HTTP response for anomalies' % run)
121+
if single_url:
122+
print('%s Analysing HTTP response for anomalies' % run)
122123
fuzz = random_str(6)
123124
response_2 = requester(request, {fuzz: fuzz[::-1]})
124125
if type(response_1) == str or type(response_2) == str:
125126
return 'skipped'
126127
factors = define(response_1, response_2, fuzz, fuzz[::-1], wordlist)
127-
print('%s Analysing HTTP response for potential parameter names' % run)
128+
if single_url:
129+
print('%s Analysing HTTP response for potential parameter names' % run)
128130
found = heuristic(response_1.text, wordlist)
129131
if found:
130132
num = len(found)
131133
s = 's' if num > 1 else ''
132134
print('%s Heuristic scanner found %i parameter%s: %s' % (good, num, s, ', '.join(found)))
133-
print('%s Logicforcing the URL endpoint' % run)
135+
if single_url:
136+
print('%s Logicforcing the URL endpoint' % run)
134137
populated = populate(wordlist)
135138
param_groups = slicer(populated, int(len(wordlist)/mem.var['chunks']))
139+
prev_chunk_count = len(param_groups)
136140
last_params = []
137141
while True:
138142
param_groups = narrower(request, factors, param_groups)
143+
if len(param_groups) > prev_chunk_count:
144+
response_3 = requester(request, {fuzz: fuzz[::-1]})
145+
if compare(response_3, factors, [fuzz]) != '':
146+
print('%s Target is misbehaving. Try the --stable swtich.' % bad)
147+
return []
139148
if mem.var['kill']:
140149
return 'skipped'
141150
param_groups = confirm(param_groups, last_params)
151+
prev_chunk_count = len(param_groups)
142152
if not param_groups:
143153
break
144154
confirmed_params = []
@@ -147,7 +157,7 @@ def initialize(request, wordlist):
147157
if reason:
148158
name = list(param.keys())[0]
149159
confirmed_params.append(name)
150-
print('%s name: %s, factor: %s' % (res, name, reason))
160+
print('%s parameter detected: %s, based on: %s' % (res, name, reason))
151161
return confirmed_params
152162

153163

@@ -169,12 +179,17 @@ def main():
169179
final_result[url]['params'] = these_params
170180
final_result[url]['method'] = request['method']
171181
final_result[url]['headers'] = request['headers']
182+
exporter(final_result)
183+
else:
184+
print('%s No parameters were discovered.' % info)
172185
elif type(request) == list:
173186
# in case of multiple targets
187+
count = 0
174188
for each in request:
189+
count += 1
175190
url = each['url']
176191
mem.var['kill'] = False
177-
print('%s Scanning: %s' % (run, url))
192+
print('%s Scanning %d/%d: %s' % (run, count, len(request), url))
178193
these_params = initialize(each, list(wordlist))
179194
if these_params == 'skipped':
180195
print('%s Skipped %s due to errors' % (bad, url))
@@ -183,12 +198,16 @@ def main():
183198
final_result[url]['params'] = these_params
184199
final_result[url]['method'] = each['method']
185200
final_result[url]['headers'] = each['headers']
186-
print('%s Parameters found: %s' % (good, ', '.join(final_result[url])))
201+
exporter(final_result)
202+
print('%s Parameters found: %s\n' % (good, ', '.join(final_result[url]['params'])))
203+
if not mem.var['json_file']:
204+
final_result = {}
205+
continue
206+
else:
207+
print('%s No parameters were discovered.\n' % info)
187208
except KeyboardInterrupt:
188209
exit()
189210

190-
exporter(final_result)
191-
192211

193212
if __name__ == '__main__':
194213
main()

arjun/core/anomaly.py

+5-1
Original file line numberDiff line numberDiff line change
@@ -57,6 +57,8 @@ def compare(response, factors, params):
5757
detects anomalies by comparing a HTTP response against a rule list
5858
returns string, list (anomaly, list of parameters that caused it)
5959
"""
60+
if response == '':
61+
return ('', [])
6062
these_headers = list(response.headers.keys())
6163
these_headers.sort()
6264
if factors['same_code'] and response.status_code != factors['same_code']:
@@ -67,7 +69,7 @@ def compare(response, factors, params):
6769
if factors['same_redirect'] and urlparse(response.headers.get('Location', '')).path != factors['same_redirect']:
6870
return ('redirection', params)
6971
elif factors['same_redirect'] and 'Location' in response.headers:
70-
if urlparse(response.headers.get['Location']).path != factors['same_redirect']:
72+
if urlparse(response.headers.get('Location', '')).path != factors['same_redirect']:
7173
return ('redirection', params)
7274
if factors['same_body'] and response.text != factors['same_body']:
7375
return ('body length', params)
@@ -87,6 +89,8 @@ def compare(response, factors, params):
8789
return ('param name reflection', params)
8890
if factors['value_missing']:
8991
for value in params.values():
92+
if type(value) != str:
93+
continue
9094
if value in response.text and re.search(r'[\'"\s]%s[\'"\s]' % value, response.text):
9195
return ('param value reflection', params)
9296
return ('', [])

arjun/core/exporter.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ def text_export(result):
3333
"""
3434
exports results to a text file, one url per line
3535
"""
36-
with open(mem.var['text_file'], 'w+', encoding='utf8') as text_file:
36+
with open(mem.var['text_file'], 'a+', encoding='utf8') as text_file:
3737
for url, data in result.items():
3838
clean_url = url.lstrip('/')
3939
if data['method'] == 'JSON':

arjun/core/requester.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@ def requester(request, payload={}):
1515
central function for making http requests
1616
returns str on error otherwise response object of requests library
1717
"""
18-
if 'include' in request and request['include']:
18+
if len(request.get('include', '')) != 0:
1919
payload.update(request['include'])
2020
if mem.var['stable']:
2121
mem.var['delay'] = random.choice(range(6, 12))

arjun/core/utils.py

+2
Original file line numberDiff line numberDiff line change
@@ -122,6 +122,8 @@ def get_params(include):
122122
if include.startswith('{'):
123123
try:
124124
params = json.loads(str(include).replace('\'', '"'))
125+
if type(params) != dict:
126+
return {}
125127
return params
126128
except json.decoder.JSONDecodeError:
127129
return {}

arjun/plugins/heuristic.py

+4
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,8 @@
33
from arjun.core.utils import extract_js
44

55
re_not_junk = re.compile(r'^[A-Za-z0-9_]+$')
6+
7+
68
def is_not_junk(param):
79
return (re_not_junk.match(param) is not None)
810

@@ -11,6 +13,8 @@ def is_not_junk(param):
1113
re_input_ids = re.compile(r'''(?i)<input.+?id=["']?([^"'\s>]+)''')
1214
re_empty_vars = re.compile(r'''(?:[;\n]|\bvar|\blet)(\w+)\s*=\s*(?:['"`]{1,2}|true|false|null)''')
1315
re_map_keys = re.compile(r'''['"](\w+?)['"]\s*:\s*['"`]''')
16+
17+
1418
def heuristic(response, wordlist):
1519
potential_params = []
1620

0 commit comments

Comments
 (0)