Skip to content

Commit

Permalink
Add files via upload
Browse files Browse the repository at this point in the history
  • Loading branch information
s0md3v authored Apr 6, 2019
1 parent e66cfdd commit d7f2a1b
Show file tree
Hide file tree
Showing 9 changed files with 1,822 additions and 57 deletions.
2 changes: 1 addition & 1 deletion core/dom.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ def dom(response):
highlighted = []
sources = r'''document\.(URL|documentURI|URLUnencoded|baseURI|cookie|referrer)|location\.(href|search|hash|pathname)|window\.name|history\.(pushState|replaceState)(local|session)Storage'''
sinks = r'''eval|evaluate|execCommand|assign|navigate|getResponseHeaderopen|showModalDialog|Function|set(Timeout|Interval|Immediate)|execScript|crypto.generateCRMFRequest|ScriptElement\.(src|text|textContent|innerText)|.*?\.onEventName|document\.(write|writeln)|.*?\.innerHTML|Range\.createContextualFragment|(document|window)\.location'''
scripts = re.findall(r'(?i)(?s)<scrip[^>]*(.*?)</script>', response)
scripts = re.findall(r'(?i)(?s)<script[^>]*>(.*?)</script>', response)
for script in scripts:
script = script.split('\n')
num = 1
Expand Down
6 changes: 5 additions & 1 deletion core/photon.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
from urllib.parse import urlparse


from plugins.retireJs import retireJs
from core.utils import getUrl, getParams
from core.requester import requester
from core.zetanize import zetanize
Expand Down Expand Up @@ -36,6 +37,7 @@ def rec(target):
inps.append({'name': name, 'value': value})
forms.append({0: {'action': url, 'method': 'get', 'inputs': inps}})
response = requester(url, params, headers, True, delay, timeout).text
retireJs(url, response)
forms.append(zetanize(response))
matches = findall(r'<[aA].*href=["\']{0,1}(.*?)["\']', response)
for link in matches: # iterate over the matches
Expand All @@ -53,9 +55,11 @@ def rec(target):
storage.add(main_url + '/' + link)
for x in range(level):
urls = storage - processed # urls to crawl = all urls - urls that have been crawled
# for url in urls:
# rec(url)
threadpool = concurrent.futures.ThreadPoolExecutor(
max_workers=threadCount)
futures = (threadpool.submit(rec, url) for url in urls)
for i, _ in enumerate(concurrent.futures.as_completed(futures)):
for i in concurrent.futures.as_completed(futures):
pass
return [forms, processed]
9 changes: 4 additions & 5 deletions core/requester.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,8 +5,7 @@
import warnings

import core.config
from core.config import globalVariables
from core.utils import converter
from core.utils import converter, getVar
from core.log import setup_logger

logger = setup_logger(__name__)
Expand All @@ -15,9 +14,9 @@


def requester(url, data, headers, GET, delay, timeout):
if core.config.globalVariables['jsonData']:
if getVar('jsonData'):
data = converter(data)
elif core.config.globalVariables['path']:
elif getVar('path'):
url = converter(data, url)
data = []
GET, POST = True, False
Expand All @@ -37,7 +36,7 @@ def requester(url, data, headers, GET, delay, timeout):
if GET:
response = requests.get(url, params=data, headers=headers,
timeout=timeout, verify=False, proxies=core.config.proxies)
elif core.config.globalVariables['jsonData']:
elif getVar('jsonData'):
response = requests.get(url, json=data, headers=headers,
timeout=timeout, verify=False, proxies=core.config.proxies)
else:
Expand Down
49 changes: 47 additions & 2 deletions core/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -163,7 +163,7 @@ def getParams(url, data, GET):
if data[:1] == '?':
data = data[1:]
elif data:
if core.config.globalVariables['jsonData'] or core.config.globalVariables['path']:
if getVar('jsonData') or getVar('path'):
params = data
else:
try:
Expand Down Expand Up @@ -197,6 +197,51 @@ def writer(obj, path):

def reader(path):
with open(path, 'r') as f:
result = [line.strip(
result = [line.rstrip(
'\n').encode('utf-8').decode('utf-8') for line in f]
return result

def js_extractor(response):
"""Extract js files from the response body"""
scripts = []
matches = re.findall(r'<(?:script|SCRIPT).*?(?:src|SRC)=([^\s>]+)', response)
for match in matches:
match = match.replace('\'', '').replace('"', '').replace('`', '')
scripts.append(match)
return scripts


def handle_anchor(parent_url, url):
if parent_url.count('/') > 2:
replacable = re.search(r'/[^/]*?$', parent_url).group()
if replacable != '/':
parent_url = parent_url.replace(replacable, '')
scheme = urlparse(parent_url).scheme
if url[:4] == 'http':
return url
elif url[:2] == '//':
return scheme + ':' + url
elif url[:1] == '/':
return parent_url + url
else:
if parent_url.endswith('/') or url.startswith('/'):
return parent_url + url
else:
return parent_url + '/' + url


def deJSON(data):
return data.replace('\\\\', '\\')


def getVar(name):
return core.config.globalVariables[name]

def updateVar(name, data, mode=None):
if mode:
if mode == 'append':
core.config.globalVariables[name].append(data)
elif mode == 'add':
core.config.globalVariables[name].add(data)
else:
core.config.globalVariables[name] = data
Loading

0 comments on commit d7f2a1b

Please sign in to comment.