diff --git a/.gitignore b/.gitignore index d829975..a97d138 100644 --- a/.gitignore +++ b/.gitignore @@ -176,6 +176,10 @@ cython_debug/ .pypirc +# DPAT generated reports +dpat_report*/ + + # https://github.com/github/gitignore/blob/main/Global/VisualStudioCode.gitignore .vscode/* diff --git a/README.md b/README.md index 38072dd..c371798 100644 --- a/README.md +++ b/README.md @@ -31,24 +31,41 @@ Ideally there shouldn't be much to install, but I've included a requirements.txt `pip3 install -r requirements.txt` -### Neo4j Creds +### Backend selection and credentials + +Max supports two backends: + +- Legacy Neo4j (default): no flag or `--backend neo4j` +- BloodHound CE (preview): `--backend bhce` plus BHCE flags + +BHCE flags and env vars: + +- `--bhce-url` (env BHCE_URL) +- `--bhce-user` (env BHCE_USER) +- `--bhce-secret` (env BHCE_SECRET) + + + +Current BHCE support on this branch: + +- Supported: dpat, mark-owned, mark-hvt +- Legacy-only (use `--backend neo4j`): get-info, query, export, del-edge, add-spns, add-spw + +### Neo4j creds Neo4j credentials can be hardcoded at the beginning of the script, they can be provided as CLI arguments, or stored as environment variables. If either parameter is left blank, you will be prompted for the uname/password. To use environment variables, it is probably easiest to add a line (e.g., `export NEO4J_USERNAME='neo4j'`) within *~/.bashrc* or *~/.zshrc* to store the username since it isn't really sensitive. The database password can be set within your shell's tab prior to running Max. Adding a space before the export command should prevent it from appearing within history. ```bash export NEO4J_PASSWORD='bloodhound' # Notice whitespace before 'export' python3 max.py {module} {args} - ``` -``` -python3 max.py -u neo4j -p neo4j {module} {args} -``` +For BHCE (preview), you can set env vars as well: -``` -python3 max.py {module} {args} -Neo4j Username: neo4j -Neo4j Password: +```bash +export BHCE_URL='http://127.0.0.1:8080' +export BHCE_USER='user@example.com' +export BHCE_SECRET='your_password' ``` ### Quick Use @@ -60,9 +77,15 @@ python3 max.py {module} -h ``` Importing owned objects into BH -``` +```bash +# Legacy python3 max.py mark-owned -f owned.txt python3 max.py mark-owned -f owned.txt --add-note "Owned by repeated local admin" + +# BHCE +python3 max.py --backend bhce --bhce-url http://127.0.0.1:8080 \ + --bhce-user user@example.com --bhce-secret 'pass' \ + mark-owned -f owned.txt --add-note "Owned by repeated local admin" ``` Get list of users @@ -107,8 +130,15 @@ python3 max.py add-spns -i getuserspns-raw-output.txt ``` DPAT -``` -python3 max.py dpat -n ~/client/ntds.dit -c ~/.hashcat/hashcat.potfile -o ouputdir --html --sanitize +```bash +# Legacy +python3 max.py dpat -n ~/client/ntds.txt -c ~/.hashcat/hashcat.potfile -o outputdir --html --sanitize + +# BHCE (recommended to add --less for large graphs) +python3 max.py --backend bhce --bhce-url http://127.0.0.1:8080 \ + --bhce-user user@example.com --bhce-secret 'pass' \ + dpat -n ./samples/ntds_sample.txt -c ./samples/potfile_sample.txt \ + -S --less --own-cracked --add-crack-note -o ./dpat_report --html ``` Pet max diff --git a/bhce_client.py b/bhce_client.py new file mode 100644 index 0000000..79d115c --- /dev/null +++ b/bhce_client.py @@ -0,0 +1,261 @@ +import requests +from urllib.parse import urlparse +from typing import Any, Dict, List, Optional + +class BHCEClient: + """ + BloodHound Community Edition (BHCE) REST client with login/self/cypher support. + """ + + def __init__(self, base_url: str, timeout: int = 20, verify: bool = True) -> None: + self.base_url = base_url.rstrip('/') + self.token: Optional[str] = None + self.timeout = timeout + self.verify = verify + self.session = requests.Session() + self._domain = urlparse(self.base_url).hostname or '' + if self._domain.endswith('.'): + self._domain = self._domain[:-1] + + # --- Internal helpers -------------------------------------------------- + + def _headers(self) -> Dict[str, str]: + headers = { + 'Accept': 'application/json', + } + # If we have a session token (obtained via username/password login), + # include it as a Bearer token for BHCE API authentication. + if self.token: + tok = str(self.token) + headers['Authorization'] = tok if tok.lower().startswith('bearer ') else f'Bearer {tok}' + return headers + + def _get(self, path: str, params: Optional[Dict[str, Any]] = None): + url = f"{self.base_url}{path}" + return self.session.get(url, headers=self._headers(), params=params, timeout=self.timeout, verify=self.verify) + + def _post(self, path: str, json: Optional[Dict[str, Any]] = None, extra_headers: Optional[Dict[str, str]] = None, allow_redirects: bool = True): + url = f"{self.base_url}{path}" + headers = self._headers() + headers['Content-Type'] = 'application/json' + if extra_headers: + headers.update(extra_headers) + return self.session.post(url, headers=headers, json=json, timeout=self.timeout, verify=self.verify, allow_redirects=allow_redirects) + + + # --- Auth/session ------------------------------------------------------ + def login(self, username: str, secret: str, otp: Optional[str] = None) -> bool: + """Login with username/secret; store server-issued session token if present. + + Uses /api/v2/login with login_method "secret". If no token is returned, + falls back to checking cookie-based auth by calling /api/v2/self. + """ + body: Dict[str, Any] = {"login_method": "secret", "username": username, "secret": secret} + if otp: + body["one_time_passcode"] = otp + try: + r = self._post('/api/v2/login', json=body, extra_headers={"Prefer": "0"}, allow_redirects=True) + except Exception: + return False + data: Dict[str, Any] = {} + try: + if r.headers.get('Content-Type','').startswith('application/json'): + data = r.json() or {} + except Exception: + data = {} + token = (((data.get('data') or {}).get('session_token')) or data.get('session_token')) + if r.status_code < 300 and token: + self.token = token + return self.get_self() is not None + if r.status_code < 300 and not token: + # possibly cookie auth; verify + return self.get_self() is not None + return False + + def get_self(self) -> Optional[Dict[str, Any]]: + try: + r = self._get('/api/v2/self') + if r.status_code < 300: + try: + return r.json() + except Exception: + return {} + except Exception: + pass + return None + + # --- Direct REST: users ------------------------------------------------- + def get_users(self, page: int = 1, page_size: int = 100, counts: bool = False) -> Optional[Dict[str, Any]]: + params = { + 'page': page, + 'page_size': page_size, + } + if counts: + params['counts'] = 'true' + try: + r = self._get('/api/v2/users', params=params) + if r.status_code < 300: + return r.json() + except Exception: + pass + return None + + def get_user(self, user_id: str, counts: bool = False) -> Optional[Dict[str, Any]]: + path = f"/api/v2/users/{user_id}" + params = {} + if counts: + params['counts'] = 'true' + try: + url = f"{self.base_url}{path}" + headers = self._headers() + headers['Prefer'] = '0' + r = self.session.get(url, headers=headers, params=params, timeout=self.timeout, verify=self.verify) + if r.status_code < 300: + return r.json() + except Exception: + pass + return None + + # --- Cypher query endpoint -------------------------------------------- + def cypher(self, query: str, include_properties: bool = True) -> Optional[Dict[str, Any]]: + try: + r = self._post('/api/v2/graphs/cypher', json={"query": query, "include_properties": include_properties}) + if r.status_code < 300: + # Read JSON if present; for mutation queries the body may be empty + try: + data = r.json() + if data: + return data + except Exception: + pass + # Return a minimal truthy object to indicate success on 2xx + return {"ok": True, "status": r.status_code} + except Exception: + pass + return None + + # --- Helpers built on cypher ------------------------------------------ + @staticmethod + def _extract_nodes(graph_response: Dict[str, Any]) -> List[Dict[str, Any]]: + data = (graph_response or {}).get('data') or {} + nodes = data.get('nodes') or {} + out: List[Dict[str, Any]] = [] + for node_id, node in nodes.items(): + props = node.get('properties') or {} + merged = {"id": node_id, **{k: v for k, v in node.items() if k != 'properties'}, "properties": props} + if 'name' not in merged: + merged['name'] = props.get('name') + if 'objectid' not in merged: + merged['objectid'] = props.get('objectid') or props.get('objectId') + out.append(merged) + return out + + def list_users(self, enabled: Optional[bool] = None) -> List[Dict[str, Any]]: + where = " WHERE u.enabled=true" if enabled is True else (" WHERE u.enabled=false" if enabled is False else "") + q = f"MATCH (u:User){where} RETURN u LIMIT 100000" + resp = self.cypher(q, include_properties=True) + if not resp: + return [] + return self._extract_nodes(resp) + + + def find_user_by_name_or_rid(self, username: str, rid: Optional[str]) -> Optional[Dict[str, Any]]: + """Find a user by SAM@DOMAIN prefix plus RID suffix, with fallback to exact name. + + - Input username may be in the form DOMAIN\\USER or USER@DOMAIN. + - Prefer matching nodes where u.name STARTS WITH 'SAM@' AND u.objectid ends with '-RID'. + - Fallback to exact u.name match. + """ + raw_user = username or '' + # Derive SAM from either DOMAIN\\SAM or SAM@DOMAIN + if '@' in raw_user: + sam = raw_user.split('@', 1)[0] + elif '\\' in raw_user: + sam = raw_user.split('\\', 1)[1] + else: + sam = raw_user + + safe_user = raw_user.replace('\\', '\\\\').replace("'", "\\'") + safe_sam = sam.replace('\\', '\\\\').replace("'", "\\'") + + if rid: + safe_rid = str(rid).replace("'", "").upper() + q = ( + "MATCH (u:User) WHERE toUpper(u.name) STARTS WITH toUpper('" + safe_sam + "@') " + f"AND toUpper(u.objectid) ENDS WITH '-{safe_rid}' RETURN u LIMIT 1" + ) + else: + q = f"MATCH (u:User) WHERE toUpper(u.name) = toUpper('{safe_user}') RETURN u LIMIT 1" + + resp = self.cypher(q, include_properties=True) + if not resp: + return None + nodes = self._extract_nodes(resp) + if nodes: + return nodes[0] + # Fallback: ignore name and just match RID suffix, picking PHANTOM domains first + if rid: + q2 = f"MATCH (u:User) WHERE toUpper(u.objectid) ENDS WITH '-{safe_rid}' RETURN u LIMIT 5" + resp2 = self.cypher(q2, include_properties=True) + nodes2 = self._extract_nodes(resp2) if resp2 else [] + return nodes2[0] if nodes2 else None + return None + + def update_user_properties(self, user: Dict[str, Any], props: Dict[str, Any]) -> bool: + props = dict(props or {}) + objid = ( + user.get('objectId') + or user.get('objectid') + or (user.get('properties') or {}).get('objectid') + or (user.get('properties') or {}).get('objectId') + ) + name = user.get('name') or (user.get('properties') or {}).get('name') + if not (objid or name): + return False + + def _fmt(v: Any) -> str: + if v is None: + return 'null' + if isinstance(v, bool): + return 'true' if v else 'false' + s = str(v).replace('\\', '\\\\').replace("'", "\\'") + return f"'{s}'" + + assignments: List[str] = [f"u.{k} = {_fmt(v)}" for k, v in props.items()] + if not assignments: + return True + where = f"u.objectid = '{objid}'" if objid else f"toUpper(u.name) = toUpper('{name.replace("'", "\\'")}')" + q = f"MATCH (u:User) WHERE {where} SET {', '.join(assignments)} RETURN u LIMIT 1" + try: + r = self._post('/api/v2/graphs/cypher', json={"query": q, "include_properties": False}) + return r.status_code < 300 + except Exception: + return False + + def update_node_properties_by_name(self, name: str, props: Dict[str, Any]) -> bool: + """Generic helper: update arbitrary node by name with provided properties. + + Returns True on any non-error response from the API. + """ + if not name: + return False + + def _fmt(v: Any) -> str: + if v is None: + return 'null' + if isinstance(v, bool): + return 'true' if v else 'false' + s = str(v).replace('\\', '\\\\').replace("'", "\\'") + return f"'{s}'" + + props = dict(props or {}) + if not props: + return True + sets: List[str] = [f"n.{k} = {_fmt(v)}" for k, v in props.items()] + safe_name = name.replace('\\', '\\\\').replace("'", "\\'") + q = f"MATCH (n) WHERE toUpper(n.name) = toUpper('{safe_name}') SET {', '.join(sets)} RETURN n LIMIT 1" + try: + r = self._post('/api/v2/graphs/cypher', json={"query": q, "include_properties": False}) + return r.status_code < 300 + except Exception: + return False diff --git a/max.py b/max.py index 9ab836d..9ae460a 100644 --- a/max.py +++ b/max.py @@ -18,49 +18,89 @@ except ImportError: import cgi as htmllib from itertools import zip_longest +from bhce_client import BHCEClient + +# Backend types +BACKEND_NEO4J = "neo4j" +BACKEND_BHCE = "bhce" # option to hardcode URL & URI or put them in environment variables, these will be used for neo4j database "default" location global_url = "http://127.0.0.1:7474" if (not os.environ.get('NEO4J_URL', False)) else os.environ['NEO4J_URL'] global_uri = "/db/neo4j/tx/commit" if (not os.environ.get('NEO4J_URI', False)) else os.environ['NEO4J_URI'] +# BHCE defaults (preview) +bhce_url_default = os.environ.get('BHCE_URL', 'http://127.0.0.1:8080') +bhce_user_default = os.environ.get('BHCE_USER', '') +bhce_secret_default = os.environ.get('BHCE_SECRET', '') +bhce_otp_default = os.environ.get('BHCE_OTP', '') +bhce_insecure_default = os.environ.get('BHCE_INSECURE', 'false').lower() in ('1','true','yes','y') + # option to hardcode creds or put them in environment variables, these will be used as the username and password "defaults" global_username = 'neo4j' if (not os.environ.get('NEO4J_USERNAME', False)) else os.environ['NEO4J_USERNAME'] global_password = 'bloodhound' if (not os.environ.get('NEO4J_PASSWORD', False)) else os.environ['NEO4J_PASSWORD'] def do_test(args): + """Light connectivity check for the selected backend.""" + if getattr(args, 'backend', BACKEND_NEO4J) == BACKEND_NEO4J: + try: + requests.get(args.url + global_uri, timeout=5) + return True + except Exception: + return False + else: + # BHCE: Prefer a real auth check when creds are provided; else probe base URL. + try: + client = BHCEClient(args.bhce_url, verify=not args.bhce_insecure) + if getattr(args, 'bhce_user', '') and getattr(args, 'bhce_secret', ''): + if client.login(args.bhce_user, args.bhce_secret, args.bhce_otp or None): + return True + # Fallback to base URL probe + r = requests.get(args.bhce_url, timeout=5, verify=not args.bhce_insecure) + return r.status_code < 500 + except Exception: + return False + - try: - requests.get(args.url + global_uri) - return True - except: - return False +def _build_bhce_client(args) -> BHCEClient: + """Create and return a BHCE client with verify and optional login.""" + client = BHCEClient(args.bhce_url, verify=not args.bhce_insecure) + if args.bhce_user and args.bhce_secret: + client.login(args.bhce_user, args.bhce_secret, args.bhce_otp or None) + return client def do_query(args, query, data_format=None): + """Execute a query against the selected backend. - data_format = [data_format, "row"][data_format == None] - data = { - "statements" : [ - { - "statement" : query, - "resultDataContents" : [ data_format ] - } - ] - } - headers = {'Content-type': 'application/json', 'Accept': 'application/json; charset=UTF-8'} - auth = HTTPBasicAuth(args.username, args.password) + For legacy BloodHound (Neo4j), this sends a Cypher query via REST. + For BloodHound CE, raw Cypher is not supported; this will raise until mapped. + """ + if getattr(args, 'backend', BACKEND_NEO4J) == BACKEND_NEO4J: + data_format = [data_format, "row"][data_format == None] + data = { + "statements" : [ + { + "statement" : query, + "resultDataContents" : [ data_format ] + } + ] + } + headers = {'Content-type': 'application/json', 'Accept': 'application/json; charset=UTF-8'} + auth = HTTPBasicAuth(args.username, args.password) - r = requests.post(args.url + global_uri, auth=auth, headers=headers, json=data) + r = requests.post(args.url + global_uri, auth=auth, headers=headers, json=data) - if r.status_code == 401: - print("Authentication error: the supplied credentials are incorrect for the Neo4j database, specify new credentials with -u & -p or hardcode your credentials at the top of the script") - exit() - elif r.status_code >= 300: - print("Failed to retrieve data. Server returned status code: {}".format(r.status_code)) - exit() + if r.status_code == 401: + print("Authentication error: the supplied credentials are incorrect for the Neo4j database, specify new credentials with -u & -p or hardcode your credentials at the top of the script") + exit() + elif r.status_code >= 300: + print("Failed to retrieve data. Server returned status code: {}".format(r.status_code)) + exit() + else: + return r else: - return r + raise RuntimeError("This operation requires Neo4j/Cypher and isn’t yet implemented for BloodHound CE.") def get_query_output(entry,delimeter,cols_len=None,path=False): @@ -194,7 +234,7 @@ def get_info(args): }, "admincomps" : { "query" : "MATCH (n:Computer),(m:Computer) MATCH (n)-[r:MemberOf|AdminTo*1..]->(m) RETURN DISTINCT n.name,m.name ORDER BY n.name", - "columns" : ["AdminComputerName","VictimCompterName"] + "columns" : ["AdminComputerName","VictimComputerName"] }, "nolaps" : { "query" : "MATCH (c:Computer {haslaps:false}) RETURN c.name", @@ -328,7 +368,7 @@ def get_info(args): cols = queries["foreignprivs"]["columns"] elif (args.ownedtohvts): query = queries["owned-to-hvts"]["query"] - cols = queries["owned-to-hvts"]["query"] + cols = queries["owned-to-hvts"]["columns"] elif (args.unamesess != ""): query = queries["sessions"]["query"].format(uname=args.unamesess.upper().strip()) cols = queries["sessions"]["columns"] @@ -405,68 +445,104 @@ def get_info(args): def mark_owned(args): - if (args.clear): - - query = 'MATCH (n) WHERE n.owned=true SET n.owned=false' - r = do_query(args,query) - print("[+] 'Owned' attribute removed from all objects.") - - else: - - note_string = "" - if args.notes != "": - note_string = "SET n.notes=\"" + args.notes + "\"" + if getattr(args, 'backend', BACKEND_NEO4J) == BACKEND_NEO4J: + if (args.clear): + query = 'MATCH (n) WHERE n.owned=true SET n.owned=false' + r = do_query(args,query) + print("[+] 'Owned' attribute removed from all objects.") + else: + note_string = "" + if args.notes != "": + note_string = "SET n.notes=\"" + args.notes + "\"" - f = open(args.filename).readlines() + f = open(args.filename).readlines() - for line in f: + for line in f: - if args.userpass is True or args.store: - uname, passwd = line.strip().split(':') - uname = uname.upper() - if args.store: - passwd_query = "SET n.password=\"" + passwd + "\"" + if args.userpass is True or args.store: + uname, passwd = line.strip().split(':') + uname = uname.upper() + if args.store: + passwd_query = "SET n.password=\"" + passwd + "\"" + else: + passwd_query = "" else: - passwd_query = "" - else: - uname = line.upper().strip() + uname = line.upper().strip() - query = 'MATCH (n) WHERE n.name="{uname}" SET n.owned=true {notes} {passwd} RETURN n'.format(uname=uname,passwd=passwd_query,notes=note_string) - r = do_query(args, query) + query = 'MATCH (n) WHERE n.name="{uname}" SET n.owned=true {notes} {passwd} RETURN n'.format(uname=uname,passwd=passwd_query,notes=note_string) + r = do_query(args, query) - fail_resp = '{"results":[{"columns":["n"],"data":[]}],"errors":[]}' - if r.text == fail_resp: - print("[-] AD Object: " + uname + " could not be marked as owned") + fail_resp = '{"results":[{"columns":["n"],"data":[]}],"errors":[]}' + if r.text == fail_resp: + print("[-] AD Object: " + uname + " could not be marked as owned") + else: + print("[+] AD Object: " + uname + " marked as owned successfully") + else: + client = _build_bhce_client(args) + if args.clear: + ok = bool(client.cypher("MATCH (n) WHERE n.owned=true SET n.owned=false RETURN COUNT(n)", include_properties=False)) + print("[+] 'Owned' attribute removed from all objects." if ok else "[-] Failed to clear 'Owned' attribute") + return + # Mark inputs as owned; optionally set notes and store password + f = open(args.filename).readlines() + for line in f: + passwd = None + if args.userpass or args.store: + uname, passwd = line.strip().split(':', 1) else: + uname = line.strip() + uname = uname.upper() + props = {"owned": True} + if args.notes: + props["notes"] = args.notes + if args.store and passwd is not None: + props["password"] = passwd + ok = client.update_node_properties_by_name(uname, props) + if ok: print("[+] AD Object: " + uname + " marked as owned successfully") + else: + print("[-] AD Object: " + uname + " could not be marked as owned") def mark_hvt(args): + if getattr(args, 'backend', BACKEND_NEO4J) == BACKEND_NEO4J: + if (args.clear): + query = 'MATCH (n) WHERE n.highvalue=true SET n.highvalue=false' + r = do_query(args,query) + print("[+] 'High Value' attribute removed from all objects.") + else: + note_string = "" + if args.notes != "": + note_string = "SET n.notes=\"" + args.notes + "\"" - if (args.clear): + f = open(args.filename).readlines() - query = 'MATCH (n) WHERE n.highvalue=true SET n.highvalue=false' - r = do_query(args,query) - print("[+] 'High Value' attribute removed from all objects.") + for line in f: + query = 'MATCH (n) WHERE n.name="{uname}" SET n.highvalue=true {notes} RETURN n'.format(uname=line.upper().strip(),notes=note_string) + r = do_query(args, query) + fail_resp = '{"results":[{"columns":["n"],"data":[]}],"errors":[]}' + if r.text == fail_resp: + print("[-] AD Object: " + line.upper().strip() + " could not be marked as HVT") + else: + print("[+] AD Object: " + line.upper().strip() + " marked as HVT successfully") else: - - note_string = "" - if args.notes != "": - note_string = "SET n.notes=\"" + args.notes + "\"" - + client = _build_bhce_client(args) + if args.clear: + ok = bool(client.cypher("MATCH (n) WHERE n.highvalue=true SET n.highvalue=false RETURN COUNT(n)", include_properties=False)) + print("[+] 'High Value' attribute removed from all objects." if ok else "[-] Failed to clear 'High Value' attribute") + return f = open(args.filename).readlines() - for line in f: - - query = 'MATCH (n) WHERE n.name="{uname}" SET n.highvalue=true {notes} RETURN n'.format(uname=line.upper().strip(),notes=note_string) - r = do_query(args, query) - - fail_resp = '{"results":[{"columns":["n"],"data":[]}],"errors":[]}' - if r.text == fail_resp: - print("[-] AD Object: " + line.upper().strip() + " could not be marked as HVT") + name = line.upper().strip() + props = {"highvalue": True} + if args.notes: + props["notes"] = args.notes + ok = client.update_node_properties_by_name(name, props) + if ok: + print("[+] AD Object: " + name + " marked as HVT successfully") else: - print("[+] AD Object: " + line.upper().strip() + " marked as HVT successfully") + print("[-] AD Object: " + name + " could not be marked as HVT") def query_func(args): @@ -660,17 +736,17 @@ def add_spns(args): print("Invalid Option") count = 0 - for set in objects: + for spn_pair in objects: - query = statement.format(uname=set[1],comp=set[0]) + query = statement.format(uname=spn_pair[1],comp=spn_pair[0]) r = do_query(args, query) fail_resp = '{"results":[{"columns":["n","m"],"data":[]}],"errors":[]}' if r.text == fail_resp: - print("[-] Relationship " + set[0] + " -- HasSPNConfigured -> " + set[1] + " could not be added") + print("[-] Relationship " + spn_pair[0] + " -- HasSPNConfigured -> " + spn_pair[1] + " could not be added") else: - print("[+] Relationship " + set[0] + " -- HasSPNConfigured -> " + set[1] + " added") + print("[+] Relationship " + spn_pair[0] + " -- HasSPNConfigured -> " + spn_pair[1] + " added") count = count + 1 print('[+] HasSPNConfigured relationships created: ' + str(count)) @@ -718,30 +794,37 @@ def dpat_sanitize(args, pass_or_hash): def dpat_parse_ntds(lines, ntds_parsed): for line in lines: - if ":::" not in line or '$' in line: #filters out other lines in ntds/computer obj + # Skip obvious non-user lines and computer accounts + if '$' in line: continue line = line.replace("\r", "").replace("\n", "") - if (line == ""): + if not line: continue - else: - line = line.split(":") - # [ username, domain, rid, LM, NT, plaintext||None] + parts = line.split(":") + # Expect at least: NAME, RID, LM, NT + if len(parts) < 4: + continue + # [ username, domain, rid, LM, NT ] to_append = [] - if (line[0].split("\\")[0] == line[0]): + name_field = parts[0] + if (name_field.split("\\")[0] == name_field): # no domain found, local account - to_append.append(line[0]) + to_append.append(name_field) to_append.append("") else: - to_append.append(line[0].split("\\")[1]) - to_append.append(line[0].split("\\")[0]) - to_append.append(line[1]) - to_append.append(line[2]) - to_append.append(line[3]) + to_append.append(name_field.split("\\")[1]) + to_append.append(name_field.split("\\")[0]) + to_append.append(parts[1]) + to_append.append(parts[2]) + to_append.append(parts[3]) ntds_parsed.append(to_append) def dpat_map_users(args, users, potfile): count = 0 + bh_client = None + if getattr(args, 'backend', BACKEND_NEO4J) != BACKEND_NEO4J: + bh_client = _build_bhce_client(args) for user in users: try: nt_hash = user[4] @@ -751,12 +834,12 @@ def dpat_map_users(args, users, potfile): cracked_bool = 'false' password = None password_query = '' - if nt_hash in potfile: + if nt_hash and nt_hash.lower() in potfile: cracked_bool = 'true' - password = potfile[nt_hash] - elif lm_hash != "aad3b435b51404eeaad3b435b51404ee" and lm_hash in potfile: + password = potfile[nt_hash.lower()] + elif lm_hash and lm_hash.lower() != "aad3b435b51404eeaad3b435b51404ee" and lm_hash.lower() in potfile: cracked_bool = 'true' - password = potfile[lm_hash] + password = potfile[lm_hash.lower()] if password != None: if "$HEX[" in password: @@ -768,12 +851,26 @@ def dpat_map_users(args, users, potfile): cracked_query = "SET u.cracked={cracked_bool} SET u.nt_hash='{nt_hash}' SET u.lm_hash='{lm_hash}' SET u.ntds_uname='{ntds_uname}' {password}".format(cracked_bool=cracked_bool,nt_hash=nt_hash,lm_hash=lm_hash,ntds_uname=ntds_uname,password=password_query) query1 = "MATCH (u:User) WHERE u.name='{username1}' OR (u.name STARTS WITH '{username2}@' AND u.objectid ENDS WITH '-{rid}') {cracked_query} RETURN u.name,u.objectid".format(username1=username, username2=user[0].replace("\\","\\\\").replace("'","\\'").upper(), rid=user[2].upper(), cracked_query=cracked_query) - r1 = do_query(args,query1) - bh_users = json.loads(r1.text)['results'][0]['data'] - - # if bh_users == [] then the user was not found in BH - if bh_users != []: - count = count + 1 + if getattr(args, 'backend', BACKEND_NEO4J) == BACKEND_NEO4J: + r1 = do_query(args,query1) + bh_users = json.loads(r1.text)['results'][0]['data'] + if bh_users != []: + count = count + 1 + else: + # BHCE path: try to find and update user via REST (reuse a single client) + client = bh_client or _build_bhce_client(args) + user_node = client.find_user_by_name_or_rid(username, user[2]) + if user_node: + props = { + "cracked": cracked_bool == 'true', + "nt_hash": nt_hash, + "lm_hash": lm_hash, + "ntds_uname": ntds_uname, + } + if password is not None: + props["password"] = password + if client.update_user_properties(user_node, props): + count = count + 1 except Exception as g: print("[-] Mapping ERROR: {} FOR USER {}".format(g, user[0])) @@ -790,8 +887,14 @@ def dpat_func(args): if args.clear: print("[+] Clearing attributes from all users: cracked, password, nt_hash, lm_hash, ntds_uname") - clear_query = "MATCH (u:User) REMOVE u.cracked REMOVE u.nt_hash REMOVE u.lm_hash REMOVE u.ntds_uname REMOVE u.password" - do_query(args,clear_query) + if getattr(args, 'backend', BACKEND_NEO4J) == BACKEND_NEO4J: + clear_query = "MATCH (u:User) REMOVE u.cracked REMOVE u.nt_hash REMOVE u.lm_hash REMOVE u.ntds_uname REMOVE u.password" + do_query(args,clear_query) + else: + client = _build_bhce_client(args) + # Best-effort: iterate users and wipe properties + for u in client.list_users(): + client.update_user_properties(u, {"cracked": None, "password": None, "nt_hash": None, "lm_hash": None, "ntds_uname": None}) return if ((args.output) and (not args.csv and not args.html)): @@ -835,49 +938,70 @@ def dpat_func(args): # done parsing print("[+] Processing Potfile") - # password stats like counting reused cracked passwords - + # Build a map of hash -> plaintext; handle colons in plaintext and $HEX[...] values potfile = {} with open(args.crackfile,'r') as pot: - for line in pot.readlines(): + for raw in pot.readlines(): try: - line = line.strip().replace("$NT$", "").replace("$LM$", "") - if (line == ""): + line = raw.strip() + if not line or line.startswith('#'): continue - line = line.split(":") + # Normalize known prefixes from JTR style + line = line.replace("$NT$", "").replace("$LM$", "") - if len(line[0]) != 32: + # Split only on the first colon to preserve colons in plaintext + if ':' not in line: continue + hash_part, plain_part = line.split(":", 1) - potfile[line[0]] = line[1] + # Expect 32-hex NT/LM hash on the left + if len(hash_part) != 32: + continue - except: + # Decode $HEX[...] plaintext if present + plaintext = plain_part + if plaintext.startswith("$HEX[") and plaintext.endswith("]"): + hex_payload = plaintext[5:-1] + try: + decoded = binascii.unhexlify(hex_payload) + try: + plaintext = decoded.decode('utf-8') + except Exception: + plaintext = decoded.decode('latin-1', errors='replace') + except Exception: + # Keep original if decoding fails + pass + + # normalize to lowercase for case-insensitive matching + potfile[hash_part.lower()] = plaintext + except Exception: + # Be tolerant of malformed lines pass print('[+] Mapping NTDS users to BloodHound data') num_lines = len(ntds_parsed) - # create threads to parse file - procs = [] - num_threads = int(args.num_threads) - for t in range(0, num_threads): - start = math.ceil((num_lines / num_threads) * t) - end = math.ceil((num_lines / num_threads) * (t + 1)) - p = multiprocessing.Process(target=dpat_map_users, args=(args, ntds_parsed[ start : end ], potfile, )) - p.start() - procs.append(p) - for p_ in procs: - p_.join() - - - count_query = "MATCH (u:User) WHERE u.cracked IS NOT NULL RETURN COUNT(u.name)" - r = do_query(args,count_query) - resp = json.loads(r.text)['results'][0]['data'] - count = resp[0]['row'][0] + if getattr(args, 'backend', BACKEND_NEO4J) == BACKEND_NEO4J: + # create threads to parse file + procs = [] + num_threads = int(args.num_threads) + for t in range(0, num_threads): + start = math.ceil((num_lines / num_threads) * t) + end = math.ceil((num_lines / num_threads) * (t + 1)) + p = multiprocessing.Process(target=dpat_map_users, args=(args, ntds_parsed[ start : end ], potfile, )) + p.start() + procs.append(p) + for p_ in procs: + p_.join() + count_query = "MATCH (u:User) WHERE u.cracked IS NOT NULL RETURN COUNT(u.name)" + r = do_query(args,count_query) + resp = json.loads(r.text)['results'][0]['data'] + count = resp[0]['row'][0] + else: + # Run single-threaded mapping against BHCE to reuse one session + count = dpat_map_users(args, ntds_parsed, potfile) print("[+] BloodHound data queried successfully, {} NTDS users mapped to BH data".format(count)) - if count < 10: - print("[-] Warning: Less than 10 users mapped to BloodHound entries, verify the NTDS data matches the Neo4j data, continuing...") except Exception as e: print("[-] Error, {}".format(e)) @@ -891,25 +1015,44 @@ def dpat_func(args): if args.passwd: print("[+] Searching for users with password '{}'".format(args.passwd)) - query = "MATCH (u:User {{cracked:true}}) WHERE u.password='{pwd}' RETURN u.name".format(pwd=args.passwd.replace("\\","\\\\").replace("'","\\'")) - r = do_query(args,query) - resp = json.loads(r.text)['results'][0]['data'] - print("[+] Users: {}\n".format(len(resp))) - for entry in resp: - print(entry['row'][0]) + if getattr(args, 'backend', BACKEND_NEO4J) == BACKEND_NEO4J: + query = "MATCH (u:User {{cracked:true}}) WHERE u.password='{pwd}' RETURN u.name".format(pwd=args.passwd.replace("\\","\\\\").replace("'","\\'")) + r = do_query(args,query) + resp = json.loads(r.text)['results'][0]['data'] + print("[+] Users: {}\n".format(len(resp))) + for entry in resp: + print(entry['row'][0]) + else: + client = _build_bhce_client(args) + users = [u for u in client.list_users() if (u.get('properties') or {}).get('cracked') and (u.get('properties') or {}).get('password') == args.passwd] + print("[+] Users: {}\n".format(len(users))) + for u in users: + print((u.get('name') or (u.get('properties') or {}).get('name') or '')) return if args.usern: print("[+] Searching for password for user {}".format(args.usern)) - query = "MATCH (u:User) WHERE toUpper(u.name)='{uname}' OR toUpper(u.ntds_uname)='{uname}' RETURN u.name,u.password".format(uname=args.usern.upper().replace("\\","\\\\").replace("'","\\'")) - r = do_query(args,query) - resp = json.loads(r.text)['results'][0]['data'] - if resp == []: - print("[-] User {uname} not found".format(uname=args.usern)) - elif resp[0]['row'][1] == None: - print("[-] User {uname} not cracked, no password found".format(uname=args.usern)) + if getattr(args, 'backend', BACKEND_NEO4J) == BACKEND_NEO4J: + query = "MATCH (u:User) WHERE toUpper(u.name)='{uname}' OR toUpper(u.ntds_uname)='{uname}' RETURN u.name,u.password".format(uname=args.usern.upper().replace("\\","\\\\").replace("'","\\'")) + r = do_query(args,query) + resp = json.loads(r.text)['results'][0]['data'] + if resp == []: + print("[-] User {uname} not found".format(uname=args.usern)) + elif resp[0]['row'][1] == None: + print("[-] User {uname} not cracked, no password found".format(uname=args.usern)) + else: + print("[+] Password for user {uname}: {pwd}".format(uname=args.usern,pwd=dpat_sanitize(args, resp[0]['row'][1]))) else: - print("[+] Password for user {uname}: {pwd}".format(uname=args.usern,pwd=dpat_sanitize(args, resp[0]['row'][1]))) + client = _build_bhce_client(args) + u = client.find_user_by_name_or_rid(args.usern, None) + if not u: + print("[-] User {uname} not found".format(uname=args.usern)) + else: + pwd = (u.get('properties') or {}).get('password') + if not pwd: + print("[-] User {uname} not cracked, no password found".format(uname=args.usern)) + else: + print("[+] Password for user {uname}: {pwd}".format(uname=args.usern,pwd=dpat_sanitize(args, pwd))) return ### @@ -997,7 +1140,7 @@ def dpat_func(args): "label" : "Accounts With Explicit Admin Rights Cracked" }, { - "query" : "MATCH p2=(u:User {cracked:true})-[r1:MemberOf*1..]->(g:Group)-[r2:AdmintTo]->(n2) RETURN DISTINCT u.enabled,u.ntds_uname,u.password,u.nt_hash", + "query" : "MATCH p2=(u:User {cracked:true})-[r1:MemberOf*1..]->(g:Group)-[r2:AdminTo]->(n2) RETURN DISTINCT u.enabled,u.ntds_uname,u.password,u.nt_hash", "label" : "Accounts With Group Delegated Admin Rights Cracked" }, { @@ -1028,68 +1171,141 @@ def dpat_func(args): query_output_data = [] hashes = {} - query = "MATCH (u:User) WHERE u.nt_hash IS NOT NULL RETURN u.nt_hash,u.ntds_uname" - r = do_query(args,query) - resp = json.loads(r.text)['results'][0]['data'] - - for entry in resp: - if entry['row'][0] not in hashes: - hashes[entry['row'][0]] = [entry['row'][1]] - else: - hashes[entry['row'][0]].append(entry['row'][1]) - import time - for search_value in queries: - - # start = time.time() - - query = search_value['query'] - label = search_value['label'] - if (label not in query_counts): - query_counts[label] = 0 - print("[+] Querying for \"" + label + "\"") - dat = { 'label' : label } - dat['enabled'] = [] - dat['disabled'] = [] - + if getattr(args, 'backend', BACKEND_NEO4J) == BACKEND_NEO4J: + query = "MATCH (u:User) WHERE u.nt_hash IS NOT NULL RETURN u.nt_hash,u.ntds_uname" r = do_query(args,query) resp = json.loads(r.text)['results'][0]['data'] - # end = time.time() - # print("[*] Done in {} seconds".format(end-start)) for entry in resp: - query_counts[label] += 1 # TODO - status_flag = "disabled" - if entry['row'][0]: - status_flag = "enabled" + if entry['row'][0] not in hashes: + hashes[entry['row'][0]] = [entry['row'][1]] + else: + hashes[entry['row'][0]].append(entry['row'][1]) + else: + client = _build_bhce_client(args) + for u in client.list_users(): + props = u.get('properties') or {} + if props.get('nt_hash'): + hashes.setdefault(props['nt_hash'], []).append(props.get('ntds_uname') or props.get('name') or '') + import time + if getattr(args, 'backend', BACKEND_NEO4J) == BACKEND_BHCE: + # For BHCE, rewrite queries to return nodes and parse from node properties + client = _build_bhce_client(args) + def _bhce_return_nodes(q: str) -> str: + u = q.upper() + idx = u.rfind(" RETURN ") + if idx != -1: + return q[:idx] + " RETURN DISTINCT u" + return q.strip() + " RETURN DISTINCT u" + + for search_value in queries: + query = search_value['query'] + label = search_value['label'] + if (label not in query_counts): + query_counts[label] = 0 + print("[+] Querying for \"" + label + "\"") + dat = { 'label' : label } + dat['enabled'] = [] + dat['disabled'] = [] + + q_nodes = _bhce_return_nodes(query) + resp = client.cypher(q_nodes, include_properties=True) + nodes = [] + if resp: + # Extract nodes map -> list + data = (resp or {}).get('data') or {} + for _, node in (data.get('nodes') or {}).items(): + nodes.append(node) + + for node in nodes: + props = node.get('properties') or {} + query_counts[label] += 1 + status_flag = 'enabled' if props.get('enabled') else 'disabled' + if "cracked" in label.lower(): + uname = props.get('ntds_uname') or props.get('name') or '' + pwd = props.get('password') + nth = props.get('nt_hash') + try: + user = [uname, pwd, (0 if pwd is None else len(pwd)), nth] + dat[status_flag].append(user) + except Exception: + pass + else: + try: + uname = props.get('ntds_uname') or props.get('name') or '' + nth = props.get('nt_hash') + pwd = props.get('password') + shared_list = hashes.get(nth, []) if nth else [] + share_count = len(shared_list) + if share_count > 30: + all_hashes_shared = "Shared Hash List > 30" + else: + all_hashes_shared = ', '.join(shared_list) + user = [uname, nth, all_hashes_shared, share_count, pwd] + dat[status_flag].append(user) + except Exception: + pass if "cracked" in label.lower(): - try: - user = [entry['row'][1], entry['row'][2], len(entry['row'][2]), entry['row'][3]] - dat[status_flag].append(user) - except: - pass + dat['columns'] = ["Username", "Password", "Password Length", "NT Hash"] + dat['enabled'] = sorted(dat['enabled'], key = lambda x: -1 if x[1] is None else len(x[1]), reverse=True) + dat['disabled'] = sorted(dat['disabled'], key = lambda x: -1 if x[1] is None else len(x[1]), reverse=True) else: - try: - share_count = len(hashes[entry['row'][2]]) - if share_count > 30: - all_hashes_shared = "Shared Hash List > 30" - else: - all_hashes_shared = ', '.join(hashes[entry['row'][2]]) - user = [entry['row'][1], entry['row'][2], all_hashes_shared, share_count, entry['row'][3]] - dat[status_flag].append(user) - except: - pass + dat['columns'] = ["Username", "NT Hash", "Users Sharing this Hash", "Share Count", "Password"] + dat['enabled'] = sorted(dat['enabled'], key = lambda x: -1 if x[3] is None else x[3], reverse=True) + dat['disabled'] = sorted(dat['disabled'], key = lambda x: -1 if x[3] is None else x[3], reverse=True) - if "cracked" in label.lower(): - dat['columns'] = ["Username", "Password", "Password Length", "NT Hash"] - dat['enabled'] = sorted(dat['enabled'], key = lambda x: -1 if x[1] is None else len(x[1]), reverse=True) - dat['disabled'] = sorted(dat['disabled'], key = lambda x: -1 if x[1] is None else len(x[1]), reverse=True) + query_output_data.append(dat) + else: + for search_value in queries: + # start = time.time() + query = search_value['query'] + label = search_value['label'] + if (label not in query_counts): + query_counts[label] = 0 + print("[+] Querying for \"" + label + "\"") + dat = { 'label' : label } + dat['enabled'] = [] + dat['disabled'] = [] - else: - dat['columns'] = ["Username", "NT Hash", "Users Sharing this Hash", "Share Count", "Password"] - dat['enabled'] = sorted(dat['enabled'], key = lambda x: -1 if x[3] is None else x[3], reverse=True) - dat['disabled'] = sorted(dat['disabled'], key = lambda x: -1 if x[3] is None else x[3], reverse=True) + r = do_query(args,query) + resp = json.loads(r.text)['results'][0]['data'] + # end = time.time() + # print("[*] Done in {} seconds".format(end-start)) + for entry in resp: + query_counts[label] += 1 # TODO + status_flag = "disabled" + if entry['row'][0]: + status_flag = "enabled" + + if "cracked" in label.lower(): + try: + user = [entry['row'][1], entry['row'][2], len(entry['row'][2]), entry['row'][3]] + dat[status_flag].append(user) + except: + pass + else: + try: + share_count = len(hashes[entry['row'][2]]) + if share_count > 30: + all_hashes_shared = "Shared Hash List > 30" + else: + all_hashes_shared = ', '.join(hashes[entry['row'][2]]) + user = [entry['row'][1], entry['row'][2], all_hashes_shared, share_count, entry['row'][3]] + dat[status_flag].append(user) + except: + pass + + if "cracked" in label.lower(): + dat['columns'] = ["Username", "Password", "Password Length", "NT Hash"] + dat['enabled'] = sorted(dat['enabled'], key = lambda x: -1 if x[1] is None else len(x[1]), reverse=True) + dat['disabled'] = sorted(dat['disabled'], key = lambda x: -1 if x[1] is None else len(x[1]), reverse=True) - query_output_data.append(dat) + else: + dat['columns'] = ["Username", "NT Hash", "Users Sharing this Hash", "Share Count", "Password"] + dat['enabled'] = sorted(dat['enabled'], key = lambda x: -1 if x[3] is None else x[3], reverse=True) + dat['disabled'] = sorted(dat['disabled'], key = lambda x: -1 if x[3] is None else x[3], reverse=True) + + query_output_data.append(dat) ### ### Get the Group Stats ready @@ -1097,31 +1313,77 @@ def dpat_func(args): # TODO: Output group members in html output if not args.less: + if getattr(args, 'backend', BACKEND_NEO4J) == BACKEND_NEO4J: - print("[+] Querying for Group Statistics") - group_query_data = {} - group_data = [] + print("[+] Querying for Group Statistics") + group_query_data = {} + group_data = [] - query = "MATCH (u:User)-[:MemberOf]->(g:Group) RETURN DISTINCT g.name,u.name,u.cracked" - r = do_query(args,query) - resp = json.loads(r.text)['results'][0]['data'] - for entry in resp: - group_name = entry['row'][0] - username = entry['row'][1] - crack_status = entry['row'][2] - - if group_name not in group_query_data: - group_query_data[group_name] = [[username,crack_status]] - else: - group_query_data[group_name].append([username,crack_status]) + query = "MATCH (u:User)-[:MemberOf]->(g:Group) RETURN DISTINCT g.name,u.name,u.cracked" + r = do_query(args,query) + resp = json.loads(r.text)['results'][0]['data'] + for entry in resp: + group_name = entry['row'][0] + username = entry['row'][1] + crack_status = entry['row'][2] - for group_name in group_query_data: - cracked_total = sum(user[1] == True for user in group_query_data[group_name]) - if cracked_total == 0: - continue - perc = round(100 * float(cracked_total / len(group_query_data[group_name])), 2) - group_data.append([group_name,perc,cracked_total,len(group_query_data[group_name])]) - group_data = sorted(group_data, key = lambda x: x[1], reverse=True) + if group_name not in group_query_data: + group_query_data[group_name] = [[username,crack_status]] + else: + group_query_data[group_name].append([username,crack_status]) + + for group_name in group_query_data: + cracked_total = sum(user[1] == True for user in group_query_data[group_name]) + if cracked_total == 0: + continue + perc = round(100 * float(cracked_total / len(group_query_data[group_name])), 2) + group_data.append([group_name,perc,cracked_total,len(group_query_data[group_name])]) + group_data = sorted(group_data, key = lambda x: x[1], reverse=True) + else: + # BHCE: build group statistics by parsing MemberOf edges + print("[+] Querying for Group Statistics (BHCE)") + client = _build_bhce_client(args) + resp = client.cypher("MATCH p=(u:User)-[:MemberOf]->(g:Group) RETURN p", include_properties=True) + data = (resp or {}).get('data') or {} + nodes = data.get('nodes') or {} + edges = data.get('edges') or [] + # Build id -> node properties + node_props = {} + for nid, node in nodes.items(): + props = node.get('properties') or {} + props_f = dict(props) + props_f['kind'] = node.get('kind') + props_f['label'] = node.get('label') + node_props[nid] = props_f + # Map group name -> list of [username, cracked] + group_query_data = {} + for e in edges: + if e.get('kind') != 'MemberOf': + continue + uid = e.get('source') + gid = e.get('target') + u = node_props.get(uid, {}) + g = node_props.get(gid, {}) + if not u or not g: + continue + if (u.get('kind') or '').lower() != 'user' or (g.get('kind') or '').lower() != 'group': + # Some graphs may have reversed edges; attempt swap + u, g = g, u + if (u.get('kind') or '').lower() != 'user' or (g.get('kind') or '').lower() != 'group': + continue + gname = g.get('name') or g.get('label') or '' + uname = u.get('name') or '' + cracked = bool(u.get('cracked')) + group_query_data.setdefault(gname, []).append([uname, cracked]) + # Compute stats + group_data = [] + for gname, members in group_query_data.items(): + cracked_total = sum(1 for _, c in members if c) + if cracked_total == 0: + continue + perc = round(100 * float(cracked_total / len(members)), 2) + group_data.append([gname, perc, cracked_total, len(members)]) + group_data = sorted(group_data, key=lambda x: x[1], reverse=True) ### ### Get the Overall Stats ready @@ -1130,33 +1392,49 @@ def dpat_func(args): print("[+] Generating Overall Statistics") # all password hashes - query = "MATCH (u:User) WHERE u.cracked IS NOT NULL RETURN u.ntds_uname,u.password,u.nt_hash,u.pwdlastset" - r = do_query(args,query) - resp = json.loads(r.text)['results'][0]['data'] - num_pass_hashes = len(resp) + if getattr(args, 'backend', BACKEND_NEO4J) == BACKEND_NEO4J: + query = "MATCH (u:User) WHERE u.cracked IS NOT NULL RETURN u.ntds_uname,u.password,u.nt_hash,u.pwdlastset" + r = do_query(args,query) + resp = json.loads(r.text)['results'][0]['data'] + bh_rows = [[e['row'][0], e['row'][1], e['row'][2], e['row'][3]] for e in resp] + else: + client = _build_bhce_client(args) + bh_rows = [] + for u in client.list_users(): + p = u.get('properties') or {} + if p.get('cracked'): + bh_rows.append([p.get('ntds_uname') or p.get('name'), p.get('password'), p.get('nt_hash'), p.get('pwdlastset')]) + num_pass_hashes = len(bh_rows) num_pass_hashes_list = [] - for entry in resp: + for entry in bh_rows: length = '' - if entry['row'][1] != None: - length = len(entry['row'][1]) + if entry[1] != None: + length = len(entry[1]) try: - num_pass_hashes_list.append([entry['row'][0], entry['row'][1], length, entry['row'][2], datetime.datetime.fromtimestamp(entry['row'][3])], ) + num_pass_hashes_list.append([entry[0], entry[1], length, entry[2], datetime.datetime.fromtimestamp(entry[3])], ) except: - num_pass_hashes_list.append([entry['row'][0], entry['row'][1], length, entry['row'][2], ''], ) + num_pass_hashes_list.append([entry[0], entry[1], length, entry[2], ''], ) num_pass_hashes_list = sorted(num_pass_hashes_list, key = lambda x: -1 if x[1] is None else len(x[1]), reverse=True) # unique password hashes - query = "MATCH (u:User) RETURN COUNT(DISTINCT(u.nt_hash))" - r = do_query(args,query) - resp = json.loads(r.text)['results'][0]['data'] - num_uniq_hash = resp[0]['row'][0] + if getattr(args, 'backend', BACKEND_NEO4J) == BACKEND_NEO4J: + query = "MATCH (u:User) RETURN COUNT(DISTINCT(u.nt_hash))" + r = do_query(args,query) + resp = json.loads(r.text)['results'][0]['data'] + num_uniq_hash = resp[0]['row'][0] + else: + num_uniq_hash = len(hashes) # passwords cracked, uniques - query = "MATCH (u:User {cracked:True}) RETURN COUNT(DISTINCT(u)),COUNT(DISTINCT(u.password))" - r = do_query(args,query) - resp = json.loads(r.text)['results'][0]['data'] - num_cracked = resp[0]['row'][0] - num_uniq_cracked = resp[0]['row'][1] + if getattr(args, 'backend', BACKEND_NEO4J) == BACKEND_NEO4J: + query = "MATCH (u:User {cracked:True}) RETURN COUNT(DISTINCT(u)),COUNT(DISTINCT(u.password))" + r = do_query(args,query) + resp = json.loads(r.text)['results'][0]['data'] + num_cracked = resp[0]['row'][0] + num_uniq_cracked = resp[0]['row'][1] + else: + num_cracked = len(bh_rows) + num_uniq_cracked = len(set([r[1] for r in bh_rows if r[1]])) # password percentages if (num_pass_hashes > 0): @@ -1168,55 +1446,105 @@ def dpat_func(args): perc_uniq_cracked = 00.00 # lm hash stats - query = "MATCH (u:User) WHERE u.lm_hash IS NOT NULL AND NOT u.lm_hash='aad3b435b51404eeaad3b435b51404ee' RETURN u.lm_hash,count(u.lm_hash)" - r = do_query(args,query) - resp = json.loads(r.text)['results'][0]['data'] lm_hash_counts = {} - for entry in resp: - lm_hash_counts[entry['row'][0]] = entry['row'][1] + if getattr(args, 'backend', BACKEND_NEO4J) == BACKEND_NEO4J: + query = "MATCH (u:User) WHERE u.lm_hash IS NOT NULL AND NOT u.lm_hash='aad3b435b51404eeaad3b435b51404ee' RETURN u.lm_hash,count(u.lm_hash)" + r = do_query(args,query) + resp = json.loads(r.text)['results'][0]['data'] + for entry in resp: + lm_hash_counts[entry['row'][0]] = entry['row'][1] + else: + client = _build_bhce_client(args) + for u in client.list_users(): + p = u.get('properties') or {} + lm = p.get('lm_hash') + if lm and lm != 'aad3b435b51404eeaad3b435b51404ee': + lm_hash_counts[lm] = lm_hash_counts.get(lm, 0) + 1 non_blank_lm = sum(lm_hash_counts.values()) uniq_lm = len(lm_hash_counts) # lm hash users - query = "MATCH (u:User) WHERE u.lm_hash IS NOT NULL AND NOT u.lm_hash='aad3b435b51404eeaad3b435b51404ee' RETURN u.name,u.lm_hash" - r = do_query(args,query) - resp = json.loads(r.text)['results'][0]['data'] - lm_hash_list = [] - for entry in resp: - user = [entry['row'][0], dpat_sanitize(args, entry['row'][1])] - user.append(lm_hash_counts[entry['row'][1]]) - lm_hash_list.append(user) + if getattr(args, 'backend', BACKEND_NEO4J) == BACKEND_NEO4J: + query = "MATCH (u:User) WHERE u.lm_hash IS NOT NULL AND NOT u.lm_hash='aad3b435b51404eeaad3b435b51404ee' RETURN u.name,u.lm_hash" + r = do_query(args,query) + resp = json.loads(r.text)['results'][0]['data'] + for entry in resp: + user = [entry['row'][0], dpat_sanitize(args, entry['row'][1])] + user.append(lm_hash_counts[entry['row'][1]]) + lm_hash_list.append(user) + else: + client = _build_bhce_client(args) + for u in client.list_users(): + p = u.get('properties') or {} + lm = p.get('lm_hash') + if lm and lm != 'aad3b435b51404eeaad3b435b51404ee': + name = p.get('name') or u.get('name') or '' + lm_hash_list.append([name, dpat_sanitize(args, lm), lm_hash_counts.get(lm, 1)]) lm_hash_list = sorted(lm_hash_list, key = lambda x: x[2], reverse=True) # matching username/password - query = "MATCH (u:User {cracked:true}) WHERE toUpper(SPLIT(u.name,'@')[0])=toUpper(u.password) RETURN u.ntds_uname,u.password,u.nt_hash" - r = do_query(args,query) - resp = json.loads(r.text)['results'][0]['data'] user_pass_match_list = [] - for entry in resp: - user_pass_match_list.append([entry['row'][0],dpat_sanitize(args,entry['row'][1]),len(entry['row'][1]),entry['row'][2]]) + if getattr(args, 'backend', BACKEND_NEO4J) == BACKEND_NEO4J: + query = "MATCH (u:User {cracked:true}) WHERE toUpper(SPLIT(u.name,'@')[0])=toUpper(u.password) RETURN u.ntds_uname,u.password,u.nt_hash" + r = do_query(args,query) + resp = json.loads(r.text)['results'][0]['data'] + for entry in resp: + user_pass_match_list.append([entry['row'][0],dpat_sanitize(args,entry['row'][1]),len(entry['row'][1]),entry['row'][2]]) + else: + client = _build_bhce_client(args) + for u in client.list_users(): + p = u.get('properties') or {} + if p.get('cracked') and p.get('password') and p.get('name'): + simple = (p.get('name') or '').split('@')[0] + if simple.upper() == str(p.get('password')).upper(): + user_pass_match_list.append([p.get('ntds_uname') or p.get('name'), dpat_sanitize(args,p.get('password')), len(p.get('password')), p.get('nt_hash')]) user_pass_match = len(user_pass_match_list) # Get Password Length Stats - query = "MATCH (u:User {cracked:true}) WHERE NOT u.password='' RETURN COUNT(SIZE(u.password)), SIZE(u.password) AS sz ORDER BY sz DESC" - r = do_query(args,query) - resp = json.loads(r.text)['results'][0]['data'] password_lengths = [] - for entry in resp: - password_lengths.append(entry['row']) + if getattr(args, 'backend', BACKEND_NEO4J) == BACKEND_NEO4J: + query = "MATCH (u:User {cracked:true}) WHERE NOT u.password='' RETURN COUNT(SIZE(u.password)), SIZE(u.password) AS sz ORDER BY sz DESC" + r = do_query(args,query) + resp = json.loads(r.text)['results'][0]['data'] + for entry in resp: + password_lengths.append(entry['row']) + else: + client = _build_bhce_client(args) + lengths = {} + for u in client.list_users(): + p = u.get('properties') or {} + pwd = p.get('password') + if p.get('cracked') and pwd: + l = len(pwd) + lengths[l] = lengths.get(l, 0) + 1 + for l, c in sorted(lengths.items(), key=lambda x: x[0], reverse=True): + password_lengths.append([c, l]) # Get Password (Complexity) Stats # sort from most reused to least reused dict to list of tuples # get the first instance of not repeated password to be min'd later - query = "MATCH (u:User {cracked:true}) WHERE NOT u.password='' RETURN COUNT(u.password) AS countpwd, u.password ORDER BY countpwd DESC" - r = do_query(args,query) - resp = json.loads(r.text)['results'][0]['data'] repeated_passwords = [] - tot_num_repeated_passwords = len(resp) - for entry in resp: - if entry['row'][0] > 1: - repeated_passwords.append(entry['row']) + if getattr(args, 'backend', BACKEND_NEO4J) == BACKEND_NEO4J: + query = "MATCH (u:User {cracked:true}) WHERE NOT u.password='' RETURN COUNT(u.password) AS countpwd, u.password ORDER BY countpwd DESC" + r = do_query(args,query) + resp = json.loads(r.text)['results'][0]['data'] + tot_num_repeated_passwords = len(resp) + for entry in resp: + if entry['row'][0] > 1: + repeated_passwords.append(entry['row']) + else: + client = _build_bhce_client(args) + counts = {} + for u in client.list_users(): + p = u.get('properties') or {} + pwd = p.get('password') + if p.get('cracked') and pwd: + counts[pwd] = counts.get(pwd, 0) + 1 + for pwd, cnt in sorted(counts.items(), key=lambda x: x[1], reverse=True): + if cnt > 1: + repeated_passwords.append([cnt, pwd]) + tot_num_repeated_passwords = len(repeated_passwords) num_repeated_passwords = len(repeated_passwords) # Passwords not meeting Complexity Requirement @@ -1228,15 +1556,24 @@ def dpat_func(args): lambda s: any(x in special_chars for x in s) ] - query = "MATCH (u:User {cracked:true}) WHERE NOT u.password='' RETURN u.password,u.ntds_uname" - r = do_query(args,query) - resp = json.loads(r.text)['results'][0]['data'] password_complexity = [] - for entry in resp: - if sum(rule(entry['row'][0]) for rule in rules) >= 3: - password_complexity.append([entry['row'][1],entry['row'][0],True]) - else: - password_complexity.append([entry['row'][1],entry['row'][0],False]) + if getattr(args, 'backend', BACKEND_NEO4J) == BACKEND_NEO4J: + query = "MATCH (u:User {cracked:true}) WHERE NOT u.password='' RETURN u.password,u.ntds_uname" + r = do_query(args,query) + resp = json.loads(r.text)['results'][0]['data'] + for entry in resp: + if sum(rule(entry['row'][0]) for rule in rules) >= 3: + password_complexity.append([entry['row'][1],entry['row'][0],True]) + else: + password_complexity.append([entry['row'][1],entry['row'][0],False]) + else: + client = _build_bhce_client(args) + for u in client.list_users(): + p = u.get('properties') or {} + pwd = p.get('password') + if p.get('cracked') and pwd: + meets = sum(rule(pwd) for rule in rules) >= 3 + password_complexity.append([p.get('ntds_uname') or p.get('name'), pwd, meets]) password_complexity = sorted(password_complexity, key = lambda x: x[2]) # all stats @@ -1254,26 +1591,49 @@ def dpat_func(args): [len(repeated_passwords), "Password Reuse Stats", ['Count', 'Password'], repeated_passwords], ] - if not args.less: + if getattr(args, 'backend', BACKEND_NEO4J) == BACKEND_NEO4J and not args.less: stats.append([len(group_data), "Groups Cracked by Percentage", ["Group Name", "Percent Cracked", "Cracked Users", "Total Users"], group_data]) # set all users with cracked passwords as owned if args.own_cracked: print("[+] Marking cracked users as owned") - own_cracked_query="MATCH (u:User {cracked:True}) SET u.owned=true" - do_query(args,own_cracked_query) + if getattr(args, 'backend', BACKEND_NEO4J) == BACKEND_NEO4J: + own_cracked_query = "MATCH (u:User {cracked:true}) SET u.owned=true" + do_query(args, own_cracked_query) + else: + client = _build_bhce_client(args) + for u in client.list_users(): + p = u.get('properties') or {} + if p.get('cracked'): + client.update_user_properties(u, {"owned": True}) # Add a note to users with cracked passwords indicating that they have been cracked if args.add_crack_note: print('[+] Adding notes to cracked users') - add_crack_note_query="MATCH (u:User {cracked=True} SET u.notes=\"Password Cracked\"" - do_query(args,add_crack_note_query) + if getattr(args, 'backend', BACKEND_NEO4J) == BACKEND_NEO4J: + add_crack_note_query = "MATCH (u:User {cracked:true}) SET u.notes='Password Cracked'" + do_query(args, add_crack_note_query) + else: + client = _build_bhce_client(args) + for u in client.list_users(): + p = u.get('properties') or {} + if p.get('cracked'): + # append or set a simple note property + note = 'Password Cracked' + existing = p.get('notes') + new_note = note if not existing else f"{existing}; {note}" + client.update_user_properties(u, {"notes": new_note}) # clear the "cracked" tag if not args.store and not args.noparse: print("[+] Purging information from the database") - clear_query = "MATCH (u:User) REMOVE u.cracked REMOVE u.nt_hash REMOVE u.lm_hash REMOVE u.ntds_uname REMOVE u.password" - do_query(args,clear_query) + if getattr(args, 'backend', BACKEND_NEO4J) == BACKEND_NEO4J: + clear_query = "MATCH (u:User) REMOVE u.cracked REMOVE u.nt_hash REMOVE u.lm_hash REMOVE u.ntds_uname REMOVE u.password" + do_query(args, clear_query) + else: + client = _build_bhce_client(args) + for u in client.list_users(): + client.update_user_properties(u, {"cracked": None, "nt_hash": None, "lm_hash": None, "ntds_uname": None, "password": None}) ### ### Output methods @@ -1395,6 +1755,15 @@ def write_html_report(self, filebase, filename): print("[+] Writing HTML files") + # Helper to build safe deterministic filenames for report parts + def _fname(label: str) -> str: + s = ''.join([c if (c.isalnum() or c in ' _-().') else '_' for c in str(label)]) + s = s.replace(' ', '_') + # collapse duplicate underscores + while '__' in s: + s = s.replace('__', '_') + return f"{s}.html" + # add overall stats for stat in stats: @@ -1404,7 +1773,7 @@ def write_html_report(self, filebase, filename): else: hbt = HtmlBuilder() hbt.add_table_to_html(stat[3], stat[2]) - filename = hbt.write_html_report(filebase, ''.join([stat[1].replace(' ','_'),".html"])) + filename = hbt.write_html_report(filebase, _fname(stat[1])) summary_table.append((stat[0], stat[1],"Details")) # add BH query results @@ -1423,7 +1792,7 @@ def write_html_report(self, filebase, filename): hbt = HtmlBuilder() hbt.add_table_to_html(all_entries, cols) - filename = hbt.write_html_report(filebase, ''.join([item['label'].replace(' ','_'),".html"])) + filename = hbt.write_html_report(filebase, _fname(item['label'])) summary_table.append((len(all_entries), item['label'],"Details")) hb.add_table_to_html(summary_table, summary_table_headers, 2) @@ -1457,8 +1826,8 @@ def write_html_report(self, filebase, filename): print("|{:^10}|{:^85}|".format("Count", "Description")) print(" " + "="*96) - for set in stats: - print("|{:^10}|{:^85}|".format(set[0], set[1])) + for stat_row in stats: + print("|{:^10}|{:^85}|".format(stat_row[0], stat_row[1])) for item in query_output_data: print("|{:^10}|{:^85}|".format(len(item['enabled']) + len(item['disabled']),item['label'])) @@ -1532,10 +1901,18 @@ def main(): general = parser.add_argument_group("Optional Arguments") - # generic function parameters + # backend selection and connection parameters + general.add_argument("--backend", dest="backend", choices=[BACKEND_NEO4J, BACKEND_BHCE], default=BACKEND_NEO4J, help="Select backend: 'neo4j' (legacy) or 'bhce' (community edition, preview)") + # Neo4j params (legacy) general.add_argument("-u",dest="username",default=global_username,help="Neo4j database username (Default: {})".format(global_username)) general.add_argument("-p",dest="password",default=global_password,help="Neo4j database password (Default: {})".format(global_password)) general.add_argument("--url",dest="url",default=global_url,help="Neo4j database URL (Default: {})".format(global_url)) + # BloodHound CE params (preview) + general.add_argument("--bhce-url", dest="bhce_url", default=bhce_url_default, help="BloodHound CE base URL (Default: {})".format(bhce_url_default)) + general.add_argument("--bhce-user", dest="bhce_user", default=bhce_user_default, help="BloodHound CE username (env BHCE_USER)") + general.add_argument("--bhce-secret", dest="bhce_secret", default=bhce_secret_default, help="BloodHound CE password/secret (env BHCE_SECRET)") + general.add_argument("--bhce-otp", dest="bhce_otp", default=bhce_otp_default, help="Optional one-time passcode for BHCE login (env BHCE_OTP)") + general.add_argument("--bhce-insecure", dest="bhce_insecure", action="store_true", default=bhce_insecure_default, help="Disable TLS verification for BHCE (env BHCE_INSECURE)") # three options for the function parser._positionals.title = "Available Modules" @@ -1651,17 +2028,29 @@ def main(): if not do_test(args): - print("Connection error: restart Neo4j console or verify the the following URL is available: {}".format(args.url)) + if args.backend == BACKEND_NEO4J: + print("Connection error: restart Neo4j console or verify the the following URL is available: {}".format(args.url)) + else: + print("Connection error: verify BloodHound CE is reachable at {} (and credentials if required).".format(args.bhce_url)) exit() if args.command == None: print("Error: use a module or use -h/--help to see help") return - if args.username == "": - args.username = input("Neo4j Username: ") - if args.password == "": - args.password = getpass.getpass(prompt="Neo4j Password: ") + # Prompt for Neo4j creds only when using the legacy backend + if args.backend == BACKEND_NEO4J: + if args.username == "": + args.username = input("Neo4j Username: ") + if args.password == "": + args.password = getpass.getpass(prompt="Neo4j Password: ") + + # Temporary: gate modules not yet implemented for BHCE + unsupported_bhce = {"query", "export", "del-edge", "add-spns", "add-spw", "get-info"} + if args.backend == BACKEND_BHCE and args.command in unsupported_bhce: + print("This module uses Neo4j/Cypher in the current version and isn’t implemented for BloodHound CE yet on this branch.") + print("Tip: run with --backend neo4j for legacy, or watch this branch for CE support.") + return if args.command == "get-info": get_info(args) diff --git a/wiki/bh-legacy-cypher-catalog.md b/wiki/bh-legacy-cypher-catalog.md new file mode 100644 index 0000000..a273754 --- /dev/null +++ b/wiki/bh-legacy-cypher-catalog.md @@ -0,0 +1,156 @@ +# BloodHound Legacy Cypher Catalog (for Max) + +This document inventories all Neo4j/Cypher usage in `max.py` to aid migration to BloodHound Community Edition (BHCE). + +## Labels, relationships, and properties used + +- Node labels + - User, Group, Computer, Domain +- Relationships (edges) + - MemberOf, HasSession, AdminTo, AllExtendedRights, AddMember, ForceChangePassword, GenericAll, GenericWrite, Owns, WriteDacl, WriteOwner, ReadLAPSPassword, ReadGMSAPassword, Contains, GpLink, CanRDP, CanPSRemote, ExecuteDCOM, AllowedToDelegate, AddAllowedToAct, AllowedToAct, SQLAdmin, HasSIDHistory, HasSPNConfigured, SharesPasswordWith +- Node properties (selection or output) + - name, enabled, objectid, unconstraineddelegation, dontreqpreauth, hasspn, description, haslaps, passwordnotreqd, lastlogon, lastlogontimestamp, operatingsystem, highvalue, owned, serviceprincipalnames, cracked, nt_hash, lm_hash, ntds_uname, password, pwdlastset, sidhistory, domain + +## Module: get-info +- Users (optionally filtered by `enabled`) + - MATCH (u:User) {enabled} RETURN u.name +- Computers + - MATCH (n:Computer) RETURN n.name +- Groups + - MATCH (n:Group) RETURN n.name +- Group members (recursive) + - MATCH (g:Group {name:"{gname}"}) MATCH (n)-[r:MemberOf*1..]->(g) RETURN DISTINCT n.name +- Group list for user + - MATCH (u {name:"{uname}"}) MATCH (u)-[r:MemberOf*1..]->(g:Group) RETURN DISTINCT g.name +- All group memberships + - MATCH (n),(g:Group) MATCH (n)-[r:MemberOf]->(g) RETURN DISTINCT g.name,n.name +- Domain Admins + - MATCH (n:User)-[r:MemberOf*1..]->(g:Group) WHERE g.objectid ENDS WITH '-512' RETURN DISTINCT n.name +- DA sessions + - MATCH (u:User)-[r:MemberOf*1..]->(g:Group) WHERE g.objectid ENDS WITH '-512' WITH COLLECT(u) AS das MATCH (u2:User)<-[r2:HasSession]-(c:Computer) WHERE u2 IN das RETURN DISTINCT u2.name,c.name ORDER BY u2.name +- Domain Controllers + - MATCH (n:Computer)-[r:MemberOf*1..]->(g:Group) WHERE g.objectid ENDS WITH '-516' RETURN DISTINCT n.name +- Unconstrained delegation objects not DCs + - MATCH (g:Group) WHERE g.objectid ENDS WITH '-516' MATCH (c:Computer)-[MemberOf]->(g) WITH COLLECT(c) AS dcs MATCH (n {unconstraineddelegation:true}) WHERE NOT n IN dcs RETURN n.name +- AS-REP roastable users + - MATCH (n:User) WHERE n.dontreqpreauth=TRUE RETURN n.name +- Kerberoastable users + - MATCH (n:User {hasspn:true}) RETURN n.name +- Kerberoastable users with local admin paths + - MATCH (n:User {hasspn:true}) MATCH p=shortestPath((n)-[r:AdminTo|MemberOf*1..4]->(c:Computer)) RETURN DISTINCT n.name +- Sessions for a user + - MATCH (m {name:'{uname}'})<-[r:HasSession]-(n:Computer) RETURN DISTINCT n.name +- Local admin to computers + - MATCH (m {name:'{uname}'})-[r:AdminTo|MemberOf*1..4]->(n:Computer) RETURN DISTINCT n.name +- Admins of a computer (shortest path) + - MATCH p=shortestPath((m:Computer {name:'{comp}'})<-[r:AdminTo|MemberOf*1..]-(n)) RETURN DISTINCT n.name +- Owned objects + - MATCH (n) WHERE n.owned=true RETURN n.name +- Groups of owned objects + - MATCH (n {owned:true}) MATCH (n)-[r:MemberOf*1..]->(g:Group) RETURN DISTINCT n.name,g.name +- High value targets + - MATCH (n) WHERE n.highvalue=true RETURN n.name +- Descriptions + - MATCH (n) WHERE n.description IS NOT NULL RETURN n.name,n.description +- Computer-to-computer admin relationships + - MATCH (n:Computer),(m:Computer) MATCH (n)-[r:MemberOf|AdminTo*1..]->(m) RETURN DISTINCT n.name,m.name ORDER BY n.name +- No LAPS + - MATCH (c:Computer {haslaps:false}) RETURN c.name +- PasswordNotRequired + - MATCH (u:User {passwordnotreqd:true}) {enabled} RETURN u.name +- Password last set older than X days + - MATCH (u:User) WHERE u.pwdlastset < (datetime().epochseconds - ({days} * 86400)) AND NOT u.pwdlastset IN [-1.0,0.0] RETURN u.name,date(datetime({epochSeconds:toInteger(u.pwdlastset)})) AS changedate ORDER BY changedate DESC +- SID history + - MATCH (n) WHERE n.sidhistory<>[] UNWIND n.sidhistory AS x OPTIONAL MATCH (d:Domain) WHERE x CONTAINS d.objectid OPTIONAL MATCH (m {objectid:x}) RETURN n.name,x,d.name,m.name ORDER BY n.name +- Unsupported OS + - MATCH (c:Computer) WHERE toLower(c.operatingsystem) =~ '.*(2000|2003|2008|xp|vista| 7 |me).*' RETURN c.name,c.operatingsystem +- Foreign domain privileges + - MATCH p=(n1)-[r]->(n2) WHERE NOT n1.domain=n2.domain RETURN DISTINCT n1.name,TYPE(r),n2.name ORDER BY TYPE(r) +- Owned to HVT paths + - MATCH shortestPath((n {owned:True})-[*1..]->(m {highvalue:True})) RETURN DISTINCT n.name +- Path utilities + - MATCH p=shortestPath((n1 {name:'{start}'})-[rels*1..]->(n2 {name:'{end}'}})) RETURN p + - MATCH p=allShortestPaths((n1 {name:'{start}'})-[rels*1..]->(n2 {name:'{end}'}})) RETURN p + - MATCH p=allShortestPaths((n1 {name:'{start}'})-[rels*1..]->(n2 {highvalue:true})) RETURN p + - MATCH p=allShortestPaths((n1 {owned:true})-[rels*1..]->(n2 {highvalue:true})) RETURN p +- Owned admins (owned users -> computer AdminTo) + - match (u:User {owned: True})-[r:AdminTo|MemberOf*1..]->(c:Computer) return c.name, "AdministratedBy", u.name order by c, u +- Stale accounts/computers (by lastlogon/lastlogontimestamp) + - WITH datetime().epochseconds - ({threshold_days} * 86400) AS threshold MATCH (u:User {enabled:TRUE}) WHERE u.lastlogon < threshold AND u.lastlogontimestamp < threshold RETURN u.name + - WITH datetime().epochseconds - ({threshold_days} * 86400) AS threshold MATCH (c:Computer {enabled:TRUE}) WHERE c.lastlogon < threshold AND c.lastlogontimestamp < threshold RETURN c.name + +## Module: mark-owned +- Clear owned flag + - MATCH (n) WHERE n.owned=true SET n.owned=false +- Mark object as owned (optional notes/password) + - MATCH (n) WHERE n.name="{uname}" SET n.owned=true [SET n.notes=...] [SET n.password=...] RETURN n + +## Module: mark-hvt +- Clear highvalue + - MATCH (n) WHERE n.highvalue=true SET n.highvalue=false +- Mark object as highvalue (optional notes) + - MATCH (n) WHERE n.name="{uname}" SET n.highvalue=true [SET n.notes=...] RETURN n + +## Module: query +- Pass-through Cypher queries (row/graph) + +## Module: export +- For each edge in the set, get outbound targets + - MATCH (n1 {name:'{node_name}'}) MATCH (n1)-[r:{EDGE}]->(n2) RETURN DISTINCT n2.name + +## Module: del-edge +- Delete edges globally or from a starting node + - MATCH ({name:"{startingnode}"})-[r:{EDGE}]->() DELETE r RETURN COUNT (DISTINCT("{startingnode}")) + - MATCH p=()-[r:{EDGE}]->() DELETE r RETURN COUNT(DISTINCT(p)) + +## Module: add-spns +- Create HasSPNConfigured edges + - MATCH (n:User {name:"{uname}"}) MATCH (m:Computer {name:"{comp}"}) MERGE (m)-[r:HasSPNConfigured {isacl: false}]->(n) return n,m +- Pull users with SPNs from BH + - MATCH (n:User {hasspn:true}) RETURN n.name,n.serviceprincipalnames + +## Module: add-spw +- Create bidirectional SharesPasswordWith + - MATCH (n {name:"{name1}"}),(m {name:"{name2}"}) MERGE (n)-[r1:SharesPasswordWith]->(m) MERGE (m)-[r2:SharesPasswordWith]->(n) return n,m + +## Module: dpat (Domain Password Audit Tool) +- Map NTDS users to BH and tag properties + - MATCH (u:User) WHERE u.name='{username1}' OR (u.name STARTS WITH '{username2}@' AND u.objectid ENDS WITH '-{rid}') SET u.cracked={bool} SET u.nt_hash='{nt}' SET u.lm_hash='{lm}' SET u.ntds_uname='{ntds}' [SET u.password='{pwd}'] RETURN u.name,u.objectid +- Clear DPAT tags + - MATCH (u:User) REMOVE u.cracked REMOVE u.nt_hash REMOVE u.lm_hash REMOVE u.ntds_uname REMOVE u.password +- Count mapped users + - MATCH (u:User) WHERE u.cracked IS NOT NULL RETURN COUNT(u.name) +- Lookups + - MATCH (u:User {cracked:true}) WHERE u.password='{pwd}' RETURN u.name + - MATCH (u:User) WHERE toUpper(u.name)='{uname}' OR toUpper(u.ntds_uname)='{uname}' RETURN u.name,u.password +- DPAT summary queries (selection below; see code for full list) + - MATCH (u:User) RETURN DISTINCT u.enabled,u.ntds_uname,u.nt_hash,u.password + - MATCH (u:User {cracked:true}) RETURN DISTINCT u.enabled,u.ntds_uname,u.password,u.nt_hash + - MATCH (u:User {cracked:true,hasspn:true}) RETURN DISTINCT u.enabled,u.ntds_uname,u.password,u.nt_hash + - MATCH (u:User {cracked:true,dontreqpreauth:true}) RETURN DISTINCT u.enabled,u.ntds_uname,u.password,u_nt_hash + - MATCH (u:User {cracked:true,unconstraineddelegation:true}) RETURN DISTINCT u.enabled,u.ntds_uname,u.password,u_nt_hash + - MATCH (u:User {cracked:true}) WHERE u.lastlogon < (datetime().epochseconds - (182 * 86400)) AND NOT u.lastlogon IN [-1.0, 0.0] RETURN DISTINCT u.enabled,u.ntds_uname,u.password,u.nt_hash + - MATCH (u:User {cracked:true}) WHERE u.pwdlastset < (datetime().epochseconds - (365 * 86400)) AND NOT u.pwdlastset IN [-1.0, 0.0] RETURN DISTINCT u.enabled,u.ntds_uname,u.password,u.nt_hash + - Group-based DA/EA/Admin groups, cracked variants (regex on objectid -512, -519, -544) + - Path-based “intense” queries using shortestPath/allShortestPaths +- Supporting data for stats + - MATCH (u:User) WHERE u.nt_hash IS NOT NULL RETURN u.nt_hash,u.ntds_uname + - MATCH (u:User)-[:MemberOf]->(g:Group) RETURN DISTINCT g.name,u.name,u.cracked + - MATCH (u:User) WHERE u.cracked IS NOT NULL RETURN u.ntds_uname,u.password,u.nt_hash,u.pwdlastset + - MATCH (u:User) RETURN COUNT(DISTINCT(u.nt_hash)) + - MATCH (u:User {cracked:True}) RETURN COUNT(DISTINCT(u)),COUNT(DISTINCT(u.password)) + - MATCH (u:User) WHERE u.lm_hash IS NOT NULL AND NOT u.lm_hash='aad3b435b51404eeaad3b435b51404ee' RETURN u.lm_hash,count(u.lm_hash) + - MATCH (u:User) WHERE u.lm_hash IS NOT NULL AND NOT u.lm_hash='aad3b435b51404eeaad3b435b51404ee' RETURN u.name,u.lm_hash + - MATCH (u:User {cracked:true}) WHERE toUpper(SPLIT(u.name,'@')[0])=toUpper(u.password) RETURN u.ntds_uname,u.password,u.nt_hash + - MATCH (u:User {cracked:true}) WHERE NOT u.password='' RETURN COUNT(SIZE(u.password)), SIZE(u.password) AS sz ORDER BY sz DESC + - MATCH (u:User {cracked:true}) WHERE NOT u.password='' RETURN COUNT(u.password) AS countpwd, u.password ORDER BY countpwd DESC +- Post-processing helper actions + - MATCH (u:User {cracked:True}) SET u.owned=true + - MATCH (u:User {cracked=True} SET u.notes="Password Cracked" [Note: code has a small syntax issue here] + +## Notes +- Path-returning queries use data_format="graph" and expect neo4j REST graph response. +- Many selections rely on AD-specific semantics of `objectid` suffixes (-512, -516, -519, -544). +- Some queries intentionally include UNWIND/list processing to extract Users from paths. + +This catalog should be used to define BHCE-equivalent data fetches and mutations.