diff --git a/.github/actions/python-setup/action.yml b/.github/actions/python-setup/action.yml index d547756a877..ebab2b5258c 100644 --- a/.github/actions/python-setup/action.yml +++ b/.github/actions/python-setup/action.yml @@ -12,11 +12,13 @@ runs: if: ${{ runner.os == 'Linux' }} shell: bash run: | - sudo apt update && sudo apt-get install -y --no-install-recommends libxml2-dev libxslt-dev python3-dev libgeoip-dev ssdeep libfuzzy-dev p7zip-full innoextract unrar upx + sudo apt update && sudo apt-get install -y --no-install-recommends libxml2-dev libxslt-dev python3-dev libgeoip-dev ssdeep libfuzzy-dev 7zip innoextract unrar upx - name: Install poetry shell: bash run: PIP_BREAK_SYSTEM_PACKAGES=1 pip install poetry poetry-plugin-export + #- name: Python Poetry Action + # uses: abatilo/actions-poetry@v3.0.1 - name: Set up Python ${{ inputs.python-version }} uses: actions/setup-python@v5 @@ -27,4 +29,4 @@ runs: - name: Install requirements shell: bash run: | - PIP_BREAK_SYSTEM_PACKAGES=1 poetry install --no-interaction --no-root + PIP_BREAK_SYSTEM_PACKAGES=1 poetry install --no-interaction diff --git a/.github/copilot-instructions.md b/.github/copilot-instructions.md new file mode 100644 index 00000000000..5bc323d83fe --- /dev/null +++ b/.github/copilot-instructions.md @@ -0,0 +1,58 @@ +# Copilot Instructions for CAPEv2 + +## General Architecture +- CAPEv2 is an automated malware analysis platform, based on Cuckoo Sandbox, with extensions for dynamic, static, and network analysis. +- The backend is mainly Python, using SQLAlchemy for the database and Django/DRF for the web API. +- Main components include: + - `lib/cuckoo/core/database.py`: database logic and ORM. + - `web/apiv2/views.py`: REST API endpoints (Django REST Framework). + - `lib/cuckoo/common/`: shared utilities, configuration, helpers. + - `storage/`: analysis results and temporary files. +- Typical flow: sample upload → DB registration → VM assignment → analysis → result storage → API query. + +## Conventions and Patterns +- Heavy use of SQLAlchemy 2.0 ORM, with explicit sessions and nested transactions (`begin_nested`). +- Database models (Sample, Task, Machine, etc.) are always managed via `Database` object methods. +- API endpoints always return a dict with `error`, `data`, and, if applicable, `error_value` keys. +- Validation and request argument parsing is centralized in helpers (`parse_request_arguments`, etc.). +- Integrity errors (e.g., duplicates) are handled with `try/except IntegrityError` and recovery of the existing object. +- Tags are managed as comma-separated strings and normalized before associating to models. +- Code avoids mutable global variables; configuration is accessed via `Config` objects. + +## Developer Workflows +- No Makefile or standard build scripts; dependency management is usually via `poetry` or `pip`. +- For testing, use virtual environments and run scripts manually. +- Typical backend startup is via Django (`manage.py runserver`), and analysis workers are launched separately. +- Database changes require manual migrations (see Alembic comments in `database.py`). + +## Integrations and Dependencies +- Optional integration with MongoDB and Elasticsearch, controlled by configuration (`reporting.conf`). +- The system can use different compression tools (zlib, 7zip) depending on config. +- Sample analysis may invoke external utilities (e.g., Sflock, PE parsers). + +## Key Pattern Examples +- IntegrityError handling example: + ```python + try: + with self.session.begin_nested(): + self.session.add(sample) + except IntegrityError: + sample = self.session.scalar(select(Sample).where(Sample.md5 == file_md5)) + ``` +- API response example: + ```python + return Response({"error": False, "data": result}) + ``` +- Tag assignment example: + ```python + tags = ",".join(set(_tags)) + ``` + +## Key Files +- `lib/cuckoo/core/database.py`: database logic, sample/task registration, machine management. +- `web/apiv2/views.py`: REST endpoints, validation, high-level business logic. +- `lib/cuckoo/common/`: utilities, helpers, configuration. + +--- + +If you introduce new endpoints, helpers, or models, follow the validation, error handling, and standard response patterns. See the files above for implementation examples. diff --git a/.github/workflows/export-requirements.yml b/.github/workflows/export-requirements.yml index a91b837935b..c54114b5747 100644 --- a/.github/workflows/export-requirements.yml +++ b/.github/workflows/export-requirements.yml @@ -21,12 +21,12 @@ jobs: uses: actions/checkout@v4 - name: Install poetry - run: pip install poetry + run: pip install poetry poetry-plugin-export --user - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v5 with: - # check-latest: true + check-latest: true python-version: ${{ matrix.python-version }} cache: 'poetry' diff --git a/.github/workflows/python-package-windows.yml b/.github/workflows/python-package-windows.yml index 061dd2476ae..5b8daa86df2 100644 --- a/.github/workflows/python-package-windows.yml +++ b/.github/workflows/python-package-windows.yml @@ -15,7 +15,7 @@ jobs: timeout-minutes: 20 strategy: matrix: - python-version: ["3.10", "3.11"] + python-version: ["3.10"] steps: - name: Check out repository code diff --git a/.github/workflows/python-package.yml b/.github/workflows/python-package.yml index d271e493800..1df8fd5c0a3 100644 --- a/.github/workflows/python-package.yml +++ b/.github/workflows/python-package.yml @@ -11,11 +11,11 @@ on: jobs: test: - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 # ubuntu-latest timeout-minutes: 20 strategy: matrix: - python-version: ["3.10", "3.11"] + python-version: ["3.10"] steps: - name: Check out repository code uses: actions/checkout@v4 @@ -34,32 +34,24 @@ jobs: - name: Install pyattck run: | - poetry run pip install pyattck==7.1.2 maco + poetry run pip install git+https://github.com/CAPESandbox/pyattck maco - name: Run Ruff - run: poetry run ruff . --line-length 132 --ignore E501,E402 + run: poetry run ruff check . --output-format=github . - name: Run unit tests run: poetry run python -m pytest --import-mode=append - - name: See if any parser changed - uses: dorny/paths-filter@v3 - id: changes - with: - filters: | - src: - - 'modules/processing/parsers/CAPE/*.py' - - - name: Test parsers only if any parser changed - if: steps.changes.outputs.src == 'true' - run: poetry run python -m pytest tests_parsers -s --import-mode=append + # see the mypy configuration in pyproject.toml + - name: Run mypy + run: poetry run mypy format: runs-on: ubuntu-latest timeout-minutes: 20 strategy: matrix: - python-version: ["3.10", "3.11"] + python-version: ["3.10"] if: ${{ github.ref == 'refs/heads/master' }} steps: @@ -71,13 +63,6 @@ jobs: with: python-version: ${{ matrix.python-version }} - - name: Format with black - run: poetry run black . - - # to be replaced with ruff - - name: Format imports with isort - run: poetry run isort . - - name: Commit changes if any # Skip this step if being run by nektos/act if: ${{ !env.ACT }} @@ -85,6 +70,8 @@ jobs: git config user.name "GitHub Actions" git config user.email "action@github.com" if output=$(git status --porcelain) && [ ! -z "$output" ]; then + git pull + git add . git commit -m "style: Automatic code formatting" -a git push fi diff --git a/README.md b/README.md index 62909436d14..b074bb2a75c 100644 --- a/README.md +++ b/README.md @@ -152,11 +152,11 @@ A huge thank you to @D00m3dR4v3n for single-handedly porting CAPE to Python 3. ## Installation recommendations and scripts for optimal performance * Python3 * agent.py is tested with python (3.7.2|3.8) x86. __You should use x86 python version inside of the VM!__ - * host tested with python3 version 3.7, 3.8, 3.10, but newer versions should work too + * host tested with python3 version 3.10, 3.12, but newer versions should work too * __Only rooter should be executed as root__, the rest as __cape__ user. Running as root will mess with permissions. 1. Become familiar with the [documentation](https://capev2.readthedocs.io/en/latest/) and __do read ALL__ config files inside of `conf` folder! -2. For best compabitility we strongly suggest installing on [Ubuntu 22.04 LTS](https://ubuntu.com/#download) and using Windows 10 21H2 as target. +2. For best compabitility we strongly suggest installing on [Ubuntu 24.04 LTS](https://ubuntu.com/#download) and using Windows 10 21H2 as target. 3. `kvm-qemu.sh` and `cape2.sh` __SHOULD BE__ executed from `tmux` session to prevent any OS problems if ``ssh`` connections breaks. 4. [KVM](https://github.com/kevoreilly/CAPEv2/blob/master/installer/kvm-qemu.sh) is recommended as the hypervisor. * Replace `` with a real pattern. @@ -228,3 +228,4 @@ If you use CAPEv2 in your work, please cite it as specified in the "Cite this re ### Docs * [ReadTheDocs](https://capev2.readthedocs.io/en/latest/#) +* [DeepWiki](https://deepwiki.com/kevoreilly/CAPEv2/1-overview) - AI generated, some might be wrong but generally pretty accurate. diff --git a/agent/agent.py b/agent/agent.py index b8968b7f016..0b05c1cccd7 100644 --- a/agent/agent.py +++ b/agent/agent.py @@ -9,7 +9,6 @@ import http.server import ipaddress import json -import multiprocessing import os import platform import random @@ -30,7 +29,7 @@ from zipfile import ZipFile try: - import re2 as re + import re2 as re # type: ignore except ImportError: import re @@ -44,7 +43,7 @@ if sys.maxsize > 2**32 and sys.platform == "win32": sys.exit("You should install python3 x86! not x64") -AGENT_VERSION = "0.18" +AGENT_VERSION = "0.20" AGENT_FEATURES = [ "execpy", "execute", @@ -96,7 +95,7 @@ def _missing_(cls, value): AGENT_BROWSER_EXT_PATH = "" AGENT_BROWSER_LOCK = Lock() ANALYZER_FOLDER = "" -agent_mutexes = {} +agent_mutexes: dict = {} """Holds handles of mutexes held by the agent.""" state = { "status": Status.INIT, @@ -177,12 +176,12 @@ def __init__(self): def run( self, - host: ipaddress.IPv4Address = "0.0.0.0", + host: ipaddress.IPv4Address = ipaddress.IPv4Address("0.0.0.0"), port: int = 8000, - event: multiprocessing.Event = None, + event = None, ): socketserver.ThreadingTCPServer.allow_reuse_address = True - self.s = socketserver.ThreadingTCPServer((host, port), self.handler) + self.s = socketserver.ThreadingTCPServer((str(host), port), self.handler) # tell anyone waiting that they're good to go if event: @@ -226,7 +225,6 @@ def handle(self, obj): self.close_connection = True def shutdown(self): - # BaseServer also features a .shutdown() method, but you can't use # that from the same thread as that will deadlock the whole thing. if hasattr(self, "s"): @@ -248,7 +246,7 @@ def __init__(self, status_code=200, **kwargs): def init(self): pass - def json(self): + def json(self) -> str: for valkey in self.values: if isinstance(self.values[valkey], bytes): self.values[valkey] = self.values[valkey].decode("utf8", "replace") @@ -324,8 +322,8 @@ def headers(self, obj): class request: - form = {} - files = {} + form: dict = {} + files: dict = {} client_ip = None client_port = None method = None @@ -334,7 +332,7 @@ class request: } -app = MiniHTTPServer() +app: MiniHTTPServer = MiniHTTPServer() def isAdmin(): @@ -378,7 +376,7 @@ def get_subprocess_status(): """Return the subprocess status.""" async_subprocess = state.get("async_subprocess") message = "Analysis status" - exitcode = async_subprocess.exitcode + exitcode = async_subprocess.poll() if exitcode is None or (sys.platform == "win32" and exitcode == 259): # Process is still running. state["status"] = Status.RUNNING @@ -546,7 +544,7 @@ def do_mkdir(): @app.route("/mktemp", methods=("GET", "POST")) def do_mktemp(): suffix = request.form.get("suffix", "") - prefix = request.form.get("prefix", "tmp") + prefix = request.form.get("prefix", "") dirpath = request.form.get("dirpath") try: @@ -562,11 +560,13 @@ def do_mktemp(): @app.route("/mkdtemp", methods=("GET", "POST")) def do_mkdtemp(): suffix = request.form.get("suffix", "") - prefix = request.form.get("prefix", "tmp") + prefix = request.form.get("prefix", "") dirpath = request.form.get("dirpath") try: dirpath = tempfile.mkdtemp(suffix=suffix, prefix=prefix, dir=dirpath) + if sys.platform == "win32": + subprocess.call(["icacls", dirpath, "/inheritance:e", "/grant", "BUILTIN\\Users:(OI)(CI)(RX)"]) except Exception: return json_exception("Error creating temporary directory") @@ -713,9 +713,7 @@ def background_subprocess(command_args, cwd, base64_encode, shell=False): def spawn(args, cwd, base64_encode, shell=False): """Kick off a subprocess in the background.""" - run_subprocess_args = [args, cwd, base64_encode, shell] - proc = multiprocessing.Process(target=background_subprocess, name=f"child process {args[1]}", args=run_subprocess_args) - proc.start() + proc = subprocess.Popen(args, cwd=cwd, shell=shell) state["status"] = Status.RUNNING state["description"] = "" state["async_subprocess"] = proc @@ -765,7 +763,7 @@ def do_browser_ext(): AGENT_BROWSER_LOCK.acquire() if not AGENT_BROWSER_EXT_PATH: try: - ext_tmpdir = tempfile.mkdtemp(prefix="tmp") + ext_tmpdir = tempfile.mkdtemp(prefix="") except Exception: AGENT_BROWSER_LOCK.release() return json_exception("Error creating temporary directory") @@ -799,7 +797,6 @@ def do_kill(): if __name__ == "__main__": - multiprocessing.set_start_method("spawn") parser = argparse.ArgumentParser() parser.add_argument("host", nargs="?", default="0.0.0.0") parser.add_argument("port", type=int, nargs="?", default=8000) diff --git a/agent/test_agent.py b/agent/test_agent.py index 64156b7cac0..02f9499ee9a 100644 --- a/agent/test_agent.py +++ b/agent/test_agent.py @@ -9,12 +9,14 @@ import pathlib import random import shutil +import subprocess import sys import tempfile import time import unittest import uuid import zipfile +from typing import Optional from unittest import mock from urllib.parse import urljoin @@ -38,8 +40,8 @@ class TestAgentFunctions: @mock.patch("sys.platform", "win32") def test_get_subprocess_259(self): mock_process_id = 999998 - mock_subprocess = mock.Mock(spec=multiprocessing.Process) - mock_subprocess.exitcode = 259 + mock_subprocess = mock.Mock(spec=subprocess.Popen) + mock_subprocess.poll = mock.Mock(return_value=259) mock_subprocess.pid = mock_process_id with mock.patch.dict(agent.state, {"async_subprocess": mock_subprocess}): actual = agent.get_subprocess_status() @@ -180,7 +182,7 @@ def test_delete_mutex_win32_200(self): class TestAgent: """Test the agent API.""" - agent_process: multiprocessing.Process = None + agent_process: Optional[multiprocessing.Process] = None def setup_method(self): agent.state = {"status": agent.Status.INIT, "description": "", "async_subprocess": None} @@ -391,8 +393,8 @@ def test_mkdir_invalid(self): def test_mktemp_valid(self): form = { "dirpath": DIRPATH, - "prefix": make_temp_name(), - "suffix": "tmp", + "prefix": "", + "suffix": "", } js = self.post_form("mktemp", form) assert js["message"] == "Successfully created temporary file" @@ -417,8 +419,8 @@ def test_mkdtemp_valid(self): """Ensure we can use the mkdtemp endpoint.""" form = { "dirpath": DIRPATH, - "prefix": make_temp_name(), - "suffix": "tmp", + "prefix": "", + "suffix": "", } js = self.post_form("mkdtemp", form) assert js["message"] == "Successfully created temporary directory" @@ -464,7 +466,7 @@ def test_store_invalid(self): # destination file path is invalid upload_file = {"file": ("test_data.txt", "test data\ntest data\n")} - form = {"filepath": os.path.join(DIRPATH, make_temp_name(), "tmp")} + form = {"filepath": os.path.join(DIRPATH, make_temp_name(), "")} js = self.post_form("store", form, 500, files=upload_file) assert js["message"].startswith("Error storing file") diff --git a/analyzer/linux/analyzer.py b/analyzer/linux/analyzer.py index 6add3119edc..28c78bf335c 100644 --- a/analyzer/linux/analyzer.py +++ b/analyzer/linux/analyzer.py @@ -71,7 +71,7 @@ def monitor_new_processes(parent_pid, interval=0.25): new_processes = current_processes - known_processes for pid in new_processes: - log.info(f"New child process detected: {pid}") + log.info("New child process detected: %s", str(pid)) dump_memory(pid) add_pids(pid) # Add the new process to PROCESS_LIST @@ -118,20 +118,20 @@ def dump_memory(pid): chunk = mem_file.read(end - start) output_file.write(chunk) except (OSError, ValueError) as e: - log.error(f"Could not read memory range {start:x}-{end:x}: {e}") + log.error("Could not read memory range %s: {e}", f"{start:x}-{end:x}", str(e)) maps_file.close() mem_file.close() output_file.close() except FileNotFoundError: - log.error(f"Process with PID {pid} not found.") + log.error("Process with PID %s not found.", str(pid)) except PermissionError: - log.error(f"Permission denied to access process with PID {pid}.") + log.error("Permission denied to access process with PID %s.", str(pid)) if os.path.exists(f"{MEM_PATH}/{pid}.dmp"): upload_to_host(f"{MEM_PATH}/{pid}.dmp", f"memory/{pid}.dmp") DUMPED_LIST.add(pid) else: - log.error(f"Memdump file not found in guest machine for PID {pid}") + log.error("Memdump file not found in guest machine for PID %s", str(pid)) class Analyzer: diff --git a/analyzer/linux/lib/api/screenshot.py b/analyzer/linux/lib/api/screenshot.py index 2273e6b2ade..eb93756781c 100644 --- a/analyzer/linux/lib/api/screenshot.py +++ b/analyzer/linux/lib/api/screenshot.py @@ -139,7 +139,7 @@ async def is_gnome(self): log.info("Detected non-Gnome desktop environment.") else: self._is_gnome = True - log.info(f"Detected Gnome version {version}") + log.info("Detected Gnome version %s", str(version)) name = "org.gnome.Screenshot" resp = await self.bus.request_name(name) if resp not in ( @@ -205,8 +205,8 @@ async def take_screenshot_gnome(self): "http://www.freedesktop.org/standards/dbus/1.0/introspect.dtd"> - - + + @@ -260,7 +260,7 @@ async def handler(response, results): if response == 0: await queue.put(urllib.parse.urlparse(results["uri"].value).path) else: - log.warning(f"Received non-zero response when taking screenshot: {response}") + log.warning("Received non-zero response when taking screenshot: %s", str(response)) await queue.put(None) # Set up the signal handler diff --git a/analyzer/linux/lib/common/results.py b/analyzer/linux/lib/common/results.py index 365b089fa7d..b43ca019d05 100644 --- a/analyzer/linux/lib/common/results.py +++ b/analyzer/linux/lib/common/results.py @@ -38,7 +38,7 @@ def upload_to_host(file_path, dump_path, pids="", ppids="", metadata="", categor nc.send(buf, retry=True) buf = infd.read(BUFSIZE) except Exception as e: - log.error("Exception uploading file %s to host: %s", file_path, e, exc_info=True) + log.exception("Exception uploading file %s to host: %s", file_path, e) finally: if nc: nc.close() diff --git a/analyzer/linux/modules/auxiliary/filecollector.py b/analyzer/linux/modules/auxiliary/filecollector.py index c68da449ce5..83fe88f4987 100755 --- a/analyzer/linux/modules/auxiliary/filecollector.py +++ b/analyzer/linux/modules/auxiliary/filecollector.py @@ -51,7 +51,6 @@ def __init__(self, options, config): self.thread.join(0.5) def run(self): - if not HAVE_PYINOTIFY: log.info("Missed dependency: pip3 install pyinotify") return False diff --git a/analyzer/linux/modules/auxiliary/screenshots.py b/analyzer/linux/modules/auxiliary/screenshots.py index 36f25818e7b..b1ef4c83100 100644 --- a/analyzer/linux/modules/auxiliary/screenshots.py +++ b/analyzer/linux/modules/auxiliary/screenshots.py @@ -11,6 +11,7 @@ if HAVE_PIL and HAVE_DBUS_NEXT: from PIL import Image + from lib.api.screenshot import Screenshot, ScreenshotGrabber, ScreenshotsUnsupported from lib.common.abstracts import Auxiliary diff --git a/analyzer/linux/modules/packages/zip.py b/analyzer/linux/modules/packages/zip.py index 0cc17c6b775..20e475b7071 100644 --- a/analyzer/linux/modules/packages/zip.py +++ b/analyzer/linux/modules/packages/zip.py @@ -17,7 +17,6 @@ class Zip(Package): - real_package = None def prepare(self): diff --git a/analyzer/windows/analyzer.py b/analyzer/windows/analyzer.py index deb7a583b87..362b46be381 100644 --- a/analyzer/windows/analyzer.py +++ b/analyzer/windows/analyzer.py @@ -691,7 +691,7 @@ def analysis_loop(self, aux_modules): try: Process(pid=pid).upload_memdump() except Exception as e: - log.error(e, exc_info=True) + log.exception(e) log.info("Process with pid %s appears to have terminated", pid) if pid in self.process_list.pids: self.process_list.remove_pid(pid) @@ -915,7 +915,7 @@ def dump_file(self, filepath, metadata="", pids="", ppids="", category="files"): except (IOError, socket.error) as e: log.error('Unable to upload dropped file at path "%s": %s', filepath, e) except Exception as e: - log.error(e, exc_info=True) + log.exception(e) def delete_file(self, filepath, pid=None): """A file is about to removed and thus should be dumped right away.""" @@ -1508,8 +1508,7 @@ def dispatch(self, data): try: response = fn(arguments) except Exception as e: - log.error(e, exc_info=True) - log.exception("Pipe command handler exception occurred (command %s args %s)", command, arguments) + log.exception("Pipe command handler exception occurred (command %s args %s). %s", command, arguments, str(e)) return response @@ -1536,7 +1535,7 @@ def dispatch(self, data): # When user set wrong package, Example: Emotet package when submit doc, package only is for EXE! except CuckooError: - log.info("You probably submitted the job with wrong package", exc_info=True) + log.exception("You probably submitted the job with wrong package") data["status"] = "exception" data["description"] = "You probably submitted the job with wrong package" try: diff --git a/analyzer/windows/bin/PPLinject.exe b/analyzer/windows/bin/PPLinject.exe index 6b4077b6471..c040aa61f24 100644 Binary files a/analyzer/windows/bin/PPLinject.exe and b/analyzer/windows/bin/PPLinject.exe differ diff --git a/analyzer/windows/bin/PPLinject64.exe b/analyzer/windows/bin/PPLinject64.exe index 7053b028bcf..5dcfef78a57 100644 Binary files a/analyzer/windows/bin/PPLinject64.exe and b/analyzer/windows/bin/PPLinject64.exe differ diff --git a/analyzer/windows/bin/loader.exe b/analyzer/windows/bin/loader.exe index 277794e1fc6..0142b1a338d 100644 Binary files a/analyzer/windows/bin/loader.exe and b/analyzer/windows/bin/loader.exe differ diff --git a/analyzer/windows/bin/loader_x64.exe b/analyzer/windows/bin/loader_x64.exe index 020ed940f4b..c895b0bfaab 100644 Binary files a/analyzer/windows/bin/loader_x64.exe and b/analyzer/windows/bin/loader_x64.exe differ diff --git a/analyzer/windows/data/yara/Al-khaser.yar b/analyzer/windows/data/yara/Al-khaser.yar deleted file mode 100644 index 0eab395fa8a..00000000000 --- a/analyzer/windows/data/yara/Al-khaser.yar +++ /dev/null @@ -1,12 +0,0 @@ -rule Al_khaser -{ - meta: - author = "kevoreilly" - description = "Al-khaser bypass" - cape_options = "bp0=$print_check_result_x86,bp0=$print_check_result_x64,action0=setecx:0,count=1,no-logs=2" - strings: - $print_check_result_x86 = {89 45 FC 53 56 8B C1 89 95 C4 FD FF FF 89 85 C8 FD FF FF 57 6A F5 83 F8 01 75 47 FF 15 [4] 8B D8 8D 8D E4 FD FF FF BA 16 00 00 00 66 90} - $print_check_result_x64 = {48 89 84 24 50 02 00 00 8B F1 83 F9 01 B9 F5 FF FF FF 48 8B EA 75 41 FF 15 [4] 48 8D 7C 24 30 B9 16 00 00 00 48 8B D8} - condition: - uint16(0) == 0x5A4D and any of ($print_check_result*) -} diff --git a/analyzer/windows/data/yara/Amatera.yar b/analyzer/windows/data/yara/Amatera.yar new file mode 100644 index 00000000000..51dfa0897d8 --- /dev/null +++ b/analyzer/windows/data/yara/Amatera.yar @@ -0,0 +1,14 @@ +rule Amatera +{ + meta: + author = "kevoreilly" + description = "Amatera syscall capture" + cape_options = "sysbp=$sysenter" + hash = "35eb93548a0c037d392f870c05e0e9fb1aeff3a5a505e1d4a087f7465ed1f6af" + strings: + $sysenter = {64 FF 15 C0 00 00 00 C3} + $harness = {0F B7 55 EC 52 E8 [4] 83 C4 04 C7 45 F0 [4] 8B 45 ?? 50 [0-40] FF 55 F0 83 C4 ?? 8B E5 5D C3} + $socket = {66 89 [2] 6A 00 6A ?? 8D [3] 68 (03|07) 20 01 00 8B 4D F8 E8 [4] 0F B6 (C0|C8) 85 (C0|C9) 75 04 32 C0 EB} + condition: + uint16(0) == 0x5A4D and all of them +} diff --git a/analyzer/windows/data/yara/DarkGateLoader.yar b/analyzer/windows/data/yara/DarkGateLoader.yar index c9ececb6c0f..c44be35608f 100644 --- a/analyzer/windows/data/yara/DarkGateLoader.yar +++ b/analyzer/windows/data/yara/DarkGateLoader.yar @@ -6,7 +6,7 @@ rule DarkGateLoader cape_options = "bp0=$decrypt1+30,bp0=$decrypt2+29,action0=dump:eax::ebx,bp1=$decrypt3+80,action1=dumpsize:eax,bp2=$decrypt3+124,hc2=1,action2=dump:eax,count=0" packed = "b15e4b4fcd9f0d23d902d91af9cc4e01417c426e55f6e0b4ad7256f72ac0231a" strings: - $loader = {6C 6F 61 64 65 72} + $loader = "loader" $decrypt1 = {B? 01 00 00 00 8B [3] E8 [4] 8B D7 32 54 [4] 88 54 18 FF 4? 4? 75} $decrypt2 = {B? 01 00 00 00 8B [2] E8 [4] 8B D7 2B D3 [4] 88 54 18 FF 4? 4? 75} $decrypt3 = {89 85 [4] 8B 85 [4] 8B F0 8D BD [4] B? 10 [3] F3 A5 8B 85 [4] 33 D2 [2] 8B 85 [4] 99} diff --git a/analyzer/windows/data/yara/Formbook.yar b/analyzer/windows/data/yara/Formbook.yar index a1d3d50adf6..efb9a24a3a5 100644 --- a/analyzer/windows/data/yara/Formbook.yar +++ b/analyzer/windows/data/yara/Formbook.yar @@ -3,7 +3,7 @@ rule FormhookA meta: author = "kevoreilly" description = "Formbook Anti-hook Bypass" - cape_options = "clear,bp0=$remap_ntdll_0,action0=setedx:ntdll,count0=1,bp1=$remap_ntdll_1,action1=setdst:ntdll,count1=1,force-sleepskip=1" + cape_options = "clear,bp0=$remap_ntdll_0,action0=setedx:ntdll,count0=1,bp1=$remap_ntdll_1,action1=setdst:ntdll,count1=1" packed = "9e38c0c3c516583da526016c4c6a671c53333d3d156562717db79eac63587522" packed = "b8e44f4a0d92297c5bb5b217c121f0d032850b38749044face2b0014e789adfb" strings: @@ -18,12 +18,13 @@ rule FormhookB meta: author = "kevoreilly" description = "Formbook Anti-hook Bypass" - cape_options = "clear,bp0=$entry,action0=scan,hc0=1,bp1=$new_remap+6,action1=setdst:ntdll,count=0,force-sleepskip=1" + cape_options = "clear,bp0=$entry,action0=scan,hc0=1,bp1=$new_remap,action1=setdst:ntdll,count1=0,bp2=$code+14,count=0" packed = "08c5f44d57f5ccc285596b3d9921bf7fbbbf7f9a827bb3285a800e4c9faf6731" strings: - $remap_ntdll = {33 96 [2] 00 00 8D 86 [2] 00 00 68 F0 00 00 00 50 89 [2-5] E8 [4-10] 6A 00 6A 0? 8D 4D ?? 51 6A} + $decode = {B8 67 66 66 66 F7 E? C1 FA 03 8B ?2 C1 E? 1F 03 ?2} $entry = {55 8B EC 83 EC ?4 53 56 57 [480-520] 8B E5 5D C3} - $new_remap = {90 90 90 90 90 90 8B (86 [2] 00 00|46 ??|06) 5F 5E 5B 8B E5 5D C3} + $new_remap = {8B (86 [2] 00 00|46 ??|06) 5F 5E 5B 8B E5 5D C3} + $code = {8B 4E 18 50 6A 00 51 57 56 E8 9A 18 00 00 8B 55 10 8B 45 0C 8B 0F 83 C4 1C 52 50 FF D1 5F 5E 5D C3} condition: 2 of them } @@ -61,27 +62,14 @@ rule FormconfB meta: author = "kevoreilly" description = "Formbook Config Extraction" - cape_options = "clear,bp0=$c2_1,bp0=$c2_2+38,action0=string:rcx+1,bp1=$decoy,action1=string:rcx+1,bp2=$config,action2=scan,count=0,typestring=Formbook Config" + cape_options = "clear,bp0=$c2_1,bp0=$c2_2,action0=string:rcx,bp1=$decoy,action1=string:rdi,bp2=$config,action2=scan,bp3=$sleep+5,action3=skip,count=0,typestring=Formbook Config" packed = "60571b2683e7b753a77029ebe9b5e1cb9f3fbfa8d6a43e4b7239eefd13141ae4" strings: $c2_1 = {44 0F B6 5D ?? 45 84 DB 74 ?? 48 8D 4D [1-5] 41 80 FB 2F 74 11 0F B6 41 01 48 FF C1 FF C3 44 0F B6 D8 84 C0 75} - $c2_2 = {49 8D 8D [2] 00 00 B2 ?? E8 [4] 4D 8D 85 [2] 00 00 49 8D 8D [2] 00 00 BA ?? 00 00 00 E8} + $c2_2 = {40 53 48 83 EC 20 48 8B DA 48 85 C9 74 28 80 39 00 74 23 48 85 D2 74 1E 48 8B D1 41 B8 04 00 00 00 48 8B CB E8} $decoy = {45 3B B5 [2] 00 00 [0-7] 44 8D 1C 33 48 8D 7D [1-5] 42 C6 44 [2] 00 [0-4] 48 8B CF E8} $config = {40 55 53 56 57 41 54 41 55 41 56 41 57 48 8D AC 24 [4] 48 81 EC [2] 00 00 45 33 F6 33 C0 4C 8B E9 4C 89 75} + $sleep = {B9 88 13 00 00 FF D7 44 8B 9B [4] 41 81 FB 00 01 00 00 75 ?? 48 39 B3 [4] 74 ?? 8B 83 [4] 05 00 20 00 00 39 B0} condition: 2 of them } - -rule FormconfC -{ - meta: - author = "kevoreilly" - description = "Formbook Config Extraction" - cape_options = "clear,bp0=$c2,hc0=1,action0=string:rcx+1,bp1=$decoy,action1=string:rcx+1,count=0,typestring=Formbook Config" - packed = "0270016f451f9ba630f2ea4e2ea006fb89356627835b560bb2f4551a735ba0e1" - strings: - $c2 = {49 8D 95 [2] 00 00 49 8D 8D [2] 00 00 41 B8 07 00 00 00 E8 [4] 49 8B CD 45 88 [3] 00 00 E8 [4] 33 C0} - $decoy = {48 8B CF E8 [4] 48 8B D7 44 8B C0 49 8B 85 [4] 49 (8D 8C 04 [2] 00 00|8D 0C 04) E8 [4] 48 8B CF E8} - condition: - all of them -} diff --git a/analyzer/windows/data/yara/ModiLoader.yar b/analyzer/windows/data/yara/ModiLoader.yar index c8783d53cdc..36dc76915a4 100644 --- a/analyzer/windows/data/yara/ModiLoader.yar +++ b/analyzer/windows/data/yara/ModiLoader.yar @@ -1,4 +1,18 @@ -rule ModiLoader { +rule ModiLoader +{ + meta: + author = "kevoreilly" + description = "ModiLoader detonation shim" + cape_options = "exclude-apis=NtAllocateVirtualMemory:NtProtectVirtualMemory" + hash = "1f0cbf841a6bc18d632e0bc3c591266e77c99a7717a15fc4b84d3e936605761f" + strings: + $epilog1 = {81 C2 A1 03 00 00 87 D1 29 D3 33 C0 5A 59 59 64 89 10 68} + $epilog2 = {6A 00 6A 01 8B 45 ?? 50 FF 55 ?? 33 C0 5A 59 59 64 89 10 68} + condition: + uint16(0) == 0x5a4d and all of them +} + +rule ModiLoaderOld { meta: author = "ditekSHen" description = "ModiLoader detonation shim" diff --git a/analyzer/windows/data/yara/NitrogenLoader.yar b/analyzer/windows/data/yara/NitrogenLoader.yar index 0c37500494a..6103b91b925 100644 --- a/analyzer/windows/data/yara/NitrogenLoader.yar +++ b/analyzer/windows/data/yara/NitrogenLoader.yar @@ -38,4 +38,25 @@ rule NitrogenLoaderBypass $exit = {33 C9 E8 [4] E8 [4] 48 8D 84 24 [4] 48 89 44 24 ?? 4? B? E4 00 00 00 4? 8B 05 [4] B? 03 00 00 00 48 8D} condition: all of them -} \ No newline at end of file +} + +rule NitrogenLoaderConfig +{ + meta: + author = "enzok" + description = "NitrogenLoader Config Extraction" + cape_options = "bp0=$decrypt1*+1,bp1=$key*,hc0=1,count=0,action0=string:rcx,action1=string:rdx,typestring=NitrogenLoader Config" + strings: + $decrypt1 = {48 63 4? 24 ?? 33 D2 48 [0-3] F7 B4 24 [4] 48 8B C2 48 8B 8C 24 [4] 0F BE 04 01} + $decrypt2 = {8B ?? 24 [1-4] 33 C8 8B C1 48 63 4C 24 ?? 48 8B 94 24 [4] 88 04 0A} + $decrypt3 = {8B 8C 24 ?? ?? ?? ?? 2B C8 8B C1 48 63 4C 24 ?? 48 8B 94 24 [4] 88 04 0A} + $key = {74 ?? E8 [4] 85 C0 75 ?? 4? 8B 0D [3] 00 4? 8D 15 [3] 00 E8} + $taskman_1 = {E8 [4] B9 61 00 00 00 88 84 24 [4] E8 [4] B9 73 00 00 00 88 84 24 [4] E8 [4] B9 6B 00 00 00 88 84 24 [4] E8 [3] FF} + $taskman_2 = {B9 4D 00 00 00 88 84 24 [4] E8 [4] B9 61 00 00 00 88 84 24 [4] E8 [4] B9 6E 00 00 00 88 84 24 [4] E8 [3] FF} + $taskman_3 = {B9 61 00 00 00 88 84 24 [4] E8 [4] B9 67 00 00 00 88 84 24 [4] E8 [4] B9 65 00 00 00 88 84 24 [4] E8 [3] FF} + $taskman_4 = {B9 72 00 00 00 88 84 24 [4] E8 [4] 31 C9 88 84 24 [4] E8 [3] FF} + $rc4decrypt_1 = {48 89 ?? 48 89 ?? E8 [4] 48 8B ?? 24 [1-4] 4? 89 ?? 48 89 ?? 4? 89 C1 89 EA E8 [4] 48 89} + $rc4decrypt_2 = {E8 [4] 8B ?? 24 [1-4] 4? 89 ?? 48 89 ?? 4? 89 C1 E8 [3] FF} + condition: + any of ($decrypt*) or ($key and (3 of ($taskman_*) and 1 of ($rc4decrypt_*))) +} diff --git a/analyzer/windows/data/yara/Pafish.yar b/analyzer/windows/data/yara/Pafish.yar deleted file mode 100644 index 5da94455018..00000000000 --- a/analyzer/windows/data/yara/Pafish.yar +++ /dev/null @@ -1,14 +0,0 @@ -rule Pafish -{ - meta: - author = "kevoreilly" - description = "Pafish bypass" - cape_options = "bp0=$rdtsc_vmexit_32-2,bp1=$rdtsc_vmexit_32-2,bp0=$rdtsc_vmexit_64+36,bp1=$rdtsc_vmexit_64+36,action0=skip,action1=skip,count=1" - hash = "9e7d694ed87ae95f9c25af5f3a5cea76188cd7c1c91ce49c92e25585f232d98e" - hash = "ff24b9da6cddd77f8c19169134eb054130567825eee1008b5a32244e1028e76f" - strings: - $rdtsc_vmexit_32 = {8B 45 E8 80 F4 00 89 C? 8B 45 EC 80 F4 00 89 C? 89 F? 09 ?? 85 C0 75 07} - $rdtsc_vmexit_64 = {48 8B 45 F0 48 BA CD CC CC CC CC CC CC CC 48 F7 E2 48 89 D0 48 C1 E8 03 48 89 45 F0 48 81 7D F0 ?? 0? 00 00 77 07} - condition: - uint16(0) == 0x5A4D and any of them -} diff --git a/analyzer/windows/data/yara/Socks5Systemz.yar b/analyzer/windows/data/yara/Socks5Systemz.yar index 8e00078e272..8493b3b8986 100644 --- a/analyzer/windows/data/yara/Socks5Systemz.yar +++ b/analyzer/windows/data/yara/Socks5Systemz.yar @@ -3,16 +3,17 @@ rule Socks5Systemz meta: author = "kevoreilly" description = "Socks5Systemz config extraction" - cape_options = "br0=user32::wsprintfA,action1=string:[esp],count=0,typestring=Socks5Systemz Config" + cape_options = "br0=user32::wsprintfA,br1=ntdll::sprintf,action2=string:[esp],action3=string:[esp],count=0,typestring=Socks5Systemz Config" packed = "9b997d0de3fe83091726919a0dc653e22f8f8b20b1bb7d0b8485652e88396f29" strings: $chunk1 = {0F B6 84 8A [4] E9 [3] (00|FF)} $chunk2 = {0F B6 04 8D [4] E9 [3] (00|FF)} - $chunk3 = {0F B6 04 8D [4] E9 [3] (00|FF)} - $chunk4 = {0F B6 04 8D [4] E9 [3] (00|FF)} - $chunk5 = {66 0F 6F 05 [4] E9 [3] (00|FF)} - $chunk6 = {F0 0F B1 95 [4] E9 [3] (00|FF)} - $chunk7 = {83 FA 04 E9 [3] (00|FF)} + $chunk3 = {66 0F 6F 05 [4] E9 [3] (00|FF)} + $chunk4 = {F0 0F B1 95 [4] E9 [3] (00|FF)} + $chunk5 = {83 FA 04 E9 [3] (00|FF)} + $chunk6 = {8A 04 8D [4] E9 [3] (00|FF)} + $chunk7 = {83 C4 04 83 C4 04 E9} + $chunk8 = {83 C2 04 87 14 24 5C E9} condition: - uint16(0) == 0x5A4D and 6 of them + uint16(0) == 0x5A4D and 5 of them } diff --git a/analyzer/windows/dll/capemon.dll b/analyzer/windows/dll/capemon.dll index 5a47c15e36d..b0e7d73ec76 100755 Binary files a/analyzer/windows/dll/capemon.dll and b/analyzer/windows/dll/capemon.dll differ diff --git a/analyzer/windows/dll/capemon_x64.dll b/analyzer/windows/dll/capemon_x64.dll index 4f96b7bf52f..25c23084f65 100755 Binary files a/analyzer/windows/dll/capemon_x64.dll and b/analyzer/windows/dll/capemon_x64.dll differ diff --git a/analyzer/windows/dll/version.dll b/analyzer/windows/dll/version.dll new file mode 100644 index 00000000000..388d91199d5 Binary files /dev/null and b/analyzer/windows/dll/version.dll differ diff --git a/analyzer/windows/dll/version_x64.dll b/analyzer/windows/dll/version_x64.dll new file mode 100644 index 00000000000..51e942113ba Binary files /dev/null and b/analyzer/windows/dll/version_x64.dll differ diff --git a/analyzer/windows/lib/api/process.py b/analyzer/windows/lib/api/process.py index e4b2b6f7592..fef7ad5ade1 100644 --- a/analyzer/windows/lib/api/process.py +++ b/analyzer/windows/lib/api/process.py @@ -13,7 +13,7 @@ import urllib.error import urllib.parse import urllib.request -from ctypes import byref, c_buffer, c_int, c_ulong, create_string_buffer, sizeof +from ctypes import byref, c_buffer, c_int, c_ulong, create_string_buffer, sizeof, windll, ArgumentError from pathlib import Path from shutil import copy @@ -43,13 +43,15 @@ CAPEMON64_NAME, LOADER32_NAME, LOADER64_NAME, - TTD32_NAME, - TTD64_NAME, LOGSERVER_PREFIX, PATHS, PIPE, SHUTDOWN_MUTEX, TERMINATE_EVENT, + TTD32_NAME, + TTD64_NAME, + SIDELOADER32_NAME, + SIDELOADER64_NAME, ) from lib.common.defines import ( KERNEL32, @@ -65,6 +67,13 @@ from lib.core.compound import create_custom_folders from lib.core.config import Config +# CSIDL constants +CSIDL_WINDOWS = 0x0024 +CSIDL_SYSTEM = 0x0025 +CSIDL_SYSTEMX86 = 0x0029 +CSIDL_PROGRAM_FILES = 0x0026 +CSIDL_PROGRAM_FILESX86 = 0x002a + IOCTL_PID = 0x222008 IOCTL_CUCKOO_PATH = 0x22200C PATH_KERNEL_DRIVER = "\\\\.\\DriverSSDT" @@ -94,6 +103,20 @@ def get_referrer_url(interest): return f"http://www.google.com/url?sa=t&rct=j&q=&esrc=s&source=web&cd={itemidx}&ved={vedstr}&url={escapedurl}&ei={eistr}&usg={usgstr}" +def nt_path_to_dos_path_ansi(nt_path: str) -> str: + drive_letters = "ABCDEFGHIJKLMNOPQRSTUVWXYZ" + nt_path_bytes = nt_path.encode("utf-8", errors="ignore") + for letter in drive_letters: + drive = f"{letter}:" + target = create_string_buffer(1024) + res = KERNEL32.QueryDosDeviceA(drive.encode("ascii"), target, 1024) + if res != 0: + device_path = target.value + if nt_path_bytes.startswith(device_path): + converted = nt_path_bytes.replace(device_path, drive.encode("ascii"), 1) + return converted.decode("utf-8", errors="ignore") + return nt_path + def NT_SUCCESS(val): return val >= 0 @@ -228,6 +251,12 @@ def get_filepath(self): return "" + def get_folder_path(self, csidl): + """Use SHGetFolderPathW to get the system folder path for a given CSIDL.""" + buf = create_string_buffer(MAX_PATH) + windll.shell32.SHGetFolderPathA(None, csidl, None, 0, buf) + return buf.value.decode('utf-8', errors='ignore') + def get_image_name(self): """Get the image name; returns an empty string on error.""" if not self.h_process: @@ -278,6 +307,58 @@ def get_parent_pid(self): return None + def detect_dll_sideloading(self, directory_path: str) -> bool: + """Detect potential DLL sideloading in the provided directory.""" + try: + directory = Path(directory_path) + if not directory.is_dir(): + return False + + if (directory/"capemon.dll").exists(): + return False + + # Early exit if directory is a known system location + try: + system_dirs = { + Path(self.get_folder_path(CSIDL_WINDOWS)).resolve(), + Path(self.get_folder_path(CSIDL_SYSTEM)).resolve(), + Path(self.get_folder_path(CSIDL_SYSTEMX86)).resolve(), + Path(self.get_folder_path(CSIDL_PROGRAM_FILES)).resolve(), + Path(self.get_folder_path(CSIDL_PROGRAM_FILESX86)).resolve() + } + if directory.resolve() in system_dirs: + return False + except (OSError, ArgumentError, ValueError) as e: + log.warning("detect_dll_sideloading: failed to retrieve system paths: %s", e) + return False + + try: + local_dlls = {f.name.lower() for f in directory.glob("*.dll") if f.is_file()} + if not local_dlls: + return False + except (OSError, PermissionError) as e: + log.warning("detect_dll_sideloading: could not list DLLs in %s: %s", directory_path, e) + return False + + # Build set of known system DLLs + known_dlls = set() + for sys_dir in system_dirs: + try: + if sys_dir.exists(): + known_dlls.update(f.name.lower() for f in sys_dir.glob("*.dll") if f.is_file()) + except (OSError, PermissionError) as e: + log.debug("detect_dll_sideloading: skipping system dir %s: %s", sys_dir, e) + + suspicious = local_dlls & known_dlls + if suspicious: + for dll in suspicious: + log.info("Potential dll side-loading detected in local directory: %s", dll) + return bool(suspicious) + + except Exception as e: + log.error("detect_dll_sideloading: unexpected error with path %s: %s", directory_path, e) + return False + def kernel_analyze(self): """zer0m0n kernel analysis""" log.info("Starting kernel analysis") @@ -535,7 +616,7 @@ def ttd_stop(self): if result.stderr: log.error(" ".join(result.stderr.split())) - log.info("Stopped TTD for %s process with pid %d: %s", bit_str, self.pid) + log.info("Stopped TTD for %s process with pid %d", bit_str, self.pid) return True @@ -601,7 +682,6 @@ def is_64bit(self): return False def write_monitor_config(self, interest=None, nosleepskip=False): - config_path = os.path.join(Path.cwd(), "dll", f"{self.pid}.ini") log.info("Monitor config for %s: %s", self, config_path) @@ -706,6 +786,12 @@ def inject(self, interest=None, nosleepskip=False): self.write_monitor_config(interest, nosleepskip) + path = os.path.dirname(nt_path_to_dos_path_ansi(self.get_filepath())) + + if self.detect_dll_sideloading(path) and self.has_msimg32(path): + self.deploy_version_proxy(path) + return True + log.info("%s DLL to inject is %s, loader %s", bit_str, dll, bin_name) try: @@ -759,7 +845,7 @@ def upload_memdump(self): try: upload_to_host(file_path, os.path.join("memory", f"{self.pid}.dmp"), category="memory") except Exception as e: - log.error(e, exc_info=True) + log.exception(e) log.error(os.path.join("memory", f"{self.pid}.dmp")) log.error(file_path) log.info("Memory dump of %s uploaded", self) @@ -770,3 +856,41 @@ def __str__(self): """Get a string representation of this process.""" image_name = self.get_image_name() or "???" return f"<{self.__class__.__name__} {self.pid} {image_name}>" + + def has_msimg32(self, directory_path: str) -> bool: + """Check if msimg32.dll exists in directory""" + try: + return any( + f.name.lower() == "msimg32.dll" + for f in Path(directory_path).glob("*") + if f.is_file() + ) + except (OSError, PermissionError): + return False + + def deploy_version_proxy(self, directory_path: str): + """Deploy version.dll proxy loader""" + if self.is_64bit(): + dll = CAPEMON64_NAME + side_dll = SIDELOADER64_NAME + bit_str = "64-bit" + else: + dll = CAPEMON32_NAME + side_dll = SIDELOADER32_NAME + bit_str = "32-bit" + + dll = os.path.join(Path.cwd(), dll) + + if not os.path.exists(dll): + log.warning("invalid path %s for monitor DLL to be sideloaded in %s, sideloading aborted", dll, self) + return + + try: + copy(dll, os.path.join(directory_path, "capemon.dll")) + copy(side_dll, os.path.join(directory_path, "version.dll")) + copy(os.path.join(Path.cwd(), "dll", f"{self.pid}.ini"), os.path.join(directory_path, "config.ini")) + except OSError as e: + log.error("Failed to copy DLL: %s", e) + return + log.info("%s DLL to sideload is %s, sideloader %s", bit_str, os.path.join(directory_path, "capemon.dll"), os.path.join(directory_path, "version.dll")) + return diff --git a/analyzer/windows/lib/common/constants.py b/analyzer/windows/lib/common/constants.py index e18566c587b..ec0e9811496 100644 --- a/analyzer/windows/lib/common/constants.py +++ b/analyzer/windows/lib/common/constants.py @@ -28,6 +28,8 @@ LOADER64_NAME = f"bin\\{random_string(8)}.exe" TTD32_NAME = "bin\\wow64\\TTD.exe" TTD64_NAME = "bin\\TTD.exe" +SIDELOADER32_NAME = "dll\\version.dll" +SIDELOADER64_NAME = "dll\\version_x64.dll" # Options OPT_APPDATA = "appdata" diff --git a/analyzer/windows/lib/common/results.py b/analyzer/windows/lib/common/results.py index b6983a52f7d..b552bbe1c79 100644 --- a/analyzer/windows/lib/common/results.py +++ b/analyzer/windows/lib/common/results.py @@ -61,7 +61,7 @@ def upload_to_host(file_path, dump_path, pids="", ppids="", metadata="", categor size -= read_size buf = infd.read(BUFSIZE) except Exception as e: - log.error("Exception uploading file %s to host: %s", file_path, e, exc_info=True) + log.exception("Exception uploading file %s to host: %s", file_path, e) def upload_buffer_to_host(buffer, dump_path, filepath=False, pids="", ppids="", metadata="", category="", duplicated=False): diff --git a/analyzer/windows/lib/common/zip_utils.py b/analyzer/windows/lib/common/zip_utils.py index e9e822e5e75..7ea21702a6a 100644 --- a/analyzer/windows/lib/common/zip_utils.py +++ b/analyzer/windows/lib/common/zip_utils.py @@ -1,16 +1,12 @@ import hashlib import logging import os +import re import shutil import subprocess from pathlib import Path from zipfile import BadZipfile, ZipFile -try: - import re2 as re -except ImportError: - import re - from lib.common.constants import OPT_MULTI_PASSWORD from lib.common.exceptions import CuckooPackageError from lib.common.hashing import hash_file @@ -18,7 +14,8 @@ log = logging.getLogger(__name__) -FILE_NAME_REGEX = re.compile("[\s]{2}((?:[a-zA-Z0-9\.\-,_\\\\]+( [a-zA-Z0-9\.\-,_\\\\]+)?)+)\\r") +# FILE_NAME_REGEX = re.compile("[\s]{2}((?:[a-zA-Z0-9\.\-,_\\\\]+( [a-zA-Z0-9\.\-,_\\\\]+)?)+)\\r") +FILE_NAME_REGEX = re.compile(r"\s{2}((?:[a-zA-Z0-9.\-,_\\]+(?: [a-zA-Z0-9.\-,_\\]+)?)*)\r") FILE_EXT_OF_INTEREST = [ ".bat", ".cmd", @@ -50,7 +47,7 @@ def extract_archive(seven_zip_path, archive_path, extract_path, password="infect @param extract_path: where to extract @param password: archive password @param try_multiple_passwords: we will be splitting the password on the ':' symbol, - and trying each one to extract the archive + and trying each one to extract the archive """ log.debug([seven_zip_path, "x", "-p", "-y", f"-o{extract_path}", archive_path]) p = subprocess.run( @@ -60,7 +57,7 @@ def extract_archive(seven_zip_path, archive_path, extract_path, password="infect stdout=subprocess.PIPE, ) stdoutput, stderr = p.stdout, p.stderr - log.debug(f"{p.stdout} {p.stderr}") + log.debug("%s %s", p.stdout, p.stderr) if try_multiple_passwords: passwords = password.split(":") @@ -84,9 +81,9 @@ def extract_archive(seven_zip_path, archive_path, extract_path, password="infect stdout=subprocess.PIPE, ) stdoutput, stderr = p.stdout, p.stderr - log.debug(f"{p.stdout} {p.stderr}") + log.debug("%s - %s", p.stdout, p.stderr) if b"Wrong password" in stderr: - log.debug(f"The provided password '{pword}' was incorrect") + log.debug("The provided password '%s' was incorrect", str(pword)) continue else: # We did it! @@ -151,7 +148,7 @@ def extract_zip(zip_path, extract_path, password=b"infected", recursion_depth=1, @param password: ZIP password @param recursion_depth: how deep we are in a nested archive @param try_multiple_passwords: we will be splitting the password on the ':' symbol, - and trying each one to extract the archive + and trying each one to extract the archive """ # Test if zip file contains a file named as itself. if is_overwritten(zip_path): @@ -195,7 +192,7 @@ def extract_zip(zip_path, extract_path, password=b"infected", recursion_depth=1, raise CuckooPackageError("Invalid Zip file") from e except RuntimeError as e: if "Bad password for file" in repr(e): - log.debug(f"Password '{pword}' was unsuccessful in extracting the archive.") + log.debug("Password '%s' was unsuccessful in extracting the archive.", str(pword)) password_fail = True continue else: @@ -203,7 +200,7 @@ def extract_zip(zip_path, extract_path, password=b"infected", recursion_depth=1, try: archive.extractall(path=extract_path, pwd=pword) except RuntimeError as e: - raise CuckooPackageError(f"Unable to extract Zip file: {e}") from e + raise CuckooPackageError("Unable to extract Zip file: %s", str(e)) from e finally: if recursion_depth < 4: # Extract nested archives. @@ -227,7 +224,7 @@ def extract_zip(zip_path, extract_path, password=b"infected", recursion_depth=1, log.error("Error extracting nested Zip file %s with details: %s", name, run_err) if password_fail: - raise CuckooPackageError(f"Unable to extract password-protected Zip file with the password(s): {passwords}") + raise CuckooPackageError("Unable to extract password-protected Zip file with the password(s): %s", str(passwords)) def is_overwritten(zip_path): @@ -264,7 +261,7 @@ def winrar_extractor(winrar_binary, extract_path, archive_path): stdout=subprocess.PIPE, ) # stdoutput, stderr = p.stdout, p.stderr - log.debug(p.stdout + p.stderr) + log.debug("%s - %s", p.stdout, p.stderr) return os.listdir(extract_path) @@ -289,11 +286,11 @@ def upload_extracted_files(root, files_at_root): for entry in files_at_root: try: file_path = os.path.join(root, entry) - log.info("Uploading {0} to host".format(file_path)) + log.info("Uploading %s to host", str(file_path)) filename = f"files/{hash_file(hashlib.sha256, file_path)}" upload_to_host(file_path, filename, metadata=Path(entry).name, duplicated=False) except Exception as e: - log.warning(f"Couldn't upload file {Path(entry).name} to host {e}") + log.warning("Couldn't upload file %s to host %s", str(Path(entry).name), str(e)) def attempt_multiple_passwords(options: dict, password: str) -> bool: diff --git a/analyzer/windows/lib/core/packages.py b/analyzer/windows/lib/core/packages.py index c41ebe4ba6f..2fbf6ac22e0 100644 --- a/analyzer/windows/lib/core/packages.py +++ b/analyzer/windows/lib/core/packages.py @@ -147,5 +147,7 @@ def choose_package(file_type, file_name, exports, target): return "autoit" elif file_name.endswith(("cmd", "bat")) or b"@echo off" in file_content: return "batch" + elif file_name.endswith(".rdp"): + return "rdp" else: return "generic" diff --git a/analyzer/windows/lib/core/pipe.py b/analyzer/windows/lib/core/pipe.py index c8fecc6aba5..c5f399ae3e2 100644 --- a/analyzer/windows/lib/core/pipe.py +++ b/analyzer/windows/lib/core/pipe.py @@ -224,7 +224,7 @@ def stop(self): if h.is_alive(): h.stop() except Exception as e: - log.error(e, exc_info=True) + log.exception(e) def disconnect_pipes(): diff --git a/analyzer/windows/modules/auxiliary/amsi.py b/analyzer/windows/modules/auxiliary/amsi.py index 4fa8f5074ea..05750811be8 100644 --- a/analyzer/windows/modules/auxiliary/amsi.py +++ b/analyzer/windows/modules/auxiliary/amsi.py @@ -30,7 +30,6 @@ import logging import sys import threading -import traceback import uuid logger = logging.getLogger(__name__) @@ -945,7 +944,7 @@ def _unpackSimpleType(self, record, info, event_property): # if there is no data remaining then return if user_data_remaining <= 0: - logger.warning("No more user data left, returning none for field {:s}".format(name_field)) + logger.warning("No more user data left, returning none for field %s", str(name_field)) return {name_field: None} in_type = event_property.epi_u1.nonStructType.InType @@ -986,7 +985,7 @@ def _unpackSimpleType(self, record, info, event_property): if status != ERROR_SUCCESS: # We can handle this error and still capture the data. - logger.warning("Failed to get data field data for {:s}, incrementing by reported size".format(name_field)) + logger.warning("Failed to get data field data for %s, incrementing by reported size", str(name_field)) self.index += property_length return {name_field: None} @@ -1002,7 +1001,7 @@ def _unpackSimpleType(self, record, info, event_property): data = formatted_data.value # Convert the formatted data if necessary - if out_type in TDH_CONVERTER_LOOKUP and type(data) != TDH_CONVERTER_LOOKUP[out_type]: + if out_type in TDH_CONVERTER_LOOKUP and type(data) is TDH_CONVERTER_LOOKUP[out_type]: data = TDH_CONVERTER_LOOKUP[out_type](data) return {name_field: data} @@ -1135,7 +1134,7 @@ def _processEvent(self, record): if record.contents.EventHeader.Flags & EVENT_HEADER_FLAG_EXTENDED_INFO: parsed_data["EventExtendedData"] = self._parseExtendedData(record) except Exception as e: - logger.warning("Unable to parse event: {}".format(e)) + logger.warning("Unable to parse event: %s", str(e)) try: out.update(parsed_data) @@ -1143,8 +1142,7 @@ def _processEvent(self, record): if self.event_callback: self.event_callback(out) except Exception as e: - logger.error("Exception during callback: {}".format(e)) - logger.error(traceback.format_exc()) + logger.exception("Exception during callback: %s", str(e)) class TraceProperties: @@ -1170,7 +1168,7 @@ def __init__(self, event_callback=None): raise OSError("AMSI not supported on this platform") from err self.provider = None self.properties = TraceProperties() - self.session_name = "{:s}".format(str(uuid.uuid4())) + self.session_name = str(uuid.uuid4()) self.running = False self.event_callback = event_callback self.trace_logfile = None diff --git a/analyzer/windows/modules/auxiliary/browsermonitor.py b/analyzer/windows/modules/auxiliary/browsermonitor.py index 6989f190b20..4e0ce43a16d 100644 --- a/analyzer/windows/modules/auxiliary/browsermonitor.py +++ b/analyzer/windows/modules/auxiliary/browsermonitor.py @@ -35,7 +35,7 @@ def _find_browser_extension(self): for directory in temp_dir_list: # TOR Browser saves directly to %temp% if directory.startswith("bext_") and directory.endswith(".json"): - log.debug(f"Found extension logs: {self.browser_logfile}") + log.debug("Found extension logs: %s", self.browser_logfile) self.browser_logfile = os.path.join(temp_dir, directory) break tmp_directory_path = os.path.join(temp_dir, directory) @@ -47,7 +47,7 @@ def _find_browser_extension(self): for file in tmp_dir_files: if file.startswith("bext_") and file.endswith(".json"): self.browser_logfile = os.path.join(temp_dir, directory, file) - log.debug(f"Found extension logs: {self.browser_logfile}") + log.debug("Found extension logs: %s", self.browser_logfile) break time.sleep(1) diff --git a/analyzer/windows/modules/auxiliary/disguise.py b/analyzer/windows/modules/auxiliary/disguise.py index 9f8745dbe4c..4d8c2d0db7f 100644 --- a/analyzer/windows/modules/auxiliary/disguise.py +++ b/analyzer/windows/modules/auxiliary/disguise.py @@ -30,7 +30,6 @@ from lib.common.rand import random_integer, random_string log = logging.getLogger(__name__) -PERSISTENT_ROUTE_GATEWAY = "192.168.1.1" si = subprocess.STARTUPINFO() si.dwFlags |= subprocess.STARTF_USESHOWWINDOW @@ -243,18 +242,14 @@ def randomizeUUID(self): # Replace the UUID with the new UUID SetValueEx(key, "MachineGuid", 0, REG_SZ, createdUUID) - def add_persistent_route(self): - self.run_as_system( - ["C:\\Windows\\System32\\ROUTE.exe", "-p", "add", "0.0.0.0", "mask", "0.0.0.0", PERSISTENT_ROUTE_GATEWAY] - ) - self.run_as_system( - ["C:\\Windows\\System32\\ROUTE.exe", "-p", "change", "0.0.0.0", "mask", "0.0.0.0", PERSISTENT_ROUTE_GATEWAY] - ) + def add_persistent_route(self, gateway: str): + self.run_as_system(["C:\\Windows\\System32\\ROUTE.exe", "-p", "add", "0.0.0.0", "mask", "0.0.0.0", gateway]) + self.run_as_system(["C:\\Windows\\System32\\ROUTE.exe", "-p", "change", "0.0.0.0", "mask", "0.0.0.0", gateway]) def start(self): if self.config.windows_static_route: - log.info(f"Config for route is: {str(self.config.windows_static_route)}") - self.add_persistent_route() + log.info("Config for route is: %s", str(self.config.windows_static_route)) + self.add_persistent_route(self.config.windows_static_route_gateway) self.change_productid() self.set_office_mrus() self.ramnit() diff --git a/analyzer/windows/modules/auxiliary/dns_etw.py b/analyzer/windows/modules/auxiliary/dns_etw.py index 4c52da10b60..093d50f7288 100644 --- a/analyzer/windows/modules/auxiliary/dns_etw.py +++ b/analyzer/windows/modules/auxiliary/dns_etw.py @@ -22,8 +22,8 @@ HAVE_ETW = True except ImportError as e: log.debug( - f"Could not load auxiliary module DNS_ETW due to '{e}'\nIn order to use DNS_ETW functionality, it " - "is required to have pywintrace setup in python" + "Could not load auxiliary module DNS_ETW due to '%s'\nIn order to use DNS_ETW functionality, it " + "is required to have pywintrace setup in python", str(e) ) __author__ = "[Canadian Centre for Cyber Security] @CybercentreCanada" @@ -43,7 +43,6 @@ def encode(data, encoding="utf-8"): if HAVE_ETW: class ETW_provider(ETW): - def __init__( self, ring_buf_size=1024, @@ -120,7 +119,7 @@ def on_event(self, event_tufo): if event_id not in self.event_id_filters: return if self.no_conout is False: - log.info("{:d} ({:s})\n{:s}\n".format(event_id, event["Task Name"], pprint.pformat(encode(event)))) + log.info("%d (%s)\n%s\n", event_id, event["Task Name"], pprint.pformat(encode(event))) if event["QueryName"] in SAFELIST: return # Event 3010 query @@ -227,5 +226,5 @@ def stop(self): # file_name = file_path_list[-1] # process = file_path_list[-2] dumppath = os.path.join("DNS_ETW", "etw_dns.json") - log.debug("DNS_ETW Aux Module is uploading %s" % f) + log.debug("DNS_ETW Aux Module is uploading %s", f) upload_to_host(f, dumppath) diff --git a/analyzer/windows/modules/auxiliary/evtx.py b/analyzer/windows/modules/auxiliary/evtx.py index 41f47c3d16f..1899812f736 100644 --- a/analyzer/windows/modules/auxiliary/evtx.py +++ b/analyzer/windows/modules/auxiliary/evtx.py @@ -12,7 +12,6 @@ class Evtx(Thread, Auxiliary): - evtx_dump = "evtx.zip" windows_logs = [ diff --git a/analyzer/windows/modules/auxiliary/filepickup.py b/analyzer/windows/modules/auxiliary/filepickup.py index 0adb7305e9d..fbfa3e6059f 100644 --- a/analyzer/windows/modules/auxiliary/filepickup.py +++ b/analyzer/windows/modules/auxiliary/filepickup.py @@ -29,7 +29,7 @@ def start(self): def stop(self): if hasattr(self, "file_to_get"): if self.file_to_get: - log.info(f"Uploading {self.file_to_get}") + log.info("Uploading %s", self.file_to_get) upload_to_host(self.file_to_get, os.path.join("files", os.path.basename(self.file_to_get))) self.do_run = False diff --git a/analyzer/windows/modules/auxiliary/human.py b/analyzer/windows/modules/auxiliary/human.py index c7fb4c8c519..6851589d9cd 100644 --- a/analyzer/windows/modules/auxiliary/human.py +++ b/analyzer/windows/modules/auxiliary/human.py @@ -517,7 +517,7 @@ def run(self): pass else: for instruction in GIVEN_INSTRUCTIONS: - log.info("Instruction: %s" % instruction) + log.info("Instruction: %s", instruction) try: if instruction.lower() == CLICK_CMD: click_mouse() @@ -536,7 +536,7 @@ def run(self): if match and len(match.regs) == 2: interval = int(match.group(1)) except Exception as e: - log.error("One of the instruction given is invalid: %s with error %s" % (instruction, e)) + log.error("One of the instruction given is invalid: %s with error %s", instruction, str(e)) continue while self.do_run: diff --git a/analyzer/windows/modules/auxiliary/permissions.py b/analyzer/windows/modules/auxiliary/permissions.py index 8b8bb711f1a..17b655119b9 100644 --- a/analyzer/windows/modules/auxiliary/permissions.py +++ b/analyzer/windows/modules/auxiliary/permissions.py @@ -34,7 +34,6 @@ def start(self): log.debug("Adjusting permissions for %s", locations) for location in locations: - # First add a non-inherited permission for Admin Read+Execute # icacls /grant:r "BUILTIN\Administrators:(OI)(CI)(RX)" "BUILTIN\\Administrators:(RX)" /t /c /q modify_admin_params = [ diff --git a/analyzer/windows/modules/auxiliary/recentfiles.py b/analyzer/windows/modules/auxiliary/recentfiles.py index e6ec3cf4d23..04a8cc427c9 100644 --- a/analyzer/windows/modules/auxiliary/recentfiles.py +++ b/analyzer/windows/modules/auxiliary/recentfiles.py @@ -70,7 +70,7 @@ def start(self): ext = random.choice(self.extensions) filepath = os.path.join(dirpath, "%s.%s" % (filename, ext)) open(filepath, "wb").write(os.urandom(random.randint(30, 999999))) - log.debug("Wrote 'recentfile' %s to disk." % filepath) + log.debug("Wrote 'recentfile' %s to disk.", filepath) SHELL32.SHAddToRecentDocs(SHARD_PATHA, filepath) diff --git a/analyzer/windows/modules/auxiliary/watchdownloads.py b/analyzer/windows/modules/auxiliary/watchdownloads.py new file mode 100644 index 00000000000..c2aefa5ffda --- /dev/null +++ b/analyzer/windows/modules/auxiliary/watchdownloads.py @@ -0,0 +1,76 @@ +# Copyright (C) 2025 Xiang Chen +# This file is part of CAPE Sandbox +# See the file 'docs/LICENSE' for copying permission. + +import logging +import os +import time +from threading import Thread + +from lib.common.abstracts import Auxiliary +from lib.common.results import upload_to_host + +log = logging.getLogger(__name__) + +folders_to_monitor = [ + os.path.join(os.environ["HOMEPATH"], "downloads"), +] + + +HAVE_WATCHDOG = False +try: + from watchdog.events import EVENT_TYPE_DELETED, FileSystemEvent, FileSystemEventHandler + from watchdog.observers import Observer + + class MyEventHandler(FileSystemEventHandler): + def on_any_event(self, event: FileSystemEvent) -> None: + if event.event_type == EVENT_TYPE_DELETED: + return + try: + filename = os.path.basename(event.src_path) + if not filename.endswith((".part", "desktop.ini")): + log.info("Monitor uploading %s", filename) + upload_to_host(event.src_path, f"files/{filename}") + except Exception as e: + log.exception("Can't upload new file %s to host. %s", event.src_path, str(e)) + + HAVE_WATCHDOG = True +except ImportError as e: + log.debug("Could not load auxiliary module WatchDownloads due to '%s'", str(e)) + + +class WatchDownloads(Auxiliary, Thread): + """Collect CPU/memory usage info from monitored processes""" + + def __init__(self, options, config): + Auxiliary.__init__(self, options, config) + Thread.__init__(self) + self.enabled = self.config.watchdownloads + self.do_run = True + + def stop(self): + """Stop collecting info""" + self.do_run = False + + def run(self): + """Run capturing of info. + @return: operation status. + """ + if not self.enabled: + return False + + event_handler = MyEventHandler() + observer = Observer() + for folder in folders_to_monitor: + log.info("Monitoring %s", folder) + observer.schedule(event_handler, folder, recursive=True) + observer.start() + + try: + while self.do_run: + time.sleep(1) + finally: + observer.stop() + observer.join() + + return True diff --git a/analyzer/windows/modules/auxiliary/wmi_etw.py b/analyzer/windows/modules/auxiliary/wmi_etw.py new file mode 100644 index 00000000000..cbd2f5e379e --- /dev/null +++ b/analyzer/windows/modules/auxiliary/wmi_etw.py @@ -0,0 +1,192 @@ +import json +import logging +import os +import pprint +from collections.abc import Iterable, Mapping + +from lib.common.abstracts import Auxiliary +from lib.common.results import upload_to_host +from lib.core.config import Config + +log = logging.getLogger(__name__) + +SAFELIST = [] + +ETW = False +HAVE_ETW = False +try: + from etw import ETW, ProviderInfo + from etw import evntrace as et + from etw.GUID import GUID + + HAVE_ETW = True +except ImportError as e: + log.debug( + "Could not load auxiliary module WMI_ETW due to '%s'\nIn order to use WMI_ETW functionality, it " + "is required to have pywintrace setup in python", str(e) + ) + +__author__ = "[Andrea Oliveri starting from code of Canadian Centre for Cyber Security]" + + +def encode(data, encoding="utf-8"): + if isinstance(data, str): + return data.encode(encoding, "ignore") + elif isinstance(data, Mapping): + return dict(map(encode, data.items())) + elif isinstance(data, Iterable): + return type(data)(map(encode, data)) + else: + return data + + +if HAVE_ETW: + + class ETW_provider(ETW): + + def __init__( + self, + ring_buf_size=4096, + max_str_len=4096, + min_buffers=0, + max_buffers=0, + level=et.TRACE_LEVEL_INFORMATION, # If >= 5 print more useless (?) stuff + any_keywords=None, + all_keywords=None, + filters=None, + event_callback=None, + logfile=None, + no_conout=False, + ): + """ + Initializes an instance of WMI_ETW. The default parameters represent a very typical use case and should not be + overridden unless the user knows what they are doing. + + :param ring_buf_size: The size of the ring buffer used for capturing events. + :param max_str_len: The maximum length of the strings the proceed the structure. + Unless you know what you are doing, do not modify this value. + :param min_buffers: The minimum number of buffers for an event tracing session. + Unless you know what you are doing, do not modify this value. + :param max_buffers: The maximum number of buffers for an event tracing session. + Unless you know what you are doing, do not modify this value. + :param level: Logging level + :param any_keywords: List of keywords to match + :param all_keywords: List of keywords that all must match + :param filters: List of filters to apply to capture. + :param event_callback: Callback for processing events + :param logfile: Path to logfile. + :param no_conout: If true does not output live capture to console. + """ + + self.logfile = logfile + self.no_conout = no_conout + if event_callback: + self.event_callback = event_callback + else: + self.event_callback = self.on_event + + providers = [ + ProviderInfo( + "Microsoft-Windows-WMI-Activity", + GUID("{1418EF04-B0B4-4623-BF7E-D74AB47BBDAA}"), + level, + any_keywords, + all_keywords, + ) + ] + self.event_id_filters = [] + super().__init__( + session_name="WMI_ETW", + ring_buf_size=ring_buf_size, + max_str_len=max_str_len, + min_buffers=min_buffers, + max_buffers=max_buffers, + event_callback=self.event_callback, + task_name_filters=filters, + providers=providers, + event_id_filters=self.event_id_filters, + ) + + def on_event(self, event_tufo): + """ + Starts the capture using ETW. + :param event_tufo: tufo containing event information + :param logfile: Path to logfile. + :param no_conout: If true does not output live capture to console. + :return: Does not return anything. + """ + event_id, event = event_tufo + + if self.no_conout is False: + log.info("%d (%s)\n%s\n", event_id, event["Task Name"], pprint.pformat(encode(event))) + + if self.logfile is not None: + with open(self.logfile, "a") as file: + json.dump({"event_id": event_id, "event": event}, file) + file.write("\n") + + def start(self): + super().start() + + def stop(self): + super().stop() + + class WMI_ETW(Auxiliary): + """ETW logging""" + + def __init__(self, options, config): + Auxiliary.__init__(self, options, config) + self.config = Config(cfg="analysis.conf") + self.enabled = self.config.wmi_etw + self.do_run = self.enabled + + self.output_dir = "C:\\wmi\\" + try: + os.mkdir(self.output_dir) + except Exception as e: + print(e) + import traceback + + log.exception(traceback.format_exc()) + + self.log_file = os.path.join(self.output_dir, "wmi_provider.log") + if HAVE_ETW: + self.capture = ETW_provider(logfile=self.log_file, level=255, no_conout=True) + + def start(self): + if not self.enabled or not HAVE_ETW: + return False + try: + log.debug("Starting WMI ETW") + # Start WMI_ETW_provider in the background + self.capture.start() + except Exception as e: + print(e) + import traceback + + log.exception(traceback.format_exc()) + return True + + def stop(self): + if not HAVE_ETW: + return + log.debug("Stopping WMI_ETW...") + self.capture.stop() + files_to_upload = set() + + for d in os.listdir(self.output_dir): + path = os.path.join(self.output_dir, d) + if os.path.isfile(path): + files_to_upload.add(path) + continue + for f in os.listdir(path): + file_path = os.path.join(path, f) + files_to_upload.add(file_path) + continue + + # Upload the ETW log files to the host. + log.debug(files_to_upload) + for f in files_to_upload: + dumppath = os.path.join("aux", "wmi_etw.json") + log.debug("WMI_ETW Aux Module is uploading %s", f) + upload_to_host(f, dumppath) diff --git a/analyzer/windows/modules/packages/Ie4uinit.py b/analyzer/windows/modules/packages/Ie4uinit.py new file mode 100644 index 00000000000..1d3cd52bae7 --- /dev/null +++ b/analyzer/windows/modules/packages/Ie4uinit.py @@ -0,0 +1,35 @@ +# Copyright (C) 2010-2015 Cuckoo Foundation. +# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org +# See the file 'docs/LICENSE' for copying permission. +# https://lolbas-project.github.io/lolbas/Binaries/Ie4uinit/ +# https://bohops.com/2018/02/26/leveraging-inf-sct-fetch-execute-techniques-for-bypass-evasion-persistence/ +# https://bohops.com/2018/03/10/leveraging-inf-sct-fetch-execute-techniques-for-bypass-evasion-persistence-part-2/ + +import os +import shutil +from lib.common.abstracts import Package + + +class IE4uinit(Package): + """Ie4uinit analysis package.""" + + PATHS = [ + ("SystemRoot", "system32", "ie4uinit.exe"), + # ("Windows", "SysWOW64", "ie4uinit.exe"), + ] + summary = "Executes commands from a specially prepared ie4uinit.inf file." + description = """Executes commands from a specially prepared ie4uinit.inf file.""" + + def start(self, path): + # rundll32.exe ieadvpack.dll,LaunchINFSection test.inf,,1, <- Requires DefaultInstall as entry point + # check if named " ie4uinit.inf" + ie4uinit = self.get_path_app_in_path("ie4uinit.exe") + dirname = os.path.dirname(path) + local_ie4uinit = os.path.join(dirname, "ie4uinit.exe") + shutil.copy(ie4uinit, local_ie4uinit) + ie4uinit = local_ie4uinit + if not path.endswith("ie4uinit.inf"): + new_file = os.path.join(dirname, "ie4uinit.inf") + shutil.copy(path, new_file) + path = new_file + return self.execute(ie4uinit, "-BaseSettings", path) diff --git a/analyzer/windows/modules/packages/archive.py b/analyzer/windows/modules/packages/archive.py index 394085c6b98..95264d281ca 100644 --- a/analyzer/windows/modules/packages/archive.py +++ b/analyzer/windows/modules/packages/archive.py @@ -108,7 +108,7 @@ def start(self, path): files_at_root = [os.path.join(r, f).replace(f"{root}\\", "") for r, _, files in os.walk(root) for f in files] log.debug(files_at_root) if set(file_names) != set(files_at_root): - log.debug(f"Replacing {file_names} with {files_at_root}") + log.debug("Replacing %s with %s", str(file_names), str(files_at_root)) file_names = files_at_root upload_extracted_files(root, files_at_root) @@ -123,12 +123,12 @@ def start(self, path): try: shutil.copytree(d, os.path.join("C:\\", item)) except Exception as e: - log.warning(f"Couldn't copy {d} to root of C: {e}") + log.warning("Couldn't copy %s to root of C: %s", d, str(e)) else: try: shutil.copy(d, "C:\\") except Exception as e: - log.warning(f"Couldn't copy {d} to root of C: {e}") + log.warning("Couldn't copy %s to root of C: %s", d, str(e)) file_name = self.options.get(OPT_FILE) # If no file name is provided via option, discover files to execute. diff --git a/analyzer/windows/modules/packages/dll.py b/analyzer/windows/modules/packages/dll.py index 1c047bf2154..068c951a0a0 100644 --- a/analyzer/windows/modules/packages/dll.py +++ b/analyzer/windows/modules/packages/dll.py @@ -97,7 +97,7 @@ def start(self, path): # If the user has not enabled multi, but requested multiple functions, log it and default to #1 elif not enable_multi and (":" in function or "-" in function or ".." in function): - log.warning(f"You need to enable the `{_OPT_ENABLE_MULTI}` option if you want to run multiple functions.") + log.warning("You need to enable the `%s` option if you want to run multiple functions.", str(_OPT_ENABLE_MULTI)) # Setting function to the first ordinal number since the user does not want use to run multiple functions. function = "#1" diff --git a/analyzer/windows/modules/packages/firefox_ext.py b/analyzer/windows/modules/packages/firefox_ext.py index 72002671752..69d76c520b7 100644 --- a/analyzer/windows/modules/packages/firefox_ext.py +++ b/analyzer/windows/modules/packages/firefox_ext.py @@ -1,11 +1,15 @@ # Copyright (C) 2024 fdiaz@virustotal.com # This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org # See the file 'docs/LICENSE' for copying permission. +import base64 +import logging +import os import time import webbrowser from lib.common.abstracts import Package +log = logging.getLogger(__name__) class Firefox_Ext(Package): """Firefox analysis package (with extension).""" @@ -14,10 +18,36 @@ class Firefox_Ext(Package): ("ProgramFiles", "Mozilla Firefox", "firefox.exe"), ] summary = "Opens the URL in firefox." - description = """Spawns firefox.exe and opens the supplied URL.""" + description = """Spawns firefox.exe and opens the supplied URL. + + Allows setting a custom user-agent string via the 'user_agent' option. + The value should be provided base64-encoded. + """ + + # set your current firefox profile path (about:profiles) + profile_path = None def start(self, url): - webbrowser.register("firefox", None, webbrowser.BackgroundBrowser(self.get_path("firefox.exe"))) + user_agent = self.options.get("user_agent") + log.debug("User agent option value: %s", user_agent) + try: + base64.b64decode(user_agent) + except Exception: + log.error("Invalid base64 encoded user agent provided.") + user_agent = None + if user_agent and self.profile_path: + config = os.path.join(self.profile_path, 'prefs.js') + ua_decoded = base64.b64decode(user_agent).decode('utf-8') + ua_config = f'user_pref("general.useragent.override", "{ua_decoded}");\n' + try: + os.makedirs(os.path.dirname(config), exist_ok=True) + with open(config, 'a') as file: + file.write(ua_config) + log.info("Successfully appended user agent to prefs.js: %s", ua_decoded) + except Exception as e: + log.error("Failed to write user agent to prefs.js: %s", e) + firefox_path = self.get_path("firefox.exe") + webbrowser.register("firefox", None, webbrowser.BackgroundBrowser(firefox_path)) firefox = webbrowser.get("firefox") firefox.open("about:blank") time.sleep(7) # Rough estimate, change based on your setup times. diff --git a/analyzer/windows/modules/packages/msix.py b/analyzer/windows/modules/packages/msix.py index 13bf6c58400..dfa372cbdbe 100644 --- a/analyzer/windows/modules/packages/msix.py +++ b/analyzer/windows/modules/packages/msix.py @@ -38,7 +38,7 @@ def start(self, path): if len(file_names) and "config.json" in file_names: extract_zip(path, orig_path.parent) - log.debug(f"Extracted {len(file_names)} files from {path} to {orig_path.parent}") + log.debug("Extracted %d files from %s to %s", len(file_names), str(path), str(orig_path.parent)) with suppress(Exception): config_path = str(orig_path.with_name("config.json")) @@ -51,10 +51,10 @@ def start(self, path): if script_paths: path = str(orig_path.with_name(script_paths[0])) args = f'-NoProfile -ExecutionPolicy bypass -File "{path}"' - log.debug(f"msix file contains script {path}") + log.debug("msix file contains script %s", str(path)) if not args: - args = f"-NoProfile -ExecutionPolicy bypass {os.getcwd()}\data\msix.ps1 {path}" + args = fr"-NoProfile -ExecutionPolicy bypass {os.getcwd()}\data\msix.ps1 {path}" # now we need to get app id and launch it return self.execute(powershell, args, powershell) diff --git a/analyzer/windows/modules/packages/pub.py b/analyzer/windows/modules/packages/pub.py index 9da03cc3437..1602615d8b7 100644 --- a/analyzer/windows/modules/packages/pub.py +++ b/analyzer/windows/modules/packages/pub.py @@ -30,7 +30,6 @@ def __init__(self, options=None, config=None): The .pub filename extension will be added automatically.""" def set_keys(self): - baseOfficeKeyPath = r"Software\Microsoft\Office" installedVersions = [] try: diff --git a/analyzer/windows/modules/packages/pub2016.py b/analyzer/windows/modules/packages/pub2016.py index 950c1fa6048..196a395aefb 100644 --- a/analyzer/windows/modules/packages/pub2016.py +++ b/analyzer/windows/modules/packages/pub2016.py @@ -26,7 +26,6 @@ def __init__(self, options=None, config=None): The .pub filename extension will be added automatically.""" def set_keys(self): - baseOfficeKeyPath = r"Software\Microsoft\Office" installedVersions = [] try: diff --git a/analyzer/windows/modules/packages/rdp.py b/analyzer/windows/modules/packages/rdp.py index 016df77bbf4..f6124a67f98 100644 --- a/analyzer/windows/modules/packages/rdp.py +++ b/analyzer/windows/modules/packages/rdp.py @@ -2,7 +2,7 @@ from lib.common.common import check_file_extension -class Exe(Package): +class RDP(Package): """RDP analysis package.""" PATHS = [ diff --git a/analyzer/windows/prescripts/prescript_detection.py b/analyzer/windows/prescripts/prescript_detection.py index 46ff0f4510d..09aa2a165a8 100644 --- a/analyzer/windows/prescripts/prescript_detection.py +++ b/analyzer/windows/prescripts/prescript_detection.py @@ -249,17 +249,17 @@ def add_file_to_path(src_path, dst_path, overwrite=False): if os.path.exists(dst_path) and overwrite: # in case of the src and dst are the same file if os.path.samefile(src_path, dst_path): - log.info(f"Same file {dst_path} already in the victim vm") + log.info("Same file %s already in the victim vm", str(dst_path)) return os.remove(dst_path) shutil.copyfile(src=src_path, dst=dst_path) - log.info(f"File {dst_path} modified in the victim vm") + log.info("File %s modified in the victim vm", str(dst_path)) elif os.path.exists(dst_path): - log.info(f"File {dst_path} already in the victim vm") + log.info("File %s already in the victim vm", str(dst_path)) return else: shutil.copyfile(src=src_path, dst=dst_path) - log.info(f"File {dst_path} added to victim vm") + log.info("File %s added to victim vm", str(dst_path)) def run_script(script_path, args, timeout): @@ -268,12 +268,12 @@ def run_script(script_path, args, timeout): subprocess.check_output("python " + exec, timeout=timeout, stderr=subprocess.STDOUT) else: subprocess.check_output(exec, timeout=timeout, stderr=subprocess.STDOUT) - log.info(f"Running script {script_path} with parameters {args} on the victim vm") + log.info("Running script %s with parameters %s on the victim vm", str(script_path), str(args)) def add_directory(path): os.makedirs(path, exist_ok=True) - log.info(f"Folder {path} added to victim vm") + log.info("Folder %s added to victim vm", str(path)) def registry_path_to_winreg(path): @@ -304,7 +304,7 @@ def create_registry(path, key, value, value_type): RegistryKey = CreateKey(path, key) SetValueEx(RegistryKey, key, 0, value_type, value) CloseKey(RegistryKey) - log.info(f"Created registry {path}, with key {key} and value {value} on the victim vm") + log.info("Created registry %s, with key %s and value %s on the victim vm", str(path), str(key), str(value)) def modify_registry(path, key, value, value_type): @@ -312,9 +312,9 @@ def modify_registry(path, key, value, value_type): try: RegistryKey = OpenKey(path, key, 0, KEY_ALL_ACCESS) except Exception as _: - log.info(f"The target registry doesn't exist on the victim vm at path {path} with key {key}") + log.info("The target registry doesn't exist on the victim vm at path %s with key %s", str(path), str(key)) SetValueEx(RegistryKey, key, 0, value_type, value) - log.info(f"Modified registry {path}, with key {key} to value {value} on the victim vm") + log.info("Modified registry %s, with key %s to value %s on the victim vm", str(path), str(key), str(value)) def create_scheduled_task( @@ -346,7 +346,7 @@ def create_scheduled_task( tr.SetTrigger(trigger) pf = new_task.QueryInterface(pythoncom.IID_IPersistFile) pf.Save(None, 1) - log.info(f"Scheduled task {task_name} created on the victim vm") + log.info("Scheduled task %s created on the victim vm", str(task_name)) def create_scheduled_task2( @@ -567,7 +567,7 @@ def modify_scheduled_task( folder.DeleteTask(task_name, 0) else: folder.RegisterTaskDefinition(task_name, modified_task, TASK_CREATION.TASK_UPDATE.value, "", "", 0) - log.info(f"Scheduled task {task_name} modified on the victim vm") + log.info("Scheduled task %s modified on the victim vm", str(task_name)) def create_trigger( @@ -598,8 +598,8 @@ def create_trigger( def change_execution_dir(dir): - log.info(f"Changing execution directory to {dir}") - log.warn("Changing directory not available in prescript testing") + log.info("Changing execution directory to %s", dir) + log.warning("Changing directory not available in prescript testing") def main(args): @@ -681,8 +681,8 @@ def main(args): args=params_dict[ACTIONS_PARAMETERS[parsed_action][1]], timeout=int(params_dict[ACTIONS_PARAMETERS[parsed_action][2]]), ) - log.info(f"Runned script with {params_dict}") - print(f"Runned script with {params_dict}") + log.info("Runned script with %s", str(params_dict)) + # print(f"Runned script with {params_dict}") elif parsed_action == LIST_OF_VALID_ACTIONS[1]: add_file_to_path( src_path=params_dict[ACTIONS_PARAMETERS[parsed_action][0]], @@ -690,15 +690,15 @@ def main(args): overwrite=bool(params_dict[ACTIONS_PARAMETERS[parsed_action][2]]), ) log.info( - f"Adding file from {params_dict[ACTIONS_PARAMETERS[parsed_action][0]]} to {params_dict[ACTIONS_PARAMETERS[parsed_action][1]]}" - ) - print( - f"Adding file from {params_dict[ACTIONS_PARAMETERS[parsed_action][0]]} to {params_dict[ACTIONS_PARAMETERS[parsed_action][1]]}" + "Adding file from %s to %s", params_dict[ACTIONS_PARAMETERS[parsed_action][0]], params_dict[ACTIONS_PARAMETERS[parsed_action][1]] ) + # print( + # f"Adding file from {params_dict[ACTIONS_PARAMETERS[parsed_action][0]]} to {params_dict[ACTIONS_PARAMETERS[parsed_action][1]]}" + # ) elif parsed_action == LIST_OF_VALID_ACTIONS[2]: add_directory(path=params_dict[ACTIONS_PARAMETERS[parsed_action][0]]) - log.info(f"Created directory with {params_dict}") - print(f"Created directory with {params_dict}") + log.info("Created directory with %s", str(params_dict)) + # print(f"Created directory with {params_dict}") elif parsed_action == LIST_OF_VALID_ACTIONS[3]: value_type = identify_registry_value_type(params_dict[ACTIONS_PARAMETERS[parsed_action][2]]) create_registry( @@ -707,8 +707,8 @@ def main(args): value=params_dict[ACTIONS_PARAMETERS[parsed_action][2]], value_type=value_type, ) - log.info(f"Created registry with {params_dict}") - print(f"Created registry with {params_dict}") + log.info("Created registry with %s", str(params_dict)) + # print(f"Created registry with {params_dict}") elif parsed_action == LIST_OF_VALID_ACTIONS[4]: value_type = identify_registry_value_type(params_dict[ACTIONS_PARAMETERS[parsed_action][2]]) modify_registry( @@ -717,8 +717,8 @@ def main(args): value=params_dict[ACTIONS_PARAMETERS[parsed_action][2]], value_type=value_type, ) - log.info(f"Modified registry with {params_dict}") - print(f"Modified registry with {params_dict}") + log.info("Modified registry with %s", str(params_dict)) + # print(f"Modified registry with {params_dict}") elif parsed_action == LIST_OF_VALID_ACTIONS[5]: parsed_params_dict = {} for param in ACTIONS_PARAMETERS[parsed_action]: @@ -747,15 +747,15 @@ def main(args): else: parsed_params_dict[param] = params_dict[param] create_scheduled_task2(**parsed_params_dict) - log.info(f"Created scheduled task with {params_dict}") - print(f"Created scheduled task with {params_dict}") + log.info("Created scheduled task with %s", str(params_dict)) + # print(f"Created scheduled task with {params_dict}") elif parsed_action == LIST_OF_VALID_ACTIONS[6]: create_scheduled_task_from_xml( task_name=params_dict[ACTIONS_PARAMETERS[parsed_action][0]], xml_path=params_dict[ACTIONS_PARAMETERS[parsed_action][1]], ) - log.info(f"Created scheduled task from xml with {params_dict}") - print(f"Created scheduled task from xml with {params_dict}") + log.info("Created scheduled task from xml with %s", str(params_dict)) + # print(f"Created scheduled task from xml with {params_dict}") elif parsed_action == LIST_OF_VALID_ACTIONS[7]: parsed_params_dict = {} for param in ACTIONS_PARAMETERS[parsed_action]: @@ -770,16 +770,16 @@ def main(args): else: parsed_params_dict[param] = params_dict[param] modify_scheduled_task(**parsed_params_dict) - log.info(f"Modified scheduled task with {params_dict}") - print(f"Modified scheduled task with {params_dict}") + log.info("Modified scheduled task with %s", str(params_dict)) + # print(f"Modified scheduled task with {params_dict}") elif parsed_action == LIST_OF_VALID_ACTIONS[8]: change_execution_dir(path=params_dict[ACTIONS_PARAMETERS[parsed_action][0]]) - log.info(f"Changed execution dir to {params_dict[ACTIONS_PARAMETERS[parsed_action][0]]}") - print(f"Changed execution dir to {params_dict[ACTIONS_PARAMETERS[parsed_action][0]]}") + log.info("Changed execution dir to %s", params_dict[ACTIONS_PARAMETERS[parsed_action][0]]) + # print(f"Changed execution dir to {params_dict[ACTIONS_PARAMETERS[parsed_action][0]]}") except Exception as e: - log.debug(f"Invalid action {action} with parameters {params_dict} --> {e}") - print(f"Invalid action {action} with parameters {params_dict} --> {e}") + log.debug("Invalid action %s with parameters %s --> %s", str(action), str(params_dict), str(e)) + # print(f"Invalid action {action} with parameters {params_dict} --> {e}") if __name__ == "__main__": diff --git a/analyzer/windows/tests/lib/common/test_abstracts.py b/analyzer/windows/tests/lib/common/test_abstracts.py index 1607df26f29..2df0f991625 100644 --- a/analyzer/windows/tests/lib/common/test_abstracts.py +++ b/analyzer/windows/tests/lib/common/test_abstracts.py @@ -6,7 +6,6 @@ class TestPackageConfiguration(unittest.TestCase): - def test_private_package_configuration(self): # test analysis package package_module = self.__class__.__module__ diff --git a/analyzer/windows/tests/test_analyzer.py b/analyzer/windows/tests/test_analyzer.py index a8db4ec4241..353b6401a68 100644 --- a/analyzer/windows/tests/test_analyzer.py +++ b/analyzer/windows/tests/test_analyzer.py @@ -81,7 +81,6 @@ def test_prepare(self, set_lock, init_logging, config, pipeserver): class TestAnalyzerChoosePackage(unittest.TestCase): - def test_choose_package_shellcode(self): test = analyzer.Analyzer() test.config = MagicMock() diff --git a/changelog.md b/changelog.md index eadbd064d4e..31c31993039 100644 --- a/changelog.md +++ b/changelog.md @@ -1,3 +1,122 @@ +### [11.06.2025] +* __Action required!__ For users of Python 3.12+ in guest, update the agent to solve #2621 affecting e.g. MSI detonation +* Agent update: Fix issue with analyzer directory creation lacking required ACLs for Python 3.12, remove predictable "tmp" prefix for directory name(s) (fixes #2621) + +### [10.06.2025] +* Monitor update: WMI hooks: add handling for VT_NULL and enable WMI_Get logging + +### [06.06.2025] +* Monitor updates: + * WMI hooks + * Fix format string vulnerability in debugger StringsOutput() function + +### [03.06.2025] +* Monitor update: Fix bug in retarget_relative_displacement() relative offset calculation (thanks @ClaudioWayne) + +### [23.05.2025] +* Socks5Systemz update: detection & config extraction, parser update also required (CAPE-parsers repo) +* Monitor updates: + * Trace: do not wrap GetExportNameByAddress() in try/catch and do not use StepOverRegister in BreakOnReturnCallback() + * Debugger: fix br1 (break on return) config option parsing (config.c) + * Replace remaining uses of pipe("INFO:...) with DebugOutput() + * Trace: switch from using ScyllaGetExportNameByAddress() to GetExportNameByAddress() + * Harden ScanForExport() function (used by GetExportNameByAddress()) + +### [8.05.2025] +* PPLInject: Improve logging output if DLL transaction fails due to insufficiently large transaction target DLL in %SYSTEM% +* Monitor update: Allow monitor to load without config ini file - defaults to standalone mode + +### [28.04.2025] +* Monitor updates: + * .NET JIT cache dumps: off by default, configurable limit with option jit-dumps=X + * Windows Loader Snaps: vDbgPrintExWithPrefixInternal hook & option 'snaps=1' for loader snaps output in analysis log + * Disable AMSI dumps by default (and uncheck web submission tickbox) + * Native hookset (ntdll only) option: native=1 + * CryptDuplicateKey hook (thanks @KillerInstinct) + +### [02.04.2025] +* Monitor updates: + * Trace: allow custom stepping behavior with 'stepmode' option, stepmode=1 steps into short calls (e.g. Rhadamanthys control flow flattening) + * Hooking: replace sprintf calls with internal non-allocating implementation (num_to_hex(), uuid_to_string()) + * CommandLineToArgvW hook +* Cleaners update + * Now you can specify time range as 12h, 50m, 3d. + * Improved bulk cleanup for speed. Mongodb's calls collection now has task_id value. This allows to cleanup it faster. + * Servers that runs for years, might need to update their `tasks_tags` table schema by hand, only if you getting `ForeignKey violation`. +``` + ALTER TABLE tasks_tags DROP CONSTRAINT tasks_tags_task_id_fkey, ADD CONSTRAINT tasks_tags_task_id_fkey FOREIGN KEY (task_id) REFERENCES tasks (id) ON DELETE CASCADE; + ALTER TABLE tasks_tags DROP CONSTRAINT tasks_tags_tag_id_fkey, ADD CONSTRAINT tasks_tags_tag_id_fkey FOREIGN KEY (tag_id) REFERENCES tags (id) ON DELETE CASCADE; +``` + +### [14.03.2025] CAPA and FLOSS +* CAPA and FLOSS configs are moved to `integrations.conf` + +### [01.03.2025] VirusTotal and MalwareBazaar +* We have moved VirusTotal and MalwareBazaar to generic downloader so you can enable then in `integrations.conf` + * Downlod service allows you to set order + simplifies adding another services + * For `API` use `tasks/create/download_services/` instead of `tasks/create/vtdl/`. Example of data: `data={"hashes":"hash1,hash2"}` + +### [28.02.2025] +* Monitor updates: + * NtCreateSection hook: add file path (from handle) to logging (thanks @scccccccccc) + * NtCreateSection LdrpCallInitRoutine hook: add coverage-module breakpoint setting, fix 64-bit address logging + * Trace: improve logging of conditional jump target addresses, on by default) + +### [27.02.2025] +* Monitor update: Improve handling of bogus VirtualSize values in PE section table during dumping (e.g. e4f4afa1b85113656d4788a4f48fa5263b31b922d3e345093e082486193b0275) + +### [26.02.2025] +* Monitor updates: + * Fix import reconstruction entrypoint setting - thanks @shuiyc + * Add hooks for MsiInstallProductA/W - thanks @KillerInstinct + * Add protected-pids config option (protected-pids=0 to disable, on by default) + +### [11.02.2025] +* `selfextract.conf` renamed to `integrations.conf`. + * Please rename your config file. + +### [10.02.2025] +* We are now on `Poetry v2`. If you see next message, you need to upgrade your `poetry` version. + * This one might be tricky as depends if your `poetry` was installed with `apt` or script. But something like this should works: + * `curl -sSL https://install.python-poetry.org | POETRY_HOME=/etc/poetry python3 -` +``` +The Poetry configuration is invalid: + - Additional properties are not allowed ('requires-poetry' was unexpected) +``` +* If you see missed `crispy_bootstrap4`. Just run `poetry install` as `cape` user. + +### [05.02.2025] +* Monitor update: Fix hooking deadlock with delay-loaded dlls & make LdrpCallInitRoutine hook transparent + +### [28.01.2025] +* Require `poetry>=2.0`. + +### [27.01.2025] +* Monitor update: Fix import reconstruction (advanced submission option) + +### [23.01.2025] +* Monitor update: Fix bug with dumping PE images with abnormally large PE header (e.g. 5ba3d13c57f6b08f34d8ec4f200091b458fdf48141c23ce959d9cda4804a7f5e) (thanks Kevin Ross) + +### [07.01.2025] +* Monitor update: Fix bug with dump limit being tripped by AMSI preventing unpacked capture in e.g. e69ab87e878305285eab44652fa72b0168b53d2c9d95d5e40ae6311a5b6eec7b (thanks @YungBinary) + +### [31.12.2024] +* Monitor updates: + * CoGetClassObject hook: add inspect_clsid for improved injection (e.g. 38a9847cb5ce4918bdfee2d54d5d3b79e1399cce15c7b68d86e8f0a5f48e3131) + * SetFileInformationByHandle hook + * GetComputerNameExW hook: add fake results for all NameTypes (e.g. 8056b8ff55c452cc87e35d69928cccbcfc5af848db1abb4fe0364510986e068b) + * RmStartSession hook (thanks para0x0dise) + * LdrpCallInitRoutine hook for Win10+ + +### [01.11.2024] Parsers +* Malware config parsers aka parsers are moved out of core of CAPE. + * Now they are at their own [repository](https://github.com/CAPESandbox/CAPE-parsers). + * Feature added. `load=X`, where `X` is one of those: all/core/community + * All = core and community + * Exclude parsers. Allows to not load some particular parsers. `exclude_parsers=["name1", "name2"]` + * Your custom parsers from `custom/parsers/` will still load and overwrite cape carser if name matches. +* __Action required!__ `cd /opt/CAPEv2 && poetry install` + ### [04.10.2024] * Monitor update: Add GetClassObject hook to handle UAC bypass technique using CMSTPLUA COM object * PrivateLoader direct syscall capture @@ -492,7 +611,7 @@ rule X_cryptor { * You need to download version for your CPU and extract it to `data/NETReactorSlayer.CLI` * In case if you are on x64 host, then just run: `poetry run python utils/community.py -waf` * Add execution permission with `chmod a+x data/NETReactorSlayer.CLI` -* Now each section inside of `selfextract.conf` has timeout value. Default is 60 seconds +* Now each section inside of `integrations.conf` has timeout value. Default is 60 seconds ### [24.12.2022] * Monitor updates: Fix NtAllocateVirtualMemoryEx & NtMapViewOfSectionEx hooks and rebuild with Visual Studio 2022 diff --git a/conf/default/api.conf.default b/conf/default/api.conf.default index 96e249df910..a86be5729d8 100644 --- a/conf/default/api.conf.default +++ b/conf/default/api.conf.default @@ -75,8 +75,7 @@ status = yes rps = 1/s rpm = 2/m -# Submit VTDL tasks to Cuckoo. -[vtdl] +[downloading_services] enabled = no auth_only = no allmachines = no @@ -205,6 +204,13 @@ auth_only = no rps = 1/s #rpm = 10/m +# Pull a PCAP from a specific task +[tasktlspcap] +enabled = yes +auth_only = no +rps = 1/s +#rpm = 10/m + # Pull a EVTX from a specific task [taskevtx] enabled = yes @@ -362,3 +368,9 @@ rpm = 4/m # Allow to request stop of the analysis inside of the VM [user_stop] enabled = no + +[mitmdump] +enabled = no +auth_only = no +rps = 1/s +rpm = 4/m diff --git a/conf/default/auxiliary.conf.default b/conf/default/auxiliary.conf.default index ec8aeb84c73..94a5707242b 100644 --- a/conf/default/auxiliary.conf.default +++ b/conf/default/auxiliary.conf.default @@ -22,6 +22,9 @@ browser = yes curtain = no digisig = yes disguise = yes +# This is only useful in case you use KVM's dnsmasq. You need to set windows_static_route_gateway. Disguise must be enabled +windows_static_route = no +windows_static_route_gateway = 192.168.1.1 evtx = no human_windows = yes human_linux = no @@ -38,12 +41,13 @@ permissions = no pre_script = no during_script = no filecollector = yes -# This is only useful in case you use KVM's dnsmasq. You need to change your range inside of analyzer/windows/modules/auxiliary/disguise.py. Disguise must be enabled -windows_static_route = no tracee_linux = no sslkeylogfile = no # Requires setting up browser extension, check extra/browser_extension browsermonitor = no +wmi_etw = no +dns_etw = no +watchdownloads = no [AzSniffer] # Enable or disable the use of Azure Network Watcher packet capture feature, disable standard sniffer if this is in use to not create concurrent .pcap files @@ -80,6 +84,12 @@ enabled = no [Mitmdump] # Enable or disable the use of mitmdump (mitmproxy) to get dump.har [yes/no]. -# This module requires installed mitmproxy see install_mitmproxy +# This module requires mitmproxy to be installed see install_mitmproxy # (https://github.com/kevoreilly/CAPEv2/blob/master/installer/cape2.sh#L1320) enabled = no + +[PolarProxy] +# Enable or disable the use of PolarProxy to get dump.pcap with decrypted TLS streams [yes/no]. +# This module requires PolarProxy to be installed see install_polarproxy. +# Use add the options "polarproxy=1" when submitting a sample. +enabled = no diff --git a/conf/default/cuckoo.conf.default b/conf/default/cuckoo.conf.default index afb2098afab..a0da4fb527b 100644 --- a/conf/default/cuckoo.conf.default +++ b/conf/default/cuckoo.conf.default @@ -1,5 +1,8 @@ [cuckoo] +# Ignore Signals, will quit CAPE inmediatelly instead wait jobs to finish +ignore_signals = yes + # Which category of tasks do you want to analyze? categories = static, pcap, url, file @@ -33,6 +36,11 @@ scaling_semaphore = off # A configurable wait time between updating the limit value of the scaling bounded semaphore scaling_semaphore_update_timer = 10 +# Specify a timeout for tasks, useful if you are bound to timely reports awaited by users +task_timeout = off +task_pending_timeout = 0 +task_timeout_scan_interval = 30 + # Enable creation of memory dump of the analysis machine before shutting # down. Even if turned off, this functionality can also be enabled at # submission. Currently available for: VirtualBox and libvirt modules (KVM). @@ -214,12 +222,13 @@ freespace = 2000 [cleaner] # Invoke cleanup if <= of free space detected. see/set freespace/freespace_processing +# format is 1d, 12h, 120m enabled = no # set any value to 0 to disable it. In days -binaries_days = 5 -tmp_days = 5 +binaries = 0 +tmp = 0 # Remove analysis folder -analysis_days = 5 +analysis = 0 # Delete mongo data mongo = no # Clean orphan files in mongodb diff --git a/conf/default/integrations.conf.default b/conf/default/integrations.conf.default new file mode 100644 index 00000000000..a129eab9560 --- /dev/null +++ b/conf/default/integrations.conf.default @@ -0,0 +1,155 @@ +# This config is to be able to enable/disable things like MSI/NSIS/UnAutoIt, 3rd part services integraitons, etc + +[general] +pefiles = yes +dotnet = no +office = no +java = no +pdf = no +lnk = no +windows_script = no +elf = no +hwp = no + +# Number of workers for pool to run them in parallel +max_workers = 6 + +[mandiant_intel] +enabled = no +api_access = +api_secret = + +# Create your apikey: https://threatfox.abuse.ch/api/#auth_key +# MalwareBazaar uses this key too +[abusech] +threatfox = no +malwarebazaar = no +apikey = + + +# sudo apt install msitools +[msi_extract] +enabled = yes +binary = /usr/bin/msiextract +timeout = 60 + +[kixtart_extract] +enabled = yes +timeout = 60 + +[vbe_extract] +enabled = yes +timeout = 60 + +[batch_extract] +enabled = yes +timeout = 60 + +# REPO DOESN'T EXIST ANYMORE! +# cd /opt/CAPEv2/data/ +# snap install go --classic +# git clone https://github.com/x0r19x91/UnAutoIt && cd UnAutoIt +# GOOS="linux" GOARCH="amd64" go build -o UnAutoIt +[UnAutoIt_extract] +enabled = yes +binary = data/UnAutoIt/UnAutoIt +timeout = 60 + +[RarSFX_extract] +enabled = yes +timeout = 60 + +# apt install upx-ucl +[UPX_unpack] +enabled = yes +timeout = 60 + +# Nsis, 7Zip SFX, etc +[SevenZip_unpack] +binary = data/7zz +enabled = yes +timeout = 60 + +# Use https://github.com/gdesmar/innoextract +[Inno_extract] +enabled = yes +binary = data/innoextract +timeout = 60 + +# https://github.com/mstrobel/procyon/releases +[procyon] +enabled = yes +binary = data/procyon.jar +timeout = 60 + +# sudo apt install de4dot +[de4dot_deobfuscate] +enabled = yes +binary = /usr/bin/de4dot +extra_args = +timeout = 60 + +# https://github.com/otavepto/NETReactorSlayer/releases +[eziriz_deobfuscate] +enabled = yes +binary = data/NETReactorSlayer.CLI +extra_args = --no-pause True +timeout = 60 + +[office_one] +enabled = yes +timeout = 60 + +[msix_extract] +enabled = no +timeout = 60 + +# PE file overlay +[overlay] +enabled = yes +timeout = 60 + +[UnGPG_extract] +enabled = no +timeout = 60 + +[pyinstaller] +enabled = no +timeout = 60 + +# Community +# FLARE capa -> to update rules: poetry run python utils/community.py -cr +[flare_capa] +enabled = no +# Generate it always or generate on demand only(user need to click button to generate it), still should be enabled to use this feature on demand +on_demand = no +# Analyze binary payloads +static = no +# Analyze CAPE payloads +cape = no +# Analyze ProcDump +procdump = no +# behavior analysis summary +behavior = no + +# external download services +[downloaders] +# You can overwrite the downloaders order, must match filename without ".py". If name is missed is the same as disabled. Example: +# order = virustotal,malwarebazaar +order = + +[virustotal] +enabled = no +apikey = + +# Community +[floss] +enabled = no +on_demand = yes +static_strings = no +stack_strings = yes +decoded_strings = yes +tight_strings = yes +min_length = 5 +# Download FLOSS signatures from https://github.com/mandiant/flare-floss/tree/master/sigs +sigs_path = data/flare-signatures diff --git a/conf/default/kvm.conf.default b/conf/default/kvm.conf.default index 9d46ec13e3a..403e7fbe549 100644 --- a/conf/default/kvm.conf.default +++ b/conf/default/kvm.conf.default @@ -42,8 +42,8 @@ ip = 192.168.122.105 # Example: MSIX - Windows >= 10 # tags = winxp,acrobat_reader_6 -# (Optional) Specify the snapshot name to use. If you do not specify a snapshot -# name, the KVM MachineManager will use the current snapshot. +# (Optional) Specify the snapshot name to use. +# If empty, it will use the current/latest snapshot. # Example (Snapshot1 is the snapshot name): # snapshot = Snapshot1 diff --git a/conf/default/polarproxy.conf.default b/conf/default/polarproxy.conf.default new file mode 100644 index 00000000000..3ecf2da62cb --- /dev/null +++ b/conf/default/polarproxy.conf.default @@ -0,0 +1,26 @@ +[cfg] +# bin path to PolarProxy +bin = /opt/PolarProxy/PolarProxy + +# Host ip where PolarProxy is listening +host = 192.168.122.1 + +# Interface where PolarProxy is listening +interface = virbr0 + +# PKCS#12 certificate/private key file +cert = /opt/PolarProxy/PolarProxy-key-crt.p12 + +# Password to unlock PKCS#12 file +password = CHANGEME + +# See https://www.netresec.com/?page=TlsFirewall for details on PolarProxy TLS firewall +# Newline separated file containing domain regexes for PolarProxy to not MITM +bypass_list = /opt/PolarProxy/bypass-domains.txt +# Newline separated file containing domain regexes for PolarProxy to block connections to +block_list = /opt/PolarProxy/block-domains.txt + +# bin path to mergecap +mergecap = /usr/bin/mergecap + +# Future options like custom ports, cert paths, etc diff --git a/conf/default/processing.conf.default b/conf/default/processing.conf.default index dbd297d6286..31b912d1ccc 100644 --- a/conf/default/processing.conf.default +++ b/conf/default/processing.conf.default @@ -22,20 +22,6 @@ enabled = no [analysisinfo] enabled = yes -# Community -# FLARE capa -> to update rules utils/community.py -cr -# install -> cd /tmp && git clone --recurse-submodules https://github.com/fireeye/capa.git && cd capa && git submodule update --init rules && python -m poetry run pip install . -[flare_capa] -enabled = no -# Generate it always or generate on demand only(user need to click button to generate it), still should be enabled to use this feature on demand -on_demand = no -# Analyze binary payloads -static = no -# Analyze CAPE payloads -cape = no -# Analyze ProcDump -procdump = no - # Community [decompression] enabled = no @@ -77,11 +63,12 @@ enabled = no processtree = no # Platform specific platform = linux +update_file_descriptors = yes [debug] enabled = yes # Amount of text (bytes) -buffer = 8192 +buffer = 0 [detections] enabled = yes @@ -150,6 +137,7 @@ definitions = data/trid/triddefs.trd [die] # Detect it Easy enabled = no +binary = /usr/bin/diec [virustotal] enabled = yes @@ -165,14 +153,11 @@ do_file_lookup = yes do_url_lookup = yes urlscrub = (^http:\/\/serw\.clicksor\.com\/redir\.php\?url=|&InjectedParam=.+$) +# Since Suricata 8, socket mode is deprecated. [suricata] -# Notes on getting this to work check install_suricata function: -# https://github.com/kevoreilly/CAPEv2/blob/master/installer/cape2.sh - -enabled = yes -#Runmode "cli" or "socket" -runmode = socket -#Outputfiles +enabled = no +runmode = cli +# Outputfiles # if evelog is specified, it will be used instead of the per-protocol log files evelog = eve.json @@ -188,13 +173,14 @@ fileslog = files-json.log filesdir = files # Amount of text to carve from plaintext files (bytes) buffer = 8192 -#Used for creating an archive of extracted files + #Used for creating an archive of extracted files 7zbin = /usr/bin/7z zippass = infected -##Runmode "cli" options +# Runmode "cli" options bin = /usr/bin/suricata conf = /etc/suricata/suricata.yaml -##Runmode "socket" Options + +# Runmode "socket" Options. Deprecated since Suricata 8. socket_file = /tmp/suricata-command.socket # Community @@ -306,10 +292,15 @@ modules_path = modules/processing/parsers/RATDecoders/ enabled = yes modules_path = modules/processing/parsers/malduck/ +# installed from PYPI [CAPE_extractors] enabled = yes # Must ends with / -modules_path = modules/processing/parsers/CAPE/ +modules_path = custom/parsers/ +# Config parsers all/core/community +parsers = all +# list of comma separated parsers. Ex: stealc,lumma +exclude= # Community [reversinglabs] @@ -321,17 +312,11 @@ key = [script_log_processing] enabled = yes -# Community -[floss] +[html_scraper] enabled = no -on_demand = yes -static_strings = no -stack_strings = yes -decoded_strings = yes -tight_strings = yes -min_length = 5 -# Download FLOSS signatures from https://github.com/mandiant/flare-floss/tree/master/sigs -sigs_path = data/flare-signatures -[html_scraper] +# Community +[polarproxy] +# Enable when using the PolarProxy option during analysis. This will merge the tls.pcap containing +# plain-text TLS streams into the task PCAP. enabled = no diff --git a/conf/default/reporting.conf.default b/conf/default/reporting.conf.default index bf4944528e3..694f5e5433b 100644 --- a/conf/default/reporting.conf.default +++ b/conf/default/reporting.conf.default @@ -4,11 +4,6 @@ # You can also add additional options under the section of your module and # they will be available in your Python class. -# Generate CAPE's analysis summary by FLARE/Mandiant's CAPA -[flare_capa_summary] -enabled = yes -on_demand= no - # Community [cents] enabled = no @@ -31,7 +26,7 @@ procdump = yes # Community [pcap2cert] -enabled = yes +enabled = no # Community [litereport] @@ -54,6 +49,7 @@ drive_credentials_location = data/google_creds.json enabled = yes indent = 4 encoding = latin-1 +store_compressed = no # Community [reporthtml] @@ -151,10 +147,6 @@ user = admin pass = admin realm = Moloch -# Community -[resubmitexe] -enabled = no -resublimit = 5 # Community [compression] @@ -223,4 +215,4 @@ enabled = no enabled = no [browserext] -enabled = no \ No newline at end of file +enabled = no diff --git a/conf/default/selfextract.conf.default b/conf/default/selfextract.conf.default deleted file mode 100644 index b39903710fe..00000000000 --- a/conf/default/selfextract.conf.default +++ /dev/null @@ -1,96 +0,0 @@ -# This config is to be able to enable/disable things like MSI/NSIS/UnAutoIt etc - -[general] -pefiles = yes -dotnet = yes -office = yes -java = yes -pdf = yes -lnk = yes -windows_script = yes -elf = yes -hwp = yes - -# Number of workers for pool to run them in parallel -max_workers = 6 - -# sudo apt install msitools -[msi_extract] -enabled = yes -binary = /usr/bin/msiextract -timeout = 60 - -[kixtart_extract] -enabled = yes -timeout = 60 - -[vbe_extract] -enabled = yes -timeout = 60 - -[batch_extract] -enabled = yes -timeout = 60 - -# REPO DOESN'T EXIST ANYMORE! -# cd /opt/CAPEv2/data/ -# snap install go --classic -# git clone https://github.com/x0r19x91/UnAutoIt && cd UnAutoIt -# GOOS="linux" GOARCH="amd64" go build -o UnAutoIt -[UnAutoIt_extract] -enabled = yes -binary = data/UnAutoIt/UnAutoIt -timeout = 60 - -[RarSFX_extract] -enabled = yes -timeout = 60 - -# apt install upx-ucl -[UPX_unpack] -enabled = yes -timeout = 60 - -# Nsis, 7Zip SFX, etc -[SevenZip_unpack] -enabled = yes -timeout = 60 - -# sudo apt install innoextract -[Inno_extract] -enabled = yes -binary = /usr/bin/innoextract -timeout = 60 - -# https://github.com/mstrobel/procyon/releases -[procyon] -enabled = yes -binary = data/procyon.jar -timeout = 60 - -# sudo apt install de4dot -[de4dot_deobfuscate] -enabled = yes -binary = /usr/bin/de4dot -extra_args = -timeout = 60 - -# https://github.com/otavepto/NETReactorSlayer/releases -[eziriz_deobfuscate] -enabled = yes -binary = data/NETReactorSlayer.CLI -extra_args = --no-pause True -timeout = 60 - -[office_one] -enabled = yes -timeout = 60 - -[msix_extract] -enabled = no -timeout = 60 - -# PE file overlay -[overlay] -enabled = yes -timeout = 60 diff --git a/conf/default/web.conf.default b/conf/default/web.conf.default index 5d9538a18e4..a6c14bd7385 100644 --- a/conf/default/web.conf.default +++ b/conf/default/web.conf.default @@ -7,6 +7,8 @@ enabled = no captcha = no 2fa = no # To enable Oauth check https://django-allauth.readthedocs.io and web/web/settings.py. +# Allow only SSO for users with specific domain. Can be allow to all if empty. +social_auth_email_domain = example.com [registration] enabled = no @@ -44,6 +46,8 @@ anon_viewable = no existent_tasks = no top_detections = yes top_asn = yes +# Enable checking for samples in MongoDB before fetching from external sources. Disable to reduce MongoDB load when the database is large. +check_sample_in_mongodb = no # hostname of the cape instance hostname = 127.0.0.1 ;hostname = www.capesandbox.com @@ -59,6 +63,10 @@ reports_dl_allowed_to_all = yes expose_process_log = no # Show button to reprocess the task reprocess_tasks = no +# Allows you to define URL splitter, "," is default +url_splitter = , +# Limit number of files extracted from archive in demux.py +demux_files_limit = 10 # ratelimit for anon users [ratelimit] @@ -90,7 +98,7 @@ enabled = no enabled = no [vtupload] -# Don't forget to set VT key in aux.conf under virustotaldl +# Don't forget to set apikey in integrations.conf under virustotal enabled = no #No means delete is disabled on webgui @@ -194,6 +202,7 @@ guest_width = 1280 guest_height = 1024 # rdp settings guest_rdp_port = 3389 +ignore_rdp_cert = false [packages] # VM tags may be used to specify on which guest machines a sample should be run @@ -215,15 +224,5 @@ csrf_trusted_origins = # Example: packages = chrome,chromium,firefox packages = -# external download services -[download_services] -# adds an option in the web interface to upload samples via VirusTotal/MalwareBazaar -# downloads for a comma-separated list of MD5/SHA1/SHA256 hashes -virustotal = no -# note that unlike the VirusTotal processing module, the key required -# here is a Intelligence API key, not a Public API key -vtkey = -malwarebazaar = no - [yara_detail] enabled = no diff --git a/custom/.gitignore b/custom/.gitignore index f39a0c14d32..8e9508a9a89 100644 --- a/custom/.gitignore +++ b/custom/.gitignore @@ -1,2 +1,3 @@ +/* !/README.md !/.gitignore diff --git a/data/.gitignore b/data/.gitignore new file mode 100644 index 00000000000..3c287c17ceb --- /dev/null +++ b/data/.gitignore @@ -0,0 +1,6 @@ +# Ignore binaries use for self-extracting. +UnAutoIt +7zz +innoextract +procyon.jar +NETReactorSlayer.CLI diff --git a/data/dnsbl.py b/data/dnsbl.py new file mode 100644 index 00000000000..fd862f8c634 --- /dev/null +++ b/data/dnsbl.py @@ -0,0 +1,41 @@ +dnsbl_servers = ( + "zen.spamhaus.org", + "dnsbl.sorbs.net", + "bl.spamcop.net", + "cbl.abuseat.org", + "b.barracudacentral.org", + "dnsbl-1.uceprotect.net", + "dnsbl-2.uceprotect.net", + "dnsbl-3.uceprotect.net", + "dnsbl.dronebl.org", + "noptr.spamrats.com", + "multi.surbl.org", + "psbl.surriel.com", + "dnsbl.invaluement.com", + "dyna.spamrats.com", + "spam.spamrats.com", + "dul.dnsbl.sorbs.net", + "dynip.rothen.com", + "spamsources.fabel.dk", + "truncate.gbudb.net", + "db.wpbl.info", + "dnsbl.zapbl.net", + "combined.rbl.msrbl.net", + "tor.dan.me.uk", + "relays.nether.net", + "rbl.efnetrbl.org", + "bl.kundenserver.de", + "rbl.interserver.net", + "rbl.rbldns.ru", + "all.rbl.jp", + "sbl.spamhaus.org", + "xbl.spamhaus.org", + "pbl.spamhaus.org", + "dnsbl-4.uceprotect.net", + "dnsbl-5.uceprotect.net", + "dnsbl-6.uceprotect.net", + "spamrbl.imp.ch", + "bogons.cymru.com", + "rbl.realtimeblacklist.com", + "http.dnsbl.sorbs.net", +) diff --git a/data/html/report.html b/data/html/report.html index 1e73d77fe45..c67812c1fa4 100644 --- a/data/html/report.html +++ b/data/html/report.html @@ -2,7 +2,7 @@ {% block content %} {% include "sections/info.html" %} {% include "sections/errors.html" %} - {% if results.info.category == "file" %} + {% if results.info.category == "file,pcap,static" %} {% include "sections/file.html" %} {% elif results.info.category == "url" %} {% include "sections/url.html" %} @@ -10,10 +10,12 @@ {% include "sections/signatures.html" %} {% include "sections/screenshots.html" %} {% include "sections/network.html" %} - {% include "sections/dropped.html" %} - {% include "sections/payloads.html" %} - {% include "sections/behavior.html" %} - {% if results.memory %} - {% include "sections/volatility.html" %} + {% if results.info.category == "file" %} + {% include "sections/dropped.html" %} + {% include "sections/payloads.html" %} + {% include "sections/behavior.html" %} + {% if results.memory %} + {% include "sections/volatility.html" %} + {% endif %} {% endif %} {% endblock %} diff --git a/data/html/sections/info.html b/data/html/sections/info.html index 43f534c0adc..7058558cd99 100644 --- a/data/html/sections/info.html +++ b/data/html/sections/info.html @@ -62,7 +62,7 @@

Sandbox Info

{% endif %} -{% if results.CAPE.configs %} +{% if "CAPE" in results and results.CAPE.configs %}

Malware config(s)

{% for config_block in results.CAPE.configs %} diff --git a/data/html/sections/payloads.html b/data/html/sections/payloads.html index b7289917a66..64494cb31ac 100644 --- a/data/html/sections/payloads.html +++ b/data/html/sections/payloads.html @@ -3,7 +3,7 @@

Payloads

- {% if results.CAPE.payloads %} + {% if "CAPE" in results and results.CAPE.payloads %} {% for file in results.CAPE.payloads %}
{% if summary_report %} diff --git a/data/safelist/disposable_domain_list.txt b/data/safelist/disposable_domain_list.txt index d59877fa5bf..156767fe3aa 100644 --- a/data/safelist/disposable_domain_list.txt +++ b/data/safelist/disposable_domain_list.txt @@ -27079,3 +27079,4 @@ kiabws.online upived.online relay.firefox.com bupt.edu.cn +motivue.com diff --git a/data/yara/CAPE/AdaptixBeacon.yar b/data/yara/CAPE/AdaptixBeacon.yar new file mode 100644 index 00000000000..efa3c297850 --- /dev/null +++ b/data/yara/CAPE/AdaptixBeacon.yar @@ -0,0 +1,16 @@ +rule AdaptixBeacon +{ + meta: + author = "enzok" + description = "AdaptixBeacon Payload" + cape_type = "AdaptixBeacon Payload" + hash = "f78f5803be5704420cbb2e0ac3c57fcb3d9cdf443fbf1233c069760bee115b5d" + strings: + $conf_1 = {8D ?? ?? E8 [3] 00 4? 89 [1-2] 4? 8B 4C 24 ?? E8 [3] 00 4? 8B 53 48 66 [0-1] 89 04} + $conf_2 = {E8 [3] 00 48 8B 4C 24 ?? 48 89 43 78 E8 [3] 00 48 8B 4C 24 ?? 89 83 80 00 00 00 E8 [3] 00 03 83 80 00 00 00 48 8B 4C 24} + $conf_3 = {E8 [3] 00 4? 8B 4C 24 ?? 4? 89 ?? 4? 89 43 58 E8 [3] 00 4? 8B 4C 24 ?? 4? 89 ?? 4? 89 43 60 E8 [3] 00 4? 8B 4C 24 ?? 4? 89 ?? 4? 89 43 68} + $wininet_1 = {B9 77 00 00 00 4? 89 50 28 E8 [4] B9 69 00 00 00 88 44 24 ?? E8 [4] B9 6E 00 00 00 88 44 24} + $wininet_2 = {B9 69 00 00 00 88 44 24 ?? E8 [4] B9 6E 00 00 00 88 44 24 ?? E8 [4] B9 65 00 00 00 88 44 24} + condition: + 1 of ($conf_*) and 1 of ($wininet_*) +} \ No newline at end of file diff --git a/data/yara/CAPE/AgentTesla.yar b/data/yara/CAPE/AgentTesla.yar index 555f83b570d..0410093a7e9 100644 --- a/data/yara/CAPE/AgentTesla.yar +++ b/data/yara/CAPE/AgentTesla.yar @@ -110,18 +110,6 @@ rule AgentTeslaV3 { (uint16(0) == 0x5a4d and (8 of ($s*) or (6 of ($s*) and 4 of ($g*)))) or (2 of ($m*)) } -rule AgentTeslaXor -{ - meta: - author = "kevoreilly" - description = "AgentTesla xor-based config decoding" - cape_type = "AgentTesla Payload" - strings: - $decode = {06 91 06 61 20 [4] 61 D2 9C 06 17 58 0A 06 7E [4] 8E 69 FE 04 2D ?? 2A} - condition: - uint16(0) == 0x5A4D and any of them -} - rule AgentTeslaV4 { meta: diff --git a/data/yara/CAPE/Amatera.yar b/data/yara/CAPE/Amatera.yar new file mode 100644 index 00000000000..6af16ae67db --- /dev/null +++ b/data/yara/CAPE/Amatera.yar @@ -0,0 +1,14 @@ +rule Amatera +{ + meta: + author = "kevoreilly" + description = "Amatera Payload" + cape_type = "Amatera Payload" + hash = "35eb93548a0c037d392f870c05e0e9fb1aeff3a5a505e1d4a087f7465ed1f6af" + strings: + $sysenter = {64 FF 15 C0 00 00 00 C3} + $harness = {0F B7 55 EC 52 E8 [4] 83 C4 04 C7 45 F0 [4] 8B 45 ?? 50 [0-40] FF 55 F0 83 C4 ?? 8B E5 5D C3} + $socket = {66 89 [2] 6A 00 6A ?? 8D [3] 68 (03|07) 20 01 00 8B 4D F8 E8 [4] 0F B6 (C0|C8) 85 (C0|C9) 75 04 32 C0 EB} + condition: + uint16(0) == 0x5A4D and all of them +} diff --git a/data/yara/CAPE/Arkei.yar b/data/yara/CAPE/Arkei.yar index f168915ed1e..7eff6c52021 100644 --- a/data/yara/CAPE/Arkei.yar +++ b/data/yara/CAPE/Arkei.yar @@ -1,7 +1,7 @@ rule Arkei { meta: - author = "kevoreilly" + author = "kevoreilly, YungBinary" description = "Arkei Payload" cape_type = "Arkei Payload" strings: @@ -19,6 +19,32 @@ rule Arkei $v7 = "files\\cc_" ascii wide $v8 = "files\\autofill_" ascii wide $v9 = "files\\cookies_" ascii wide + + $loaded_modules = { + 64 A1 30 00 00 00 + 8B 40 0C + 8B 40 0C + 8B 00 + 8B 00 + 8B 40 18 + 89 45 FC + 8B 45 FC + 8B E5 + 5D + C3 + } + + $language_check = { + FF 15 ?? ?? ?? ?? + 0F B7 C0 + 89 45 ?? + 81 7D ?? 3F 04 ?? ?? + 7F + } + + $ext1 = ".zoo" ascii + $ext2 = ".arc" ascii + condition: - uint16(0) == 0x5A4D and (all of ($string*) or 7 of ($v*)) + uint16(0) == 0x5A4D and (($loaded_modules and $language_check and $ext1 and $ext2) or (all of ($string*) or 7 of ($v*))) } diff --git a/data/yara/CAPE/AsyncRAT.yar b/data/yara/CAPE/AsyncRAT.yar index 84a02f65c2e..d7a42a60ee6 100644 --- a/data/yara/CAPE/AsyncRAT.yar +++ b/data/yara/CAPE/AsyncRAT.yar @@ -1,40 +1,287 @@ -rule AsyncRAT -{ - meta: - author = "kevoreilly, JPCERT/CC Incident Response Group" - description = "AsyncRAT Payload" - cape_type = "AsyncRAT Payload" - strings: - $salt = {BF EB 1E 56 FB CD 97 3B B2 19 02 24 30 A5 78 43 00 3D 56 44 D2 1E 62 B9 D4 F1 80 E7 E6 C3 39 41} - $b1 = {00 00 00 0D 53 00 48 00 41 00 32 00 35 00 36 00 00} - $b2 = {09 50 00 6F 00 6E 00 67 00 00} - $string1 = "Pastebin" ascii wide nocase - $string2 = "Pong" wide - $string3 = "Stub.exe" ascii wide - $kitty = "StormKitty" ascii - condition: - uint16(0) == 0x5A4D and not $kitty and ($salt and (2 of ($str*) or 1 of ($b*))) or (all of ($b*) and 2 of ($str*)) -} - rule AsyncRAT_kingrat { meta: author = "jeFF0Falltrades" - cape_type = "AsyncRAT Payload" + cape_type = "AsyncRAT Payload" strings: $str_async = "AsyncClient" wide ascii nocase $str_aes_exc = "masterKey can not be null or empty" wide ascii $str_schtasks = "schtasks /create /f /sc onlogon /rl highest" wide ascii + $byte_aes_key_base = { 7E [3] 04 73 [3] 06 80 } + $byte_aes_salt_base = { BF EB 1E 56 FB CD 97 3B B2 19 } + $patt_verify_hash = { 7e [3] 04 6f [3] 0a 6f [3] 0a 74 [3] 01 } + $patt_config = { 72 [3] 70 80 [3] 04 } + $dcrat_1 = "dcrat" wide ascii nocase $dcrat_2 = "qwqdan" wide ascii $dcrat_3 = "YW1zaS5kbGw=" wide ascii $dcrat_4 = "VmlydHVhbFByb3RlY3Q=" wide ascii $dcrat_5 = "save_Plugin" wide ascii + + $ww2 = "WorldWindClient" wide fullword nocase + $ww3 = "WorldWindStealer" wide fullword nocase + $ww4 = "*WorldWind Pro - Results:*" wide fullword nocase + $ww5 = /WorldWind(\s)?Stealer/ ascii wide + + $prynt = /Prynt(\s)?Stealer/ ascii wide + + condition: + (not any of ($dcrat*) and not any of ($ww*) and not $prynt) and 6 of them and #patt_config >= 10 +} + +rule StormKitty { + meta: + author = "ditekSHen" + description = "StormKitty infostealer payload" + cape_type = "StormKitty Payload" + strings: + $x1 = "\\ARTIKA\\Videos\\Chrome-Password-Recovery" ascii + $x2 = "https://github.com/LimerBoy/StormKitty" fullword ascii + $x3 = "StormKitty" fullword ascii + $s1 = "GetBSSID" fullword ascii + $s2 = "GetAntivirus" fullword ascii + $s3 = "C:\\Users\\Public\\credentials.txt" fullword wide + $s4 = "^([a-zA-Z0-9_\\-\\.]+)@([a-zA-Z0-9_\\-\\.]+)\\.([a-zA-Z]{2,5})$" fullword wide + $s5 = "BCrypt.BCryptGetProperty() (get size) failed with status code:{0}" fullword wide + $s6 = "\"encrypted_key\":\"(.*?)\"" fullword wide + + $ww2 = "WorldWindClient" wide fullword nocase + $ww3 = "WorldWindStealer" wide fullword nocase + $ww4 = "*WorldWind Pro - Results:*" wide fullword nocase + $ww5 = /WorldWind(\s)?Stealer/ ascii wide + + $prynt = /Prynt(\s)?Stealer/ ascii wide + + condition: + uint16(0) == 0x5a4d and (not any of ($ww*) and not $prynt) and (2 of ($x*) or 5 of ($s*) or (3 of ($s*) and 1 of ($x*))) +} + + +rule WorldWind { + meta: + author = "ditekSHen" + description = "Detects WorldWind infostealer" + cape_type = "WorldWind Payload" + strings: + $c1 = /WorldWind(\s)?Stealer/ ascii wide + $x2 = "@FlatLineStealer" ascii wide + $x3 = "@CashOutGangTalk" ascii wide + $m1 = ".Passwords.Targets." ascii + $m2 = ".Modules.Keylogger" ascii + $m3 = ".Modules.Clipper" ascii + $m4 = ".Modules.Implant" ascii + $s1 = "--- Clipper" wide + $s2 = "Downloading file: \"{file}\"" wide + $s3 = "/bot{0}/getUpdates?offset={1}" wide + $s4 = "send command to bot!" wide + $s5 = " *Keylogger " fullword wide + $s6 = "*Stealer" wide + $s7 = "Bot connected" wide + condition: + uint16(0) == 0x5a4d and 1 of ($c*) and (1 of ($x*) or 2 of ($m*) or 3 of ($s*)) +} + + +rule Prynt { + meta: + author = "ditekSHen" + description = "Detects Prynt infostealer" + cape_type = "Prynt Payload" + strings: + $c1 = /Prynt(\s)?Stealer/ ascii wide + $x2 = "@FlatLineStealer" ascii wide + $x3 = "@CashOutGangTalk" ascii wide + $m1 = ".Passwords.Targets." ascii + $m2 = ".Modules.Keylogger" ascii + $m3 = ".Modules.Clipper" ascii + $m4 = ".Modules.Implant" ascii + $s1 = "--- Clipper" wide + $s2 = "Downloading file: \"{file}\"" wide + $s3 = "/bot{0}/getUpdates?offset={1}" wide + $s4 = "send command to bot!" wide + $s5 = " *Keylogger " fullword wide + $s6 = "*Stealer" wide + $s7 = "Bot connected" wide + condition: + uint16(0) == 0x5a4d and 1 of ($c*) and (1 of ($x*) or 2 of ($m*) or 3 of ($s*)) +} + + +rule XWorm { + meta: + author = "ditekSHen" + description = "Detects XWorm" + cape_type = "XWorm Payload" + strings: + $x1 = "XWorm " wide nocase + $x2 = /XWorm\s(V|v)\d+\.\d+/ fullword wide + $s1 = "RunBotKiller" fullword wide + $s2 = "XKlog.txt" fullword wide + $s3 = /(shell|reg)fuc/ fullword wide + $s4 = "closeshell" fullword ascii + $s5 = { 62 00 79 00 70 00 73 00 73 00 00 ?? 63 00 61 00 6c 00 6c 00 75 00 61 00 63 00 00 ?? 73 00 63 00 } + $s6 = { 44 00 44 00 6f 00 73 00 54 00 00 ?? 43 00 69 00 6c 00 70 00 70 00 65 00 72 00 00 ?? 50 00 45 00 } + $s7 = { 69 00 6e 00 6a 00 52 00 75 00 6e 00 00 ?? 73 00 74 00 61 00 72 00 74 00 75 00 73 00 62 } + $s8 = { 48 6f 73 74 00 50 6f 72 74 00 75 70 6c 6f 61 64 65 72 00 6e 61 6d 65 65 65 00 4b 45 59 00 53 50 4c 00 4d 75 74 65 78 78 00 } + $v2_1 = "PING!" fullword wide + $v2_2 = "Urlhide" fullword wide + $v2_3 = /PC(Restart|Shutdown)/ fullword wide + $v2_4 = /(Start|Stop)(DDos|Report)/ fullword wide + $v2_5 = /Offline(Get|Keylogger)/ wide + $v2_6 = "injRun" fullword wide + $v2_7 = "Xchat" fullword wide + $v2_8 = "UACFunc" fullword ascii wide + condition: + uint16(0) == 0x5a4d and ((1 of ($x*) and (3 of ($s*) or 3 of ($v2*))) or 6 of them) +} + +rule xworm_kingrat { + meta: + author = "jeFF0Falltrades" + cape_type = "XWorm payload" + strings: + $str_xworm = "xworm" wide ascii nocase + $str_xwormmm = "Xwormmm" wide ascii + $str_xclient = "XClient" wide ascii + $str_default_log = "\\Log.tmp" wide ascii + $str_create_proc = "/create /f /RL HIGHEST /sc minute /mo 1 /t" wide ascii + $str_ddos_start = "StartDDos" wide ascii + $str_ddos_stop = "StopDDos" wide ascii + $str_timeout = "timeout 3 > NUL" wide ascii + $byte_md5_hash = { 7e [3] 04 28 [3] 06 6f } + $patt_config = { 72 [3] 70 80 [3] 04 } + condition: + 5 of them and #patt_config >= 7 +} + +rule DCRat { + meta: + author = "ditekSHen" + description = "DCRat payload" + cape_type = "DCRat Payload" + strings: + // DCRat + $dc1 = "DCRatBuild" ascii + $dc2 = "DCStlr" ascii + $x1 = "px\">
DCRat Keylogger" wide + $x2 = "DCRat-Log#" wide + $x3 = "DCRat.Code" wide + $string1 = "CaptureBrowsers" fullword ascii + $string2 = "DecryptBrowsers" fullword ascii + $string3 = "Browsers.IE10" ascii + $string4 = "Browsers.Chromium" ascii + $string5 = "WshShell" ascii + $string6 = "SysMngmts" fullword ascii + $string7 = "LoggerData" fullword ascii + // DCRat Plugins/Libraries + $plugin = "DCRatPlugin" fullword ascii + // AntiVM + $av1 = "AntiVM" ascii wide + $av2 = "vmware" fullword wide + $av3 = "VirtualBox" fullword wide + $av4 = "microsoft corporation" fullword wide + $av5 = "VIRTUAL" fullword wide + $av6 = "DetectVirtualMachine" fullword ascii + $av7 = "Select * from Win32_ComputerSystem" fullword wide + // Plugin_AutoStealer, Plugin_AutoKeylogger + $pl1 = "dcratAPI" fullword ascii + $pl2 = "dsockapi" fullword ascii + $pl3 = "file_get_contents" fullword ascii + $pl4 = "classthis" fullword ascii + $pl5 = "typemdt" fullword ascii + $pl6 = "Plugin_AutoStealer" ascii wide + $pl7 = "Plugin_AutoKeylogger" ascii wide + // variant + $v1 = "Plugin couldn't process this action!" wide + $v2 = "Unknown command!" wide + $v3 = "PLUGINCONFIGS" wide + $v4 = "Saving log..." wide + $v5 = "~Work.log" wide + $v6 = "MicrophoneNum" fullword wide + $v7 = "WebcamNum" fullword wide + $v8 = "%SystemDrive% - Slow" wide + $v9 = "%UsersFolder% - Fast" wide + $v10 = "%AppData% - Very Fast" wide + $v11 = /\[(Up|Down|Enter|ESC|CTRL|Shift|Win|Tab|CAPSLOCK: (ON|OFF))\]<\/span>/ wide + $px1 = "[Browsers] Scanned elements: " wide + $px2 = "[Browsers] Grabbing cookies" wide + $px3 = "[Browsers] Grabbing passwords" wide + $px4 = "[Browsers] Grabbing forms" wide + $px5 = "[Browsers] Grabbing CC" wide + $px6 = "[Browsers] Grabbing history" wide + $px7 = "[StealerPlugin] Invoke: " wide + $px8 = "[Other] Grabbing steam" wide + $px9 = "[Other] Grabbing telegram" wide + $px10 = "[Other] Grabbing discord tokens" wide + $px11 = "[Other] Grabbing filezilla" wide + $px12 = "[Other] Screenshots:" wide + $px13 = "[Other] Clipboard" wide + $px14 = "[Other] Saving system information" wide + condition: + uint16(0) == 0x5a4d and (all of ($dc*) or all of ($string*) or 2 of ($x*) or 6 of ($v*) or 5 of ($px*)) or ($plugin and (4 of ($av*) or 5 of ($pl*))) +} + +rule dcrat_kingrat { + meta: + author = "jeFF0Falltrades" + cape_type = "DCRat Payload" + strings: + $venom_1 = "VenomRAT" wide ascii nocase + $venom_2 = "HVNC_REPLY_MESSAGE" wide ascii + $str_aes_exc = "masterKey can not be null or empty" wide ascii + $str_b64_amsi = "YW1zaS5kbGw=" wide ascii + $str_b64_virtual_protect = "VmlydHVhbFByb3RlY3Q=" wide ascii + $str_dcrat = "dcrat" wide ascii nocase + $str_plugin = "save_Plugin" wide ascii + $str_qwqdan = "qwqdan" wide ascii $byte_aes_key_base = { 7E [3] 04 73 [3] 06 80 } - $byte_aes_salt_base = { BF EB 1E 56 FB CD 97 3B B2 19 } + $patt_config = { 72 [3] 70 80 [3] 04 } $patt_verify_hash = { 7e [3] 04 6f [3] 0a 6f [3] 0a 74 [3] 01 } + + condition: + (not any of ($venom*)) and 5 of them and #patt_config >= 10 +} + +rule QuasarRAT { + meta: + author = "ditekshen" + description = "QuasarRAT payload" + cape_type = "QuasarRAT Payload" + strings: + $s1 = "GetKeyloggerLogsResponse" fullword ascii + $s2 = "GetKeyloggerLogs" fullword ascii + $s3 = "/>Log created on" wide + $s4 = "User: {0}{3}Pass: {1}{3}Host: {2}" wide + $s5 = "Domain: {1}{0}Cookie Name: {2}{0}Value: {3}{0}Path: {4}{0}Expired: {5}{0}HttpOnly: {6}{0}Secure: {7}" wide + $s6 = "grabber_" wide + $s7 = "" ascii + $s8 = "k__BackingField" fullword ascii + $s9 = "" ascii + $s10 = "add_OnHotKeysDown" ascii + $mutex = "QSR_MUTEX_" ascii wide + $ua1 = "Mozilla/5.0 (Windows NT 6.3; rv:48.0) Gecko/20100101 Firefox/48.0" fullword wide + $us2 = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_3) AppleWebKit/537.75.14 (KHTML, like Gecko) Version/7.0.3 Safari/7046A194A" fullword wide + condition: + uint16(0) == 0x5a4d and ($mutex or (all of ($ua*) and 2 of them) or 6 of ($s*)) +} + +rule quasarrat_kingrat { + meta: + author = "jeFF0Falltrades" + cape_type = "QuasarRAT Payload" + strings: + $str_quasar = "Quasar." wide ascii + $str_hidden = "set_Hidden" wide ascii + $str_shell = "DoShellExecuteResponse" wide ascii + $str_close = "echo DONT CLOSE THIS WINDOW!" wide ascii + $str_pause = "ping -n 10 localhost > nul" wide ascii + $str_aes_exc = "masterKey can not be null or empty" wide ascii + $byte_aes_key_base = { 7E [3] 04 73 [3] 06 25 } + $byte_aes_salt_base = { BF EB 1E 56 FB CD 97 3B B2 19 } + $byte_special_folder = { 7e 73 [4] 28 [4] 80 } $patt_config = { 72 [3] 70 80 [3] 04 } + $patt_verify_hash = { 7e [3] 04 6f [3] 0a 6f [3] 0a 74 [3] 01 } condition: - (not any of ($dcrat*)) and 6 of them and #patt_config >= 10 + 6 of them and #patt_config >= 10 } diff --git a/data/yara/CAPE/CobaltStrikeBeacon.yar b/data/yara/CAPE/CobaltStrikeBeacon.yar index aaf01584892..5e766690b9a 100644 --- a/data/yara/CAPE/CobaltStrikeBeacon.yar +++ b/data/yara/CAPE/CobaltStrikeBeacon.yar @@ -17,9 +17,9 @@ rule CobaltStrikeBeacon $pwsh1 = "IEX (New-Object Net.Webclient).DownloadString('http" ascii $pwsh2 = "powershell -nop -exec bypass -EncodedCommand \"%s\"" fullword ascii $ver3a = {69 68 69 68 69 6b ?? ?? 69} - $ver3b = {69 69 69 69} + $ver3b = "iiiiiiiiiiiiiiii" $ver4a = {2e 2f 2e 2f 2e 2c ?? ?? 2e} - $ver4b = {2e 2e 2e 2e} + $ver4b = "................" $a1 = "%02d/%02d/%02d %02d:%02d:%02d" xor(0x00-0xff) $a2 = "Started service %s on %s" xor(0x00-0xff) $a3 = "%s as %s\\%s: %d" xor(0x00-0xff) diff --git a/data/yara/CAPE/DCRat.yar b/data/yara/CAPE/DCRat.yar deleted file mode 100644 index 4ca7696dfd0..00000000000 --- a/data/yara/CAPE/DCRat.yar +++ /dev/null @@ -1,87 +0,0 @@ -rule DCRat { - meta: - author = "ditekSHen" - description = "DCRat payload" - cape_type = "DCRat Payload" - strings: - // DCRat - $dc1 = "DCRatBuild" ascii - $dc2 = "DCStlr" ascii - $x1 = "px\">
DCRat Keylogger" wide - $x2 = "DCRat-Log#" wide - $x3 = "DCRat.Code" wide - $string1 = "CaptureBrowsers" fullword ascii - $string2 = "DecryptBrowsers" fullword ascii - $string3 = "Browsers.IE10" ascii - $string4 = "Browsers.Chromium" ascii - $string5 = "WshShell" ascii - $string6 = "SysMngmts" fullword ascii - $string7 = "LoggerData" fullword ascii - // DCRat Plugins/Libraries - $plugin = "DCRatPlugin" fullword ascii - // AntiVM - $av1 = "AntiVM" ascii wide - $av2 = "vmware" fullword wide - $av3 = "VirtualBox" fullword wide - $av4 = "microsoft corporation" fullword wide - $av5 = "VIRTUAL" fullword wide - $av6 = "DetectVirtualMachine" fullword ascii - $av7 = "Select * from Win32_ComputerSystem" fullword wide - // Plugin_AutoStealer, Plugin_AutoKeylogger - $pl1 = "dcratAPI" fullword ascii - $pl2 = "dsockapi" fullword ascii - $pl3 = "file_get_contents" fullword ascii - $pl4 = "classthis" fullword ascii - $pl5 = "typemdt" fullword ascii - $pl6 = "Plugin_AutoStealer" ascii wide - $pl7 = "Plugin_AutoKeylogger" ascii wide - // variant - $v1 = "Plugin couldn't process this action!" wide - $v2 = "Unknown command!" wide - $v3 = "PLUGINCONFIGS" wide - $v4 = "Saving log..." wide - $v5 = "~Work.log" wide - $v6 = "MicrophoneNum" fullword wide - $v7 = "WebcamNum" fullword wide - $v8 = "%SystemDrive% - Slow" wide - $v9 = "%UsersFolder% - Fast" wide - $v10 = "%AppData% - Very Fast" wide - $v11 = /\[(Up|Down|Enter|ESC|CTRL|Shift|Win|Tab|CAPSLOCK: (ON|OFF))\]<\/span>/ wide - $px1 = "[Browsers] Scanned elements: " wide - $px2 = "[Browsers] Grabbing cookies" wide - $px3 = "[Browsers] Grabbing passwords" wide - $px4 = "[Browsers] Grabbing forms" wide - $px5 = "[Browsers] Grabbing CC" wide - $px6 = "[Browsers] Grabbing history" wide - $px7 = "[StealerPlugin] Invoke: " wide - $px8 = "[Other] Grabbing steam" wide - $px9 = "[Other] Grabbing telegram" wide - $px10 = "[Other] Grabbing discord tokens" wide - $px11 = "[Other] Grabbing filezilla" wide - $px12 = "[Other] Screenshots:" wide - $px13 = "[Other] Clipboard" wide - $px14 = "[Other] Saving system information" wide - condition: - uint16(0) == 0x5a4d and (all of ($dc*) or all of ($string*) or 2 of ($x*) or 6 of ($v*) or 5 of ($px*)) or ($plugin and (4 of ($av*) or 5 of ($pl*))) -} - -rule dcrat_kingrat { - meta: - author = "jeFF0Falltrades" - cape_type = "DCRat Payload" - strings: - $venom_1 = "VenomRAT" wide ascii nocase - $venom_2 = "HVNC_REPLY_MESSAGE" wide ascii - $str_aes_exc = "masterKey can not be null or empty" wide ascii - $str_b64_amsi = "YW1zaS5kbGw=" wide ascii - $str_b64_virtual_protect = "VmlydHVhbFByb3RlY3Q=" wide ascii - $str_dcrat = "dcrat" wide ascii nocase - $str_plugin = "save_Plugin" wide ascii - $str_qwqdan = "qwqdan" wide ascii - $byte_aes_key_base = { 7E [3] 04 73 [3] 06 80 } - $patt_config = { 72 [3] 70 80 [3] 04 } - $patt_verify_hash = { 7e [3] 04 6f [3] 0a 6f [3] 0a 74 [3] 01 } - - condition: - (not any of ($venom*)) and 5 of them and #patt_config >= 10 -} diff --git a/data/yara/CAPE/Latrodectus.yar b/data/yara/CAPE/Latrodectus.yar index 221365c6c10..a8b6c7f444d 100644 --- a/data/yara/CAPE/Latrodectus.yar +++ b/data/yara/CAPE/Latrodectus.yar @@ -10,7 +10,7 @@ rule Latrodectus $fnvhash2 = {8B 0C 24 33 C8 8B C1 89 04 24 69 04 24 93 01 00 01} $procchk1 = {E8 [3] FF 85 C0 74 [2] FF FF FF FF E9 [4] E8 [4] 89 44 24 ?? E8 [4] 83 F8 4B 73 ?? 83 [3] 06} $procchk2 = {72 [2] FF FF FF FF E9 [4] E8 [4] 83 F8 32 73 ?? 83 [3] 06} - $version = {C7 44 2? ?? 0? 00 00 00 C7 44 2? ?? 0? 00 00 00 C7 44 2? ?? 01 00 00 00 8B} + $version = {C7 44 2? ?? ?? 00 00 00 C7 44 2? ?? ?? 00 00 00 8B 05 [4] 89} condition: all of them } @@ -28,7 +28,7 @@ rule Latrodectus_AES $key = {C6 44 2? ?? ?? [150] C6 44 2? ?? ?? B8 02} $aes_ctr_1 = {8B 44 24 ?? FF C8 89 44 24 ?? 83 7C 24 ?? 00 7C ?? 4? 63 44 24 ?? 4? 8B 4C 24 ?? 0F B6 84 01 F0 00 00 00 3D FF 00 00 00} $aes_ctr_2 = {48 03 C8 48 8B C1 0F B6 ?? 48 63 4C 24 ?? 0F B6 4C 0C ?? 33 C1 48 8B 4C 24 ?? 48 8B 54 24 ?? 48 03 D1 48 8B CA 88 01} - $version = {C7 44 2? ?? 0? 00 00 00 C7 44 2? ?? 0? 00 00 00 C7 44 2? ?? 01 00 00 00 8B} + $version = {C7 44 2? ?? ?? 00 00 00 C7 44 2? ?? ?? 00 00 00 8B 05 [4] 89} condition: all of them -} +} \ No newline at end of file diff --git a/data/yara/CAPE/Lumma.yar b/data/yara/CAPE/Lumma.yar index 1422e550b62..1ca8dafefa0 100644 --- a/data/yara/CAPE/Lumma.yar +++ b/data/yara/CAPE/Lumma.yar @@ -10,6 +10,7 @@ rule Lumma $decode1 = {C1 (E9|EA) 02 [0-3] 0F B6 (44|4C) ?? FF 83 (F8|F9) 3D 74 05 83 (F8|F9) 2E 75 01 (49|4A) [0-30] 2E 75} $decode2 = {B0 40 C3 B0 3F C3 89 C8 04 D0 3C 09 77 06 80 C1 04 89 C8 C3 89 C8 04 BF 3C} $decode3 = {B0 40 C3 B0 3F C3 80 F9 30 72 ?? 80 F9 39 77 06 80 C1 04 89 C8 C3} + $decode4 = {89 C8 04 D0 3C 09 77 ?? [3-11] 89 C8 [0-1] C3 89 C8 04 BF 3C 1A 72 ?? 89 C8 04 9F 3C} condition: uint16(0) == 0x5a4d and any of them } diff --git a/data/yara/CAPE/NetTraveler.yar b/data/yara/CAPE/NetTraveler.yar index 61ba7b97f51..d11f2297166 100644 --- a/data/yara/CAPE/NetTraveler.yar +++ b/data/yara/CAPE/NetTraveler.yar @@ -5,7 +5,10 @@ rule NetTraveler description = "NetTraveler Payload" cape_type = "NetTraveler Payload" strings: - $string1 = {4E 61 6D 65 3A 09 25 73 0D 0A 54 79 70 65 3A 09 25 73 0D 0A 53 65 72 76 65 72 3A 09 25 73 0D 0A} // "Name: %s Type: %s Server: %s " + $string1 = { 4E 61 6D 65 3A 09 25 73 + 0D 0A 54 79 70 65 3A 09 + 25 73 0D 0A 53 65 72 76 + 65 72 3A 09 25 73 0D 0A } // Name:\t%s\r\nType:\t%s\r\nServer:\t%s\r\n $string2 = "Password Expiried Time:" $string3 = "Memory: Total:%dMB,Left:%dMB (for %.2f%s)" diff --git a/data/yara/CAPE/NitrogenLoader.yar b/data/yara/CAPE/NitrogenLoader.yar index 1939fc68e00..857ea03e55d 100644 --- a/data/yara/CAPE/NitrogenLoader.yar +++ b/data/yara/CAPE/NitrogenLoader.yar @@ -4,15 +4,28 @@ rule NitrogenLoader author = "enzok" description = "Nitrogen Loader" cape_type = "NitrogenLoader Loader" - hash = "7b603d63a23201ff0b6ffa9acdd650df9caa1731837d559d93b3d8ce1d82a962" + hash1 = "7b603d63a23201ff0b6ffa9acdd650df9caa1731837d559d93b3d8ce1d82a962" + hash2 = "50c2afd792bfe2966133ee385054eaae1f73b04e013ef3434ef2407f99d7f037" + hash3 = "4926dee7da0da522c34ffeebb32f28703fd689a52543332c8d28ccfea223f43a" strings: - $aes1 = {63 7c 77 7b f2 6b 6f c5 30 01 67 2b fe d7 ab 76 ca 82 c9 7d fa} - $aes2 = {52 09 6a d5 30 36 a5 38 bf 40 a3 9e 81 f3 d7 fb 7c e3 39 82 9b} - $string1 = "BASS_GetEAXParameters" + $stringaes1 = {63 7c 77 7b f2 6b 6f c5 30 01 67 2b fe d7 ab 76 ca 82 c9 7d fa} + $stringaes2 = {52 09 6a d5 30 36 a5 38 bf 40 a3 9e 81 f3 d7 fb 7c e3 39 82 9b} + $string1 = "GetComputerNameExA" $string2 = "LoadResource" $syscallmakehashes = {48 89 4C 24 ?? 48 89 54 24 ?? 4? 89 44 24 ?? 4? 89 4C 24 ?? 4? 83 EC ?? B? [4] E8 [3] 00} $syscallnumber = {49 89 C3 B? [4] E8 [3] 00} $syscall = {48 83 C4 ?? 4? 8B 4C 24 ?? 4? 8B 54 24 ?? 4? 8B 44 24 ?? 4? 8B 4C 24 ?? 4? 89 CA 4? FF E3} + $decryptstr1 = {33 D2 48 8B 04 24 B? 0C 00 00 00 48 F7 F1 48 8B C2 48 C1 E0 02 0F B6 C8 48 8B 44 24 ?? 48 D3 E8 48 25 AB 00 00 00} + $decryptrsc1 = {48 63 4? 24 ?? 33 D2 48 [0-3] F7 B4 24 [4] 48 8B C2 48 8B 8C 24 [4] 0F BE 04 01} + $decryptrsc2 = {8B ?? 24 [1-4] 33 C8 8B C1 48 63 4C 24 ?? 48 8B 94 24 [4] 88 04 0A} + $decryptrsc3 = {8B 8C 24 ?? ?? ?? ?? 2B C8 8B C1 48 63 4C 24 ?? 48 8B 94 24 [4] 88 04 0A} + $decryptstrs = {33 D2 48 8B 04 24 B9 0C 00 00 00 48 F7 F1 48 8B C2 48 C1 E0 02 0F B6 C8 48 8B 44 24 ?? 48 D3 E8 48 25 AB 00 00 00} + $taskman_1 = {E8 [4] B9 61 00 00 00 88 84 24 [4] E8 [4] B9 73 00 00 00 88 84 24 [4] E8 [4] B9 6B 00 00 00 88 84 24 [4] E8 [3] FF} + $taskman_2 = {B9 4D 00 00 00 88 84 24 [4] E8 [4] B9 61 00 00 00 88 84 24 [4] E8 [4] B9 6E 00 00 00 88 84 24 [4] E8 [3] FF} + $taskman_3 = {B9 61 00 00 00 88 84 24 [4] E8 [4] B9 67 00 00 00 88 84 24 [4] E8 [4] B9 65 00 00 00 88 84 24 [4] E8 [3] FF} + $taskman_4 = {B9 72 00 00 00 88 84 24 [4] E8 [4] 31 C9 88 84 24 [4] E8 [3] FF} + $rc4decrypt_1 = {48 89 ?? 4? 89 ?? E8 [4] 4? 8B ?? 24 [1-4] 4? 89 ?? 4? 89 ?? 4? 89 C1 [0-1] 89 ?? E8 [4] 4? 89} + $rc4decrypt_2 = {E8 [4] 8B ?? 24 [1-4] 4? 89 ?? 48 89 ?? 4? 89 C1 E8 [3] FF} condition: - all of ($aes*) and all of ($string*) and any of ($syscall*) + (2 of ($string*) and any of ($syscall*)) or 4 of ($decrypt*) or (3 of ($taskman_*) and all of ($rc4decrypt_*)) } diff --git a/data/yara/CAPE/Obfuscar.yar b/data/yara/CAPE/Obfuscar.yar new file mode 100644 index 00000000000..0639a5c5a25 --- /dev/null +++ b/data/yara/CAPE/Obfuscar.yar @@ -0,0 +1,12 @@ +rule Obfuscar +{ + meta: + author = "kevoreilly" + description = "Obfuscar xor routime" + // cape_type = "AgentTesla Payload" + // https://github.com/obfuscar/obfuscar/blob/65e9ced171e0f2a92d2c64c479c3a1ec3624802a/Obfuscar/Obfuscator.cs#L1693 + strings: + $decode = {06 91 06 61 20 [4] 61 D2 9C 06 17 58 0A 06 7E [4] 8E 69 FE 04 2D ?? 2A} + condition: + uint16(0) == 0x5A4D and any of them +} diff --git a/data/yara/CAPE/QuasarRAT.yar b/data/yara/CAPE/QuasarRAT.yar deleted file mode 100644 index 8877430d23c..00000000000 --- a/data/yara/CAPE/QuasarRAT.yar +++ /dev/null @@ -1,43 +0,0 @@ -rule QuasarRAT { - meta: - author = "ditekshen" - description = "QuasarRAT payload" - cape_type = "QuasarRAT Payload" - strings: - $s1 = "GetKeyloggerLogsResponse" fullword ascii - $s2 = "GetKeyloggerLogs" fullword ascii - $s3 = "/>Log created on" wide - $s4 = "User: {0}{3}Pass: {1}{3}Host: {2}" wide - $s5 = "Domain: {1}{0}Cookie Name: {2}{0}Value: {3}{0}Path: {4}{0}Expired: {5}{0}HttpOnly: {6}{0}Secure: {7}" wide - $s6 = "grabber_" wide - $s7 = "" ascii - $s8 = "k__BackingField" fullword ascii - $s9 = "" ascii - $s10 = "add_OnHotKeysDown" ascii - $mutex = "QSR_MUTEX_" ascii wide - $ua1 = "Mozilla/5.0 (Windows NT 6.3; rv:48.0) Gecko/20100101 Firefox/48.0" fullword wide - $us2 = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_3) AppleWebKit/537.75.14 (KHTML, like Gecko) Version/7.0.3 Safari/7046A194A" fullword wide - condition: - uint16(0) == 0x5a4d and ($mutex or (all of ($ua*) and 2 of them) or 6 of ($s*)) -} - -rule quasarrat_kingrat { - meta: - author = "jeFF0Falltrades" - cape_type = "QuasarRAT Payload" - strings: - $str_quasar = "Quasar." wide ascii - $str_hidden = "set_Hidden" wide ascii - $str_shell = "DoShellExecuteResponse" wide ascii - $str_close = "echo DONT CLOSE THIS WINDOW!" wide ascii - $str_pause = "ping -n 10 localhost > nul" wide ascii - $str_aes_exc = "masterKey can not be null or empty" wide ascii - $byte_aes_key_base = { 7E [3] 04 73 [3] 06 25 } - $byte_aes_salt_base = { BF EB 1E 56 FB CD 97 3B B2 19 } - $byte_special_folder = { 7e 73 [4] 28 [4] 80 } - $patt_config = { 72 [3] 70 80 [3] 04 } - $patt_verify_hash = { 7e [3] 04 6f [3] 0a 6f [3] 0a 74 [3] 01 } - - condition: - 6 of them and #patt_config >= 10 -} diff --git a/data/yara/CAPE/Quickbind.yar b/data/yara/CAPE/Quickbind.yar index 4acda8970fc..5ecf40b25cc 100644 --- a/data/yara/CAPE/Quickbind.yar +++ b/data/yara/CAPE/Quickbind.yar @@ -5,13 +5,14 @@ rule Quickbind description = "Quickbind" cape_type = "Quickbind Payload" strings: - $anti_appdirs = {E8 [4] 83 F8 0? 7? ?? E8} - $anti_procs_ram = {E8 [4] 83 F8 0? 7? ?? E8 [4] 3D (FF 0E | 00 0F | FF 16) 00 00} - $anti_ram = {E8 [4] 3D (FF 1F | 00 20 | 00 17 | FF 0E | FF 16 | FF 2F) 00 00} - $mutex_1 = {FF [1-5] 3D B7 00 00 00 74 [7-10] 25 89 00 00 00} - $mutex_2 = {FF 15 [4] 4? 89 C? 4? 85 C? 74 ?? FF 15 [4] 3D B7 00 00 00} - $mutex_3 = {FF 15 [4] 4? 89 44 24 ?? 4? 83 7C 24 ?? 00 74 ?? FF 15 [4] 3D B7 00 00 00} - $sleep = {B9 64 00 00 00 [0-7] FF} + $anti_appdirs = {E8 [4] 83 F8 0? 7? ?? E8} + $anti_procs_ram = {E8 [4] 83 F8 0? 7? ?? E8 [4] 3D (FF 0E | 00 0F | FF 16) 00 00} + $anti_procs = {4C 89 F1 [0-9] FF D3 83 7C 24 ?? (03 | 07)} + $anti_ram = {E8 [4] 3D (FF 1F | 00 20 | 00 17 | FF 0E | FF 16 | FF 2F) 00 00} + $sleep = {B9 64 00 00 00 [0-7] FF} + $mutex_api = "CreateMutexW" + $mutex_error = {FF [1-5] 3D B7 00 00 00} condition: - all of ($anti_*) and 1 of ($mutex_*) and $sleep + //any of them + 3 of ($anti_*) and all of ($mutex_*) and $sleep } diff --git a/data/yara/CAPE/SmokeLoader.yar b/data/yara/CAPE/SmokeLoader.yar index f593544fe5c..988425e5421 100644 --- a/data/yara/CAPE/SmokeLoader.yar +++ b/data/yara/CAPE/SmokeLoader.yar @@ -7,8 +7,8 @@ rule SmokeLoader strings: $rc4_decrypt64 = {41 8D 41 01 44 0F B6 C8 42 0F B6 [2] 41 8D 04 12 44 0F B6 D0 42 8A [2] 42 88 [2] 42 88 [2] 42 0F B6 [2] 03 CA 0F B6 C1 8A [2] 30 0F 48 FF C7 49 FF CB 75} $rc4_decrypt32 = {47 B9 FF 00 00 00 23 F9 8A 54 [2] 0F B6 C2 03 F0 23 F1 8A 44 [2] 88 44 [2] 88 54 [2] 0F B6 4C [2] 0F B6 C2 03 C8 81 E1 FF 00 00 00 8A 44 [2] 30 04 2B 43 3B 9C 24 [4] 72 C0} - $fetch_c2_64 = {00 48 8D 05 [3] FF 48 8B CB 48 8B 14 D0 48 8B 5C 24 ?? 48 83 C4 20 5F E9} + $fetch_c2_64 = {74 ?? B? E8 03 00 00 B9 58 02 00 00 FF [5] 48 FF C? 75 F0 [6-10] 48 8D 05} $fetch_c2_32 = {8B 96 [2] (00|01) 00 8B CE 5E 8B 14 95 [4] E9} condition: - 2 of them + 2 of them } diff --git a/data/yara/CAPE/Socks5Systemz.yar b/data/yara/CAPE/Socks5Systemz.yar index 6c11a120f6d..603cf9c4c87 100644 --- a/data/yara/CAPE/Socks5Systemz.yar +++ b/data/yara/CAPE/Socks5Systemz.yar @@ -8,11 +8,12 @@ rule Socks5Systemz strings: $chunk1 = {0F B6 84 8A [4] E9 [3] (00|FF)} $chunk2 = {0F B6 04 8D [4] E9 [3] (00|FF)} - $chunk3 = {0F B6 04 8D [4] E9 [3] (00|FF)} - $chunk4 = {0F B6 04 8D [4] E9 [3] (00|FF)} - $chunk5 = {66 0F 6F 05 [4] E9 [3] (00|FF)} - $chunk6 = {F0 0F B1 95 [4] E9 [3] (00|FF)} - $chunk7 = {83 FA 04 E9 [3] (00|FF)} + $chunk3 = {66 0F 6F 05 [4] E9 [3] (00|FF)} + $chunk4 = {F0 0F B1 95 [4] E9 [3] (00|FF)} + $chunk5 = {83 FA 04 E9 [3] (00|FF)} + $chunk6 = {8A 04 8D [4] E9 [3] (00|FF)} + $chunk7 = {83 C4 04 83 C4 04 E9} + $chunk8 = {83 C2 04 87 14 24 5C E9} condition: - uint16(0) == 0x5A4D and 6 of them + uint16(0) == 0x5A4D and 5 of them } diff --git a/data/yara/CAPE/XWorm.yar b/data/yara/CAPE/XWorm.yar deleted file mode 100644 index 76e401a3e47..00000000000 --- a/data/yara/CAPE/XWorm.yar +++ /dev/null @@ -1,46 +0,0 @@ -rule XWorm { - meta: - author = "ditekSHen" - description = "Detects XWorm" - cape_type = "XWorm Payload" - strings: - $x1 = "XWorm " wide nocase - $x2 = /XWorm\s(V|v)\d+\.\d+/ fullword wide - $s1 = "RunBotKiller" fullword wide - $s2 = "XKlog.txt" fullword wide - $s3 = /(shell|reg)fuc/ fullword wide - $s4 = "closeshell" fullword ascii - $s5 = { 62 00 79 00 70 00 73 00 73 00 00 ?? 63 00 61 00 6c 00 6c 00 75 00 61 00 63 00 00 ?? 73 00 63 00 } - $s6 = { 44 00 44 00 6f 00 73 00 54 00 00 ?? 43 00 69 00 6c 00 70 00 70 00 65 00 72 00 00 ?? 50 00 45 00 } - $s7 = { 69 00 6e 00 6a 00 52 00 75 00 6e 00 00 ?? 73 00 74 00 61 00 72 00 74 00 75 00 73 00 62 } - $s8 = { 48 6f 73 74 00 50 6f 72 74 00 75 70 6c 6f 61 64 65 72 00 6e 61 6d 65 65 65 00 4b 45 59 00 53 50 4c 00 4d 75 74 65 78 78 00 } - $v2_1 = "PING!" fullword wide - $v2_2 = "Urlhide" fullword wide - $v2_3 = /PC(Restart|Shutdown)/ fullword wide - $v2_4 = /(Start|Stop)(DDos|Report)/ fullword wide - $v2_5 = /Offline(Get|Keylogger)/ wide - $v2_6 = "injRun" fullword wide - $v2_7 = "Xchat" fullword wide - $v2_8 = "UACFunc" fullword ascii wide - condition: - uint16(0) == 0x5a4d and ((1 of ($x*) and (3 of ($s*) or 3 of ($v2*))) or 6 of them) -} - -rule xworm_kingrat { - meta: - author = "jeFF0Falltrades" - cape_type = "XWorm payload" - strings: - $str_xworm = "xworm" wide ascii nocase - $str_xwormmm = "Xwormmm" wide ascii - $str_xclient = "XClient" wide ascii - $str_default_log = "\\Log.tmp" wide ascii - $str_create_proc = "/create /f /RL HIGHEST /sc minute /mo 1 /t" wide ascii - $str_ddos_start = "StartDDos" wide ascii - $str_ddos_stop = "StopDDos" wide ascii - $str_timeout = "timeout 3 > NUL" wide ascii - $byte_md5_hash = { 7e [3] 04 28 [3] 06 6f } - $patt_config = { 72 [3] 70 80 [3] 04 } - condition: - 5 of them and #patt_config >= 7 - } diff --git a/data/yara/CAPE/Zloader.yar b/data/yara/CAPE/Zloader.yar index fa82d11d034..362941020c4 100644 --- a/data/yara/CAPE/Zloader.yar +++ b/data/yara/CAPE/Zloader.yar @@ -8,7 +8,7 @@ rule Zloader strings: $rc4_init = {31 [1-3] 66 C7 8? 00 01 00 00 00 00 90 90 [0-5] 8? [5-90] 00 01 00 00 [0-15] (74|75)} $decrypt_conf = {83 C4 04 84 C0 74 5? E8 [4] E8 [4] E8 [4] E8 [4] ?8 [4] ?8 [4] ?8} - $decrypt_conf_1 = {48 8d [5] [0-6] e8 [4] 48 [3-4] 48 [3-4] 48 [6] E8} + $decrypt_conf_1 = {48 8d [5-11] e8 [4] 48 [3-4] 48 [3-4] 48 [6] E8} $decrypt_conf_2 = {48 8d [5] 4? [5] e8 [4] 48 [3-4] 48 8d [5] E8 [4] 48} $decrypt_key_1 = {66 89 C2 4? 8D 0D [3] 00 4? B? FC 03 00 00 E8 [4] 4? 83 C4 [1-2] C3} $decrypt_key_2 = {48 8d 0d [3] 00 66 89 ?? 4? 89 F0 4? [2-5] E8 [4-5] 4? 83 C4} diff --git a/dev_utils/elasticsearchdb.py b/dev_utils/elasticsearchdb.py index 6b92867ec01..defcfca51b6 100644 --- a/dev_utils/elasticsearchdb.py +++ b/dev_utils/elasticsearchdb.py @@ -92,7 +92,7 @@ def get_calls_index(): def delete_analysis_and_related_calls(task_id: str): analyses = elastic_handler.search(index=get_analysis_index(), query=get_query_by_info_id(task_id))["hits"]["hits"] if analyses: - log.debug("Deleting analysis data for Task %s" % task_id) + log.debug("Deleting analysis data for Task %s", task_id) for analysis in analyses: analysis = analysis["_source"] for process in analysis["behavior"].get("processes", []): @@ -100,7 +100,7 @@ def delete_analysis_and_related_calls(task_id: str): elastic_handler.delete_by_query(index=get_calls_index(), body={"query": {"match": {"_id": call}}}) elastic_handler.delete_by_query(index=get_analysis_index(), body={"query": get_query_by_info_id(task_id)}) - log.debug("Deleted previous ElasticsearchDB data for Task %s" % task_id) + log.debug("Deleted previous ElasticsearchDB data for Task %s", task_id) def scroll(scroll_id: str) -> dict: diff --git a/dev_utils/mongo_hooks.py b/dev_utils/mongo_hooks.py index 0f794d19520..6270a86763a 100644 --- a/dev_utils/mongo_hooks.py +++ b/dev_utils/mongo_hooks.py @@ -1,11 +1,12 @@ import itertools import logging -from pymongo import UpdateOne +from pymongo import UpdateOne, errors from dev_utils.mongodb import ( mongo_bulk_write, mongo_delete_data, + mongo_delete_data_range, mongo_delete_many, mongo_find, mongo_find_one, @@ -82,8 +83,12 @@ def normalize_files(report): request = normalize_file(file_dict, report["info"]["id"]) if request: requests.append(request) - if requests: - mongo_bulk_write(FILES_COLL, requests, ordered=False) + + try: + if requests: + mongo_bulk_write(FILES_COLL, requests, ordered=False) + except errors.OperationFailure as exc: + log.error("Mongo hook 'normalize_files' failed with code %d: %s", exc.code, exc) return report @@ -150,6 +155,24 @@ def remove_task_references_from_files(task_ids): ) +@mongo_hook(mongo_delete_data_range, "analysis") +def remove_task_references_from_files_range(*, range_start: int = 0, range_end: int = 0): + """Remove the given task_ids from the TASK_IDS_KEY field on "files" + documents that were referenced by those tasks that are being deleted. + """ + range_query = {} + if range_start > 0: + range_query["$gte"] = range_start + if range_end > 0: + range_query["$lt"] = range_end + if range_query: + mongo_update_many( + FILES_COLL, + {TASK_IDS_KEY: {"$elemMatch": range_query}}, + {"$pull": {TASK_IDS_KEY: range_query}}, + ) + + def delete_unused_file_docs(): """Delete entries in the FILES_COLL collection that are no longer referenced by any analysis tasks. This should typically be invoked @@ -165,6 +188,7 @@ def collect_file_dicts(report) -> itertools.chain: """Return an iterable containing all of the candidates for files from various parts of the report to be normalized. """ + # ToDo extend to self extract file_dicts = [] target_file = report.get("target", {}).get("file", None) if target_file: @@ -172,4 +196,6 @@ def collect_file_dicts(report) -> itertools.chain: file_dicts.append(report.get("dropped", None) or []) file_dicts.append(report.get("CAPE", {}).get("payloads", None) or []) file_dicts.append(report.get("procdump", None) or []) + if report.get("suricata", {}).get("files", []): + file_dicts.append(list(filter(None, [file_info.get("file_info", []) for file_info in report.get("suricata", {}).get("files", [])]))) return itertools.chain.from_iterable(file_dicts) diff --git a/dev_utils/mongodb.py b/dev_utils/mongodb.py index 3debefdf1ba..cb32967087e 100644 --- a/dev_utils/mongodb.py +++ b/dev_utils/mongodb.py @@ -1,8 +1,11 @@ import collections import functools +import itertools import logging import time -from typing import Callable, Sequence, Union +from typing import Any, Callable, Sequence + +from bson import ObjectId from lib.cuckoo.common.config import Config @@ -75,6 +78,7 @@ def mongo_hook(mongo_funcs, collection): mongo_find, mongo_find_one, mongo_delete_data, + mongo_delete_data_range, ), f"{mongo_func} can not have hooks applied" def decorator(f): @@ -186,31 +190,94 @@ def mongo_drop_database(database: str): conn.drop_database(database) -def mongo_delete_data(task_ids: Union[int, Sequence[int]]): +def mongo_delete_data(task_ids: int | Sequence[int]) -> None: + """Delete the specified task or tasks.""" try: if isinstance(task_ids, int): task_ids = [task_ids] - analyses_tmp = [] - found_task_ids = [] - tasks = mongo_find("analysis", {"info.id": {"$in": task_ids}}, {"behavior.processes.calls": 1, "info.id": 1}) - - for task in tasks or []: - for process in task.get("behavior", {}).get("processes", []): - if process.get("calls"): - mongo_delete_many("calls", {"_id": {"$in": process["calls"]}}) - analyses_tmp.append(task["_id"]) - task_id = task.get("info", {}).get("id", None) - if task_id is not None: - found_task_ids.append(task_id) - - if analyses_tmp: - mongo_delete_many("analysis", {"_id": {"$in": analyses_tmp}}) - if found_task_ids: - for hook in hooks[mongo_delete_data]["analysis"]: - hook(found_task_ids) + if task_ids: + mongo_delete_calls(task_ids=task_ids) + mongo_delete_many("analysis", {"info.id": {"$in": task_ids}}) + for hook in hooks[mongo_delete_data]["analysis"]: + hook(task_ids) + except Exception as e: + log.exception(e) + + +def mongo_delete_data_range(*, range_start: int = 0, range_end: int = 0) -> None: + """Delete tasks in a specific range.""" + INFO_ID = "info.id" + try: + info_id_query = {} + if range_start > 0: + info_id_query["$gte"] = range_start + if range_end > 0: + info_id_query["$lt"] = range_end + if info_id_query: + mongo_delete_calls_by_task_id_in_range(range_start=range_start, range_end=range_end) + mongo_delete_many("analysis", {INFO_ID: info_id_query}) + for hook in hooks[mongo_delete_data_range]["analysis"]: + hook(range_start=range_start, range_end=range_end) except Exception as e: - log.error(e, exc_info=True) + log.exception(e) + + +def mongo_delete_calls(task_ids: Sequence[int] | None) -> None: + """Delete calls by primary key. + + This obtains the call IDs from the analysis collection, which are then used + to delete calls in batches.""" + log.info("attempting to delete calls for %d tasks", len(task_ids)) + + query = {"info.id": {"$in": task_ids}} + projection = {"behavior.processes.calls": 1} + tasks: list[dict[str, Any]] = mongo_find("analysis", query, projection) + + if not tasks: + return + + delete_target_ids: list[ObjectId] = [] + + def get_call_ids_from_task(task: dict[str, Any]) -> list[ObjectId]: + """Get the call IDs from an analysis document.""" + processes = task.get("behavior", {}).get("processes", []) + calls = [proc.get("calls", []) for proc in processes] + return list(itertools.chain.from_iterable(calls)) + + for task in tasks: + delete_target_ids.extend(get_call_ids_from_task(task)) + + delete_target_ids = list(set(delete_target_ids)) + chunk_size = 1000 + for idx in range(0, len(delete_target_ids), chunk_size): + mongo_delete_many("calls", {"_id": {"$in": delete_target_ids[idx : idx + chunk_size]}}) + + +def mongo_delete_calls_by_task_id(task_ids: Sequence[int]) -> None: + """Delete calls by querying the calls collection by the task_id field. + + Note, the task_id field was added to the calls collection in 9999881. + Objects added to the collection prior to this will not be deleted. Use + mongo_delete_calls for backwards compatibility. + """ + mongo_delete_many("calls", {"task_id": {"$in": task_ids}}) + + +def mongo_delete_calls_by_task_id_in_range(*, range_start: int = 0, range_end: int = 0) -> None: + """Delete calls by querying the calls collection by the task_id field. + + Note, the task_id field was added to the calls collection in 9999881. + Objects added to the collection prior to this will be persist. Use + mongo_delete_calls for backwards compatibility. + """ + task_id_query: dict[str, int] = {} + if range_start > 0: + task_id_query["$gte"] = range_start + if range_end > 0: + task_id_query["$lt"] = range_end + if task_id_query: + mongo_delete_many("calls", {"task_id": task_id_query}) def mongo_is_cluster(): diff --git a/docs/book/src/installation/guest/cloning.rst b/docs/book/src/installation/guest/cloning.rst index 2fc73018a35..dc2a91ddaf7 100644 --- a/docs/book/src/installation/guest/cloning.rst +++ b/docs/book/src/installation/guest/cloning.rst @@ -14,3 +14,7 @@ There is a `Python command-line utility`_ available that can automate this proce The new virtual machine will also contain all of the settings of the original one, which is not good. Now you need to proceed by repeating the steps explained in :doc:`network`, :doc:`agent`, and :doc:`saving` for this new machine. + +One alternative to manually make the clones unique is to enable the disguise auxiliary module, windows_static_route and windows_static_route_gateway in conf/auxiliary.conf. +The auxiliary option is applicable to dnsmasq user which can't set the default gateway there because of the usage of an isolated routing in kvm. +One could run it once and snapshot to apply the modification or running the auxiliary module at every analysis. diff --git a/docs/book/src/installation/guest/creation.rst b/docs/book/src/installation/guest/creation.rst index f834ab8629b..945e63736ad 100644 --- a/docs/book/src/installation/guest/creation.rst +++ b/docs/book/src/installation/guest/creation.rst @@ -17,7 +17,7 @@ guide, so please refer to the virtualization software's official documentation. .. note:: For analysis purposes, it is recommended to use Windows 10 21H2 with User - Access Control disabled. CAPE also supports Windows 7. + Access Control disabled. .. note:: diff --git a/docs/book/src/installation/guest/requirements.rst b/docs/book/src/installation/guest/requirements.rst index 528fa0a6eb8..a8c6806a8c7 100644 --- a/docs/book/src/installation/guest/requirements.rst +++ b/docs/book/src/installation/guest/requirements.rst @@ -18,7 +18,7 @@ Python is a strict requirement for the CAPE guest component (*analyzer*) to run version of Python can be 64-bit (x64). You can download the proper `Windows`_ / `Linux`_ installer from the `official website`_. -Python versions > 3.6 are preferred. +Python versions > 3.10 and < 3.13 are preferred. .. important:: When installing Python, it is recommended to select the `Add Python to PATH` option. And remove from that PATH `%USERPROFILE%\AppData\Local\Microsoft\WindowsApps` @@ -40,7 +40,7 @@ CAPE guest component. They include: The recommended installation is the execution of the following commands:: > python -m pip install --upgrade pip - > python -m pip install Pillow==9.5.0 + > python -m pip install Pillow These Python libraries are not strictly required by CAPE, but you are encouraged to install them if you want to have access to all available features. Make sure diff --git a/docs/book/src/installation/guest_physical/creation.rst b/docs/book/src/installation/guest_physical/creation.rst index 67d3b5c8f7d..949eb799231 100644 --- a/docs/book/src/installation/guest_physical/creation.rst +++ b/docs/book/src/installation/guest_physical/creation.rst @@ -15,7 +15,7 @@ guide, so please refer to the official documentation. chapter. .. note:: - For analysis purposes, you are recommended to use Windows 7 with User + For analysis purposes, you are recommended to use Windows 10 21H2 with User Access Control disabled. diff --git a/docs/book/src/installation/host/routing.rst b/docs/book/src/installation/host/routing.rst index 7ce89cf55d4..165e374f68c 100644 --- a/docs/book/src/installation/host/routing.rst +++ b/docs/book/src/installation/host/routing.rst @@ -79,6 +79,8 @@ Following is the list of available routing options. +-------------------------+--------------------------------------------------+ | :ref:`routing_tor` | Routes all traffic through Tor. | +-------------------------+--------------------------------------------------+ +| :ref:`routing_tun` | Route traffic though any "tun" interface | ++-------------------------+--------------------------------------------------+ | :ref:`routing_vpn` | Routes all traffic through one of perhaps | | | multiple pre-defined VPN endpoints. | +-------------------------+--------------------------------------------------+ @@ -86,6 +88,7 @@ Following is the list of available routing options. | | multiple pre-defined VPN endpoints. | +-------------------------+--------------------------------------------------+ + Using Per-Analysis Network Routing ================================== @@ -358,6 +361,18 @@ correctly. .. _`latest stable version of Tor here`: https://www.torproject.org/docs/debian.html.en + +.. _routing_tun: + +Tun Routing +^^^^^^^^^^^ +This allows you to route via any ``tun`` interface. You can pass the tun +interface name on demand per analysis. The interface name can be ``tunX`` +or ``tun_foo``. This assumes you create the tunnel inferface outside of CAPE. + +Then you set the ``route=tun_foo`` on the ``/apiv2/tasks/create/file/`` +API call. + .. _routing_vpn: VPN Routing @@ -454,13 +469,13 @@ VPN persistence & auto-restart `source`_:: 6. Reload the daemons: # sudo systemctl daemon-reload - 1. Start the OpenVPN service: + 7. Start the OpenVPN service: # sudo systemctl start openvpn - 2. Test if it is working by checking the external IP: + 8. Test if it is working by checking the external IP: # curl ifconfig.co - 3. If curl is not installed: + 9. If curl is not installed: # sudo apt install curl .. _`source`: https://www.ivpn.net/knowledgebase/linux/linux-autostart-openvpn-in-systemd-ubuntu/ @@ -568,7 +583,7 @@ Assuming you already have any VM running, to test the internet connection using $ sudo python3 router_manager.py -r internet -e --vm-name win1 --verbose $ sudo python3 router_manager.py -r internet -d --vm-name win1 --verbose -The ``-e`` flag is used to enable a route and ``-d`` is used to disable it. You can read more about all the options the utility has by running:: +The ``-e`` flag is used to enable a route and ``-d`` is used to disable it. You can read more about all the options the utility has by running:: $ sudo python3 router_manager.py -h diff --git a/docs/book/src/introduction/what.rst b/docs/book/src/introduction/what.rst index 3b61fffaa36..fe58fa65021 100644 --- a/docs/book/src/introduction/what.rst +++ b/docs/book/src/introduction/what.rst @@ -129,7 +129,7 @@ The following picture explains CAPE's main architecture: :align: center The recommended setup is *GNU/Linux* (Ubuntu LTS preferably) as the Host and -*Windows 7* as a Guest. +*Windows 10 21H2* as a Guest. Obtaining CAPE ================ diff --git a/docs/book/src/usage/monitor.rst b/docs/book/src/usage/monitor.rst index bda7a3bef41..05fba0606d2 100644 --- a/docs/book/src/usage/monitor.rst +++ b/docs/book/src/usage/monitor.rst @@ -198,6 +198,7 @@ Importing instruction traces into disassembler ============================================== It is possible to import CAPE's debugger output into a dissassembler. One example procedure is as follow: + * Highlight CFG in disassembler: .. code-block:: bash diff --git a/docs/book/src/usage/web.rst b/docs/book/src/usage/web.rst index da0b231e685..5dc42fc1ad7 100644 --- a/docs/book/src/usage/web.rst +++ b/docs/book/src/usage/web.rst @@ -90,7 +90,7 @@ To extend the capabilities of control what users can do check `Django migrations .. _`Django migrations a primer`: https://realpython.com/django-migrations-a-primer/ -In few works you need to add new fields to ``models.py`` and run ``poetry run python3 manage.py makemigrations`` +In few words you need to add new fields to ``models.py`` and run ``poetry run python3 manage.py makemigrations`` Exposed to internet @@ -99,7 +99,7 @@ Exposed to internet To get rid of many bots/scrappers so we suggest deploying this amazing project `Nginx Ultimate bad bot blocker`_, follow the README for installation steps * Enable web auth with captcha in `conf/web.conf` properly to avoid any brute force. -* Enable `ReCaptcha`_. You will need to set ``Public`` and ``Secret`` keys in ``web/web/settings.py`` +* Enable `ReCaptcha`_. You will need to set ``RECAPTCHA_PUBLIC_KEY`` and ``RECAPTCHA_PRIVATE_KEY`` keys in ``web/web/local_settings.py`` * You might need to "Verify" and set as "Stuff user" to your admin in the Django admin panel and add your domain to Sites in Django admin too * `AllAuth`_ aka SSO authentication with Google, Github, etc. `Video Tutorial`_ & `StackOverflow Example`_: * Note ``SITE_ID=1`` in django admin is ``example.com`` rename it to your domain to get it working @@ -132,8 +132,8 @@ Run .. code:: bash - sudo systemctl daemon-reload - sudo service cape-web restart + sudo systemctl daemon-reload + sudo service cape-web restart NGINX ----- @@ -141,9 +141,10 @@ Next, install NGINX and configure it to be a reverse proxy to Gunicorn. .. code:: bash - sudo apt install nginx + sudo apt install nginx -Create a configuration file at ``/etc/nginx/conf.d/cape`` +Create a configuration file at ``/etc/nginx/conf.d/cape``. +You might need to add ``include /etc/nginx/conf.d/*.conf;`` to ``http`` section inside of ``/etc/nginx/nginx.conf``. Replace ``www.capesandbox.com`` with your actual hostname. @@ -159,14 +160,14 @@ Replace ``www.capesandbox.com`` with your actual hostname. location ^~ /.well-known/acme-challenge/ { - default_type "text/plain"; - root /var/www/html; - break; - } + default_type "text/plain"; + root /var/www/html; + break; + } - location = /.well-known/acme-challenge/ { - return 404; - } + location = /.well-known/acme-challenge/ { + return 404; + } location / { proxy_pass http://127.0.0.1:8000; @@ -210,8 +211,8 @@ Now enable the nginx configuration by executing the following: .. code:: bash - rm -f /etc/nginx/conf.d/default - ln -s /etc/nginx/conf.d/cape /etc/nginx/conf.d/default + rm -f /etc/nginx/conf.d/default + ln -s /etc/nginx/conf.d/cape /etc/nginx/conf.d/default If you want to block users from changing their own email addresses, add the following `location` directive inside of the `server` directive: @@ -287,15 +288,14 @@ Let's Encrypt certificates If you would like to install a free Let's Encrypt certificate on your NGINX server, follow these steps, replacing ``capesandbox.com`` with your actual -hostname. +hostname. Use ``cape2.sh`` to install dependencies. But also ensure that instruction +are up to date with this https://certbot.eff.org/ Install `certbot`. .. code-block:: bash - sudo snap install core; sudo snap refresh core - sudo snap install --classic certbot - sudo ln -s /snap/bin/certbot /usr/bin/certbot + sudo cape2.sh letsencrypt Request the certificate @@ -474,3 +474,44 @@ an error like ``django.db.utils.OperationalError: no such table: auth_user`` may be raised. In order to solve it just execute the ``web/manage.py`` utility with the ``migrate`` option:: $ sudo -u cape poetry run python3 web/manage.py migrate + + +Slow web/API searches when using MongoDB as backend +--------------------------------------------------- + +* Check server lack of resources as memory ram, cpu or even slow hard drive. +* Possible issue is the lack of proper indexes. +* List your MongoDB indexes: + +.. code-block:: bash + + db.analysis.getIndexes() + +* Test your query with explaination. Replace with your search patterns: + +.. code-block:: bash + + db.analysis.find({"target.file.name": ""}).explain("executionStats") + +* Pay attention to stage value: + +.. code-block:: bash + + executionStages: { + stage: 'COLLSCAN', # <--- Full collection scan instead of index usage + +If you expect it to search in index, expected output should be like this: + +.. code-block:: bash + + executionStages: { + stage: 'FETCH', + + inputStage: { + stage: 'IXSCAN', # <--- Index usage + +* How to delete index + +.. code-block:: bash + + db.collection.dropIndexes("") diff --git a/extra/libvirt_installer.sh b/extra/libvirt_installer.sh index 4f127bbcbbe..a352b7a696f 100755 --- a/extra/libvirt_installer.sh +++ b/extra/libvirt_installer.sh @@ -2,9 +2,9 @@ set -ex # run this via... -# cd /opt/CAPEv2/ ; sudo -u cape poetry run extra/libvirt_installer.sh +# cd /opt/CAPEv2/ ; sudo -u cape /etc/poetry/bin/poetry run extra/libvirt_installer.sh -LIB_VERSION=10.7.0 +LIB_VERSION=11.1.0 cd /tmp || return if [ ! -f v${LIB_VERSION}.zip ]; then diff --git a/extra/optional_dependencies.txt b/extra/optional_dependencies.txt index 86e36f7c36d..9e6b31ecff2 100644 --- a/extra/optional_dependencies.txt +++ b/extra/optional_dependencies.txt @@ -1,12 +1,15 @@ # This is aux requirements file to install all side dependencies -# Run: poetry run pip -r extra/optional_dependencies.txt +# Run: poetry run pip install -r extra/optional_dependencies.txt +# Those deps are not in pyproject due to their hardcoded dependencies and conflicts with another libraries. +# Those deps adds big value to specific tasks, but we can't satisfy all use cases. So end user MUST make it work by himself. ImageHash deepdiff flask flask-restful flask-sqlalchemy==3.0.5 -git+https://github.com/CAPESandbox/binGraph +git+https://github.com/CAPESandbox/binGraph # requires sudo apt install libgraphviz-dev git+https://github.com/CAPESandbox/httpreplay +pyasyncore git+https://github.com/CAPESandbox/socks5man git+https://github.com/DissectMalware/XLMMacroDeobfuscator.git git+https://github.com/DissectMalware/batch_deobfuscator @@ -23,4 +26,4 @@ urlextract==1.5.0 pdfminer==20191125 pg_activity python-tlsh -pyattck==7.1.2 +git+https://github.com/CAPESandbox/pyattck/ diff --git a/extra/yara_installer.sh b/extra/yara_installer.sh index ca5909bcfed..5ad6aa728ac 100755 --- a/extra/yara_installer.sh +++ b/extra/yara_installer.sh @@ -2,19 +2,15 @@ set -ex # run this via... -# cd /opt/CAPEv2/ ; sudo -u cape poetry run extra/yara_installer.sh +# cd /opt/CAPEv2/ ; sudo -u cape /etc/poetry/bin/poetry run extra/yara_installer.sh if [ ! -d /tmp/yara-python ]; then git clone --recursive https://github.com/VirusTotal/yara-python /tmp/yara-python fi -cd /tmp/yara-python +/etc/poetry/bin/poetry --directory /opt/CAPEv2 run bash -c "cd /tmp/yara-python && python setup.py build --enable-cuckoo --enable-magic --enable-profiling" +/etc/poetry/bin/poetry --directory /opt/CAPEv2 run pip install /tmp/yara-python -poetry --directory /opt/CAPEv2 run python setup.py build --enable-cuckoo --enable-magic --enable-profiling -poetry --directory /opt/CAPEv2 run pip install . - -cd .. - -if [ -d yara-python ]; then - rm -rf yara-python -fi \ No newline at end of file +if [ -d /tmp/yara-python ]; then + rm -rf /tmp/yara-python +fi diff --git a/installer/cape2.sh b/installer/cape2.sh old mode 100644 new mode 100755 index f514aa899a4..c7123ae986a --- a/installer/cape2.sh +++ b/installer/cape2.sh @@ -6,6 +6,9 @@ # Huge thanks to: @NaxoneZ @kevoreilly @ENZOK @wmetcalf @ClaudioWayne +# Ensure non-interactive mode for apt commands globally to prevent prompts during automated installations +export DEBIAN_FRONTEND=noninteractive + # Static values # Where to place everything # CAPE TcpDump will sniff this interface @@ -20,10 +23,12 @@ PASSWD="SuperPuperSecret" DIST_MASTER_IP="192.168.1.1" USER="cape" # https://nginx.org/en/linux_packages.html -nginx_version=1.25.3 +nginx_version=1.27.3 prometheus_version=2.20.1 grafana_version=7.1.5 node_exporter_version=1.0.1 +# https://github.com/crowdsecurity/cs-nginx-bouncer/releases/download/v$CSNB_VERSION/crowdsec-nginx-bouncer.tgz +CSNB_VERSION="1.0.8" # if set to 1, enables snmpd and other various bits to support # monitoring via LibreNMS librenms_enable=0 @@ -53,13 +58,13 @@ librenms_megaraid_enable=0 # disabling this will result in the web interface being disabled MONGO_ENABLE=1 -DIE_VERSION="3.09" +DIE_VERSION="3.10" TOR_SOCKET_TIMEOUT="60" # if a config file is present, read it in if [ -f "./cape-config.sh" ]; then - . ./cape-config.sh + . ./cape-config.sh fi UBUNTU_VERSION=$(lsb_release -rs) @@ -69,21 +74,7 @@ ARCH="$(dpkg --print-architecture)" function issues() { cat << EOI -Problems with PyOpenSSL? - sudo rm -rf /usr/local/lib/python3.8/dist-packages/OpenSSL/ - sudo rm -rf /home/${USER}/.local/lib/python3.8/site-packages/OpenSSL/ - sudo apt-get install --reinstall python-openssl - -Problem with PIP? - sudo python -m pip3 uninstall pip3 && sudo apt-get install python3-pip --reinstall - -Problem with pillow: - * ValueError: jpeg is required unless explicitly disabled using --disable-jpeg, aborting - * ValueError: zlib is required unless explicitly disabled using --disable-zlib, aborting -Solution: - # https://askubuntu.com/a/1094768 - # you may need to adjust version of libjpeg-turbo8 - sudo apt-get install zlib1g-dev libjpeg-turbo8-dev libjpeg-turbo8=1.5.2-0ubuntu5 + No known problems yet EOI } @@ -143,166 +134,167 @@ EndOfHelp function install_crowdsecurity() { echo "[+] Install crowdsecurity" - sudo apt-get install bash gettext whiptail curl wget + sudo apt-get install -y bash gettext whiptail curl wget cd /tmp || return if [ ! -d crowdsec-release.tgz ]; then curl -s https://api.github.com/repos/crowdsecurity/crowdsec/releases/latest | grep browser_download_url| cut -d '"' -f 4 | wget -i - fi tar xvzf crowdsec-release.tgz + # ToDo fix this directory=$(ls | grep "crowdsec-v*") cd "$directory" || return sudo ./wizard.sh -i sudo cscli collections install crowdsecurity/nginx sudo systemctl reload crowdsec - install_docker - sudo cscli dashboard setup -l 127.0.0.1 -p 8448 + # install_docker + # sudo cscli dashboard setup -l 127.0.0.1 -p 8448 - wget https://github.com/crowdsecurity/cs-nginx-bouncer/releases/download/v0.0.4/cs-nginx-bouncer.tgz - tar xvzf cs-nginx-bouncer.tgz - directory=$(ls | grep "cs-nginx-bouncer*") - cd "$directory" || return + if [ ! -f crowdsec-nginx-bouncer-v$CSNB_VERSION ]; then + wget https://github.com/crowdsecurity/cs-nginx-bouncer/releases/download/v$CSNB_VERSION/crowdsec-nginx-bouncer.tgz + tar xvzf crowdsec-nginx-bouncer.tgz + fi + cd crowdsec-nginx-bouncer-v$CSNB_VERSION || return sudo ./install.sh } function install_docker() { echo "[+] Install docker" # https://www.digitalocean.com/community/tutorials/how-to-install-and-use-docker-on-ubuntu-20-04 - sudo apt-get install apt-transport-https ca-certificates curl software-properties-common + sudo apt-get install -y apt-transport-https ca-certificates curl software-properties-common curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /etc/apt/keyrings/docker.gpg --yes echo "deb [signed-by=/etc/apt/keyrings/docker.gpg arch=amd64] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable" > /etc/apt/sources.list.d/docker.list sudo apt-get update - sudo apt-get install docker-ce + sudo apt-get install -y docker-ce sudo usermod -aG docker ${USER} } function install_jemalloc() { - # https://zapier.com/engineering/celery-python-jemalloc/ if ! $(dpkg -l "libjemalloc*" | grep -q "ii libjemalloc"); then - apt-get install -f checkinstall curl build-essential jq autoconf libjemalloc-dev -y + sudo apt-get install -y libjemalloc-dev fi } function librenms_cron_config() { - echo '*/5 * * * * root /usr/bin/env PATH=/sbin:/bin:/usr/sbin:/usr/bin:/usr/local/sbin:/usr/local/bin /usr/local/bin/sneck -u 2> /dev/null > /dev/null' - echo '*/5 * * * * root /usr/bin/env PATH=/sbin:/bin:/usr/sbin:/usr/bin:/usr/local/sbin:/usr/local/bin /etc/snmp/extends/cape | /usr/local/bin/librenms_return_optimizer 2> /dev/null > /var/cache/cape.cache' - echo '*/5 * * * * root /usr/bin/env PATH=/sbin:/bin:/usr/sbin:/usr/bin:/usr/local/sbin:/usr/local/bin /etc/snmp/extends/smart -u' - echo '*/5 * * * * root /usr/bin/env PATH=/sbin:/bin:/usr/sbin:/usr/bin:/usr/local/sbin:/usr/local/bin /usr/local/bin/hv_monitor -c 2> /dev/null > /var/cache/hv_monitor.cache' - echo '*/5 * * * * root /usr/bin/env PATH=/sbin:/bin:/usr/sbin:/usr/bin:/usr/local/sbin:/usr/local/bin /etc/snmp/extends/osupdate 2> /dev/null > /var/cache/osupdate.extend' - echo '1 1 * * * root /bin/cat /sys/devices/virtual/dmi/id/board_serial > /etc/snmp/serial' + echo '*/5 * * * * root /usr/bin/env PATH=/sbin:/bin:/usr/sbin:/usr/bin:/usr/local/sbin:/usr/local/bin /usr/local/bin/sneck -u 2> /dev/null > /dev/null' + echo '*/5 * * * * root /usr/bin/env PATH=/sbin:/bin:/usr/sbin:/usr/bin:/usr/local/sbin:/usr/local/bin /etc/snmp/extends/cape | /usr/local/bin/librenms_return_optimizer 2> /dev/null > /var/cache/cape.cache' + echo '*/5 * * * * root /usr/bin/env PATH=/sbin:/bin:/usr/sbin:/usr/bin:/usr/local/sbin:/usr/local/bin /etc/snmp/extends/smart -u' + echo '*/5 * * * * root /usr/bin/env PATH=/sbin:/bin:/usr/sbin:/usr/bin:/usr/local/sbin:/usr/local/bin /usr/local/bin/hv_monitor -c 2> /dev/null > /var/cache/hv_monitor.cache' + echo '*/5 * * * * root /usr/bin/env PATH=/sbin:/bin:/usr/sbin:/usr/bin:/usr/local/sbin:/usr/local/bin /etc/snmp/extends/osupdate 2> /dev/null > /var/cache/osupdate.extend' + echo '1 1 * * * root /bin/cat /sys/devices/virtual/dmi/id/board_serial > /etc/snmp/serial' } function librenms_sneck_config() { - if [ "$librenms_ipmi" -ge 1 ]; then - echo 'ipmi_sensor|/usr/lib/nagios/plugins/check_ipmi_sensor --nosel' - else - echo '#ipmi_sensor|/usr/lib/nagios/plugins/check_ipmi_sensor --nosel' - fi - echo 'virtqemud_procs|/usr/lib/nagios/plugins/check_procs --ereg-argument-array "^/usr/sbin/virtqemud" 1:1' - echo 'cape_procs|/usr/lib/nagios/plugins/check_procs --ereg-argument-array "poetry.*bin/python cuckoo.py" 1:1' - echo 'cape_processor_procs|/usr/lib/nagios/plugins/check_procs --ereg-argument-array "poetry.*bin/python process.py" 1:' - echo 'cape_rooter_procs|/usr/lib/nagios/plugins/check_procs --ereg-argument-array "poetry.*bin/python rooter.py" 1' - if [ "$clamav_enable" -ge 1 ]; then - echo "clamav|/usr/lib/nagios/plugins/check_clamav -w $librenms_clamav_warn -c $librenms_clamav_crit" - else - echo "#clamav|/usr/lib/nagios/plugins/check_clamav -w $librenms_clamav_warn -c $librenms_clamav_crit" - fi - if [ "$MONGO_ENABLE" -ge 1 ]; then - echo "mongodb|/usr/lib/nagios/plugins/check_mongodb.py $librenms_mongo_args" - echo 'cape_web_procs|/usr/lib/nagios/plugins/check_procs --ereg-argument-array "poetry.*bin/python manage.py" 1:' - else - echo "#mongodb|/usr/lib/nagios/plugins/check_mongodb.py $librenms_mongo_args" - echo 'cape_web_procs|/usr/lib/nagios/plugins/check_procs --ereg-argument-array "poetry.*bin/python manage.py" 0' - fi + if [ "$librenms_ipmi" -ge 1 ]; then + echo 'ipmi_sensor|/usr/lib/nagios/plugins/check_ipmi_sensor --nosel' + else + echo '#ipmi_sensor|/usr/lib/nagios/plugins/check_ipmi_sensor --nosel' + fi + echo 'virtqemud_procs|/usr/lib/nagios/plugins/check_procs --ereg-argument-array "^/usr/sbin/virtqemud" 1:1' + echo 'cape_procs|/usr/lib/nagios/plugins/check_procs --ereg-argument-array "poetry.*bin/python cuckoo.py" 1:1' + echo 'cape_processor_procs|/usr/lib/nagios/plugins/check_procs --ereg-argument-array "poetry.*bin/python process.py" 1:' + echo 'cape_rooter_procs|/usr/lib/nagios/plugins/check_procs --ereg-argument-array "poetry.*bin/python rooter.py" 1' + if [ "$clamav_enable" -ge 1 ]; then + echo "clamav|/usr/lib/nagios/plugins/check_clamav -w $librenms_clamav_warn -c $librenms_clamav_crit" + else + echo "#clamav|/usr/lib/nagios/plugins/check_clamav -w $librenms_clamav_warn -c $librenms_clamav_crit" + fi + if [ "$MONGO_ENABLE" -ge 1 ]; then + echo "mongodb|/usr/lib/nagios/plugins/check_mongodb.py $librenms_mongo_args" + echo 'cape_web_procs|/usr/lib/nagios/plugins/check_procs --ereg-argument-array "poetry.*bin/python manage.py" 1:' + else + echo "#mongodb|/usr/lib/nagios/plugins/check_mongodb.py $librenms_mongo_args" + echo 'cape_web_procs|/usr/lib/nagios/plugins/check_procs --ereg-argument-array "poetry.*bin/python manage.py" 0' + fi } function librenms_snmpd_config() { - echo "rocommunity $snmp_community" - echo - echo "syslocation $snmp_location" - echo "syscontact $snmp_contact" - echo - if [ "$librenms_megaraid_enable" -ge 1 ]; then - echo "pass .1.3.6.1.4.1.3582 /usr/sbin/lsi_mrdsnmpmain" - else - echo "#pass .1.3.6.1.4.1.3582 /usr/sbin/lsi_mrdsnmpmain" - fi - echo - echo 'extend distro /etc/snmp/extends/distro' - echo "extend hardware '/bin/cat /sys/devices/virtual/dmi/id/product_name'" - echo "extend manufacturer '/bin/cat /sys/devices/virtual/dmi/id/sys_vendor'" - echo "extend serial '/bin/cat /etc/snmp/serial'" - echo - echo "extend cape /bin/cat /var/cache/cape.cache" - echo "extend smart /bin/cat /var/cache/smart" - echo "extend sneck /usr/bin/env PATH=/sbin:/bin:/usr/sbin:/usr/bin:/usr/local/sbin:/usr/local/bin /usr/local/bin/sneck -c -b" - echo "extend hv-monitor /bin/cat /var/cache/hv_monitor.cache" - echo "extend osupdate /bin/cat /var/cache/osupdate.extend" - if [ "$librenms_mdadm_enable" -ge 1 ]; then - echo "extend mdadm /usr/bin/env PATH=/sbin:/bin:/usr/sbin:/usr/bin:/usr/local/sbin:/usr/local/bin /etc/snmp/extends/mdadm" - else - echo "#extend mdadm /usr/bin/env PATH=/sbin:/bin:/usr/sbin:/usr/bin:/usr/local/sbin:/usr/local/bin /etc/snmp/extends/mdadm" - fi - echo - if [ ! -z "$snmp_agentaddress" ]; then - echo "agentaddress $snmp_agentaddress" - fi + echo "rocommunity $snmp_community" + echo + echo "syslocation $snmp_location" + echo "syscontact $snmp_contact" + echo + if [ "$librenms_megaraid_enable" -ge 1 ]; then + echo "pass .1.3.6.1.4.1.3582 /usr/sbin/lsi_mrdsnmpmain" + else + echo "#pass .1.3.6.1.4.1.3582 /usr/sbin/lsi_mrdsnmpmain" + fi + echo + echo 'extend distro /etc/snmp/extends/distro' + echo "extend hardware '/bin/cat /sys/devices/virtual/dmi/id/product_name'" + echo "extend manufacturer '/bin/cat /sys/devices/virtual/dmi/id/sys_vendor'" + echo "extend serial '/bin/cat /etc/snmp/serial'" + echo + echo "extend cape /bin/cat /var/cache/cape.cache" + echo "extend smart /bin/cat /var/cache/smart" + echo "extend sneck /usr/bin/env PATH=/sbin:/bin:/usr/sbin:/usr/bin:/usr/local/sbin:/usr/local/bin /usr/local/bin/sneck -c -b" + echo "extend hv-monitor /bin/cat /var/cache/hv_monitor.cache" + echo "extend osupdate /bin/cat /var/cache/osupdate.extend" + if [ "$librenms_mdadm_enable" -ge 1 ]; then + echo "extend mdadm /usr/bin/env PATH=/sbin:/bin:/usr/sbin:/usr/bin:/usr/local/sbin:/usr/local/bin /etc/snmp/extends/mdadm" + else + echo "#extend mdadm /usr/bin/env PATH=/sbin:/bin:/usr/sbin:/usr/bin:/usr/local/sbin:/usr/local/bin /etc/snmp/extends/mdadm" + fi + echo + if [ ! -z "$snmp_agentaddress" ]; then + echo "agentaddress $snmp_agentaddress" + fi } function install_librenms() { - echo "[+] Install librenms" - if [ "$librenms_enable" -ge 1 ]; then - echo "Enabling stuff for LibreNMS" - apt-get install -y zlib1g-dev cpanminus libjson-perl libfile-readbackwards-perl \ - libjson-perl libconfig-tiny-perl libdbi-perl libfile-slurp-perl \ - libstatistics-lite-perl libdbi-perl libdbd-pg-perl monitoring-plugins \ - monitoring-plugins-contrib monitoring-plugins-standard dmidecode wget snmpd - cpanm HV::Monitor Monitoring::Sneck - mkdir -p /etc/snmp/extends - wget https://raw.githubusercontent.com/librenms/librenms-agent/master/snmp/distro -O /etc/snmp/extends/distro - wget https://raw.githubusercontent.com/librenms/librenms-agent/master/snmp/cape -O /etc/snmp/extends/cape - wget https://raw.githubusercontent.com/librenms/librenms-agent/master/snmp/smart -O /etc/snmp/extends/smart - wget https://raw.githubusercontent.com/librenms/librenms-agent/master/snmp/osupdate -O /etc/snmp/extends/osupdate - chmod +x /etc/snmp/extends/distro /etc/snmp/extends/cape /etc/snmp/extends/smart /etc/snmp/extends/osupdate - - if [ "$librenms_mdadm_enable" -ge 1 ]; then - apt-get install -y jq - wget https://raw.githubusercontent.com/librenms/librenms-agent/master/snmp/mdadm -O /etc/snmp/extends/mdadm - chmod +x /etc/snmp/extends/mdadm - fi - - /etc/snmp/extends/smart -g > /etc/snmp/extends/smart.config - echo "You will want to check /etc/snmp/extends/smart.config to see if it looks good." - echo "See /etc/snmp/extends/smart for more info" - - cat /sys/devices/virtual/dmi/id/board_serial > /etc/snmp/serial - - librenms_sneck_config > /usr/local/etc/sneck.conf - librenms_cron_config > /etc/cron.d/librenms_auto - librenms_snmpd_config > /etc/snmp/snmpd.conf - - systemctl enable snmpd.service - systemctl restart snmpd.service - systemctl restart cron.service - else - echo "Skipping stuff for LibreNMS" - fi + if [ "$librenms_enable" -ge 1 ]; then + echo "[+] Install librenms" + echo "Enabling stuff for LibreNMS" + sudo apt-get install -y zlib1g-dev cpanminus libjson-perl libfile-readbackwards-perl \ + libjson-perl libconfig-tiny-perl libdbi-perl libfile-slurp-perl \ + libstatistics-lite-perl libdbi-perl libdbd-pg-perl monitoring-plugins \ + monitoring-plugins-contrib monitoring-plugins-standard dmidecode wget snmpd + cpanm HV::Monitor Monitoring::Sneck + mkdir -p /etc/snmp/extends + wget https://raw.githubusercontent.com/librenms/librenms-agent/master/snmp/distro -O /etc/snmp/extends/distro + wget https://raw.githubusercontent.com/librenms/librenms-agent/master/snmp/cape -O /etc/snmp/extends/cape + wget https://raw.githubusercontent.com/librenms/librenms-agent/master/snmp/smart -O /etc/snmp/extends/smart + wget https://raw.githubusercontent.com/librenms/librenms-agent/master/snmp/osupdate -O /etc/snmp/extends/osupdate + chmod +x /etc/snmp/extends/distro /etc/snmp/extends/cape /etc/snmp/extends/smart /etc/snmp/extends/osupdate + + if [ "$librenms_mdadm_enable" -ge 1 ]; then + sudo apt-get install -y jq + wget https://raw.githubusercontent.com/librenms/librenms-agent/master/snmp/mdadm -O /etc/snmp/extends/mdadm + chmod +x /etc/snmp/extends/mdadm + fi + + /etc/snmp/extends/smart -g > /etc/snmp/extends/smart.config + echo "You will want to check /etc/snmp/extends/smart.config to see if it looks good." + echo "See /etc/snmp/extends/smart for more info" + + cat /sys/devices/virtual/dmi/id/board_serial > /etc/snmp/serial + + librenms_sneck_config > /usr/local/etc/sneck.conf + librenms_cron_config > /etc/cron.d/librenms_auto + librenms_snmpd_config > /etc/snmp/snmpd.conf + + systemctl enable snmpd.service + systemctl restart snmpd.service + systemctl restart cron.service + else + echo "Skipping stuff for LibreNMS" + fi } function install_modsecurity() { echo "[+] Install modsecurity" # Tested on nginx 1.(16|18).X Based on https://www.nginx.com/blog/compiling-and-installing-modsecurity-for-open-source-nginx/ with fixes - apt-get install -y apt-utils autoconf automake build-essential git libcurl4-openssl-dev libgeoip-dev liblmdb-dev libpcre++-dev libtool libxml2-dev libyajl-dev pkgconf wget zlib1g-dev - git clone --depth 1 -b v3/master --single-branch https://github.com/SpiderLabs/ModSecurity + sudo apt-get install -y git g++ apt-utils autoconf automake build-essential libcurl4-openssl-dev libgeoip-dev liblmdb-dev libpcre2-dev libtool libxml2-dev libyajl-dev pkgconf zlib1g-dev + git clone --depth 500 -b v3/master --single-branch https://github.com/SpiderLabs/ModSecurity cd ModSecurity || return git submodule init git submodule update ./build.sh - ./configure + ./configure --with-pcre2 make -j"$(nproc)" - checkinstall -D --pkgname="ModSecurity" --default + make install cd .. || return git clone --depth 1 https://github.com/SpiderLabs/ModSecurity-nginx.git @@ -318,6 +310,12 @@ function install_modsecurity() { cd nginx-"$nginx_version" || return ./configure --with-compat --add-dynamic-module=../ModSecurity-nginx make modules + mkdir -p /usr/lib/nginx/modules + mkdir -p /usr/share/nginx/modules + mkdir -p /var/lib/nginx/body + mkdir -p /etc/nginx/cert + + cp objs/ngx_http_modsecurity_module.so /usr/lib/nginx/modules/ngx_http_modsecurity_module.so cp objs/ngx_http_modsecurity_module.so /usr/share/nginx/modules/ngx_http_modsecurity_module.so cd .. || return @@ -328,17 +326,16 @@ function install_modsecurity() { sed -i 's/SecRuleEngine DetectionOnly/SecRuleEngine On/' /etc/nginx/modsec/modsecurity.conf echo 'Include "/etc/nginx/modsec/modsecurity.conf"' >/etc/nginx/modsec/main.conf + # ToDo echo ''' - - 1. Add next line to the top of /etc/nginx/nginx.conf - * load_module modules/ngx_http_modsecurity_module.so; - 2. Add next 2 rules to enabled-site under server section + 1. Add next 2 rules to enabled-site under server section modsecurity on; modsecurity_rules_file /etc/nginx/modsec/main.conf; ''' } + function install_nginx() { echo "[+] Install nginx" if [ ! -d nginx-$nginx_version ]; then @@ -348,15 +345,20 @@ function install_nginx() { tar xzvf nginx-$nginx_version.tar.gz fi - # PCRE version 8.42 - wget https://ftp.exim.org/pub/pcre/pcre-8.45.tar.gz && tar xzvf pcre-8.45.tar.gz - - # zlib version 1.2.11 - wget https://www.zlib.net/zlib-1.3.tar.gz && tar xzvf zlib-1.3.tar.gz + PCRE_VERSION="10.37" + OPENSSL_VERSION="3.4.0" + ZLIB_VERSION="1.3.1" + if [ ! -d pcre2-$PCRE_VERSION ]; then + wget https://ftp.exim.org/pub/pcre/pcre2-$PCRE_VERSION.tar.gz && tar xzvf pcre2-$PCRE_VERSION.tar.gz + fi - # OpenSSL version 3.2.0 - wget https://www.openssl.org/source/openssl-3.2.0.tar.gz && tar xzvf openssl-3.2.0.tar.gz + if [ ! -d zlib-1.3.1]; then + wget https://www.zlib.net/zlib-$ZLIB_VERSION.tar.gz && tar xzvf zlib-$ZLIB_VERSION.tar.gz + fi + if [ ! -d openssl-$OPENSSL_VERSION ]; then + wget https://www.openssl.org/source/openssl-$OPENSSL_VERSION.tar.gz && tar xzvf openssl-$OPENSSL_VERSION.tar.gz + fi sudo add-apt-repository -y ppa:maxmind/ppa sudo apt-get update && sudo apt-get upgrade -y sudo apt-get install -y perl libperl-dev libgd3 libgd-dev libgeoip1 libgeoip-dev geoip-bin libxml2 libxml2-dev libxslt1.1 libxslt1-dev @@ -364,6 +366,7 @@ function install_nginx() { cd nginx-$nginx_version || return sudo cp man/nginx.8 /usr/share/man/man8 + # ToDo auto confirmation of overwrite sudo gzip /usr/share/man/man8/nginx.8 ls /usr/share/man/man8/ | grep nginx.8.gz @@ -383,14 +386,14 @@ function install_nginx() { --http-proxy-temp-path=/var/lib/nginx/proxy \ --http-scgi-temp-path=/var/lib/nginx/scgi \ --http-uwsgi-temp-path=/var/lib/nginx/uwsgi \ - --with-openssl=../openssl-3.2.0 \ + --with-openssl=../openssl-$OPENSSL_VERSION \ --with-openssl-opt=enable-ec_nistp_64_gcc_128 \ --with-openssl-opt=no-nextprotoneg \ --with-openssl-opt=no-weak-ssl-ciphers \ --with-openssl-opt=no-ssl3 \ - --with-pcre=../pcre-8.45 \ + --with-pcre=../pcre2-$PCRE_VERSION \ --with-pcre-jit \ - --with-zlib=../zlib-1.3 \ + --with-zlib=../zlib-$ZLIB_VERSION \ --with-compat \ --with-file-aio \ --with-threads \ @@ -434,11 +437,27 @@ function install_nginx() { sudo adduser --system --home /nonexistent --shell /bin/false --no-create-home --disabled-login --disabled-password --gecos "nginx user" --group nginx install_modsecurity + mkdir -p /var/lib/nginx/body + mkdir -p /etc/nginx/cert + + sudo openssl dhparam -out /etc/nginx/cert/dhparam.pem 2048 sudo mkdir -p /var/cache/nginx/client_temp /var/cache/nginx/fastcgi_temp /var/cache/nginx/proxy_temp /var/cache/nginx/scgi_temp /var/cache/nginx/uwsgi_temp sudo chmod 700 /var/cache/nginx/* sudo chown nginx:root /var/cache/nginx/* + sudo mkdir -p /etc/nginx/bots.d + sudo wget https://raw.githubusercontent.com/mitchellkrogza/nginx-ultimate-bad-bot-blocker/master/conf.d/globalblacklist.conf -O /etc/nginx/conf.d/globalblacklist.conf + sudo wget https://raw.githubusercontent.com/mitchellkrogza/nginx-ultimate-bad-bot-blocker/master/bots.d/blockbots.conf -O /etc/nginx/bots.d/blockbots.conf + sudo wget https://raw.githubusercontent.com/mitchellkrogza/nginx-ultimate-bad-bot-blocker/master/bots.d/ddos.conf -O /etc/nginx/bots.d/ddos.conf + sudo wget https://raw.githubusercontent.com/mitchellkrogza/nginx-ultimate-bad-bot-blocker/master/bots.d/whitelist-ips.conf -O /etc/nginx/bots.d/whitelist-ips.conf + sudo wget https://raw.githubusercontent.com/mitchellkrogza/nginx-ultimate-bad-bot-blocker/master/bots.d/whitelist-domains.conf -O /etc/nginx/bots.d/whitelist-domains.conf + sudo wget https://raw.githubusercontent.com/mitchellkrogza/nginx-ultimate-bad-bot-blocker/master/bots.d/blacklist-user-agents.conf -O /etc/nginx/bots.d/blacklist-user-agents.conf + sudo wget https://raw.githubusercontent.com/mitchellkrogza/nginx-ultimate-bad-bot-blocker/master/bots.d/custom-bad-referrers.conf -O /etc/nginx/bots.d/custom-bad-referrers.conf + sudo wget https://raw.githubusercontent.com/mitchellkrogza/nginx-ultimate-bad-bot-blocker/master/bots.d/blacklist-ips.conf -O /etc/nginx/bots.d/blacklist-ips.conf + sudo wget https://raw.githubusercontent.com/mitchellkrogza/nginx-ultimate-bad-bot-blocker/master/bots.d/bad-referrer-words.conf -O /etc/nginx/bots.d/bad-referrer-words.conf + + if [ ! -f /lib/systemd/system/nginx.service ]; then cat >> /lib/systemd/system/nginx.service << EOF [Unit] @@ -460,6 +479,9 @@ WantedBy=multi-user.target EOF fi + cp /etc/nginx/nginx.conf /etc/nginx/nginx.conf_backup + sed -i '1 i\load_module modules/ngx_http_modsecurity_module.so;' /etc/nginx/nginx.conf + sudo systemctl enable nginx.service sudo systemctl start nginx.service sudo systemctl is-enabled nginx.service @@ -501,7 +523,7 @@ server { } server { - if ($http_user_agent = "") { + if ($http_user_agent = "") { return 444; } # SSL configuration @@ -569,16 +591,17 @@ fi function install_letsencrypt(){ echo "[+] Install and configure letsencrypt" - sudo add-apt-repository ppa:certbot/certbot -y - sudo apt-get update - sudo apt-get install python3-certbot-nginx -y + sudo apt-get install -y python3 python3-venv libaugeas0 + sudo pip install certbot certbot-nginx --break-system-packages echo "server_name $1 www.$1;" > /etc/nginx/sites-available/"$1" + sudo ln -s /opt/certbot/bin/certbot /usr/bin/certbot sudo certbot --nginx -d "$1" -d www."$1" + } function install_fail2ban() { echo "[+] Installing fail2ban" - sudo apt-get install fail2ban -y + sudo apt-get install -y fail2ban sudo cp /etc/fail2ban/jail.conf /etc/fail2ban/jail.local sudo sed -i /etc/fail2ban/jail.local systemctl start fail2ban @@ -622,8 +645,8 @@ function redsocks2() { function distributed() { echo "[+] Configure distributed configuration" - sudo apt-get install uwsgi uwsgi-plugin-python3 nginx -y 2>/dev/null - sudo -u ${USER} bash -c 'poetry run pip install flask flask-restful flask-sqlalchemy requests' + sudo apt-get install -y uwsgi uwsgi-plugin-python3 nginx 2>/dev/null + sudo -u ${USER} bash -c '/etc/poetry/bin/poetry run pip install flask flask-restful flask-sqlalchemy requests' sudo cp /opt/CAPEv2/uwsgi/capedist.ini /etc/uwsgi/apps-available/cape_dist.ini sudo ln -s /etc/uwsgi/apps-available/cape_dist.ini /etc/uwsgi/apps-enabled @@ -667,65 +690,65 @@ EOL function install_suricata() { echo '[+] Installing Suricata' - add-apt-repository ppa:oisf/suricata-stable -y - apt-get install suricata suricata-update -y + sudo add-apt-repository -y ppa:oisf/suricata-stable + sudo apt-get -o Dpkg::Options::="--force-confold" -o Dpkg::Options::="--force-overwrite" install -y suricata touch /etc/suricata/threshold.config # Download etupdate to update Emerging Threats Open IDS rules: mkdir -p "/etc/suricata/rules" if ! crontab -l | grep -q -F '15 * * * * /usr/bin/suricata-update'; then - crontab -l | { cat; echo "15 * * * * /usr/bin/suricata-update --suricata /usr/bin/suricata --suricata-conf /etc/suricata/suricata.yaml -o /etc/suricata/rules/ && /usr/bin/suricatasc -c reload-rules /tmp/suricata-command.socket &>/dev/null"; } | crontab - + crontab -l | { cat; echo "15 * * * * /usr/bin/suricata-update --suricata /usr/bin/suricata --suricata-conf /etc/suricata/suricata.yaml -o /etc/suricata/rules/ &>/dev/null"; } | crontab - fi if [ -d /usr/share/suricata/rules/ ]; then - # copy files if rules folder contains files + # copy files if rules folder contains files if [ "$(ls -A /var/lib/suricata/rules/)" ]; then cp "/usr/share/suricata/rules/"* "/etc/suricata/rules/" fi fi if [ -d /var/lib/suricata/rules/ ]; then - # copy files if rules folder contains files + # copy files if rules folder contains files if [ "$(ls -A /var/lib/suricata/rules/)" ]; then cp "/var/lib/suricata/rules/"* "/etc/suricata/rules/" fi fi - # ToDo this is not the best solution but i don't have time now to investigate proper one - sed -i 's|CapabilityBoundingSet=CAP_NET_ADMIN|#CapabilityBoundingSet=CAP_NET_ADMIN|g' /lib/systemd/system/suricata.service - systemctl daemon-reload + cat > /etc/suricata/cape.yaml <=5 requires CPU AVX instruction support https://www.mongodb.com/docs/manual/administration/production-notes/#x86_64 + if [ "$MONGO_ENABLE" -ge 1 ]; then + echo "[+] Installing MongoDB" + # Mongo >=5 requires CPU AVX instruction support https://www.mongodb.com/docs/manual/administration/production-notes/#x86_64 MONGO_VERSION="8.0" if ! grep -q ' avx ' /proc/cpuinfo; then @@ -795,34 +818,41 @@ function install_mongo(){ fi fi - sudo curl -fsSL "https://pgp.mongodb.com/server-${MONGO_VERSION}.asc" | sudo gpg --dearmor -o /etc/apt/keyrings/mongo.gpg --yes - echo "deb [signed-by=/etc/apt/keyrings/mongo.gpg arch=amd64] https://repo.mongodb.org/apt/ubuntu $(lsb_release -cs)/mongodb-org/${MONGO_VERSION} multiverse" > /etc/apt/sources.list.d/mongodb.list - - apt-get update 2>/dev/null - apt-get install libpcre3-dev numactl cron -y - apt-get install -y mongodb-org - pip3 install pymongo -U --break-system-packages - - apt-get install -y ntp - systemctl start ntp.service && sudo systemctl enable ntp.service - - if ! grep -q -E '^kernel/mm/transparent_hugepage/enabled' /etc/sysfs.conf; then - sudo apt-get install sysfsutils -y - echo "kernel/mm/transparent_hugepage/enabled = never" >> /etc/sysfs.conf - echo "kernel/mm/transparent_hugepage/defrag = never" >> /etc/sysfs.conf - fi - - if [ -f /lib/systemd/system/mongod.service ]; then - systemctl stop mongod.service - systemctl disable mongod.service - rm /lib/systemd/system/mongod.service - rm /lib/systemd/system/mongod.service - systemctl daemon-reload - fi - - if [ ! -f /lib/systemd/system/mongodb.service ]; then - crontab -l | { cat; echo "@reboot /bin/mkdir -p /data/configdb && /bin/mkdir -p /data/db && /bin/chown mongodb:mongodb /data -R"; } | crontab - - cat >> /lib/systemd/system/mongodb.service < /etc/apt/sources.list.d/mongodb.list + + sudo apt-get update 2>/dev/null + sudo apt-get install -y libpcre3-dev numactl cron + sudo apt-get install -y mongodb-org + + # Check pip version. Only pip3 versions 23+ have the '--break-system-packages' flag. + PIP_VERSION=$(pip3 -V | awk '{print $2}' | cut -d'.' -f1) + if [ "$PIP_VERSION" -ge 23 ]; then + pip3 install pymongo -U --break-system-packages + else + pip3 install pymongo -U + fi + + sudo apt-get install -y ntp + systemctl start ntp.service && sudo systemctl enable ntp.service + + if ! grep -q -E '^kernel/mm/transparent_hugepage/enabled' /etc/sysfs.conf; then + sudo apt-get install -y sysfsutils + echo "kernel/mm/transparent_hugepage/enabled = never" >> /etc/sysfs.conf + echo "kernel/mm/transparent_hugepage/defrag = never" >> /etc/sysfs.conf + fi + + if [ -f /lib/systemd/system/mongod.service ]; then + systemctl stop mongod.service + systemctl disable mongod.service + rm /lib/systemd/system/mongod.service + rm /lib/systemd/system/mongod.service + systemctl daemon-reload + fi + + if [ ! -f /lib/systemd/system/mongodb.service ]; then + crontab -l | { cat; echo "@reboot /bin/mkdir -p /data/configdb && /bin/mkdir -p /data/db && /bin/chown mongodb:mongodb /data -R"; } | crontab - + cat >> /lib/systemd/system/mongodb.service < /etc/apt/sources.list.d/elastic-8.x.list - apt-get update && apt-get install elasticsearch - pip3 install elasticsearch --break-system-packages + sudo apt-get update && sudo apt-get install -y elasticsearch + + # Check pip version. Only pip3 versions 23+ have the '--break-system-packages' flag. + PIP_VERSION=$(pip3 -V | awk '{print $2}' | cut -d'.' -f1) + if [ "$PIP_VERSION" -ge 23 ]; then + pip3 install elasticsearch --break-system-packages + else + pip3 install elasticsearch + fi + systemctl enable elasticsearch } @@ -885,8 +923,8 @@ function install_postgresql() { curl https://www.postgresql.org/media/keys/ACCC4CF8.asc | gpg --dearmor | sudo tee /etc/apt/trusted.gpg.d/apt.postgresql.org.gpg >/dev/null echo "deb [signed-by=/etc/apt/trusted.gpg.d/apt.postgresql.org.gpg arch=amd64] http://apt.postgresql.org/pub/repos/apt/ $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list - sudo apt-get update -y - sudo apt -y install libpq-dev postgresql postgresql-client + sudo apt-get update + sudo apt-get install -y libpq-dev postgresql postgresql-client sudo systemctl enable postgresql.service sudo systemctl start postgresql.service @@ -906,7 +944,7 @@ function install_capa() { cd capa || return git pull git submodule update --init rules - poetry --directory /opt/CAPEv2/ run pip install . + /etc/poetry/bin/poetry --directory /opt/CAPEv2/ run pip install /tmp/capa cd /opt/CAPEv2 if [ -d /tmp/capa ]; then sudo rm -rf /tmp/capa @@ -925,19 +963,25 @@ function dependencies() { #sudo canonical-livepatch enable APITOKEN # deps - apt-get install python3-pip build-essential libssl-dev libssl3 python3-dev cmake nfs-common -y - apt-get install innoextract msitools iptables psmisc jq sqlite3 tmux net-tools checkinstall graphviz python3-pydot git numactl python3 python3-dev python3-pip libjpeg-dev zlib1g-dev -y - apt-get install zpaq upx-ucl wget zip unzip p7zip-full lzip rar unrar unace-nonfree cabextract geoip-database libgeoip-dev libjpeg-dev mono-utils ssdeep libfuzzy-dev exiftool -y - apt-get install uthash-dev libconfig-dev libarchive-dev libtool autoconf automake privoxy software-properties-common wkhtmltopdf xvfb xfonts-100dpi tcpdump libcap2-bin wireshark-common -y - apt-get install python3-pil subversion uwsgi uwsgi-plugin-python3 python3-pyelftools git curl -y - apt-get install openvpn wireguard -y - apt-get install crudini -y + sudo apt-get install -y python3-pip build-essential libssl-dev libssl3 python3-dev cmake nfs-common crudini + sudo apt-get install -y innoextract msitools iptables psmisc jq sqlite3 tmux net-tools checkinstall graphviz python3-pydot git numactl python3 python3-dev python3-pip libjpeg-dev zlib1g-dev + sudo apt-get install -y zpaq upx-ucl wget zip unzip lzip rar unrar unace-nonfree cabextract geoip-database libgeoip-dev libjpeg-dev mono-utils ssdeep libfuzzy-dev exiftool + sudo DEBIAN_FRONTEND=noninteractive apt-get install -y uthash-dev libconfig-dev libarchive-dev libtool autoconf automake privoxy software-properties-common wkhtmltopdf xvfb xfonts-100dpi tcpdump libcap2-bin wireshark-common + sudo apt-get install -y python3-pil subversion uwsgi uwsgi-plugin-python3 python3-pyelftools git curl + sudo apt-get install -y openvpn wireguard + # for bingraph + sudo apt-get install -y libgraphviz-dev + # APT poetry is ultra outdated - curl -sSL https://install.python-poetry.org | python3 - - apt-get install locate # used by extra/libvirt_installer.sh + curl -sSL https://install.python-poetry.org | POETRY_HOME=/etc/poetry python3 - + echo "PATH=$PATH:/etc/poetry/bin/" >> /etc/bash.bashrc + source /etc/bash.bashrc + poetry self add poetry-plugin-shell + + sudo apt-get install -y locate # used by extra/libvirt_installer.sh # de4dot selfextraction - apt-get install -y libgdiplus libdnlib2.1-cil libgif7 libmono-accessibility4.0-cil libmono-ldap4.0-cil libmono-posix4.0-cil libmono-sqlite4.0-cil libmono-system-componentmodel-dataannotations4.0-cil libmono-system-data4.0-cil libmono-system-design4.0-cil libmono-system-drawing4.0-cil libmono-system-enterpriseservices4.0-cil libmono-system-ldap4.0-cil libmono-system-runtime-serialization-formatters-soap4.0-cil libmono-system-runtime4.0-cil libmono-system-transactions4.0-cil libmono-system-web-applicationservices4.0-cil libmono-system-web-services4.0-cil libmono-system-web4.0-cil libmono-system-windows-forms4.0-cil libmono-webbrowser4.0-cil + sudo apt-get install -y libgdiplus libdnlib2.1-cil libgif7 libmono-accessibility4.0-cil libmono-ldap4.0-cil libmono-posix4.0-cil libmono-sqlite4.0-cil libmono-system-componentmodel-dataannotations4.0-cil libmono-system-data4.0-cil libmono-system-design4.0-cil libmono-system-drawing4.0-cil libmono-system-enterpriseservices4.0-cil libmono-system-ldap4.0-cil libmono-system-runtime-serialization-formatters-soap4.0-cil libmono-system-runtime4.0-cil libmono-system-transactions4.0-cil libmono-system-web-applicationservices4.0-cil libmono-system-web-services4.0-cil libmono-system-web4.0-cil libmono-system-windows-forms4.0-cil libmono-webbrowser4.0-cil de4dot_package_name="de4dot_3.1.41592.3405-2_all.deb" # if not exist download package if [ ! -f $de4dot_package_name ]; then @@ -951,7 +995,7 @@ function dependencies() { return fi - # if broken sudo python -m pip uninstall pip && sudo apt-get install python-pip --reinstall + # if broken sudo python -m pip uninstall pip && sudo apt-get install -y --reinstall python-pip #pip3 install --upgrade pip # /usr/bin/pip # from pip import __main__ @@ -959,7 +1003,7 @@ function dependencies() { # sys.exit(__main__._main()) # re2 - dead on py3.11 - # apt-get install libre2-dev -y + # sudo apt-get install -y libre2-dev #re2 for py3 # pip3 install cython # pip3 install git+https://github.com/andreasvc/pyre2.git @@ -971,7 +1015,7 @@ function dependencies() { sudo -u postgres -H sh -c "psql -d \"${USER}\" -c \"GRANT ALL PRIVILEGES ON DATABASE ${USER} to ${USER};\"" sudo -u postgres -H sh -c "psql -d \"${USER}\" -c \"ALTER DATABASE ${USER} OWNER TO ${USER};\"" - apt-get install apparmor-utils -y + sudo apt-get install -y apparmor-utils TCPDUMP_PATH=`which tcpdump` aa-complain ${TCPDUMP_PATH} aa-disable ${TCPDUMP_PATH} @@ -989,7 +1033,7 @@ function dependencies() { usermod -a -G systemd-journal ${USER} # https://www.torproject.org/docs/debian.html.en - sudo apt-get install gnupg2 -y + sudo apt-get install -y gnupg2 wget -qO- https://deb.torproject.org/torproject.org/A3C4F0F979CAA22CDBA8F512EE8CBC9E886DDD89.asc | gpg --dearmor | sudo tee /usr/share/keyrings/deb.torproject.org-keyring.gpg >/dev/null @@ -1005,7 +1049,7 @@ function dependencies() { sudo apt-get update 2>/dev/null sudo systemctl stop tor@default.service && sudo systemctl disable tor@default.service - apt-get install tor deb.torproject.org-keyring libzstd1 -y + sudo apt-get install -y tor deb.torproject.org-keyring libzstd1 sed -i 's/#RunAsDaemon 1/RunAsDaemon 1/g' /etc/tor/torrc @@ -1069,35 +1113,11 @@ EOF sudo modprobe br_netfilter sudo sysctl -p - - ### PDNS - sudo apt-get install git binutils-dev libldns-dev libpcap-dev libdate-simple-perl libdatetime-perl libdbd-mysql-perl -y - cd /tmp || return - - # From pevious install - if [ -d /tmp/passivedns ]; then - sudo rm -rf /tmp/passivedns - fi - git clone https://github.com/gamelinux/passivedns.git - cd passivedns/ || return - autoreconf --install - ./configure - make -j"$(getconf _NPROCESSORS_ONLN)" - sudo checkinstall -D --pkgname=passivedns --default - chown ${USER}:${USER} -R /tmp/passivedns/ - sudo -u ${USER} bash -c 'poetry --directory /opt/CAPEv2/ run pip install unicorn capstone' - sudo -u ${USER} bash -c 'cd /tmp/passivedns/ ; poetry --directory /opt/CAPEv2/ run pip install unicorn capstone' - sed -i 's/APT::Periodic::Unattended-Upgrade "1";/APT::Periodic::Unattended-Upgrade "0";/g' /etc/apt/apt.conf.d/20auto-upgrades - - if [ -d /tmp/passivedns ]; then - sudo rm -rf /tmp/passivedns - fi - } function install_clamav() { echo "[+] Installing clamav" - apt-get install clamav clamav-daemon clamav-freshclam clamav-unofficial-sigs python3-pyclamd -y + sudo apt-get install -y clamav clamav-daemon clamav-freshclam clamav-unofficial-sigs python3-pyclamd cat >> /usr/share/clamav-unofficial-sigs/conf.d/00-clamav-unofficial-sigs.conf << EOF # This file contains user configuration settings for the clamav-unofficial-sigs.sh @@ -1238,17 +1258,17 @@ function install_CAPE() { #chmod -R =rwX,g=rwX,o=X /usr/var/malheur/ # Adapting owner permissions to the ${USER} path folder cd "/opt/CAPEv2/" || return - sudo -u ${USER} bash -c 'export PYTHON_KEYRING_BACKEND=keyring.backends.null.Keyring; CRYPTOGRAPHY_DONT_BUILD_RUST=1 poetry install' + sudo -u ${USER} bash -c 'export PYTHON_KEYRING_BACKEND=keyring.backends.null.Keyring; CRYPTOGRAPHY_DONT_BUILD_RUST=1 /etc/poetry/bin/poetry install' if [ "$DISABLE_LIBVIRT" -eq 0 ]; then - sudo -u ${USER} bash -c 'export PYTHON_KEYRING_BACKEND=keyring.backends.null.Keyring; poetry run extra/libvirt_installer.sh' + sudo -u ${USER} bash -c 'export PYTHON_KEYRING_BACKEND=keyring.backends.null.Keyring; /etc/poetry/bin/poetry run extra/libvirt_installer.sh' sudo usermod -aG kvm ${USER} sudo usermod -aG libvirt ${USER} fi #packages are needed for build options in extra/yara_installer.sh - apt-get install libjansson-dev libmagic1 libmagic-dev -y - sudo -u ${USER} bash -c 'poetry run /opt/CAPEv2/extra/yara_installer.sh' + sudo apt-get install -y libjansson-dev libmagic1 libmagic-dev + sudo -u ${USER} bash -c '/etc/poetry/bin/poetry run /opt/CAPEv2/extra/yara_installer.sh' if [ -d /tmp/yara-python ]; then sudo rm -rf /tmp/yara-python @@ -1265,14 +1285,14 @@ function install_CAPE() { chown ${USER}:${USER} -R "/opt/CAPEv2/" - if [ "$MONGO_ENABLE" -ge 1 ]; then - crudini --set conf/reporting.conf mongodb enabled yes - fi + if [ "$MONGO_ENABLE" -ge 1 ]; then + crudini --set conf/reporting.conf mongodb enabled yes + fi - if [ "$librenms_enable" -ge 1 ]; then - crudini --set conf/reporting.conf litereport enabled yes - crudini --set conf/reporting.conf runstatistics enabled yes - fi + if [ "$librenms_enable" -ge 1 ]; then + crudini --set conf/reporting.conf litereport enabled yes + crudini --set conf/reporting.conf runstatistics enabled yes + fi python3 utils/community.py -waf -cr @@ -1285,6 +1305,21 @@ Cmnd_Alias CAPE_SERVICES = /usr/bin/systemctl restart cape-rooter, /usr/bin/syst ${USER} ALL=(ALL) NOPASSWD:CAPE_SERVICES EOF fi +if [ ! -f /etc/sudoers.d/ip_netns ]; then + cat >> /etc/sudoers.d/ip_netns << EOF +${USER} ALL=NOPASSWD: /usr/sbin/ip netns exec * /usr/bin/sudo -u cape * +EOF +fi +if [ ! -f /opt/mitmproxy/mitmdump_wrapper.sh ]; then + mkdir -p /opt/mitmproxy/ + cat >> /opt/mitmproxy/mitmdump_wrapper.sh << EOF +#!/bin/bash +echo $$ > mitmdump.pid +# exec full args +exec $@ +EOF + chmod +x /opt/mitmproxy/mitmdump_wrapper.sh +fi } function install_systemd() { @@ -1295,10 +1330,10 @@ function install_systemd() { cp /opt/CAPEv2/systemd/cape-rooter.service /lib/systemd/system/cape-rooter.service cp /opt/CAPEv2/systemd/suricata.service /lib/systemd/system/suricata.service systemctl daemon-reload - cape_web_enable_string='' - if [ "$MONGO_ENABLE" -ge 1 ]; then - cape_web_enable_string="cape-web" - fi + cape_web_enable_string='' + if [ "$MONGO_ENABLE" -ge 1 ]; then + cape_web_enable_string="cape-web" + fi systemctl enable cape cape-rooter cape-processor "$cape_web_enable_string" suricata systemctl restart cape cape-rooter cape-processor "$cape_web_enable_string" suricata @@ -1357,9 +1392,9 @@ function install_node_exporter() { function install_volatility3() { echo "[+] Installing volatility3" - sudo apt-get install unzip - sudo -u ${USER} poetry run pip3 install git+https://github.com/volatilityfoundation/volatility3 - vol_path=$(sudo -u ${USER} poetry run python3 -c "import volatility3.plugins;print(volatility3.__file__.replace('__init__.py', 'symbols/'))") + sudo apt-get install -y unzip + sudo -u ${USER} /etc/poetry/bin/poetry run pip3 install git+https://github.com/volatilityfoundation/volatility3 + vol_path=$(sudo -u ${USER} /etc/poetry/bin/poetry run python3 -c "import volatility3.plugins;print(volatility3.__file__.replace('__init__.py', 'symbols/'))") cd $vol_path || return wget https://downloads.volatilityfoundation.org/volatility3/symbols/windows.zip -O windows.zip unzip -o windows.zip @@ -1383,9 +1418,9 @@ function install_guacamole() { echo "[+] Installing guacamole" # Kudos to @Enzok https://github.com/kevoreilly/CAPEv2/pull/1065 # https://guacamole.apache.org/doc/gug/installing-guacamole.html - sudo add-apt-repository ppa:remmina-ppa-team/remmina-next-daily + sudo add-apt-repository -y ppa:remmina-ppa-team/remmina-next-daily sudo apt-get update - sudo apt -y install libcairo2-dev libjpeg-turbo8-dev libpng-dev libossp-uuid-dev freerdp2-dev + sudo apt-get install -y libcairo2-dev libjpeg-turbo8-dev libpng-dev libossp-uuid-dev freerdp2-dev sudo apt-get install -y freerdp2-dev libssh2-1-dev libvncserver-dev libpulse-dev libssl-dev libvorbis-dev libwebp-dev libpango1.0-dev libavcodec-dev libavformat-dev libavutil-dev libswscale-dev # https://downloads.apache.org/guacamole/$guacamole_version/source/ @@ -1413,7 +1448,7 @@ function install_guacamole() { sudo ldconfig #pip3 install -U 'Twisted[tls,http2]' - sudo apt install python3-twisted -y + sudo apt-get install -y python3-twisted if [ -f "/etc/systemd/system/guacd.service" ] ; then sudo rm /etc/systemd/system/guacd.service @@ -1424,7 +1459,7 @@ function install_guacamole() { cp /opt/CAPEv2/systemd/guac-web.service /lib/systemd/system/guac-web.service fi - poetry_path=$(which poetry) + poetry_path="/etc/poetry/bin/poetry" if ! grep -q $poetry_path /lib/systemd/system/guac-web.service ; then sed -i "s|/usr/bin/poetry|$poetry_path|g" /lib/systemd/system/guac-web.service fi @@ -1437,7 +1472,7 @@ function install_guacamole() { sudo usermod www-data -G ${USER} cd /opt/CAPEv2 - sudo -u ${USER} bash -c 'export PYTHON_KEYRING_BACKEND=keyring.backends.null.Keyring; poetry install' + sudo -u ${USER} bash -c "export PYTHON_KEYRING_BACKEND=keyring.backends.null.Keyring; ${poetry_path} install" cd .. systemctl daemon-reload @@ -1447,14 +1482,14 @@ function install_guacamole() { function install_DIE() { echo "[+] Installing Detect It Easy" - apt-get install libqt5opengl5 libqt5script5 libqt5scripttools5 libqt5sql5 -y + sudo apt-get install -y libqt5opengl5 libqt5script5 libqt5scripttools5 libqt5sql5 wget "https://github.com/horsicq/DIE-engine/releases/download/${DIE_VERSION}/die_${DIE_VERSION}_Ubuntu_${UBUNTU_VERSION}_amd64.deb" -O DIE.deb && dpkg -i DIE.deb } function install_fluentd() { echo "[+] Installing fluentd" curl -sSO https://dl.google.com/cloudagents/add-logging-agent-repo.sh && sudo bash add-logging-agent-repo.sh - sudo apt-get update && sudo apt-get install google-fluentd + sudo apt-get update && sudo apt-get install -y google-fluentd sudo apt-get install -y google-fluentd-catch-all-config-structured sudo service google-fluentd start && sudo service google-fluentd status } @@ -1463,7 +1498,80 @@ function install_postgres_pg_activity() { echo "[+] Installing pg-activity" # amazing tool for monitoring https://github.com/dalibo/pg_activity # sudo -u postgres pg_activity -U postgres - apt install pg-activity -y + sudo apt-get install -y pg-activity +} + +function install_polarproxy() { + echo "[+] Installing PolarProxy" + + cd "/opt/" || return + + if [ ! -d PolarProxy ]; then + mkdir PolarProxy + fi + + cd PolarProxy + curl -o PolarProxy.tar.gz https://www.netresec.com/?download=PolarProxy + tar xf PolarProxy.tar.gz + chmod a+x PolarProxy + + local KEY_PEM=PolarProxy-key.pem + local CRT_PEM=PolarProxy-crt.pem + local CRT_P12=PolarProxy-key-crt.p12 + local CRT_CRT=PolarProxy-crt.crt + + # Generate key + openssl req -x509 \ + -newkey rsa:4096 \ + -passin pass:$PASSWD \ + -keyout $KEY_PEM \ + -subj "/C=US/ST=California/L=San Diego/O=Development/OU=Dev/CN=CAPEv2 PolarProxy" \ + -out $CRT_PEM \ + -nodes \ + -days 365 + + # Generate certificate + openssl x509 \ + -inform PEM \ + -passin pass:$PASSWD \ + -in $CRT_PEM \ + -out $CRT_CRT + + # Bundle key and cert for PolarProxy + openssl pkcs12 \ + -in $CRT_PEM \ + -inkey $KEY_PEM \ + -out $CRT_P12 \ + -export \ + -password pass:$PASSWD \ + -name PolarProxy + + chown -R $USER:$USER /opt/PolarProxy + + chmod 600 $CRT_P12 +} + +function install_passivedns() { + sudo apt-get install -y git binutils-dev libldns-dev libpcap-dev libdate-simple-perl libdatetime-perl libdbd-mysql-perl + cd /tmp || return + + # From pevious install + if [ -d /tmp/passivedns ]; then + sudo rm -rf /tmp/passivedns + fi + git clone https://github.com/gamelinux/passivedns.git + cd passivedns/ || return + autoreconf --install + ./configure + make -j"$(getconf _NPROCESSORS_ONLN)" + sudo checkinstall -D --pkgname=passivedns --default + chown ${USER}:${USER} -R /tmp/passivedns/ + sed -i 's/APT::Periodic::Unattended-Upgrade "1";/APT::Periodic::Unattended-Upgrade "0";/g' /etc/apt/apt.conf.d/20auto-upgrades + + if [ -d /tmp/passivedns ]; then + cd /tmp || return + sudo rm -rf /tmp/passivedns + fi } # Doesn't work ${$1,,} @@ -1548,12 +1656,12 @@ case "$COMMAND" in fi # Update FLARE CAPA rules once per day if ! crontab -l | grep -q 'community.py -waf -cr'; then - crontab -l | { cat; echo "5 0 */1 * * cd /opt/CAPEv2/utils/ && sudo -u ${USER} poetry --directory /opt/CAPEv2/ run python3 community.py -waf -cr && poetry --directory /opt/CAPEv2/ run pip install -U flare-capa && systemctl restart cape-processor 2>/dev/null"; } | crontab - + crontab -l | { cat; echo "5 0 */1 * * cd /opt/CAPEv2/utils/ && sudo -u ${USER} /etc/poetry/bin/poetry --directory /opt/CAPEv2/ run python3 community.py -waf -cr && poetry --directory /opt/CAPEv2/ run pip install -U flare-capa && systemctl restart cape-processor 2>/dev/null"; } | crontab - + fi + install_librenms + if [ "$clamav_enable" -ge 1 ]; then + install_clamav fi - install_librenms - if [ "$clamav_enable" -ge 1 ]; then - install_clamav - fi ;; 'systemd') install_systemd;; @@ -1586,15 +1694,17 @@ case "$COMMAND" in 'logrotate') install_logrotate;; 'librenms') - install_librenms;; + install_librenms;; 'librenms_cron_config') - librenms_cron_config;; + librenms_cron_config;; 'librenms_snmpd_config') - librenms_snmpd_config;; + librenms_snmpd_config;; 'librenms_sneck_config') - librenms_sneck_config;; + librenms_sneck_config;; 'mitmproxy') install_mitmproxy;; +'polarproxy') + install_polarproxy;; 'issues') issues;; 'nginx') @@ -1621,6 +1731,8 @@ case "$COMMAND" in install_DIE;; 'fluentd') install_fluentd;; +'passivedns') + install_passivedns;; *) usage;; esac diff --git a/installer/kvm-qemu.sh b/installer/kvm-qemu.sh old mode 100644 new mode 100755 index d25ac23d046..7ce6f3db773 --- a/installer/kvm-qemu.sh +++ b/installer/kvm-qemu.sh @@ -1,12 +1,12 @@ #!/bin/bash -# set -ex -# Copyright (C) 2011-2023 doomedraven. +# Copyright (C) 2011-2024 DoomedRaven. +# This file is part of Tools - https://github.com/doomedraven/Tools # See the file 'LICENSE.md' for copying permission. # https://www.doomedraven.com/2016/05/kvm.html # https://www.doomedraven.com/2020/04/how-to-create-virtual-machine-with-virt.html -# Use Ubuntu 22.04 LTS -# Update date: 22.02.2023 +# Use Ubuntu 24.04 LTS +# Update date: 22.02.2025 # Glory to Ukraine! @@ -21,6 +21,8 @@ Huge thanks to: * @wmetcalf * @ClaudioWayne * @CplNathan + * @enzok + * many others ' # ToDo investigate @@ -34,12 +36,12 @@ Huge thanks to: # https://github.com/dylanaraps/pure-bash-bible # https://www.shellcheck.net/ -# ACPI tables related -# https://wiki.archlinux.org/index.php/DSDT - # Might need update the WMI queries but you have example how to dump the information # https://github.com/SecSamDev/cancamusa/blob/main/bin/extract-info.ps1 + +# ACPI tables related +# https://wiki.archlinux.org/index.php/DSDT # Dump on linux # acpidump > acpidump.out # Dump on Windows @@ -56,10 +58,11 @@ QTARGETS="--target-list=i386-softmmu,x86_64-softmmu,i386-linux-user,x86_64-linux #https://www.qemu.org/download/#source or https://download.qemu.org/ -qemu_version=9.0.0 +qemu_version=9.2.2 # libvirt - https://libvirt.org/sources/ # changelog - https://libvirt.org/news.html -libvirt_version=10.3.0 +libvirt_version=11.1.0 +seabios_version=1.16.3 # virt-manager - https://github.com/virt-manager/virt-manager/releases # autofilled OS="" @@ -129,27 +132,41 @@ BOCHS_BLOCK_REPLACER3='' # what to use as a replacement for BXPC in bochs in ACPI info BXPC_REPLACER='' +# what to use as a replacement for seabios in config.h +BOCHS_SEABIOS_BLOCK_REPLACER='' + + +# if a config file is present, read it in +if [ -f "./kvm-config.sh" ]; then + . ./kvm-config.sh +fi + + # ToDO add to see if cpu supports VTx # egrep '(vmx|svm)' --color=always /proc/cpuinfo #* If your CPU is Intel, you need activate in __BIOS__ VT-x # * (last letter can change, you can activate [TxT ](https://software.intel.com/en-us/blogs/2012/09/25/how-to-enable-an-intel-trusted-execution-technology-capable-server) too, and any other feature, but VT-* is very important) -# if a config file is present, read it in -if [ -f "./kvm-config.sh" ]; then - . ./kvm-config.sh +which aptitude 2>/dev/null +if [ $? -eq 1 ]; then + sudo apt-get update 2>/dev/null + sudo apt-get install aptitude -y 2>/dev/null +fi + +which pip3 2>/dev/null +if [ $? -eq 1 ]; then + sudo python3-pip -y 2>/dev/null fi -# ToDo check if aptitude is installed if no refresh and install -sudo apt-get update 2>/dev/null -sudo apt-get install aptitude -y 2>/dev/null NC='\033[0m' RED='\033[0;31m' -echo -e "${RED}[!] ONLY for UBUNTU 20.04 and 22.04${NC}" -echo -e "${RED}\t[!] NEVER install packages from APT that installed by this script${NC}" +echo -e "${RED}[!] ONLY for UBUNTU 24.04${NC}" +echo -e "${RED}\t[!] NEVER install packages from apt-get that installed by this script${NC}" echo -e "${RED}\t[!] NEVER use 'make install' - it poison system and no easy way to upgrade/uninstall/cleanup, use dpkg-deb${NC}" -echo -e "${RED}\t[!] NEVER run 'python setup.py install' DO USE 'pip intall .' the same as APT poisoning/upgrading${NC}\n" -echo -e "${RED}\t[!] NEVER FORCE system upgrade, it will ignore blacklist and mess with packages installed by APT and this scritp!${NC}\n" +echo -e "${RED}\t[!] NEVER run 'python setup.py install' DO USE 'pip install .' the same as apt-get poisoning/upgrading${NC}\n" +echo -e "${RED}\t[!] NEVER FORCE system upgrade (apt install -f), it will ignore blacklist and mess with packages installed by apt-get and this scritp!${NC}\n" +echo -e "${RED}\t[!] NEVER! When upgrading ubuntu release, first uninstall qemu and libvirt, then upgrade and then install again! As is the same as force and bypasses apt mark-hold${NC}\n" function usage() { cat << EndOfHelp @@ -171,13 +188,11 @@ cat << EndOfHelp https://wiki.qemu.org/Documentation/CreateSnapshot Libvmi - install LibVMI Virtmanager - install virt-manager - Libguestfs - install libguestfs Replace_qemu - only fix antivms in QEMU source Replace_seabios - only fix antivms in SeaBios source Issues - will give you error - solution list noip - Install No-ip deamon and enable on boot SysRQ - enable SysRQ - https://sites.google.com/site/syscookbook/rhel/rhel-sysrq-key - jemalloc - install Jemalloc google if you need details ;) Tips: * Latest kernels having some KVM features :) @@ -248,56 +263,11 @@ function _enable_tcp_bbr() { } function install_apparmor() { - aptitude install -f bison linux-generic-hwe-22.04 -y + aptitude install -f bison linux-generic-hwe-24.04 -y aptitude install -f apparmor apparmor-profiles apparmor-profiles-extra apparmor-utils libapparmor-dev libapparmor1 python3-apparmor python3-libapparmor libapparmor-perl -y } -function install_libguestfs() { - # https://libguestfs.org/guestfs-building.1.html - cd /opt || return - echo "[+] Check for previous version of LibGuestFS" - sudo dpkg --purge --force-all "libguestfs-*" 2>/dev/null - - wget -O- https://packages.erlang-solutions.com/ubuntu/erlang_solutions.asc | sudo apt-key add - - sudo add-apt-repository -y "deb https://packages.erlang-solutions.com/ubuntu $(lsb_release -sc) contrib" - sudo aptitude install -f parted libyara3 erlang-dev gperf flex bison libaugeas-dev libhivex-dev supermin ocaml-nox libhivex-ocaml genisoimage libhivex-ocaml-dev libmagic-dev libjansson-dev gnulib jq ocaml-findlib -y 2>/dev/null - sudo apt-get update - sudo aptitude install -f erlang -y - - if [ ! -d libguestfs ]; then - #ToDo move to latest release not latest code - #_info=$(curl -s https://api.github.com/repos/libguestfs/libguestfs/releases/latest) - #_version=$(echo $_info |jq .tag_name|sed "s/\"//g") - #_repo_url=$(echo $_info | jq ".zipball_url" | sed "s/\"//g") - #wget -q $_repo_url - #unzip $_version - git clone --recursive https://github.com/libguestfs/libguestfs - fi - cd libguestfs || return - git submodule update --init - autoreconf -i - ./configure CFLAGS=-fPIC - make -j"$(nproc)" - - # Install virt tools that are in a diff repo since LIBGUESTFS 1.46 split - # More Info: https://listman.redhat.com/archives/libguestfs/2021-September/msg00153.html - cd /opt || return - if [ ! -d guestfs-tools ]; then - git clone --recursive https://github.com/rwmjones/guestfs-tools.git - fi - cd guestfs-tools || return - # Following tips to compile the guestfs-tools as depicted in https://www.mail-archive.com/libguestfs@redhat.com/msg22408.html - git submodule update --init --force - autoreconf -i - ../libguestfs/run ./configure CFLAGS=-fPIC - ../libguestfs/run make -j $(getconf _NPROCESSORS_ONLN) - - echo "[+] /opt/libguestfs/run --help" - echo "[+] /opt/libguestfs/run /opt/guestfs-tools/sparsify/virt-sparsify -h" -} - - function install_libvmi() { # IMPORTANT: # 1) LibVMI will have KVM support if libvirt is available during compile time. @@ -328,7 +298,7 @@ function install_libvmi() { fi mkdir -p /tmp/libvmi_builded/DEBIAN echo -e "Package: libvmi\nVersion: 1.0-0\nArchitecture: $ARCH\nMaintainer: $MAINTAINER\nDescription: libvmi" > /tmp/libvmi_builded/DEBIAN/control - cd "libvmi-v0.14.0" || return + cd "libvmi-0.14.0" || return # install deps aptitude install -f -y cmake flex bison libglib2.0-dev libjson-c-dev libyajl-dev doxygen @@ -353,7 +323,7 @@ function install_libvmi() { # git checkout add_vmi_request_page_fault # git pull #git clone https://github.com/libvmi/python.git libvmi-python - pip3 install libvmi + PIP_BREAK_SYSTEM_PACKAGES=1 pip3 install libvmi echo "[+] Cloned LibVMI Python repo" fi cd "libvmi-python" || return @@ -362,7 +332,7 @@ function install_libvmi() { aptitude install -f -y python3-pkgconfig python3-cffi python3-future #pip3 install . python3 setup.py build - pip3 install . + PIP_BREAK_SYSTEM_PACKAGES=1 pip3 install . # Rekall cd /tmp || return @@ -432,7 +402,6 @@ function install_pyvmidbg() { # r2 -d gdb://127.0.0.1:5000 -b 64 } - function install_libvirt() { # http://ask.xmodulo.com/compile-virt-manager-debian-ubuntu.html #rm -r /usr/local/lib/python2.7/dist-packages/libvirt* @@ -481,7 +450,7 @@ EOH echo "[+] Checking/deleting old versions of Libvirt" apt-get purge libvirt0 libvirt-bin libvirt-$libvirt_version 2>/dev/null dpkg -l|grep "libvirt-[0-9]\{1,2\}\.[0-9]\{1,2\}\.[0-9]\{1,2\}"|cut -d " " -f 3|sudo xargs dpkg --purge --force-all 2>/dev/null - apt-get install meson plocate libxml2-utils gnutls-bin gnutls-dev libxml2-dev bash-completion libreadline-dev numactl libnuma-dev python3-docutils flex -y + apt-get install meson plocate libxml2-utils gnutls-bin gnutls-dev libxml2-dev bash-completion libreadline-dev numactl libnuma-dev python3-docutils flex libjson-c-dev pylint pycodestyle -y # Remove old links updatedb temp_libvirt_so_path=$(locate libvirt-qemu.so | head -n1 | awk '{print $1;}') @@ -519,7 +488,7 @@ EOH # To see whole config sudo meson configure # true now is enabled cd /tmp/libvirt-$libvirt_version || return - sudo meson build -D system=true -D driver_remote=enabled -D driver_qemu=enabled -D driver_libvirtd=enabled -D qemu_group=libvirt -D qemu_user=root -D secdriver_apparmor=enabled -D apparmor_profiles=enabled -D bash_completion=auto + sudo meson setup build -D system=true -D driver_remote=enabled -D driver_qemu=enabled -D driver_libvirtd=enabled -D qemu_group=libvirt -D qemu_user=root -D secdriver_apparmor=enabled -D apparmor_profiles=enabled -D bash_completion=auto sudo ninja -C build sudo ninja -C build install @@ -533,11 +502,11 @@ EOH updatedb # ToDo fix bad destiny on some systems, example, first arg should be destiny to link not source # /usr/lib/x86_64-linux-gnu/libvirt-qemu.so.0 -> /usr/lib64/libvirt-qemu.so - temp_libvirt_so_path=$(locate libvirt-qemu.so | head -n1 | awk '{print $1;}') + temp_libvirt_so_path=$(locate libvirt-qemu.so |grep -v "docker"| head -n1 | awk '{print $1;}') temp_export_path=$(locate libvirt.pc | head -n1 | awk '{print $1;}') libvirt_so_path="${temp_libvirt_so_path%/*}/" if [[ $libvirt_so_path == "/usr/lib/x86_64-linux-gnu/" ]]; then - temp_libvirt_so_path=$(locate libvirt-qemu.so | tail -1 | awk '{print $1;}') + temp_libvirt_so_path=$(locate libvirt-qemu.so |grep -v "docker"| tail -1 | awk '{print $1;}') libvirt_so_path="${temp_libvirt_so_path%/*}/" fi export_path="${temp_export_path%/*}/" @@ -545,7 +514,8 @@ EOH if [[ -n "$libvirt_so_path" ]]; then # #ln -s /usr/lib64/libvirt-qemu.so /lib/x86_64-linux-gnu/libvirt-qemu.so.0 - for so_path in $(ls "${libvirt_so_path}"libvirt*.so.0); do ln -sf "$so_path" /lib/$(uname -m)-linux-gnu/$(basename "$so_path"); done + # 24.10.2024 Observed problem with ln -sf, but works just fine with cp + for so_path in $(ls "${libvirt_so_path}"libvirt*.so.0); do echo $so_path; cp "$so_path" /lib/$(uname -m)-linux-gnu/$(basename "$so_path"); done ldconfig else echo "${RED}[!] Problem to create symlink, unknown libvirt_so_path path${NC}" @@ -597,7 +567,7 @@ EOH cd .. # Remove the $libvirt_version directory to permission errors when runing - # cd /opt/CAPEv2/ ; sudo -u cape poetry run extra/poetry_libvirt_installer.sh later + # cd /opt/CAPEv2/ ; sudo -u cape /etc/poetry/bin/poetry run extra/poetry_libvirt_installer.sh later rm -r libvirt-python-$libvirt_version if [ "$OS" = "Linux" ]; then @@ -617,10 +587,17 @@ EOH usermod -G $groupname -a "$username" fi - #check links + # check links # sudo ln -s /usr/lib64/libvirt-qemu.so /lib/x86_64-linux-gnu/libvirt-qemu.so.0 # sudo ln -s /usr/lib64/libvirt.so.0 /lib/x86_64-linux-gnu/libvirt.so.0 - systemctl enable virtqemud.service virtnetworkd.service virtstoraged.service virtqemud.socket + + # On Ubuntu 24.04 it introduces /etc/libvirt/network.conf + if [ -f /etc/libvirt/network.conf ]; then + sed -i 's/#firewall_backend = "nftables"/firewall_backend = "iptables"/g' /etc/libvirt/network.conf + fi + + systemctl enable virtqemud.service virtnetworkd.service virtstoraged.service virtqemud.socket libvirtd.service + systemctl start libvirtd.service echo "[+] You should logout and login " fi @@ -630,7 +607,7 @@ function install_virt_manager() { # pm-utils # from build-dep aptitude install -f libgirepository1.0-dev gtk-doc-tools python3 python3-pip gir1.2-govirt-1.0 libgovirt-dev \ - libgovirt-common libgovirt2 gir1.2-rest-0.7 unzip intltool augeas-doc ifupdown wodim cdrkit-doc indicator-application \ + libgovirt-common libgovirt2 unzip intltool augeas-doc ifupdown wodim cdrkit-doc indicator-application \ augeas-tools radvd auditd systemtap nfs-common zfsutils python-openssl-doc samba \ debootstrap sharutils-doc ssh-askpass gnome-keyring\ sharutils spice-client-glib-usb-acl-helper ubuntu-mono x11-common python3-gi \ @@ -641,35 +618,35 @@ function install_virt_manager() { libxkbcommon0 libusbredirhost1 libusbredirparser1 libv4l-0 libv4lconvert0 libvisual-0.4-0 libvorbis0a libvorbisenc2 \ libvte-2.91-0 libvte-2.91-common libwavpack1 libwayland-client0 libwayland-cursor0 libwayland-egl1-mesa libwayland-server0 \ libx11-xcb1 libxcb-dri2-0 libxcb-dri3-0 libsoup-gnome2.4-1 libsoup2.4-1 libspeex1 libspice-client-glib-2.0-8 \ - libspice-client-gtk-3.0-5 libspice-server1 libtag1v5 libtag1v5-vanilla libthai-data libthai0 libtheora0 libtiff5 \ - libtwolame0 libpython3-dev librados2 libraw1394-11 librbd1 librdmacm1 librest-0.7-0 \ + libspice-client-gtk-3.0-5 libspice-server1 libtag1v5 libtag1v5-vanilla libthai-data libthai0 libtheora0 libtiff5-dev \ + libtwolame0 libpython3-dev librados2 libraw1394-11 librbd1 librdmacm1 \ librsvg2-2 librsvg2-common libsamplerate0 libsdl1.2debian libshout3 libsndfile1 libpango-1.0-0 libpangocairo-1.0-0 \ - libpangoft2-1.0-0 libpangoxft-1.0-0 libpciaccess0 libphodav-2.0-0 libphodav-2.0-common libpixman-1-0 libproxy1v5 \ + libpangoft2-1.0-0 libpangoxft-1.0-0 libpciaccess0 libpixman-1-0 libproxy1v5 \ libpulse-mainloop-glib0 libpulse0 libgstreamer1.0-0 libgtk-3-0 libgtk-3-bin libgtk-3-common libgtk-vnc-2.0-0 \ libgudev-1.0-0 libgvnc-1.0-0 libharfbuzz0b libibverbs1 libiec61883-0 libindicator3-7 libiscsi7 libjack-jackd2-0 libjbig0 \ libjpeg-turbo8 libjpeg8 libjson-glib-1.0-0 libjson-glib-1.0-common liblcms2-2 libmp3lame0 libmpg123-0 libnl-route-3-200 \ libnspr4 libnss3 libogg0 libopus0 liborc-0.4-0 libosinfo-1.0-0 libcairo-gobject2 libcairo2 libcdparanoia0 libcolord2 \ - libcups2 libdatrie1 libdbusmenu-glib4 libdbusmenu-gtk3-4 libdconf1 libdv4 libegl-mesa0 libegl1 libepoxy0 libfdt1 libflac8 \ + libcups2 libdatrie1 libdbusmenu-glib4 libdbusmenu-gtk3-4 libdconf1 libdv4 libegl-mesa0 libegl1 libepoxy0 libfdt1 \ libfontconfig1 libgbm1 libgdk-pixbuf2.0-0 libgdk-pixbuf2.0-bin libgdk-pixbuf2.0-common libglapi-mesa libglvnd0 libgraphite2-3 \ libgstreamer-plugins-base1.0-0 libgstreamer-plugins-good1.0-0 gtk-update-icon-cache hicolor-icon-theme humanity-icon-theme \ - ibverbs-providers libaa1 libaio1 libappindicator3-1 libasound2 libasound2-data libasyncns0 libatk-bridge2.0-0 libatk1.0-0 \ - libatk1.0-data libatspi2.0-0 libaugeas0 libavahi-client3 libavahi-common-data libavahi-common3 libavc1394-0 libbluetooth3 \ + ibverbs-providers libaa1 libasound2 libasound2-data libasyncns0 libatk-bridge2.0-0 libatk1.0-0 \ + libatspi2.0-0 libaugeas0 libavahi-client3 libavahi-common-data libavahi-common3 libavc1394-0 libbluetooth3 \ libcaca0 libcacard0 gir1.2-atk-1.0 gir1.2-freedesktop gir1.2-gdkpixbuf-2.0 gir1.2-gtk-3.0 gir1.2-gtk-vnc-2.0 \ gir1.2-libosinfo-1.0 gir1.2-pango-1.0 gir1.2-spiceclientglib-2.0 gir1.2-spiceclientgtk-3.0 gir1.2-vte-2.91 glib-networking \ glib-networking-common glib-networking-services gsettings-desktop-schemas gstreamer1.0-plugins-base gstreamer1.0-plugins-good \ gstreamer1.0-x adwaita-icon-theme at-spi2-core augeas-lenses cpu-checker dconf-gsettings-backend dconf-service \ fontconfig fontconfig-config fonts-dejavu-core genisoimage gir1.2-appindicator3-0.1 gir1.2-secret-1 \ gobject-introspection intltool pkg-config libxml2-dev libxslt-dev python3-dev gir1.2-gtk-vnc-2.0 gir1.2-spiceclientgtk-3.0 libgtk-3-dev \ - mlocate gir1.2-gtksource-4 libgtksourceview-4-0 libgtksourceview-4-common -y + plocate gir1.2-gtksource-4 libgtksourceview-4-0 libgtksourceview-4-common checkinstall pylint pycodestyle codespell -y # should be installed first # moved out as some 20.04 doesn't have this libs %) aptitude install -f -y python3-ntlm-auth libpython3-stdlib libbrlapi-dev libgirepository1.0-dev python3-testresources apt-get -y -o Dpkg::Options::="--force-overwrite" install ovmf - pip3 install tqdm requests six urllib3 ipaddr ipaddress idna dbus-python certifi lxml cryptography pyOpenSSL chardet asn1crypto pycairo PySocks PyGObject + PIP_BREAK_SYSTEM_PACKAGES=1 pip3 install tqdm requests six urllib3 ipaddr ipaddress idna dbus-python certifi lxml cryptography pyOpenSSL chardet asn1crypto pycairo PySocks PyGObject pylint pytest # not available in 22.04 if [ $(lsb_release -sc) != "jammy" ]; then - aptitude -f install python-enum34 libxenstore3.0 libnetcf1 libcroco3 -y + aptitude -f install python-enum34 libxenstore3.0 libnetcf1 libcroco3 libappindicator3-1 python-enum34-doc -y fi updatedb @@ -678,37 +655,22 @@ function install_virt_manager() { temp_export_path=$(locate libvirt.pc | head -n1 | awk '{print $1;}') libvirt_so_path="${temp_libvirt_so_path%/*}/" export_path="${temp_export_path%/*}/" - export PKG_CONFIG_PATH=$export_path cd /tmp || return - if [ ! -f libvirt-glib-3.0.0.tar.gz ]; then - wget -q https://libvirt.org/sources/glib/libvirt-glib-3.0.0.tar.gz - wget -q https://libvirt.org/sources/glib/libvirt-glib-3.0.0.tar.gz.asc - gpg --verify "libvirt-glib-3.0.0.tar.gz.asc" - - fi - # ToDo add blacklist - tar xf libvirt-glib-3.0.0.tar.gz - cd libvirt-glib-3.0.0 || return - aclocal && libtoolize --force - automake --add-missing - ./configure - mkdir -p /tmp/libvirt-glib_builded/DEBIAN - echo -e "Package: libvirt-glib-1.0-0\nVersion: 1.0.0\nArchitecture: $ARCH\nMaintainer: $MAINTAINER\nDescription: Custom libvirt-glib-1.0-0" > /tmp/libvirt-glib_builded/DEBIAN/control - make -j"$(nproc)" install DESTDIR=/tmp/libvirt-glib_builded - dpkg-deb --build --root-owner-group /tmp/libvirt-glib_builded - apt-get -y -o Dpkg::Options::="--force-overwrite" install /tmp/libvirt-glib_builded.deb - make -j"$(nproc)" - - # v4 is meson based - # sudo meson build -D system=true - cd /tmp || return - if [ ! -f gir1.2-libvirt-glib-1.0_1.0.0-1_amd64.deb ]; then - wget -q http://launchpadlibrarian.net/297448356/gir1.2-libvirt-glib-1.0_1.0.0-1_amd64.deb + if [ ! -d "libvirt-glib" ]; then + git clone https://gitlab.com/libvirt/libvirt-glib.git fi - dpkg --force-confold -i gir1.2-libvirt-glib-1.0_1.0.0-1_amd64.deb - + cd libvirt-glib + meson setup builddir + meson compile -C builddir + sudo ninja -C builddir install + # for some reason i have to run it twice + sudo ninja -C builddir install + # mkdir -p /usr/local/lib/girepository-1.0/ + # cp builddir/libvirt-glib/LibvirtGLib-1.0.typelib /usr/local/lib/girepository-1.0/ + # Namespace LibvirtGLib not available + cp builddir/libvirt-glib/LibvirtGLib-1.0.typelib /usr/lib/girepository-1.0/ /sbin/ldconfig if [ ! -d "virt-manager" ]; then @@ -716,18 +678,26 @@ function install_virt_manager() { echo "[+] Cloned Virt Manager repo" fi cd "virt-manager" || return - # py3 - #pip3 install . - python3 setup.py build - python3 setup.py install + # https://github.com/virt-manager/virt-manager/blob/main/INSTALL.md + meson setup build + meson install -C build if [ "$SHELL" = "/bin/zsh" ] || [ "$SHELL" = "/usr/bin/zsh" ] ; then echo "export LIBVIRT_DEFAULT_URI=qemu:///system" >> "$HOME/.zsh" + # echo "export GI_TYPELIB_PATH=/usr/local/lib/girepository-1.0:$GI_TYPELIB_PATH" >> "$HOME/.zsh" else echo "export LIBVIRT_DEFAULT_URI=qemu:///system" >> "$HOME/.bashrc" + # echo "export GI_TYPELIB_PATH=/usr/local/lib/girepository-1.0:$GI_TYPELIB_PATH" >> "$HOME/.bashrc" fi + + if [ -f /usr/share/virt-manager/local/share/glib-2.0/schemas/org.virt-manager.virt-manager.gschema.xml ]; then + cp /usr/share/virt-manager/local/share/glib-2.0/schemas/org.virt-manager.virt-manager.gschema.xml /usr/share/glib-2.0/schemas/ + elif [ -f /usr/local/share/glib-2.0/schemas/org.virt-manager.virt-manager.gschema.xml ]; then + cp /usr/local/share/glib-2.0/schemas/org.virt-manager.virt-manager.gschema.xml /usr/share/glib-2.0/schemas/ + fi + sudo glib-compile-schemas --strict /usr/share/glib-2.0/schemas/ - systemctl enable virtstoraged.service - systemctl start virtstoraged.service + systemctl enable virtstoraged.service && systemctl start virtstoraged.service + systemctl enable libvirtd.service && systemctl start libvirtd.service # i440FX-Issue Win7: Unable to complete install: 'XML error: The PCI controller with index='0' must be model='pci-root' for this machine type, but model='pcie-root' was found instead' # Workaround: Edit Overiew in XML view and delete all controller entries with type="pci" @@ -743,8 +713,7 @@ function install_kvm_linux() { aptitude install -f gtk-update-icon-cache -y 2>/dev/null # WSL support - aptitude install -f gcc make gnutls-bin - + aptitude install -f gcc make gnutls-bin -y install_libvirt systemctl enable libvirtd.service virtlogd.socket @@ -805,16 +774,17 @@ function replace_qemu_clues_public() { function replace_seabios_clues_public() { echo "[+] Generating SeaBios Kconfig" echo "[+] Fixing SeaBios antivms" - _sed_aux 's/Bochs/DELL/g' src/config.h 'Bochs was not replaced in src/config.h' + _sed_aux "s/Bochs/$BOCHS_SEABIOS_BLOCK_REPLACER/g" src/config.h 'Bochs was not replaced in src/config.h' _sed_aux "s/BOCHSCPU/$bochs_cpu_replacement/g" src/config.h 'BOCHSCPU was not replaced in src/config.h' - _sed_aux 's/"BOCHS "/"DELL"/g' src/config.h 'BOCHS was not replaced in src/config.h' - _sed_aux 's/BXPC/DELL/g' src/config.h 'BXPC was not replaced in src/config.h' + _sed_aux "s/BOCHS /$BOCHS_SEABIOS_BLOCK_REPLACER/g" src/config.h 'BOCHS was not replaced in src/config.h' + _sed_aux "s/BXPC/$BXPC_REPLACER/g" src/config.h 'BXPC was not replaced in src/config.h' _sed_aux "s/QEMU\/Bochs/$qemu_bochs_cpu/g" vgasrc/Kconfig 'QEMU\/Bochs was not replaced in vgasrc/Kconfig' _sed_aux "s/qemu /$qemu_space_replacement/g" vgasrc/Kconfig 'qemu was not replaced in vgasrc/Kconfig' _sed_aux "s/06\/23\/99/$src_misc_bios_table/g" src/misc.c 'change seabios date 1' _sed_aux "s/04\/01\/2014/$src_bios_table_date2/g" src/fw/biostables.c 'change seabios date 2' _sed_aux "s/01\/01\/2011/$src_fw_smbios_date/g" src/fw/smbios.c 'change seabios date 3' _sed_aux 's/"SeaBios"/"AMIBios"/g' src/fw/biostables.c 'change seabios to amibios' + _sed_aux 's/"SeaBIOS"/"AMIBios"/g' src/fw/biostables.c 'change seabios to amibios' FILES=( src/hw/blockcmd.c @@ -856,14 +826,6 @@ function replace_seabios_clues_public() { done } -function install_jemalloc() { - - # https://zapier.com/engineering/celery-python-jemalloc/ - if ! $(dpkg -l "libjemalloc*" | grep -q "ii libjemalloc"); then - aptitude install -f curl build-essential jq autoconf libjemalloc-dev -y - fi -} - function install_qemu() { cd /tmp || return @@ -949,8 +911,12 @@ function install_qemu() { mkdir -p /tmp/qemu-"$qemu_version"_builded/DEBIAN echo -e "Package: qemu\nVersion: $qemu_version\nArchitecture: $ARCH\nMaintainer: $MAINTAINER\nDescription: Custom antivm qemu" > /tmp/qemu-"$qemu_version"_builded/DEBIAN/control make -j"$(nproc)" install DESTDIR=/tmp/qemu-"$qemu_version"_builded - dpkg-deb --build --root-owner-group /tmp/qemu-"$qemu_version"_builded - apt-get -y -o Dpkg::Options::="--force-overwrite" install /tmp/qemu-"$qemu_version"_builded.deb + if [ "$OS" = "Linux" ]; then + dpkg-deb --build --root-owner-group /tmp/qemu-"$qemu_version"_builded + apt-get -y -o Dpkg::Options::="--force-overwrite" install /tmp/qemu-"$qemu_version"_builded.deb + elif [ "$OS" = "Darwin" ]; then + make -j"$(nproc)" install + fi # hack for libvirt/virt-manager if [ ! -f /usr/bin/qemu-system-x86_64-spice ]; then ln -s /usr/bin/qemu-system-x86_64 /usr/bin/qemu-system-x86_64-spice @@ -980,7 +946,7 @@ function install_qemu() { if [ "$OS" = "linux" ]; then dpkg --get-selections | grep "qemu" | xargs apt-mark hold dpkg --get-selections | grep "libvirt" | xargs apt-mark hold - apt-mark hold qemu libvirt + # apt-mark unhold qemu libvirt fi } @@ -989,11 +955,13 @@ function install_seabios() { cd /tmp || return echo '[+] Installing SeaBios dependencies' aptitude install -f git acpica-tools -y - if [ -d seabios ]; then - rm -r seabios + if [ ! -f "seabios_${seabios_version}.tar.gz" ]; then + rm "seabios_${seabios_version}" + wget https://github.com/coreboot/seabios/archive/refs/tags/rel-${seabios_version}.tar.gz -O "seabios_${seabios_version}.tar.gz" fi - if git clone https://github.com/coreboot/seabios.git; then - cd seabios || return + + if tar xf "seabios_${seabios_version}.tar.gz"; then + cd "seabios-rel-${seabios_version}" || return if declare -f -F "replace_seabios_clues"; then replace_seabios_clues else @@ -1002,12 +970,12 @@ function install_seabios() { # make help # make menuconfig -> BIOS tables -> disable Include default ACPI DSDT # get rid of this hack - make -j"$(nproc)" 2>/dev/null - # Windows 10(latest rev.) is uninstallable without ACPI_DSDT - # sed -i 's/CONFIG_ACPI_DSDT=y/CONFIG_ACPI_DSDT=n/g' .config sed -i 's/CONFIG_XEN=y/CONFIG_XEN=n/g' .config sed -i 's/PYTHON=python/PYTHON=python3/g' Makefile - if make -j "$(nproc)"; then + # PIP_BREAK_SYSTEM_PACKAGES=1 make -j"$(nproc)" 2>/dev/null + # Windows 10(latest rev.) is uninstallable without ACPI_DSDT + # sed -i 's/CONFIG_ACPI_DSDT=y/CONFIG_ACPI_DSDT=n/g' .config + if PIP_BREAK_SYSTEM_PACKAGES=1 make -j "$(nproc)"; then echo '[+] Replacing old bios.bin to new out/bios.bin' bios=0 SHA256_BIOS=$(shasum -a 256 out/bios.bin|awk '{print $1}') @@ -1175,6 +1143,7 @@ cat << EndOfHelp 5. ValueError: Namespace LibvirtGLib not available $ ./kvm-qemu.sh libvirt + Is due to missed LibvirtGLib-1.0.typelib inside of /usr/lib/girepository-1.0/ 6. ValueError: Namespace Libosinfo not available $ aptitude install -f libosinfo-1.0 @@ -1205,7 +1174,7 @@ function cloning() { exit 1 fi - which virt-manager + which virt-manager 2>/dev/null if [ $? -eq 1 ]; then echo "You need to install virt-manager. Run sudo $0 virtmanager" exit 1 @@ -1239,6 +1208,10 @@ function cloning() { + + + + EOF @@ -1275,6 +1248,7 @@ EOF echo "[+] Enjoy" } + # Doesn't work ${$1,,} COMMAND=$(echo "$1"|tr "[:upper:]" "[:lower:]") @@ -1298,15 +1272,15 @@ esac #check if start with root if [ "$EUID" -ne 0 ]; then - echo 'This script must be run as root' - exit 1 + echo 'This script must be run as root' + exit 1 fi OS="$(uname -s)" MAINTAINER="$(whoami)"_"$(hostname)" ARCH="$(dpkg --print-architecture)" -#add-apt-repository universe -#apt-get update && apt-get upgrade +# add-apt-repository universe +# apt-get update && apt-get upgrade #make case "$COMMAND" in @@ -1314,13 +1288,12 @@ case "$COMMAND" in issues;; 'all') configure_needreboot - aptitude install -f language-pack-UTF-8 -y + aptitude install -f language-pack-UTF-8 python3-pip -y install_qemu install_seabios install_kvm_linux # add check if server or desktop # install_virt_manager - # install_libguestfs # check if all features enabled virt-host-validate qemu systemctl daemon-reload @@ -1328,6 +1301,10 @@ case "$COMMAND" in _enable_tcp_bbr grub_iommu enable_sysrq + # check if is desktop, install virt-manager, ignore on server edition + if dpkg -l |grep -q "ii ubuntu-desktop"; then + install_virt_manager + fi ;; 'apparmor') install_apparmor;; @@ -1337,8 +1314,6 @@ case "$COMMAND" in install_seabios;; 'kvm') install_kvm_linux;; -'libguestfs') - install_libguestfs;; 'tcp_bbr') _enable_tcp_bbr;; 'replace_qemu') @@ -1385,8 +1360,6 @@ case "$COMMAND" in ;; 'grub') grub_iommu;; -'jemalloc') - install_jemalloc;; 'needreboot') configure_needreboot;; 'mosh') diff --git a/lib/cuckoo/common/abstracts.py b/lib/cuckoo/common/abstracts.py index f92105ea15c..1f830515cb9 100644 --- a/lib/cuckoo/common/abstracts.py +++ b/lib/cuckoo/common/abstracts.py @@ -13,16 +13,19 @@ import timeit import xml.etree.ElementTree as ET from builtins import NotImplementedError +from contextlib import suppress from pathlib import Path from typing import Dict, List try: import dns.resolver except ImportError: - print("Missed dependency -> pip3 install dnspython") + print("Missed dependency -> poetry install") + import PIL import requests +from data.dnsbl import dnsbl_servers from lib.cuckoo.common.config import Config from lib.cuckoo.common.constants import CUCKOO_ROOT from lib.cuckoo.common.dictionary import Dictionary @@ -60,6 +63,7 @@ HAVE_TLDEXTRACT = False repconf = Config("reporting") +integrations_conf = Config("integrations") _, categories_need_VM = load_categories() mitre, HAVE_MITRE, _ = mitre_load(repconf.mitre.enabled) @@ -644,7 +648,7 @@ def _get_snapshot(self, label): @param label: virtual machine name @return None or current snapshot @raise CuckooMachineError: if cannot find current snapshot or - when there are too many snapshots available + when there are too many snapshots available """ def _extract_creation_time(node): @@ -726,6 +730,7 @@ def set_path(self, analysis_path): self.network_path = os.path.join(self.analysis_path, "network") self.tlsmaster_path = os.path.join(self.analysis_path, "tlsmaster.txt") self.self_extracted = os.path.join(self.analysis_path, "selfextracted") + self.reports_path = os.path.join(self.analysis_path, "reports") def add_statistic_tmp(self, name, field, pretime): timediff = timeit.default_timer() - pretime @@ -833,7 +838,6 @@ def set_path(self, analysis_path): CuckooReportError(e) def yara_detected(self, name): - target = self.results.get("target", {}) if target.get("category") in ("file", "static") and target.get("file"): for keyword in ("cape_yara", "yara"): @@ -841,12 +845,14 @@ def yara_detected(self, name): if re.findall(name, yara_block["name"], re.I): yield "sample", self.results["target"]["file"]["path"], yara_block, self.results["target"]["file"] - for block in target["file"].get("extracted_files", []): - for keyword in ("cape_yara", "yara"): - for yara_block in block[keyword]: - if re.findall(name, yara_block["name"], re.I): - # we can't use here values from set_path - yield "sample", block["path"], yara_block, block + if target["file"].get("selfextract"): + for _, toolsblock in target["file"]["selfextract"].items(): + for block in toolsblock.get("extracted_files", []): + for keyword in ("cape_yara", "yara"): + for yara_block in block[keyword]: + if re.findall(name, yara_block["name"], re.I): + # we can't use here values from set_path + yield "sample", block["path"], yara_block, block for block in self.results.get("CAPE", {}).get("payloads", []) or []: for sub_keyword in ("cape_yara", "yara"): @@ -854,11 +860,13 @@ def yara_detected(self, name): if re.findall(name, yara_block["name"], re.I): yield sub_keyword, block["path"], yara_block, block - for subblock in block.get("extracted_files", []): - for keyword in ("cape_yara", "yara"): - for yara_block in subblock[keyword]: - if re.findall(name, yara_block["name"], re.I): - yield "sample", subblock["path"], yara_block, block + if block.get("selfextract", {}): + for _, toolsblock in block["selfextract"].items(): + for subblock in toolsblock.get("extracted_files", []): + for keyword in ("cape_yara", "yara"): + for yara_block in subblock[keyword]: + if re.findall(name, yara_block["name"], re.I): + yield "sample", subblock["path"], yara_block, block for keyword in ("procdump", "procmemory", "extracted", "dropped"): if self.results.get(keyword) is not None: @@ -878,27 +886,35 @@ def yara_detected(self, name): if re.findall(name, yara_block["name"], re.I): yield "extracted_pe", pe["path"], yara_block, block - for subblock in block.get("extracted_files", []): - for keyword in ("cape_yara", "yara"): - for yara_block in subblock[keyword]: - if re.findall(name, yara_block["name"], re.I): - yield "sample", subblock["path"], yara_block, block + if block.get("selfextract", {}): + for _, toolsblock in block["selfextract"].items(): + for subblock in toolsblock.get("extracted_files", []): + for keyword in ("cape_yara", "yara"): + for yara_block in subblock[keyword]: + if re.findall(name, yara_block["name"], re.I): + yield "sample", subblock["path"], yara_block, block macro_path = os.path.join(CUCKOO_ROOT, "storage", "analyses", str(self.results["info"]["id"]), "macros") for macroname in self.results.get("static", {}).get("office", {}).get("Macro", {}).get("info", []) or []: for yara_block in self.results["static"]["office"]["Macro"]["info"].get("macroname", []) or []: for sub_block in self.results["static"]["office"]["Macro"]["info"]["macroname"].get(yara_block, []) or []: if re.findall(name, sub_block["name"], re.I): - yield "macro", os.path.join(macro_path, macroname), sub_block, self.results["static"]["office"]["Macro"][ - "info" - ] + yield ( + "macro", + os.path.join(macro_path, macroname), + sub_block, + self.results["static"]["office"]["Macro"]["info"], + ) if self.results.get("static", {}).get("office", {}).get("XLMMacroDeobfuscator", False): for yara_block in self.results["static"]["office"]["XLMMacroDeobfuscator"].get("info", []).get("yara_macro", []) or []: if re.findall(name, yara_block["name"], re.I): - yield "macro", os.path.join(macro_path, "xlm_macro"), yara_block, self.results["static"]["office"][ - "XLMMacroDeobfuscator" - ]["info"] + yield ( + "macro", + os.path.join(macro_path, "xlm_macro"), + yara_block, + self.results["static"]["office"]["XLMMacroDeobfuscator"]["info"], + ) def signature_matched(self, signame: str) -> bool: # Check if signature has matched (useful for ordered signatures) @@ -964,7 +980,6 @@ def _get_ip_by_host(self, hostname): ) def _get_ip_by_host_dns(self, hostname): - ips = [] try: @@ -1085,7 +1100,7 @@ def check_file(self, pattern, regex=False, all=False): @return: depending on the value of param 'all', either a set of matched items or the first matched item """ - subject = self.results["behavior"]["summary"]["files"] + subject = self.results.get("behavior", {}).get("summary", {}).get("files", []) return self._check_value(pattern=pattern, subject=subject, regex=regex, all=all) def check_read_file(self, pattern, regex=False, all=False): @@ -1098,7 +1113,7 @@ def check_read_file(self, pattern, regex=False, all=False): @return: depending on the value of param 'all', either a set of matched items or the first matched item """ - subject = self.results["behavior"]["summary"]["read_files"] + subject = self.results.get("behavior", {}).get("summary", {}).get("read_files", []) return self._check_value(pattern=pattern, subject=subject, regex=regex, all=all) def check_write_file(self, pattern, regex=False, all=False): @@ -1111,7 +1126,7 @@ def check_write_file(self, pattern, regex=False, all=False): @return: depending on the value of param 'all', either a set of matched items or the first matched item """ - subject = self.results["behavior"]["summary"]["write_files"] + subject = self.results.get("behavior", {}).get("summary", {}).get("write_files", []) return self._check_value(pattern=pattern, subject=subject, regex=regex, all=all) def check_delete_file(self, pattern, regex=False, all=False): @@ -1124,7 +1139,7 @@ def check_delete_file(self, pattern, regex=False, all=False): @return: depending on the value of param 'all', either a set of matched items or the first matched item """ - subject = self.results["behavior"]["summary"]["delete_files"] + subject = self.results.get("behavior", {}).get("summary", {}).get("delete_files", []) return self._check_value(pattern=pattern, subject=subject, regex=regex, all=all) def check_key(self, pattern, regex=False, all=False): @@ -1137,7 +1152,7 @@ def check_key(self, pattern, regex=False, all=False): @return: depending on the value of param 'all', either a set of matched items or the first matched item """ - subject = self.results["behavior"]["summary"]["keys"] + subject = self.results.get("behavior", {}).get("summary", {}).get("keys", []) return self._check_value(pattern=pattern, subject=subject, regex=regex, all=all) def check_read_key(self, pattern, regex=False, all=False): @@ -1150,7 +1165,7 @@ def check_read_key(self, pattern, regex=False, all=False): @return: depending on the value of param 'all', either a set of matched items or the first matched item """ - subject = self.results["behavior"]["summary"]["read_keys"] + subject = self.results.get("behavior", {}).get("summary", {}).get("read_keys", []) return self._check_value(pattern=pattern, subject=subject, regex=regex, all=all) def check_write_key(self, pattern, regex=False, all=False): @@ -1163,7 +1178,7 @@ def check_write_key(self, pattern, regex=False, all=False): @return: depending on the value of param 'all', either a set of matched items or the first matched item """ - subject = self.results["behavior"]["summary"]["write_keys"] + subject = self.results.get("behavior", {}).get("summary", {}).get("write_keys", []) return self._check_value(pattern=pattern, subject=subject, regex=regex, all=all) def check_delete_key(self, pattern, regex=False, all=False): @@ -1176,7 +1191,7 @@ def check_delete_key(self, pattern, regex=False, all=False): @return: depending on the value of param 'all', either a set of matched items or the first matched item """ - subject = self.results["behavior"]["summary"]["delete_keys"] + subject = self.results.get("behavior", {}).get("summary", {}).get("delete_keys", []) return self._check_value(pattern=pattern, subject=subject, regex=regex, all=all) def check_mutex(self, pattern, regex=False, all=False): @@ -1189,7 +1204,7 @@ def check_mutex(self, pattern, regex=False, all=False): @return: depending on the value of param 'all', either a set of matched items or the first matched item """ - subject = self.results["behavior"]["summary"]["mutexes"] + subject = self.results.get("behavior", {}).get("summary", {}).get("mutexes", []) return self._check_value(pattern=pattern, subject=subject, regex=regex, all=all, ignorecase=False) def check_started_service(self, pattern, regex=False, all=False): @@ -1202,7 +1217,7 @@ def check_started_service(self, pattern, regex=False, all=False): @return: depending on the value of param 'all', either a set of matched items or the first matched item """ - subject = self.results["behavior"]["summary"]["started_services"] + subject = self.results.get("behavior", {}).get("summary", {}).get("started_services", []) return self._check_value(pattern=pattern, subject=subject, regex=regex, all=all) def check_created_service(self, pattern, regex=False, all=False): @@ -1215,7 +1230,7 @@ def check_created_service(self, pattern, regex=False, all=False): @return: depending on the value of param 'all', either a set of matched items or the first matched item """ - subject = self.results["behavior"]["summary"]["created_services"] + subject = self.results.get("behavior", {}).get("summary", {}).get("created_services", []) return self._check_value(pattern=pattern, subject=subject, regex=regex, all=all) def check_executed_command(self, pattern, regex=False, all=False, ignorecase=True): @@ -1230,7 +1245,7 @@ def check_executed_command(self, pattern, regex=False, all=False, ignorecase=Tru @return: depending on the value of param 'all', either a set of matched items or the first matched item """ - subject = self.results["behavior"]["summary"]["executed_commands"] + subject = self.results.get("behavior", {}).get("summary", {}).get("executed_commands", []) return self._check_value(pattern=pattern, subject=subject, regex=regex, all=all, ignorecase=ignorecase) def check_api(self, pattern, process=None, regex=False, all=False): @@ -1349,15 +1364,44 @@ def check_argument(self, pattern, name=None, api=None, category=None, process=No return None + def check_threatfox(self, searchterm: str): + if not integrations_conf.abusech.threatfox or not integrations_conf.abusech.apikey: + return + try: + response = requests.post( + "https://threatfox-api.abuse.ch/api/v1/", + json={"query": "search_ioc", "search_term": searchterm, "exact_match": True}, + headers={"Auth-Key": integrations_conf.abusech.apikey, "User-Agent": "CAPE Sandbox"}, + ) + return response.json() + except Exception as e: + log.error("ThreatFox error: %s", str(e)) + + def check_dnsbbl(self, domain: str): + """ + https://en.wikipedia.org/wiki/Domain_Name_System_blocklist + @param domain: domain to check in black list + """ + try: + ip_address = socket.gethostbyname(domain) + for server in dnsbl_servers: + query = ".".join(reversed(str(ip_address).split("."))) + "." + server + with suppress(socket.error): + threading.Thread(target=socket.gethostbyname, args=(query,)).start() + return True, server # Found blacklisted server + return False, None # No blacklisted server found + except socket.gaierror: + return "Invalid domain or IP address.", None + def check_ip(self, pattern, regex=False, all=False): """Checks for an IP address being contacted. @param pattern: string or expression to check for. @param regex: boolean representing if the pattern is a regular - expression or not and therefore should be compiled. + expression or not and therefore should be compiled. @param all: boolean representing if all results should be returned - in a set or not + in a set or not @return: depending on the value of param 'all', either a set of - matched items or the first matched item + matched items or the first matched item """ if all: @@ -1387,11 +1431,11 @@ def check_domain(self, pattern, regex=False, all=False): """Checks for a domain being contacted. @param pattern: string or expression to check for. @param regex: boolean representing if the pattern is a regular - expression or not and therefore should be compiled. + expression or not and therefore should be compiled. @param all: boolean representing if all results should be returned - in a set or not + in a set or not @return: depending on the value of param 'all', either a set of - matched items or the first matched item + matched items or the first matched item """ if all: @@ -1421,11 +1465,11 @@ def check_url(self, pattern, regex=False, all=False): """Checks for a URL being contacted. @param pattern: string or expression to check for. @param regex: boolean representing if the pattern is a regular - expression or not and therefore should be compiled. + expression or not and therefore should be compiled. @param all: boolean representing if all results should be returned - in a set or not + in a set or not @return: depending on the value of param 'all', either a set of - matched items or the first matched item + matched items or the first matched item """ if all: @@ -1722,7 +1766,7 @@ def update(self) -> bool: try: req = requests.get(self.downloadurl, headers=headers, verify=True) except requests.exceptions.RequestException as e: - log.warn("Error downloading feed for %s: %s", self.feedname, e) + log.warning("Error downloading feed for %s: %s", self.feedname, e) return False if req.status_code == 200: self.downloaddata = req.content diff --git a/lib/cuckoo/common/admin_utils.py b/lib/cuckoo/common/admin_utils.py index a25b6c9bd96..9808bf72e80 100644 --- a/lib/cuckoo/common/admin_utils.py +++ b/lib/cuckoo/common/admin_utils.py @@ -25,9 +25,16 @@ try: from paramiko import AutoAddPolicy, ProxyCommand, SSHClient, SSHConfig - from paramiko.ssh_exception import AuthenticationException, BadHostKeyException, PasswordRequiredException, ProxyCommandFailure + from paramiko.ssh_exception import ( + AuthenticationException, + BadHostKeyException, + PasswordRequiredException, + ProxyCommandFailure, + SSHException, + ) from scp import SCPClient, SCPException + conf = SSHConfig() conf.parse(open(os.path.expanduser("~/.ssh/config"))) @@ -36,11 +43,13 @@ print("poetry run pip install -U paramiko scp") HAVE_PARAMIKO = False + from lib.cuckoo.common.colors import green, red +from lib.cuckoo.common.sshclient import SSHJumpClient from utils.community_blocklist import blocklist try: - from admin_conf import ( # POSTPROCESS, + from admin_conf import ( CAPE_DIST_URL, CAPE_PATH, EXCLUDE_CAPE_FILES, @@ -58,8 +67,6 @@ except ModuleNotFoundError: sys.exit("[-] You need to create admin_conf.py, see admin_conf.py_example") -# Only needed when jumping over nodes -from lib.cuckoo.common.sshclient import SSHJumpClient # this is bad, but getLogger doesn't work, this can be cause of duplication of log entries if used outside logging.basicConfig(level=logging.INFO) @@ -232,7 +239,6 @@ def file_recon(file, yara_category="CAPE"): if not Path(file).exists(): return - global POSTPROCESS LOCAL_SHA256 = False filename = os.path.basename(file) OWNER = "cape:cape" @@ -245,10 +251,11 @@ def file_recon(file, yara_category="CAPE"): if b"SignatureMock.run" in f: return if b"(TcrSignature):" in f or b"(Signature)" in f: - TARGET = f"{CAPE_PATH}modules/signatures/{filename}" + TARGET = f"{CAPE_PATH}custom/signatures/{filename}" elif filename in ("loader.exe", "loader_x64.exe"): TARGET = f"{CAPE_PATH}/analyzer/windows/bin/{filename}" - POSTPROCESS = False + elif "/binary/" in file or "/binaries/" in file: + TARGET = f"{CAPE_PATH}custom/yara/binaries/{filename}" elif b"def _generator(self" in f: TARGET = f"{VOL_PATH}{filename}" OWNER = "root:staff" @@ -274,10 +281,8 @@ def file_recon(file, yara_category="CAPE"): TARGET = f"{CAPE_PATH}/lib/cuckoo/common/{filename}" elif b"class Analyzer:" in f and b"class PipeHandler(Thread):" in f and b"class PipeServer(Thread):" in f: TARGET = f"{CAPE_PATH}analyzer/windows/{filename}" - POSTPROCESS = False elif filename in ("capemon.dll", "capemon_x64.dll"): TARGET = f"{CAPE_PATH}analyzer/windows/dll/{filename}" - POSTPROCESS = False # generic deployer of files elif file.startswith("CAPEv2/"): # Remove CAPEv2/ from path to build new path @@ -285,7 +290,6 @@ def file_recon(file, yara_category="CAPE"): elif filename.endswith(".service"): TARGET = "/lib/systemd/system/{filename}" OWNER = "root:root" - POSTPROCESS = "systemctl daemon-reload" elif "Extractors/StandAlone/" in file: TARGET = f"{CAPE_PATH}custom/parsers/" stem = "Extractors/StandAlone" @@ -331,10 +335,10 @@ def _connect_via_jump_box(server: str, ssh_proxy: SSHClient): server, username=JUMP_BOX_USERNAME, key_filename=host.get("identityfile"), - # look_for_keys=True, - # allow_agent=True, + banner_timeout=200, + look_for_keys=False, + allow_agent=True, # disabled_algorithms=dict(pubkeys=["rsa-sha2-512", "rsa-sha2-256"]), - # port=ssh_port, ) sockets[server] = ssh else: @@ -350,14 +354,14 @@ def _connect_via_jump_box(server: str, ssh_proxy: SSHClient): server, username=REMOTE_SERVER_USER, key_filename=host.get("identityfile"), - # look_for_keys=False, - # allow_agent=True, - # port=ssh_port, + banner_timeout=200, + look_for_keys=False, + allow_agent=True, sock=ProxyCommand(host.get("proxycommand")), ) except (BadHostKeyException, AuthenticationException, PasswordRequiredException) as e: sys.exit( - f"Connect error: {str(e)}. Also pay attention to this log for more details /var/log/auth.log and paramiko might need update" + f"Connect error: {str(e)}. Also pay attention to this log for more details /var/log/auth.log and paramiko might need update.\nAlso ensure that you have added your public ssh key to /root/.ssh/authorized_keys" ) except ProxyCommandFailure as e: # Todo reconnect @@ -372,17 +376,20 @@ def execute_command_on_all(remote_command, servers: list, ssh_proxy: SSHClient): _, ssh_stdout, _ = ssh.exec_command(remote_command) ssh_out = ssh_stdout.read().decode("utf-8").strip() if "Active: active (running)" in ssh_out and "systemctl status" not in remote_command: - log.info("[+] Service " + green("restarted successfully and is UP")) + log.info("[+] Service %s", green("restarted successfully and is UP")) else: + srv = str(server.split(".")[1]) if ssh_out: - log.info(green(f"[+] {server} - {ssh_out}")) + log.info(green(f"[+] {srv} - {ssh_out}")) else: - log.info(green(f"[+] {server}")) + log.info(green(f"[+] {srv}")) ssh.close() except TimeoutError as e: sys.exit(f"Did you forget to use jump box? {str(e)}") + except SSHException as e: + log.error("Can't read remote bufffer: %s", str(e)) except Exception as e: - log.error(e, exc_info=True) + log.exception(e) def bulk_deploy(files, yara_category, dry_run=False, servers: list = [], ssh_proxy: SSHClient = False): @@ -393,7 +400,7 @@ def bulk_deploy(files, yara_category, dry_run=False, servers: list = [], ssh_pro files.remove(original_name) continue - if file.endswith(("processor_tests.py", "reporter_tests.py", "admin.py")): + if file.endswith(("processor_tests.py", "reporter_tests.py", "admin.py", ".conf")): files.remove(original_name) continue @@ -462,7 +469,7 @@ def deploy_file(queue, ssh_proxy: SSHClient): _, ssh_stdout, _ = ssh.exec_command(f"sha256sum {remote_file} | cut -d' ' -f1") remote_sha256 = ssh_stdout.read().strip().decode("utf-8") if local_sha256 == remote_sha256: - log.info("[+] %s - Hashes are %s: %s - %s", server, green("correct"), local_sha256, remote_file) + log.info("[+] %s - Hashes are %s: %s - %s", server.split(".")[1], green("correct"), local_sha256, remote_file) else: log.info( "[-] %s - Hashes are %s: \n\tLocal: %s\n\tRemote: %s - %s", @@ -507,9 +514,9 @@ def delete_file(queue, ssh_proxy: SSHClient): error = 1 if not error: - log.info(green(f"Completed! {remote_file}\n")) + log.info(green("Completed! %s\n", remote_file)) else: - log.info(red(f"Completed with errors. {remote_file}\n")) + log.info(red("Completed with errors. %s\n", remote_file)) queue.task_done() return error_list diff --git a/lib/cuckoo/common/aplib.py b/lib/cuckoo/common/aplib.py deleted file mode 100644 index 544faf5166b..00000000000 --- a/lib/cuckoo/common/aplib.py +++ /dev/null @@ -1,162 +0,0 @@ -#!/usr/bin/env python3 -"""A pure Python module for decompressing aPLib compressed data - -Adapted from the original C source code from http://ibsensoftware.com/files/aPLib-1.1.1.zip -Approximately 20 times faster than other Python implementations. -Compatible with both Python 2 and 3. -""" -import struct -from binascii import crc32 -from io import BytesIO - -__all__ = ["APLib", "decompress"] -__version__ = "0.6" -__author__ = "Sandor Nemes" - - -class APLib: - - __slots__ = "source", "destination", "tag", "bitcount", "strict" - - def __init__(self, source, strict=True): - self.source = BytesIO(source) - self.destination = bytearray() - self.tag = 0 - self.bitcount = 0 - self.strict = bool(strict) - - def getbit(self): - # check if tag is empty - self.bitcount -= 1 - if self.bitcount < 0: - # load next tag - self.tag = ord(self.source.read(1)) - self.bitcount = 7 - - # shift bit out of tag - bit = self.tag >> 7 & 1 - self.tag <<= 1 - - return bit - - def getgamma(self): - result = 1 - - # input gamma2-encoded bits - while True: - result = (result << 1) + self.getbit() - if not self.getbit(): - break - - return result - - def depack(self): - r0 = -1 - lwm = 0 - done = False - - try: - - # first byte verbatim - self.destination += self.source.read(1) - - # main decompression loop - while not done: - if self.getbit(): - if self.getbit(): - if self.getbit(): - offs = 0 - for _ in range(4): - offs = (offs << 1) + self.getbit() - - if offs: - self.destination.append(self.destination[-offs]) - else: - self.destination.append(0) - - lwm = 0 - else: - offs = ord(self.source.read(1)) - length = 2 + (offs & 1) - offs >>= 1 - - if offs: - for _ in range(length): - self.destination.append(self.destination[-offs]) - else: - done = True - - r0 = offs - lwm = 1 - else: - offs = self.getgamma() - - if lwm == 0 and offs == 2: - offs = r0 - length = self.getgamma() - - for _ in range(length): - self.destination.append(self.destination[-offs]) - else: - if lwm == 0: - offs -= 3 - else: - offs -= 2 - - offs <<= 8 - offs += ord(self.source.read(1)) - length = self.getgamma() - - if offs >= 32000: - length += 1 - if offs >= 1280: - length += 1 - if offs < 128: - length += 2 - - for _ in range(length): - self.destination.append(self.destination[-offs]) - - r0 = offs - - lwm = 1 - else: - self.destination += self.source.read(1) - lwm = 0 - - except (TypeError, IndexError): - if self.strict: - raise RuntimeError("aPLib decompression error") - - return bytes(self.destination) - - def pack(self): - raise NotImplementedError - - -def decompress(data, strict=False): - packed_size = None - packed_crc = None - orig_size = None - orig_crc = None - - if data.startswith(b"AP32") and len(data) >= 24: - # data has an aPLib header - header_size, packed_size, packed_crc, orig_size, orig_crc = struct.unpack_from("=IIIII", data, 4) - data = data[header_size : header_size + packed_size] - - if strict: - if packed_size is not None and packed_size != len(data): - raise RuntimeError("Packed data size is incorrect") - if packed_crc is not None and packed_crc != crc32(data): - raise RuntimeError("Packed data checksum is incorrect") - - result = APLib(data, strict=strict).depack() - - if strict: - if orig_size is not None and orig_size != len(result): - raise RuntimeError("Unpacked data size is incorrect") - if orig_crc is not None and orig_crc != crc32(result): - raise RuntimeError("Unpacked data checksum is incorrect") - - return result diff --git a/lib/cuckoo/common/blzpack.py b/lib/cuckoo/common/blzpack.py deleted file mode 100644 index 03b6a371c25..00000000000 --- a/lib/cuckoo/common/blzpack.py +++ /dev/null @@ -1,89 +0,0 @@ -# included from https://github.com/sysopfb/brieflz - -import binascii -import os -import struct -import zlib -from ctypes import byref, c_int, cdll, create_string_buffer - -CURR_DIR = os.path.abspath(os.path.dirname(__file__)) -LIB_PATH = os.path.join(CURR_DIR, "blzpack_lib.so") -brieflz = cdll.LoadLibrary(LIB_PATH) - - -DEFAULT_BLOCK_SIZE = 1024 * 1024 - - -def compress_data(data, blocksize, level): - compressed_data = "" - while len(data) > 0: - buf = create_string_buffer(data[:blocksize]) - cb = c_int(len(buf)) - cbOut = brieflz.blz_max_packed_size(blocksize) - packed = create_string_buffer(cbOut) - workmem = create_string_buffer(brieflz.blz_workmem_size_level(blocksize, 1)) - cbOut = c_int(cbOut) - retval = brieflz.blz_pack_level(byref(buf), byref(packed), cb, byref(workmem), level) - if retval > 0: - temp = packed.raw[:retval] - tempret = ( - struct.pack( - ">IIIIII", - 1651276314, - level, - len(temp), - zlib.crc32(temp) % (1 << 32), - len(buf), - zlib.crc32(data[:blocksize]) % (1 << 32), - ) - + temp - ) - compressed_data += tempret - else: - print("Compression Error") - return None - data = data[blocksize:] - return compressed_data - - -def decompress_data(data, blocksize=DEFAULT_BLOCK_SIZE, level=1): - decompressed_data = b"" - # max_packed_size = brieflz.blz_max_packed_size(blocksize) - - (magic, level, packedsize, crc, hdr_depackedsize, crc2) = struct.unpack_from(">IIIIII", data) - data = data[24:] - while magic == 0x626C7A1A and len(data) > 0: - compressed_data = create_string_buffer(data[:packedsize]) - workdata = create_string_buffer(blocksize) - depackedsize = brieflz.blz_depack(byref(compressed_data), byref(workdata), c_int(hdr_depackedsize)) - if depackedsize != hdr_depackedsize: - print("Decompression error") - print(f"DepackedSize: {depackedsize}\nHdrVal: {hdr_depackedsize}") - return None - decompressed_data += workdata.raw[:depackedsize] - data = data[packedsize:] - if len(data) > 0: - (magic, level, packedsize, crc, hdr_depackedsize, crc2) = struct.unpack_from(">IIIIII", data) - data = data[24:] - else: - break - return decompressed_data - - -def main(): - # blocksize = DEFAULT_BLOCK_SIZE - blocksize = 100 - level = 1 - data = "This is a test of brieflz compression" * 100 - retval = compress_data(data, blocksize, level) - if retval is not None: - print("Compression SUCCESS!\nCompressed Data: ") - print(binascii.hexlify(retval)) - retval = decompress_data(retval, blocksize, level) - if retval is not None and retval == data: - print("Decompress SUCCESS!\nDecompress Data: ") - print(retval) - - -if __name__ == "__main__": - main() diff --git a/lib/cuckoo/common/blzpack_lib.so b/lib/cuckoo/common/blzpack_lib.so deleted file mode 100755 index 5c12be5afea..00000000000 Binary files a/lib/cuckoo/common/blzpack_lib.so and /dev/null differ diff --git a/lib/cuckoo/common/cape_utils.py b/lib/cuckoo/common/cape_utils.py index 060ae70588c..6299e3c4d22 100644 --- a/lib/cuckoo/common/cape_utils.py +++ b/lib/cuckoo/common/cape_utils.py @@ -1,11 +1,10 @@ import hashlib import logging -import os -import tempfile + +# import tempfile from collections.abc import Iterable, Mapping +from contextlib import suppress from pathlib import Path -from types import ModuleType -from typing import Dict, Tuple from lib.cuckoo.common.config import Config from lib.cuckoo.common.constants import CUCKOO_ROOT @@ -20,7 +19,18 @@ except ImportError: HAVE_PYDEEP = False +HAS_MWCP = False +HAS_MALWARECONFIGS = False +HAVE_CAPE_EXTRACTORS = False +with suppress(ImportError): + from cape_parsers import load_cape_parsers, load_malwareconfig_parsers, load_mwcp_parsers # load_malduck_parsers + + HAS_MWCP = True + HAS_MALWARECONFIGS = True + HAVE_CAPE_EXTRACTORS = True +mwcp_decoders = {} +rat_decoders = {} cape_malware_parsers = {} # Config variables @@ -44,56 +54,19 @@ HAVE_PEFILE = True except ImportError: - print("Missed pefile library. Install it with: pip3 install pefile") + print("Missed pefile library. Install it with: poetry install") HAVE_PEFILE = False +if process_cfg.mwcp.enabled and HAS_MWCP: + mwcp_decoders, mwcp = load_mwcp_parsers() + HAS_MWCP = bool(mwcp_decoders) -def load_mwcp_parsers() -> Tuple[Dict[str, str], ModuleType]: - if not process_cfg.mwcp.enabled: - return {}, False - # Import All config parsers - try: - import mwcp - - logging.getLogger("mwcp").setLevel(logging.CRITICAL) - mwcp.register_parser_directory(os.path.join(CUCKOO_ROOT, process_cfg.mwcp.modules_path)) - _malware_parsers = {block.name.rsplit(".", 1)[-1]: block.name for block in mwcp.get_parser_descriptions(config_only=False)} - assert "MWCP_TEST" in _malware_parsers - return _malware_parsers, mwcp - except ImportError as e: - log.info("Missed MWCP -> pip3 install mwcp\nDetails: %s", e) - return {}, False - - -malware_parsers, mwcp = load_mwcp_parsers() -HAS_MWCP = bool(malware_parsers) - - -def load_malwareconfig_parsers() -> Tuple[bool, dict, ModuleType]: - if not process_cfg.ratdecoders.enabled: - return False, False, False - try: - from malwareconfig import fileparser - from malwareconfig.modules import __decoders__ - - if process_cfg.ratdecoders.modules_path: - from lib.cuckoo.common.load_extra_modules import ratdecodedr_load_decoders - - ratdecoders_local_modules = ratdecodedr_load_decoders([os.path.join(CUCKOO_ROOT, process_cfg.ratdecoders.modules_path)]) - if ratdecoders_local_modules: - __decoders__.update(ratdecoders_local_modules) - assert "TestRats" in __decoders__ - return True, __decoders__, fileparser - except ImportError: - log.info("Missed RATDecoders -> pip3 install malwareconfig") - except Exception as e: - log.error(e, exc_info=True) - return False, False, False - - -HAS_MALWARECONFIGS, __decoders__, fileparser = load_malwareconfig_parsers() +if not process_cfg.ratdecoders.enabled and HAS_MALWARECONFIGS: + HAS_MALWARECONFIGS, rat_decoders, fileparser = load_malwareconfig_parsers() HAVE_MALDUCK = False +""" +# ToDo move if process_cfg.malduck.enabled: try: # from malduck.extractor.loaders import load_modules @@ -114,15 +87,24 @@ def load_malwareconfig_parsers() -> Tuple[bool, dict, ModuleType]: assert "test_malduck" in malduck_modules_names except ImportError: log.info("Missed MalDuck -> pip3 install git+https://github.com/CERT-Polska/malduck/") +""" -HAVE_CAPE_EXTRACTORS = False if process_cfg.CAPE_extractors.enabled: - from lib.cuckoo.common.load_extra_modules import cape_load_decoders - - cape_malware_parsers = cape_load_decoders(CUCKOO_ROOT) + from lib.cuckoo.common.load_extra_modules import cape_load_custom_decoders + + cape_malware_parsers = {} + if HAVE_CAPE_EXTRACTORS: + exclude_parsers = [] + if process_cfg.CAPE_extractors.parsers: + exclude_parsers = [parser_name.strip() for parser_name in process_cfg.CAPE_extractors.parsers.split(",")] + cape_malware_parsers = load_cape_parsers(load=process_cfg.CAPE_extractors.parsers, exclude_parsers=exclude_parsers) + # Custom overwrites core + cape_malware_parsers.update(cape_load_custom_decoders(CUCKOO_ROOT)) if cape_malware_parsers: HAVE_CAPE_EXTRACTORS = True - assert "test cape" in cape_malware_parsers + if "test cape" not in cape_malware_parsers: + log.info("Missed cape-parsers! Run: poetry install") + suppress_parsing_list = ["Cerber", "Emotet_Payload", "Ursnif", "QakBot"] @@ -134,7 +116,7 @@ def load_malwareconfig_parsers() -> Tuple[bool, dict, ModuleType]: BUFSIZE = int(cfg.processing.analysis_size_limit) -def hash_file(method, path): +def hash_file(method, path: str) -> str: """Calculates an hash on a file by path. @param method: callable hashing method @param path: file path @@ -161,6 +143,17 @@ def convert(data): def is_duplicated_binary(file_info: dict, cape_file: dict, append_file: bool) -> bool: + """ + Determines if a binary file is a duplicate based on various criteria. + + Args: + file_info (dict): Information about the file being checked. + cape_file (dict): Information about the existing CAPE file. + append_file (bool): Flag indicating whether to append the file. + + Returns: + bool: False if the file is determined to be a duplicate, otherwise returns the value of append_file. + """ if HAVE_PYDEEP: ssdeep_grade = pydeep.compare(file_info["ssdeep"].encode(), cape_file["ssdeep"].encode()) if ssdeep_grade >= ssdeep_threshold: @@ -180,9 +173,25 @@ def is_duplicated_binary(file_info: dict, cape_file: dict, append_file: bool) -> return append_file -def static_config_parsers(cape_name, file_path, file_data): +def static_config_parsers(cape_name: str, file_path: str, file_data: bytes) -> dict: + """ + Process CAPE Yara hits and extract configuration data using various parsers. + + This function attempts to extract configuration data from a given file using different parsers + such as CAPE extractors, DC3-MWCP, and Malwareconfigs. The function returns a dictionary containing + the extracted configuration data. + + Args: + cape_name (str): The name of the CAPE parser to use. + file_path (str): The path to the file being analyzed. + file_data (bytes): The binary data of the file being analyzed. + + Returns: + dict: A dictionary containing the extracted configuration data. If no configuration data is + extracted, an empty dictionary is returned. + """ """Process CAPE Yara hits""" - cape_config = {cape_name: {}} + cape_config = {} parser_loaded = False # CAPE - pure python parsers # MWCP @@ -202,23 +211,23 @@ def static_config_parsers(cape_name, file_path, file_data): # python3 map object returns iterator by default, not list and not serializeable in JSON. if isinstance(value, map): value = list(value) - cape_config[cape_name].update({key: [value]}) + cape_config.setdefault(cape_name, {}).update({key: [value]}) parser_loaded = True elif isinstance(cape_configraw, dict): for key, value in cape_configraw.items(): # python3 map object returns iterator by default, not list and not serializeable in JSON. if isinstance(value, map): value = list(value) - cape_config[cape_name].update({key: [value]}) + cape_config.setdefault(cape_name, {}).update({key: [value]}) parser_loaded = True except Exception as e: - log.error("CAPE: parsing error on %s with %s: %s", file_path, cape_name, e, exc_info=True) + log.exception("CAPE: parsing error on %s with %s: %s", file_path, cape_name, e) # DC3-MWCP - if HAS_MWCP and not parser_loaded and cape_name and cape_name in malware_parsers: + if HAS_MWCP and not parser_loaded and cape_name and cape_name in mwcp_decoders: log.debug("Running MWCP on %s", file_path) try: - report = mwcp.run(malware_parsers[cape_name], data=file_data) + report = mwcp.run(mwcp_decoders[cape_name], data=file_data) reportmeta = report.as_dict_legacy() if not report.errors: parser_loaded = True @@ -233,7 +242,7 @@ def static_config_parsers(cape_name, file_path, file_data): del reportmeta["other"] tmp_dict.update(reportmeta) - cape_config[cape_name] = convert(tmp_dict) + cape_config.setdefault(cape_name, {}).update(convert(tmp_dict)) log.debug("CAPE: DC3-MWCP parser for %s completed", cape_name) else: error_lines = report.errors[0].split("\n") @@ -250,16 +259,16 @@ def static_config_parsers(cape_name, file_path, file_data): str(e), ) - elif HAS_MALWARECONFIGS and not parser_loaded and cape_name in __decoders__: + elif HAS_MALWARECONFIGS and not parser_loaded and cape_name in rat_decoders: log.debug("Running Malwareconfigs on %s", file_path) try: module = False file_info = fileparser.FileParser(rawdata=file_data) # Detects name by embed yara - if file_info.malware_name in __decoders__: - module = __decoders__[file_info.malware_name]["obj"]() - elif cape_name in __decoders__: - module = __decoders__[cape_name]["obj"]() + if file_info.malware_name in rat_decoders: + module = rat_decoders[file_info.malware_name]["obj"]() + elif cape_name in rat_decoders: + module = rat_decoders[cape_name]["obj"]() else: log.warning("%s: %s wasn't matched by plugin's yara", file_path, cape_name) @@ -270,25 +279,22 @@ def static_config_parsers(cape_name, file_path, file_data): # ToDo remove if isinstance(malwareconfig_config, list): for key, value in malwareconfig_config[0].items(): - cape_config[cape_name].update({key: [value]}) + cape_config.setdefault(cape_name, {}).update({key: [value]}) elif isinstance(malwareconfig_config, dict): for key, value in malwareconfig_config.items(): - cape_config[cape_name].update({key: [value]}) + cape_config.setdefault(cape_name, {}).update({key: [value]}) except Exception as e: if "rules" in str(e): log.warning("You probably need to compile yara-python with dotnet support") else: - log.error(e, exc_info=True) + log.exception(e) log.warning( "malwareconfig parsing error for %s with %s: %s, you should submit issue/fix to https://github.com/kevthehermit/RATDecoders/", file_path, cape_name, str(e), ) - - if cape_config.get(cape_name) == {}: - return {} - + """ elif HAVE_MALDUCK and not parser_loaded and cape_name.lower() in malduck_modules_names: log.debug("Running Malduck on %s", file_path) if not File.yara_initialized: @@ -308,15 +314,26 @@ def static_config_parsers(cape_name, file_path, file_data): del ext if tmp_config: for key, value in tmp_config[0].items(): - cape_config[cape_name].update({key: [value]}) - - if not cape_config[cape_name]: - return {} + cape_config.setdefault(cape_name, {}).update({key: [value]}) + """ return cape_config -def static_config_lookup(file_path, sha256=False): +def static_config_lookup(file_path: str, sha256: str = False) -> dict: + """ + Look up static configuration information for a given file based on its SHA-256 hash. + + This function calculates the SHA-256 hash of the file at the specified path if not provided, + and then queries either a MongoDB or Elasticsearch database to retrieve configuration information. + + Args: + file_path (str): The path to the file for which to look up configuration information. + sha256 (str, optional): The SHA-256 hash of the file. If not provided, it will be calculated. + + Returns: + dict or None: A dictionary containing the configuration information if found, otherwise None. + """ if not sha256: sha256 = hashlib.sha256(open(file_path, "rb").read()).hexdigest() @@ -346,13 +363,26 @@ def static_config_lookup(file_path, sha256=False): named_static_extractors = [] -def static_extraction(path): - config = False +def static_extraction(path: str) -> dict: + """ + Extracts static configuration from a file using YARA rules and named static extractors. + + Args: + path (str): The file path to be analyzed. + + Returns: + dict or bool: The extracted configuration as a dictionary if successful, + False if no configuration is found or an error occurs. + + Raises: + Exception: Logs any exceptions that occur during the extraction process. + """ + config = {} try: hits = File(path).get_yara(category="CAPE") path_name = Path(path).name if not hits and path_name not in named_static_extractors: - return False + return config file_data = path_read_file(path) if path_name in named_static_extractors: config = static_config_parsers(path_name, path, file_data) @@ -368,7 +398,18 @@ def static_extraction(path): return config -def cape_name_from_yara(details, pid, results): +def cape_name_from_yara(details: dict, pid: int, results: dict) -> str: + """ + Extracts the CAPE name from YARA hit details and associates it with a process ID (pid) in the results dictionary. + + Args: + details (dict): A dictionary containing YARA hit details, expected to have a key "cape_yara" with a list of hits. + pid (int): The process ID to associate the CAPE name with. + results (dict): A dictionary to store the association between detections and process IDs. + + Returns: + str: The CAPE name extracted from the YARA hit, or None if no CAPE name is found. + """ for hit in details.get("cape_yara", []) or []: if File.yara_hit_provides_detection(hit): if "detections2pid" not in results: diff --git a/lib/cuckoo/common/cleaners_utils.py b/lib/cuckoo/common/cleaners_utils.py index 15be23a6bd9..14953df9bf6 100644 --- a/lib/cuckoo/common/cleaners_utils.py +++ b/lib/cuckoo/common/cleaners_utils.py @@ -44,7 +44,7 @@ # Initialize the database connection. db: _Database = Database() if repconf.mongodb.enabled: - mdb = repconf.mongodb.get("db", "cuckoo") + # mdb = repconf.mongodb.get("db", "cuckoo") from dev_utils.mongo_hooks import delete_unused_file_docs from dev_utils.mongodb import ( connect_to_mongo, @@ -54,11 +54,58 @@ mongo_find, mongo_is_cluster, mongo_update_one, + mongo_update_many, + mongo_delete_calls_by_task_id_in_range, + mongo_delete_data_range, ) elif repconf.elasticsearchdb.enabled: from dev_utils.elasticsearchdb import all_docs, delete_analysis_and_related_calls, get_analysis_index +def convert_into_time(time_range: str) -> datetime: + """ + Converts a string representing a time range (e.g., '12h', '1d', '5m') + into a datetime object representing the time in the past. + + Args: + time_range: A string in the format of a number followed by a unit + ('d' for days, 'h' for hours, 'm' for minutes, 's' for seconds). + + Returns: + A datetime object representing the time in the past based on the + provided time range. + + Raises: + ValueError: If the input string is in an invalid format or the unit + is not recognized. + """ + if not isinstance(time_range, str) or len(time_range) < 2: + raise ValueError("Invalid time range format.") + + unit = time_range[-1].lower() + value_str = time_range[:-1] + + if not value_str.isdigit(): + raise ValueError("Invalid numeric value.") + + value = int(value_str) + now = datetime.now() + + time_units = { + "d": "days", + "h": "hours", + "m": "minutes", + "s": "seconds", + } + + if unit in time_units: + kwargs = {time_units[unit]: value} + return now - timedelta(**kwargs) + else: + raise ValueError(f"Invalid time unit '{unit}'.") + + +# todo rewrite for time range def free_space_monitor(path=False, return_value=False, processing=False, analysis=False): """ @param path: path to check @@ -70,12 +117,12 @@ def free_space_monitor(path=False, return_value=False, processing=False, analysi cleanup_dict = { "delete_mongo": config.cleaner.mongo, } - if config.cleaner.binaries_days: - cleanup_dict["delete_binaries_items_older_than_days"] = int(config.cleaner.binaries_days) - if config.cleaner.tmp_days: - cleanup_dict["delete_tmp_items_older_than_days"] = int(config.cleaner.tmp_days) - if config.cleaner.analysis_days: - cleanup_dict["delete_older_than_days"] = int(config.cleaner.analysis_days) + if config.cleaner.binaries: + cleanup_dict["delete_binaries_items_older_than"] = config.cleaner.binaries + if config.cleaner.tmp: + cleanup_dict["delete_tmp_items_older_than"] = config.cleaner.tmp + if config.cleaner.analysis: + cleanup_dict["delete_older_than"] = config.cleaner.analysis if config.cleaner.unused_files_in_mongodb: cleanup_dict["delete_unused_file_data_in_mongo"] = 1 @@ -119,13 +166,15 @@ def free_space_monitor(path=False, return_value=False, processing=False, analysi # prepare dict on startup execute_cleanup(cleanup_dict) - # rest 1 day - if config.cleaner.binaries_days and cleanup_dict["delete_binaries_items_older_than_days"]: - cleanup_dict["delete_binaries_items_older_than_days"] -= 1 - if config.cleaner.tmp_days and cleanup_dict["delete_tmp_items_older_than_days"]: - cleanup_dict["delete_tmp_items_older_than_days"] -= 1 - if config.cleaner.analysis_days and cleanup_dict["delete_older_than_days"]: - cleanup_dict["delete_older_than_days"] -= 1 + # ToDo timedelta + """ + if config.cleaner.binaries and cleanup_dict["delete_binaries_items_older_than"]: + cleanup_dict["delete_binaries_items_older_than"] -= 1 + if config.cleaner.tmp and cleanup_dict["delete_tmp_items_older_than"]: + cleanup_dict["delete_tmp_items_older_than"] -= 1 + if config.cleaner.analysis and cleanup_dict["delete_older_than"]: + cleanup_dict["delete_older_than"] -= 1 + """ time.sleep(5) else: @@ -167,77 +216,57 @@ def is_reporting_db_connected(): connect_to_es() return True except Exception as e: - log.error(f"Can't connect to reporting db {e}") + log.error("Can't connect to reporting db %s", str(e)) return False -def delete_bulk_tasks_n_folders(tids: list, delete_mongo: bool): - ids = [tid["info.id"] for tid in tids] +def delete_bulk_tasks_n_folders(ids: list, delete_mongo: bool, delete_db_tasks=False): for i in range(0, len(ids), 10): ids_tmp = ids[i : i + 10] + for id in ids_tmp: + try: + path = os.path.join(CUCKOO_ROOT, "storage", "analyses", str(id)) + if path_is_dir(path): + delete_folder(path) + except Exception as e: + log.error(e) + if delete_mongo: if mongo_is_cluster(): response = input("You are deleting mongo data in cluster, are you sure you want to continue? y/n") if response.lower() in ("n", "not"): sys.exit() mongo_delete_data(ids_tmp) - - for id in ids_tmp: - if db.delete_task(id): - try: - path = os.path.join(CUCKOO_ROOT, "storage", "analyses", "%s" % str(id)) - if path_is_dir(path): - delete_folder(path) - except Exception as e: - log.error(e) - else: - # If we don't remove from mongo we should keep in db to be able to show task in webgui - for id in ids_tmp: + if delete_db_tasks: try: - path = os.path.join(CUCKOO_ROOT, "storage", "analyses", "%s" % str(id)) - if path_is_dir(path): - delete_folder(path) + db.delete_tasks(task_ids=ids_tmp) except Exception as e: - log.error(e) + log.error("Failed to delete tasks from DB: %s", str(e)) def fail_job(tid): - if isinstance(tid, dict): - if "info.id" in tid: - tid = tid["info.id"] - elif tid.get("info", {}).get("id", 0): - tid = tid["info"]["id"] - elif "id" in tid: - tid = tid["id"] - log.info("set %s job to failed" % (tid)) - + log.info("set %s job to failed", tid) db.set_status(tid, TASK_FAILED_ANALYSIS) def delete_data(tid): - if isinstance(tid, dict): - if "info.id" in tid: - tid = tid["info.id"] - elif tid.get("info", {}).get("id", 0): - tid = tid["info"]["id"] - elif "id" in tid: - tid = tid["id"] try: - log.info("removing %s from analysis db" % (tid)) + log.info("removing %s from analysis db", tid) if repconf.mongodb.enabled: mongo_delete_data(tid) elif repconf.elasticsearchdb.enabled: delete_analysis_and_related_calls(tid) except Exception as e: - log.error("failed to remove analysis info (may not exist) %s due to %s" % (tid, e), exc_info=True) + log.exception("failed to remove analysis info (may not exist) %s due to %s", tid, e) with db.session.begin(): if db.delete_task(tid): - delete_folder(os.path.join(CUCKOO_ROOT, "storage", "analyses", "%s" % tid)) + delete_folder(os.path.join(CUCKOO_ROOT, "storage", "analyses", str(tid))) else: - log.info("failed to remove faile task %s from DB" % (tid)) + log.info("failed to remove faile task %s from DB", tid) def dist_delete_data(data, dist_db): + id_list = [] for id, file in data: try: if path_exists(file): @@ -245,14 +274,16 @@ def dist_delete_data(data, dist_db): path_delete(file) except Exception as e: log.info(e) - db.delete_task(id) # clean dist_db dist_task = dist_db.query(Task).filter(DTask.main_task.id == id).first() if dist_task: dist_db.delete(dist_task.id) + id_list.append(id) except Exception as e: log.info(e) + db.delete_tasks(id_list) + def cuckoo_clean(): """Clean up cuckoo setup. @@ -284,6 +315,7 @@ def cuckoo_clean(): os.path.join(CUCKOO_ROOT, "db"), os.path.join(CUCKOO_ROOT, "log"), os.path.join(CUCKOO_ROOT, "storage"), + # ToDo temp cleanup ] # Delete various directories. @@ -318,14 +350,12 @@ def cuckoo_clean_failed_tasks(): # logger (init_logging()) logs to a file which will be deleted. create_structure() - failed_tasks_a = db.list_tasks(status=TASK_FAILED_ANALYSIS) - failed_tasks_p = db.list_tasks(status=TASK_FAILED_PROCESSING) - failed_tasks_r = db.list_tasks(status=TASK_FAILED_REPORTING) - failed_tasks_rc = db.list_tasks(status=TASK_RECOVERED) - resolver_pool.map(lambda tid: delete_data(tid.to_dict()["id"]), failed_tasks_a) - resolver_pool.map(lambda tid: delete_data(tid.to_dict()["id"]), failed_tasks_p) - resolver_pool.map(lambda tid: delete_data(tid.to_dict()["id"]), failed_tasks_r) - resolver_pool.map(lambda tid: delete_data(tid.to_dict()["id"]), failed_tasks_rc) + # ToDo multi status + tasks_list = db.list_tasks(status=f"{TASK_FAILED_ANALYSIS}|{TASK_FAILED_PROCESSING}|{TASK_FAILED_REPORTING}|{TASK_RECOVERED}") + # ToDo rewrite for bulk delete + ids = [task.id for task in tasks_list] + delete_bulk_tasks_n_folders(ids, delete_mongo=True) + tasks_list = db.delete_tasks(status=f"{TASK_FAILED_ANALYSIS}|{TASK_FAILED_PROCESSING}|{TASK_FAILED_REPORTING}|{TASK_RECOVERED}") def cuckoo_clean_bson_suri_logs(): @@ -336,27 +366,22 @@ def cuckoo_clean_bson_suri_logs(): create_structure() from glob import glob - failed_tasks_a = db.list_tasks(status=TASK_FAILED_ANALYSIS) - failed_tasks_p = db.list_tasks(status=TASK_FAILED_PROCESSING) - failed_tasks_r = db.list_tasks(status=TASK_FAILED_REPORTING) - failed_tasks_rc = db.list_tasks(status=TASK_RECOVERED) - tasks_rp = db.list_tasks(status=TASK_REPORTED) - for e in failed_tasks_a, failed_tasks_p, failed_tasks_r, failed_tasks_rc, tasks_rp: - for el2 in e: - new = el2.to_dict() - id = new["id"] - path = os.path.join(CUCKOO_ROOT, "storage", "analyses", "%s" % id) - if path_exists(path): - jsonlogs = glob("%s/logs/*json*" % (path)) - bsondata = glob("%s/logs/*.bson" % (path)) - filesmeta = glob("%s/logs/files/*.meta" % (path)) - for f in jsonlogs, bsondata, filesmeta: - for fe in f: - try: - log.info(("removing %s" % (fe))) - path_delete(fe) - except Exception as Err: - log.info(("failed to remove sorted_pcap from disk %s" % (Err))) + tasks_list = db.list_tasks( + status=f"{TASK_FAILED_ANALYSIS}|{TASK_FAILED_PROCESSING}|{TASK_FAILED_REPORTING}|{TASK_RECOVERED}|{TASK_REPORTED}" + ) + for task in tasks_list: + path = os.path.join(CUCKOO_ROOT, "storage", "analyses", str(task.id)) + if path_exists(path): + jsonlogs = glob(f"{path}/logs/*json*") + bsondata = glob(f"{path}/logs/*.bson") + filesmeta = glob(f"{path}/logs/files/*.meta") + for f in jsonlogs, bsondata, filesmeta: + for fe in f: + try: + # log.info("removing %s", fe) + path_delete(fe) + except Exception as Err: + log.info("failed to remove sorted_pcap from disk %s", Err) def cuckoo_clean_failed_url_tasks(): @@ -374,6 +399,8 @@ def cuckoo_clean_failed_url_tasks(): if repconf.mongodb.enabled: query = {"info.category": "url", "network.http.0": {"$exists": False}} rtmp = mongo_find("analysis", query, projection={"info.id": 1}, sort=[("_id", -1)], limit=100) + ids = [task["info"]["id"] for task in rtmp] + delete_bulk_tasks_n_folders(ids, delete_mongo=True, delete_db_tasks=True) elif repconf.elasticsearchdb.enabled: rtmp = [ d["_source"] @@ -383,11 +410,8 @@ def cuckoo_clean_failed_url_tasks(): _source=["info.id"], ) ] - else: - rtmp = [] - - if rtmp and len(rtmp) > 0: - resolver_pool.map(lambda tid: delete_data(tid), rtmp) + if rtmp and len(rtmp) > 0: + resolver_pool.map(lambda tid: delete_data(tid), rtmp) def cuckoo_clean_lower_score(malscore: int): @@ -407,6 +431,7 @@ def cuckoo_clean_lower_score(malscore: int): if repconf.mongodb.enabled: result = list(mongo_find("analysis", {"malscore": {"$lte": malscore}})) id_arr = [entry["info"]["id"] for entry in result] + elif repconf.elasticsearchdb.enabled: id_arr = [ d["_source"]["info"]["id"] @@ -414,26 +439,25 @@ def cuckoo_clean_lower_score(malscore: int): index=get_analysis_index(), query={"query": {"range": {"malscore": {"lte": malscore}}}}, _source=["info.id"] ) ] - log.info(("number of matching records %s" % len(id_arr))) - resolver_pool.map(lambda tid: delete_data(tid), id_arr) + log.info("number of matching records %s", len(id_arr)) + # resolver_pool.map(lambda tid: delete_data(tid), id_arr) + if id_arr: + delete_bulk_tasks_n_folders(id_arr, delete_mongo=True, delete_db_tasks=True) -def tmp_clean_before_day(days: int): +def tmp_clean_before(timerange: str): """Clean up tmp folder - It deletes all items in tmp folder before now - days. + It deletes all items in tmp folder before now - timerange. """ - - today = datetime.today() + older_than = convert_into_time(timerange) tmp_folder_path = config.cuckoo.get("tmppath") - - for folder in ("cuckoo-tmp", "cape-external"): + # 3rd party? + for folder in ("cuckoo-tmp", "cape-external", "cuckoo-sflock"): for root, directories, files in os.walk(os.path.join(tmp_folder_path, folder), topdown=True): for name in files + directories: path = os.path.join(root, name) path_ctime = path_get_date(os.path.join(root, path)) - file_time = today - datetime.fromtimestamp(path_ctime) - # ToDo add check for hours, as 1 day and 23h is still just 1 day - if file_time.days > days: + if datetime.fromtimestamp(path_ctime) > older_than: try: if path_is_dir(path): log.info("Delete folder: %s", path) @@ -445,10 +469,10 @@ def tmp_clean_before_day(days: int): log.error(e) -def cuckoo_clean_before_day(args: dict): +def cuckoo_clean_before(args: dict): """Clean up failed tasks It deletes all stored data from file system and configured databases (SQL - and MongoDB for tasks completed before now - days. + and MongoDB for tasks completed before now - time range. """ # Init logging. # This need to init a console logger handler, because the standard @@ -460,25 +484,25 @@ def cuckoo_clean_before_day(args: dict): if not is_reporting_db_connected(): return - days = args.get("delete_older_than_days") - if not days: + timerange = args.get("delete_older_than") + if not timerange: log.info("No days argument provided bailing") return - added_before = datetime.now() - timedelta(days=int(days)) + category = None + added_before = convert_into_time(timerange) if args.get("files_only_filter"): log.info("file filter applied") - old_tasks = db.list_tasks(added_before=added_before, category="file") + category = "file" elif args.get("urls_only_filter"): log.info("url filter applied") - old_tasks = db.list_tasks(added_before=added_before, category="url") - else: - old_tasks = db.list_tasks(added_before=added_before) + category = "url" - for e in old_tasks: - id_arr.append({"info.id": (int(e.to_dict()["id"]))}) + old_tasks = db.list_tasks(added_before=added_before, category=category, not_status=TASK_PENDING) - log.info(("number of matching records %s before suri/custom filter " % len(id_arr))) + # We need this to cleanup file system and MongoDB calls collection + id_arr = [e.id for e in old_tasks] + log.info("number of matching records %s before suri/custom filter", len(id_arr)) if id_arr and args.get("suricata_zero_alert_filter"): result = list( mongo_find("analysis", {"suricata.alerts.alert": {"$exists": False}, "$or": id_arr}, {"info.id": 1, "_id": 0}) @@ -491,9 +515,30 @@ def cuckoo_clean_before_day(args: dict): ) ) id_arr = [entry["info"]["id"] for entry in result] - log.info("number of matching records %s" % len(id_arr)) - delete_bulk_tasks_n_folders(id_arr, args.get("delete_mongo")) + highest_id = max(id_arr, default=0) + log.info("number of matching records %s. Highest id: %d", len(id_arr), highest_id) + # delete_bulk_tasks_n_folders(id_arr, args.get("delete_mongo"), db_delete_before=1) # resolver_pool.map(lambda tid: delete_data(tid), id_arr) + # ids = [tid["info.id"] for tid in tids] + for i in range(0, len(id_arr), 100): + ids_tmp = id_arr[i : i + 100] + for id in ids_tmp: + try: + path = os.path.join(CUCKOO_ROOT, "storage", "analyses", str(id)) + if path_is_dir(path): + delete_folder(path) + except Exception as e: + log.error(e) + + if args.get("delete_mongo"): + if mongo_is_cluster(): + response = input("You are deleting mongo data in cluster, are you sure you want to continue? y/n") + if response.lower() in ("n", "not"): + sys.exit() + mongo_delete_data_range(range_end=highest_id) + # cleanup_files_collection_by_id(highest_id) + + db.delete_tasks(added_before=added_before, category=category) def cuckoo_clean_sorted_pcap_dump(): @@ -533,8 +578,8 @@ def cuckoo_clean_sorted_pcap_dump(): if rtmp and len(rtmp) > 0: for e in rtmp: if e["info"]["id"]: - log.info((e["info"]["id"])) try: + # ToDo replace to $in if repconf.mongodb.enabled: mongo_update_one( "analysis", {"info.id": int(e["info"]["id"])}, {"$unset": {"network.sorted_pcap_id": ""}} @@ -542,26 +587,23 @@ def cuckoo_clean_sorted_pcap_dump(): elif repconf.elasticsearchdb.enabled: es.update(index=e["index"], id=e["info"]["id"], body={"network.sorted_pcap_id": ""}) except Exception: - log.info(("failed to remove sorted pcap from db for id %s" % (e["info"]["id"]))) + log.info(("failed to remove sorted pcap from db for id %s", e["info"]["id"])) try: - path = os.path.join(CUCKOO_ROOT, "storage", "analyses", "%s" % (e["info"]["id"]), "dump_sorted.pcap") + path = os.path.join(CUCKOO_ROOT, "storage", "analyses", str(e["info"]["id"]), "dump_sorted.pcap") path_delete(path) except Exception as e: - log.info(("failed to remove sorted_pcap from disk %s" % (e))) + log.info(("failed to remove sorted_pcap from disk %s", e)) else: done = True else: done = True -def cuckoo_clean_pending_tasks(before_time: int = None, delete: bool = False): +def cuckoo_clean_pending_tasks(timerange: str = None, delete: bool = False): """Clean up pending tasks It deletes all stored data from file system and configured databases (SQL and MongoDB for pending tasks. """ - - from datetime import timedelta - # Init logging. # This need to init a console logger handler, because the standard # logger (init_logging()) logs to a file which will be deleted. @@ -569,16 +611,21 @@ def cuckoo_clean_pending_tasks(before_time: int = None, delete: bool = False): if not is_reporting_db_connected(): return - if before_time: - before_time = datetime.now() - timedelta(hours=before_time) - pending_tasks = db.list_tasks(status=TASK_PENDING, added_before=before_time) - clean_handler = delete_data if delete else fail_job - resolver_pool.map(lambda tid: clean_handler(tid.to_dict()["id"]), pending_tasks) + if timerange: + before_time = convert_into_time(timerange) + + pending_tasks = [task.id for task in db.list_tasks(status=TASK_PENDING, added_before=before_time)] + # clean_handler = delete_data if delete else fail_job + # resolver_pool.map(lambda tid: clean_handler(pending_tasks), pending_tasks) + if delete: + db.delete_tasks(status=TASK_PENDING, added_before=before_time) + else: + resolver_pool.map(lambda tid: fail_job(pending_tasks), pending_tasks) -def cuckoo_clean_range_tasks(start, end): - """Clean up tasks between start and end +def cuckoo_clean_range_tasks(range_: str): + """Clean up tasks between range: 1-5 It deletes all stored data from file system and configured databases (SQL and MongoDB for selected tasks. """ @@ -586,8 +633,14 @@ def cuckoo_clean_range_tasks(start, end): # This need to init a console logger handler, because the standard # logger (init_logging()) logs to a file which will be deleted. create_structure() - pending_tasks = db.list_tasks(id_after=start - 1, id_before=end + 1) - resolver_pool.map(lambda tid: delete_data(tid.to_dict()["id"]), pending_tasks) + start_str, end_str = range_.split("-") + start = int(start_str.strip()) + end = int(end_str.strip()) + pending_tasks = db.list_tasks(id_after=(start - 1), id_before=(end + 1)) + ids: list[int] = [task.id for task in pending_tasks] + delete_bulk_tasks_n_folders(ids, delete_mongo=False) + mongo_delete_data(ids) + db.delete_tasks(id_after=(start - 1), id_before=(end + 1)) def delete_unused_file_data_in_mongo(): @@ -627,14 +680,14 @@ def cape_clean_tlp(): return tlp_tasks = db.get_tlp_tasks() - resolver_pool.map(lambda tid: delete_data(tid), tlp_tasks) + delete_bulk_tasks_n_folders(tlp_tasks, False) -def binaries_clean_before_day(days: int): +def binaries_clean_before(timerange: str): # In case if "delete_bin_copy = off" we might need to clean binaries # find storage/binaries/ -name "*" -type f -mtime 5 -delete - today = datetime.today() + olden_than = convert_into_time(timerange) binaries_folder = os.path.join(CUCKOO_ROOT, "storage", "binaries") if not path_exists(binaries_folder): log.error("Binaries folder doesn't exist") @@ -646,15 +699,51 @@ def binaries_clean_before_day(days: int): if not os.path.exists(bin_path): continue st_ctime = path_get_date(bin_path) - file_time = today - datetime.fromtimestamp(st_ctime) - if file_time.days > days: + if datetime.fromtimestamp(st_ctime) > olden_than: # ToDo check database here to ensure that file is not used if path_exists(bin_path) and not db.sample_still_used(sha256, 0): path_delete(bin_path) -def execute_cleanup(args: dict, init_log=True): +def cleanup_mongodb_calls_collection(args: dict): + if not is_reporting_db_connected(): + return + + timerange = args.get("cleanup_mongo_calls") + if not timerange: + log.info("No time range argument provided bailing") + return + + added_before = convert_into_time(timerange) + highest_id = db.list_tasks(added_before=added_before, limit=1) + if highest_id: + mongo_delete_calls_by_task_id_in_range(range_end=highest_id[0].id) + + +def cleanup_files_collection_by_id(task_id: int): + """ + This function scans and pull out task_ids lower than task_id argument. + It useful when admin had to do emergency cleanup directly using postgres/mongodb without cleaner + """ + if not is_reporting_db_connected(): + return + """ + db.files.updateMany({},{$pull: {"_task_ids": {$lt: 1280759}}}) + { + acknowledged: true, + insertedId: null, + matchedCount: 1212950, + modifiedCount: 1177791, + upsertedCount: 0 + } + db.runCommand({ compact: "files"}) + { bytesFreed: Long('107198922752'), ok: 1 } + """ + mongo_update_many({}, {"$pull": {"_task_ids": {"$lt": task_id}}}) + + +def execute_cleanup(args: dict, init_log=True): if init_log: init_console_logging() @@ -670,8 +759,8 @@ def execute_cleanup(args: dict, init_log=True): if args.get("failed_url_clean"): cuckoo_clean_failed_url_tasks() - if args.get("delete_older_than_days"): - cuckoo_clean_before_day(args) + if args.get("delete_older_than"): + cuckoo_clean_before(args) if args.get("pcap_sorted_clean"): cuckoo_clean_sorted_pcap_dump() @@ -680,22 +769,49 @@ def execute_cleanup(args: dict, init_log=True): cuckoo_clean_bson_suri_logs() if args.get("pending_clean"): - cuckoo_clean_pending_tasks(args["before_time"]) + cuckoo_clean_pending_tasks(args["time_range"]) if args.get("malscore"): cuckoo_clean_lower_score(args["malscore"]) - if args.get("delete_range_start") and args.get("delete_range_end"): - cuckoo_clean_range_tasks(args["delete_range_start"], args["delete_range_end"]) + if args.get("delete_range"): + cuckoo_clean_range_tasks(args["delete_range"]) if args.get("deduplicated_cluster_queue"): cuckoo_dedup_cluster_queue() - if args.get("delete_tmp_items_older_than_days"): - tmp_clean_before_day(args["delete_tmp_items_older_than_days"]) + if args.get("delete_tmp_items_older_than"): + tmp_clean_before(args["delete_tmp_items_older_than"]) - if args.get("delete_binaries_items_older_than_days"): - binaries_clean_before_day(args["delete_binaries_items_older_than_days"]) + if args.get("delete_binaries_items_older_than"): + binaries_clean_before(args["delete_binaries_items_older_than"]) if args.get("delete_unused_file_data_in_mongo"): delete_unused_file_data_in_mongo() + + if args.get("cleanup_files_collection_by_id"): + cleanup_files_collection_by_id(args["cleanup_files_collection_by_id"]) + + if args.get("cleanup_mongo_calls"): + cleanup_mongodb_calls_collection(args) + + +# Example Usage: +if __name__ == "__main__": + try: + past_time_1 = convert_into_time("12h") + print(f"12 hours ago: {past_time_1}") + + past_time_2 = convert_into_time("1d") + print(f"1 day ago: {past_time_2}") + + past_time_3 = convert_into_time("5m") + print(f"5 minutes ago: {past_time_3}") + + past_time_4 = convert_into_time("30s") + print(f"30 seconds ago: {past_time_4}") + + # Example of invalid input + convert_into_time("invalid") + except ValueError as e: + print(f"Error: {e}") diff --git a/lib/cuckoo/common/demux.py b/lib/cuckoo/common/demux.py index fa42507f698..81a9d320366 100644 --- a/lib/cuckoo/common/demux.py +++ b/lib/cuckoo/common/demux.py @@ -34,6 +34,12 @@ tmp_path = cuckoo_conf.cuckoo.get("tmppath", "/tmp") linux_enabled = web_cfg.linux.get("enabled", False) or web_cfg.linux.get("static_only", False) +try: + demux_files_limit = int(web_cfg.general.demux_files_limit) +except ValueError: + log.error("Invalid value for demux_files_limit in web.conf, defaulting to 10") + demux_files_limit = 10 # Default value + demux_extensions_list = { b".accdr", b".exe", @@ -184,8 +190,8 @@ def _sf_children(child: sfFile): # -> bytes: path_to_extract = os.path.join(tmp_dir, sanitize_filename((child.filename).decode())) _ = path_write_file(path_to_extract, child.contents) except Exception as e: - log.error(e, exc_info=True) - return (path_to_extract.encode(), child.platform, child.get_type(), child.get_size()) + log.exception(e) + return (path_to_extract.encode(), child.platform, child.magic, child.filesize) # ToDo fix typing need to add str as error msg @@ -213,14 +219,25 @@ def demux_sflock(filename: bytes, options: str, check_shellcode: bool = True): return [], "blacklisted package" for sf_child in unpacked.children: if sf_child.to_dict().get("children"): - retlist.extend(_sf_children(ch) for ch in sf_child.children) + for ch in sf_child.children: + tmp_child = _sf_children(ch) + # check if path is not empty + if tmp_child and tmp_child[0]: + retlist.append(tmp_child) + # child is not available, the original file should be put into the list - if filter(None, retlist): - retlist.append(_sf_children(sf_child)) + if not retlist: + tmp_child = _sf_children(sf_child) + # check if path is not empty + if tmp_child and tmp_child[0]: + retlist.append(tmp_child) else: - retlist.append(_sf_children(sf_child)) + tmp_child = _sf_children(sf_child) + # check if path is not empty + if tmp_child and tmp_child[0]: + retlist.append(tmp_child) except Exception as e: - log.error(e, exc_info=True) + log.exception(e) return list(filter(None, retlist)), "" @@ -253,7 +270,7 @@ def demux_sample(filename: bytes, package: str, options: str, use_sflock: bool = { os.path.basename( filename - ): "File too big, enable 'allow_ignore_size' in web.conf or use 'ignore_size_check' option" + ).decode(): "File too big, enable 'allow_ignore_size' in web.conf or use 'ignore_size_check' option" } ) return retlist, error_list @@ -281,7 +298,7 @@ def demux_sample(filename: bytes, package: str, options: str, use_sflock: bool = { os.path.basename( filename - ): "Detected password protected office file, but no sflock is installed or correct password provided" + ).decode(): "Detected password protected office file, but no sflock is installed or correct password provided" } ) @@ -304,8 +321,9 @@ def demux_sample(filename: bytes, package: str, options: str, use_sflock: bool = else: error_list.append( { - os.path.basename(filename), - "File too big, enable 'allow_ignore_size' in web.conf or use 'ignore_size_check' option", + os.path.basename( + filename + ).decode(): "File too big, enable 'allow_ignore_size' in web.conf or use 'ignore_size_check' option", } ) return retlist, error_list @@ -322,13 +340,13 @@ def demux_sample(filename: bytes, package: str, options: str, use_sflock: bool = # original file if not retlist: if error_msg: - error_list.append({os.path.basename(filename), error_msg}) + error_list.append({os.path.basename(filename).decode(), error_msg}) new_retlist.append((filename, platform)) else: for filename, platform, magic_type, file_size in retlist: # verify not Windows binaries here: if platform == "linux" and not linux_enabled and "Python" not in magic_type: - error_list.append({os.path.basename(filename): "Linux processing is disabled"}) + error_list.append({os.path.basename(filename).decode(): "Linux processing is disabled"}) continue if file_size > web_cfg.general.max_sample_size: @@ -340,10 +358,11 @@ def demux_sample(filename: bytes, package: str, options: str, use_sflock: bool = else: error_list.append( { - os.path.basename(filename), - "File too big, enable 'allow_ignore_size' in web.conf or use 'ignore_size_check' option", + os.path.basename( + filename + ).decode(): "File too big, enable 'allow_ignore_size' in web.conf or use 'ignore_size_check' option", } ) new_retlist.append((filename, platform)) - return new_retlist[:10], error_list + return new_retlist[:demux_files_limit], error_list diff --git a/lib/cuckoo/common/dotnet_utils.py b/lib/cuckoo/common/dotnet_utils.py index d2ba9be05a9..7573707f745 100644 --- a/lib/cuckoo/common/dotnet_utils.py +++ b/lib/cuckoo/common/dotnet_utils.py @@ -13,7 +13,21 @@ log = logging.getLogger("dotnet_utils") -def dotnet_user_strings(file: str = False, data: bytes = False, dn_whitelisting: list = []): +def dotnet_user_strings(file: str = False, data: bytes = False, dn_whitelisting: list = []) -> list: + """ + Extracts user strings from a .NET file or data blob using dnfile. + + Args: + file (str): Path to the .NET file. Default is False. + data (bytes): Byte data of the .NET file. Default is False. + dn_whitelisting (list): List of string patterns to whitelist. Default is an empty list. + + Returns: + list: A list of extracted user strings that are not in the whitelist. + + Raises: + Exception: If there is an error processing the .NET file or data. + """ if not HAVE_DNFILE: return [] diff --git a/lib/cuckoo/common/email_utils.py b/lib/cuckoo/common/email_utils.py index 46f0066ff9a..ca212ba19c3 100644 --- a/lib/cuckoo/common/email_utils.py +++ b/lib/cuckoo/common/email_utils.py @@ -28,7 +28,6 @@ def find_attachments_in_email(s, expand_attachment): def _find_attachments_in_email(mesg, expand_attachment, atts): - # MHTML detection if mesg.get_content_maintype() == "multipart" and mesg.get_content_subtype() == "related": for part in mesg.walk(): diff --git a/lib/cuckoo/common/extractor_utils.py b/lib/cuckoo/common/extractor_utils.py index 69ae3428a00..9964d3d5f01 100644 --- a/lib/cuckoo/common/extractor_utils.py +++ b/lib/cuckoo/common/extractor_utils.py @@ -5,6 +5,19 @@ # dotnet def get_mdtoken(data: bytes) -> int: + """ + Extracts a metadata token from the given byte data. + + The function interprets the first 4 bytes of the input data as an unsigned + integer in little-endian format and then masks it with 0xFFFFFF to obtain + the metadata token. + + Args: + data (bytes): The byte data from which to extract the metadata token. + + Returns: + int: The extracted metadata token. + """ return struct.unpack_from(" int: def calc_section_alignment(pe: pefile.PE, offset: int, addr: int) -> int: """ + Calculate the alignment between two sections in a PE file. + + Args: + pe (pefile.PE): The PE file object. + offset (int): The offset value, typically calculated as + struct.unpack("i", blob[0x43:0x47])[0] + 0x47. + addr (int): The address where data starts, which can be a YARA address match. + + Returns: + int: The calculated alignment between the sections. Returns 0 if sections are not found or an error occurs. + + Raises: + Exception: If an error occurs during the calculation, it will be caught and printed. + offset is: Ex struct.unpack("i", blob[0x43:0x47])[0] + 0x47 addr is where data starts, can be YARA address match """ @@ -31,7 +58,20 @@ def calc_section_alignment(pe: pefile.PE, offset: int, addr: int) -> int: return alignment -def function_offset_from_VA(addr, blob, pe): +def function_offset_from_VA(addr: int, blob: bytes, pe: pefile.PE): + """ + Calculate the function offset from a given virtual address (VA) in a PE file. + + Args: + addr (int): The virtual address to start from. + blob (bytes): The binary data blob containing the instructions. + pe (PE): The PE file object, typically from the pefile module. + + Returns: + tuple: A tuple containing: + - function_addr (int): The calculated function address. + - offset (int): The offset of the next instruction after the function call. + """ shift_pos = blob.find(b"\xE8") + 1 function_addr = pe.get_rva_from_offset(addr + shift_pos) + pe.OPTIONAL_HEADER.ImageBase # print(f"Getting offset for function: {hex(function_addr)}") @@ -41,6 +81,19 @@ def function_offset_from_VA(addr, blob, pe): def function_offset_from_offset(addr: int, binary: bytes, pe: pefile.PE): + """ + Calculates the virtual address and file offset of a subfunction call within a binary. + + Args: + addr (int): The starting address to search for the CALL instruction. + binary (bytes): The binary data of the executable. + pe (pefile.PE): The PE file object representing the executable. + + Returns: + tuple: A tuple containing: + - call_virtual_address (int): The virtual address of the CALL instruction. + - subfunc_file_offset (int): The file offset of the subfunction being called. + """ # where our subcall starts - example: 8 shift_pos = binary[addr:].find(b"\xE8") call_file_offset = addr + shift_pos @@ -56,6 +109,18 @@ def function_offset_from_offset(addr: int, binary: bytes, pe: pefile.PE): def find_function_xrefs(data, start, end): + """ + Finds function cross-references (xrefs) within a specified range in the given binary data. + + Args: + data (bytes): The binary data to search for function xrefs. + start (int): The starting address (inclusive) of the range to search. + end (int): The ending address (exclusive) of the range to search. + + Returns: + dict: A dictionary where keys are target addresses of CALL instructions and values are lists of addresses + where these CALL instructions are located. + """ function_xrefs = {} # The re.finditer function only finds *non-overlapping* matches, which fails to find some CALL instructions for rva in range(start, end): diff --git a/lib/cuckoo/common/fraunhofer_helper.py b/lib/cuckoo/common/fraunhofer_helper.py index 57f1f25e7fb..c9eabe18165 100644 --- a/lib/cuckoo/common/fraunhofer_helper.py +++ b/lib/cuckoo/common/fraunhofer_helper.py @@ -22,6 +22,16 @@ def get_dga_lookup_dict(): + """ + Retrieves the DGA (Domain Generation Algorithm) lookup dictionary from a gzipped JSON file. + + The function constructs the file path to the DGA lookup dictionary, checks if the file exists, + and if it does, reads and decompresses the file, then loads its contents as a JSON object. + If the file does not exist, it returns an empty dictionary. + + Returns: + dict: The DGA lookup dictionary if the file exists, otherwise an empty dictionary. + """ dga_lookup_path = os.path.join(CUCKOO_ROOT, "data", "dga_lookup_dict.json.gz") if path_exists(dga_lookup_path): with gzip.GzipFile(dga_lookup_path, "r") as fin: diff --git a/lib/cuckoo/common/gcp.py b/lib/cuckoo/common/gcp.py index 4accf6643a3..204f1c6c625 100644 --- a/lib/cuckoo/common/gcp.py +++ b/lib/cuckoo/common/gcp.py @@ -63,7 +63,7 @@ def list_instances(self) -> dict: ] servers.setdefault(instance["name"], ips) except Exception as e: - log.error(e, exc_info=True) + log.exception(e) elif HAVE_GCP: try: instance_client = compute_v1.InstancesClient() @@ -88,7 +88,6 @@ def list_instances(self) -> dict: return servers def autodiscovery(self): - while True: servers = self.list_instances() if not servers: @@ -109,9 +108,9 @@ def autodiscovery(self): if not r.ok: log.error("Can't registger worker with IP: %s. status_code: %d ", ip, r.status_code) except Exception as e: - log.error(e, exc_info=True) + log.exception(e) break except Exception as e: - log.error(e, exc_info=True) + log.exception(e) time.sleep(int(self.dist_cfg.GCP.autodiscovery)) diff --git a/lib/cuckoo/common/hypervisor_config.py b/lib/cuckoo/common/hypervisor_config.py index 1f649177d76..53a36949bf5 100644 --- a/lib/cuckoo/common/hypervisor_config.py +++ b/lib/cuckoo/common/hypervisor_config.py @@ -8,6 +8,24 @@ def proxmox_shutdown_vm(machineName: str): + """ + Shuts down a virtual machine on a Proxmox server. + + Args: + machineName (str): The name of the virtual machine to shut down. + + Raises: + Exception: If there is an error during the shutdown process. + + Notes: + - This function does not support multiple Proxmox servers. + - The Proxmox server configuration is expected to be available in the `proxmox_conf` object. + - The function retrieves the VM ID from the `proxmox_conf.Node_1` configuration using the provided machine name. + - The function sends a POST request to the Proxmox API to obtain an authentication ticket and CSRF prevention token. + - The function then sends another POST request to shut down the specified virtual machine. + - If the shutdown is successful, a message is printed to indicate success. + - If an error occurs, it is caught and printed. + """ proxmox_server = proxmox_conf.proxmox.hostname # Not supporting multiple servers diff --git a/lib/cuckoo/common/icon.py b/lib/cuckoo/common/icon.py index 754b6628bca..cfc88ff5017 100644 --- a/lib/cuckoo/common/icon.py +++ b/lib/cuckoo/common/icon.py @@ -2,11 +2,10 @@ # This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org # See the file 'docs/LICENSE' for copying permission. -from ctypes import POINTER, Structure, byref +from ctypes import POINTER, Structure, byref, cast, create_string_buffer, pointer, sizeof, string_at from ctypes import c_ubyte as BYTE from ctypes import c_uint as DWORD from ctypes import c_ushort as WORD -from ctypes import cast, create_string_buffer, pointer, sizeof, string_at class GRPICONDIR(Structure): diff --git a/lib/cuckoo/common/integrations/XLMMacroDeobfuscator.py b/lib/cuckoo/common/integrations/XLMMacroDeobfuscator.py index 0da287a8401..a9c32e39723 100644 --- a/lib/cuckoo/common/integrations/XLMMacroDeobfuscator.py +++ b/lib/cuckoo/common/integrations/XLMMacroDeobfuscator.py @@ -45,7 +45,6 @@ def xlmdeobfuscate(filepath: str, task_id: str, password: str = "", on_demand: bool = False): - if not HAVE_XLM_DEOBF or processing_conf.xlsdeobf.on_demand and not on_demand: return xlm_kwargs["file"] = filepath @@ -68,4 +67,4 @@ def xlmdeobfuscate(filepath: str, task_id: str, password: str = "", on_demand: b if "no attribute 'workbook'" in str(e) or "Can't find workbook" in str(e): log.info("Workbook not found. Probably not an Excel file") else: - log.error(e, exc_info=True) + log.exception(e) diff --git a/lib/cuckoo/common/integrations/capa.py b/lib/cuckoo/common/integrations/capa.py index 13dd1c97858..b3699bf33fd 100644 --- a/lib/cuckoo/common/integrations/capa.py +++ b/lib/cuckoo/common/integrations/capa.py @@ -5,7 +5,7 @@ import collections import logging import os -from contextlib import suppress +# from contextlib import suppress from typing import Any, Dict, Set from lib.cuckoo.common.config import Config @@ -14,23 +14,23 @@ log = logging.getLogger(__name__) -processing_conf = Config("processing") reporting_conf = Config("reporting") - - -""" -from lib.cuckoo.common.integrations.capa import flare_capa_details, HAVE_FLARE_CAPA -path = "storage/binaries/8c4111e5ec6ec033ea32e7d40f3c36e16ad50146240dacfc3de6cf8df19e6531" -details = flare_capa_details(path, "static", on_demand=True) -""" +integrations_conf = Config("integrations") rules = False HAVE_FLARE_CAPA = False -if processing_conf.flare_capa.enabled or reporting_conf.flare_capa_summary.enabled: +if integrations_conf.flare_capa.enabled: try: + # from platform import python_version + from capa.version import __version__ as capa_version + from packaging import version - if capa_version[0] != "7": + # if version.parse(python_version()) >= version.parse("3.10.0"): + capa_compatible_version = "9" + + # ToDo use major? + if version.parse(capa_version).base_version.split(".")[0] != capa_compatible_version: print("FLARE-CAPA missed or incompatible version. Run: poetry install") else: import capa.capabilities.common @@ -49,6 +49,11 @@ from capa.rules import InvalidRule, InvalidRuleSet, InvalidRuleWithPath from pydantic_core._pydantic_core import ValidationError + # Disable vivisect logging + logging.getLogger("vivisect").setLevel(logging.NOTSET) + logging.getLogger("vivisect.base").setLevel(logging.NOTSET) + logging.getLogger("vivisect.impemu").setLevel(logging.NOTSET) + rules_path = os.path.join(CUCKOO_ROOT, "data", "capa-rules") if path_exists(rules_path): try: @@ -162,8 +167,8 @@ def render_attack(doc, result): 'Virtualization/Sandbox Evasion::System Checks ' '[T1497.001]'], 'DISCOVERY': ['File and Directory Discovery [T1083]', - 'Query Registry [T1012]', - 'System Information Discovery [T1082]'], + 'Query Registry [T1012]', + 'System Information Discovery [T1082]'], 'EXECUTION': ['Shared Modules [T1129]']} } """ @@ -188,16 +193,17 @@ def render_attack(doc, result): def render_mbc(doc, result): """ example:: - {'MBC': {'ANTI-BEHAVIORAL ANALYSIS': ['Debugger Detection::Timing/Delay Check ' - 'GetTickCount [B0001.032]', - 'Emulator Detection [B0004]', - 'Virtual Machine Detection::Instruction ' - 'Testing [B0009.029]', - 'Virtual Machine Detection [B0009]'], - 'COLLECTION': ['Keylogging::Polling [F0002.002]'], - 'CRYPTOGRAPHY': ['Encrypt Data::RC4 [C0027.009]', - 'Generate Pseudo-random Sequence::RC4 PRGA ' - '[C0021.004]']} + {'MBC': {'ANTI-BEHAVIORAL ANALYSIS': [ + 'Debugger Detection::Timing/Delay Check ' + 'GetTickCount [B0001.032]', + 'Emulator Detection [B0004]', + 'Virtual Machine Detection::Instruction ' + 'Testing [B0009.029]', + 'Virtual Machine Detection [B0009]'], + 'COLLECTION': ['Keylogging::Polling [F0002.002]'], + 'CRYPTOGRAPHY': [ + 'Encrypt Data::RC4 [C0027.009]', + 'Generate Pseudo-random Sequence::RC4 PRGA [C0021.004]']} } """ result["MBC"] = {} @@ -227,7 +233,6 @@ def render_dictionary(doc) -> Dict[str, Any]: render_capabilities(doc, result) return result - # ===== CAPA END @@ -244,11 +249,12 @@ def flare_capa_details( capa_output = {} if ( HAVE_FLARE_CAPA - and processing_conf.flare_capa.enabled - and processing_conf.flare_capa.get(category, False) - and not processing_conf.flare_capa.on_demand + and integrations_conf.flare_capa.enabled + and integrations_conf.flare_capa.get(category, False) + and not integrations_conf.flare_capa.on_demand or on_demand ): + # ToDo check if PE file in TYPE try: file_path_object = path_object(file_path) # extract features and find capabilities @@ -260,38 +266,37 @@ def flare_capa_details( try: extractor = capa.features.extractors.cape.extractor.CapeExtractor.from_report(results) except ValidationError as e: - log.error("CAPA ValidationError %s", e) + log.debug("CAPA ValidationError %s", e) return {} else: log.error("CAPA: Missed results probably") return {} - capabilities, counts = capa.capabilities.common.find_capabilities(rules, extractor, disable_progress=disable_progress) - + capabilities = capa.capabilities.common.find_capabilities(rules, extractor, disable_progress=disable_progress) # collect metadata (used only to make rendering more complete) - meta = capa.loader.collect_metadata( - [], file_path_object, FORMAT_AUTO, OS_AUTO, [path_object(rules_path)], extractor, counts - ) - meta.analysis.feature_counts = counts.get("feature_counts", 0) - with suppress(ValueError): - meta.analysis.library_functions = counts.get("library_functions", 0) - meta.analysis.layout = capa.loader.compute_layout(rules, extractor, capabilities) - + meta = capa.loader.collect_metadata([], file_path_object, FORMAT_AUTO, OS_AUTO, [path_object(rules_path)], extractor, capabilities) + meta.analysis.layout = capa.loader.compute_layout(rules, extractor, capabilities.matches) capa_output: Any = False # ...as python dictionary, simplified as textable but in dictionary - doc = rd.ResultDocument.from_capa(meta, rules, capabilities) + doc = rd.ResultDocument.from_capa(meta, rules, capabilities.matches) capa_output = render_dictionary(doc) - except MemoryError: log.warning("FLARE CAPA -> MemoryError") - except AttributeError: + except AttributeError as e: + log.exception(e) log.warning("FLARE CAPA -> Use GitHub's version. poetry install") except UnsupportedFormatError: log.error("FLARE CAPA -> UnsupportedFormatError") except EmptyReportError: log.info("FLARE CAPA -> No process data available") except Exception as e: - log.error(e, exc_info=True) + log.exception(e) return capa_output + + +if __name__ == "__main__": + import sys + from lib.cuckoo.common.integrations.capa import flare_capa_details, HAVE_FLARE_CAPA + details = flare_capa_details(sys.argv[1], "static", on_demand=True) diff --git a/lib/cuckoo/common/integrations/file_extra_info.py b/lib/cuckoo/common/integrations/file_extra_info.py index b49d5d31415..79aa46db6d2 100644 --- a/lib/cuckoo/common/integrations/file_extra_info.py +++ b/lib/cuckoo/common/integrations/file_extra_info.py @@ -4,12 +4,14 @@ import json import logging import os +import re import shlex import shutil import signal import subprocess -from contextlib import suppress -from typing import DefaultDict, List, Optional, Set, Union + +# from contextlib import suppress +from typing import Any, DefaultDict, List, Optional, Set, Union import pebble @@ -28,6 +30,7 @@ from lib.cuckoo.common.integrations.parse_office import HAVE_OLETOOLS, Office from lib.cuckoo.common.integrations.parse_pdf import PDF from lib.cuckoo.common.integrations.parse_pe import HAVE_PEFILE, PortableExecutable +from lib.cuckoo.common.integrations.parse_rdp import parse_rdp_file from lib.cuckoo.common.integrations.parse_wsf import WindowsScriptFile # EncodedScriptFile # from lib.cuckoo.common.integrations.parse_elf import ELF @@ -62,27 +65,20 @@ cfg = Config() processing_conf = Config("processing") -selfextract_conf = Config("selfextract") +integration_conf = Config("integrations") try: from modules.signatures.recon_checkip import dns_indicators except ImportError: dns_indicators = () -HAVE_DIE = False -with suppress(ImportError): - import die - - HAVE_DIE = True - - HAVE_FLARE_CAPA = False # required to not load not enabled dependencies -if processing_conf.flare_capa.enabled and not processing_conf.flare_capa.on_demand: +if integration_conf.flare_capa.enabled and not integration_conf.flare_capa.on_demand: from lib.cuckoo.common.integrations.capa import HAVE_FLARE_CAPA, flare_capa_details HAVE_FLOSS = False -if processing_conf.floss.enabled and not processing_conf.floss.on_demand: +if integration_conf.floss.enabled and not integration_conf.floss.on_demand: from lib.cuckoo.common.integrations.floss import HAVE_FLOSS, Floss log = logging.getLogger(__name__) @@ -112,7 +108,17 @@ HAVE_BAT_DECODER = False print("OPTIONAL! Missed dependency: poetry run pip install -U git+https://github.com/DissectMalware/batch_deobfuscator") -unautoit_binary = os.path.join(CUCKOO_ROOT, selfextract_conf.UnAutoIt_extract.binary) +unautoit_binary = "" +innoextact_binary = "" +if integration_conf.UnAutoIt_extract.binary: + unautoit_binary = os.path.join(CUCKOO_ROOT, integration_conf.UnAutoIt_extract.binary) +if integration_conf.Inno_extract.binary: + innoextact_binary = os.path.join(CUCKOO_ROOT, integration_conf.Inno_extract.binary) +sevenzip_binary = "/usr/bin/7z" +if integration_conf.SevenZip_unpack.binary: + tmp_sevenzip_binary = os.path.join(CUCKOO_ROOT, integration_conf.SevenZip_unpack.binary) + if path_exists(tmp_sevenzip_binary): + sevenzip_binary = tmp_sevenzip_binary if processing_conf.trid.enabled: trid_binary = os.path.join(CUCKOO_ROOT, processing_conf.trid.identifier) @@ -135,6 +141,12 @@ HAVE_VIRUSTOTAL = True +HAVE_MANDIANT_INTEL = False +if integration_conf.mandiant_intel.enabled: + from lib.cuckoo.common.integrations.mandiant_intel import mandiant_lookup + + HAVE_MANDIANT_INTEL = True + exclude_startswith = ("parti_",) excluded_extensions = (".parti",) tools_folder = os.path.join(cfg.cuckoo.get("tmppath", "/tmp"), "cape-external") @@ -166,53 +178,56 @@ def static_file_info( ): log.info("Missed dependencies: pip3 install oletools") + # ToDo we need type checking as it wont work for most of static jobs if HAVE_PEFILE and ("PE32" in data_dictionary["type"] or "MS-DOS executable" in data_dictionary["type"]): data_dictionary["pe"] = PortableExecutable(file_path).run(task_id) if HAVE_FLARE_CAPA: + # https://github.com/mandiant/capa/issues/2620 capa_details = flare_capa_details(file_path, "static") if capa_details: data_dictionary["flare_capa"] = capa_details - if HAVE_FLOSS: + if HAVE_FLOSS and integration_conf.floss.enabled and "Mono" not in data_dictionary["type"]: floss_strings = Floss(file_path, "static", "pe").run() if floss_strings: data_dictionary["floss"] = floss_strings if "Mono" in data_dictionary["type"]: - if selfextract_conf.general.dotnet: + if integration_conf.general.dotnet: data_dictionary["dotnet"] = DotNETExecutable(file_path).run() if processing_conf.strings.dotnet: dotnet_strings = dotnet_user_strings(file_path) if dotnet_strings: data_dictionary.setdefault("dotnet_strings", dotnet_strings) - elif HAVE_OLETOOLS and package in {"doc", "ppt", "xls", "pub"} and selfextract_conf.general.office: + elif (HAVE_OLETOOLS and package in {"doc", "ppt", "xls", "pub"} and integration_conf.general.office) or data_dictionary.get("name", "").endswith((".doc", ".ppt", ".xls", ".pub")): # options is dict where we need to get pass get_options data_dictionary["office"] = Office(file_path, task_id, data_dictionary["sha256"], options_dict).run() - elif ("PDF" in data_dictionary["type"] or file_path.endswith(".pdf")) and selfextract_conf.general.pdf: + elif ("PDF" in data_dictionary["type"] or file_path.endswith(".pdf")) and integration_conf.general.pdf: data_dictionary["pdf"] = PDF(file_path).run() elif ( package in {"wsf", "hta"} or data_dictionary["type"] == "XML document text" or file_path.endswith(".wsf") - ) and selfextract_conf.general.windows_script: + ) and integration_conf.general.windows_script: data_dictionary["wsf"] = WindowsScriptFile(file_path).run() # elif package in {"js", "vbs"}: # data_dictionary["js"] = EncodedScriptFile(file_path).run() - elif (package == "lnk" or "MS Windows shortcut" in data_dictionary["type"]) and selfextract_conf.general.lnk: + elif (package == "lnk" or "MS Windows shortcut" in data_dictionary["type"]) and integration_conf.general.lnk: data_dictionary["lnk"] = LnkShortcut(file_path).run() - elif ("Java Jar" in data_dictionary["type"] or file_path.endswith(".jar")) and selfextract_conf.general.java: - if selfextract_conf.procyon.binary and not path_exists(selfextract_conf.procyon.binary): + elif ("Java Jar" in data_dictionary["type"] or file_path.endswith(".jar")) and integration_conf.general.java: + if integration_conf.procyon.binary and not path_exists(integration_conf.procyon.binary): log.error("procyon_path specified in processing.conf but the file does not exist") else: - data_dictionary["java"] = Java(file_path, selfextract_conf.procyon.binary).run() - + data_dictionary["java"] = Java(file_path, integration_conf.procyon.binary).run() + elif file_path.endswith(".rdp") or data_dictionary.get("name", {}).endswith(".rdp"): + data_dictionary["rdp"] = parse_rdp_file(file_path) # It's possible to fool libmagic into thinking our 2007+ file is a zip. # So until we have static analysis for zip files, we can use oleid to fail us out silently, # yeilding no static analysis results for actual zip files. - # elif ("ELF" in data_dictionary["type"] or file_path.endswith(".elf")) and selfextract_conf.general.elf: + # elif ("ELF" in data_dictionary["type"] or file_path.endswith(".elf")) and integration_conf.general.elf: # data_dictionary["elf"] = ELF(file_path).run() # data_dictionary["keys"] = f.get_keys() - # elif HAVE_OLETOOLS and package == "hwp" and selfextract_conf.general.hwp: + # elif HAVE_OLETOOLS and package == "hwp" and integration_conf.general.hwp: # data_dictionary["hwp"] = HwpDocument(file_path).run() data = path_read_file(file_path) @@ -223,10 +238,10 @@ def static_file_info( if processing_conf.trid.enabled: data_dictionary["trid"] = trid_info(file_path) - if processing_conf.die.enabled and HAVE_DIE: + if processing_conf.die.enabled: data_dictionary["die"] = detect_it_easy_info(file_path) - if HAVE_FLOSS and processing_conf.floss.enabled: + if HAVE_FLOSS and processing_conf.floss.enabled and "Mono" not in data_dictionary["type"]: floss_strings = Floss(file_path, package).run() if floss_strings: data_dictionary["floss"] = floss_strings @@ -250,6 +265,11 @@ def static_file_info( if vt_details: data_dictionary["virustotal"] = vt_details + if HAVE_MANDIANT_INTEL and processing_conf.mandiant_intel.enabled: + mandiant_intel_details = mandiant_lookup("file", file_path, results) + if mandiant_intel_details: + data_dictionary["mandiant_intel"] = mandiant_intel_details + generic_file_extractors( file_path, destination_folder, @@ -261,24 +281,38 @@ def static_file_info( def detect_it_easy_info(file_path: str): + if not path_exists(processing_conf.die.binary): + log.warning("detect-it-easy binary not found at path %s", processing_conf.die.binary) + return [] + try: - try: - result_json = die.scan_file(file_path, die.ScanFlags.RESULT_AS_JSON, str(die.database_path / "db")) - except Exception as e: - log.error("DIE error: %s", str(e)) + die_output = subprocess.check_output( + [processing_conf.die.binary, "-j", file_path], + stderr=subprocess.STDOUT, + universal_newlines=True, + ) - if "detects" not in result_json: - return [] + def get_json() -> dict[str, Any]: + """Get the JSON element from the detect it easy output. - if "Invalid signature" in result_json and "{" in result_json: - start = result_json.find("{") - if start != -1: - result_json = result_json[start:] + This is required due to non-JSON output in JSON mode. + https://github.com/horsicq/Detect-It-Easy/issues/242 + """ + matches = re.findall(r"\{.*\}", die_output, re.S) + return json.loads(matches[0]) if matches else {} - strings = [sub["string"] for block in json.loads(result_json).get("detects", []) for sub in block.get("values", [])] + def get_matches() -> list[str]: + """Get the string values from the detect it easy output.""" + return [sub["string"] for block in get_json().get("detects", []) for sub in block.get("values", [])] - if strings: - return strings + return [] if "detects" not in die_output else get_matches() + except subprocess.CalledProcessError as err: + log.error( + "Detect-It-Easy: Failed to execute cmd=`%s`, stdout=`%s`, stderr=`%s`", + shlex.join(err.cmd), + err.stdout, + err.stderr, + ) except json.decoder.JSONDecodeError as e: log.debug("DIE results are not in json format: %s", str(e)) except Exception as e: @@ -341,7 +375,7 @@ def _extracted_files_metadata( if processing_conf.trid.enabled: file_info["trid"] = trid_info(full_path) - if processing_conf.die.enabled and HAVE_DIE: + if processing_conf.die.enabled: file_info["die"] = detect_it_easy_info(full_path) dest_path = os.path.join(destination_folder, file_info["sha256"]) @@ -437,10 +471,11 @@ def generic_file_extractors( eziriz_deobfuscate, office_one, msix_extract, + UnGPG_extract, ] futures = {} - with pebble.ProcessPool(max_workers=int(selfextract_conf.general.max_workers)) as pool: + with pebble.ProcessPool(max_workers=int(integration_conf.general.max_workers)) as pool: # Prefer custom modules over the built-in ones, since only 1 is allowed # to be the extracted_files_tool. if extra_info_modules: @@ -452,12 +487,12 @@ def generic_file_extractors( for extraction_func in file_info_funcs: funcname = extraction_func.__name__.split(".")[-1] if ( - not getattr(selfextract_conf, funcname, {}).get("enabled", False) + not getattr(integration_conf, funcname, {}).get("enabled", False) and getattr(extraction_func, "enabled", False) is False ): continue - func_timeout = int(getattr(selfextract_conf, funcname, {}).get("timeout", 60)) + func_timeout = int(getattr(integration_conf, funcname, {}).get("timeout", 60)) futures[funcname] = pool.schedule(extraction_func, args=args, kwargs=kwargs, timeout=func_timeout) pool.join() @@ -499,13 +534,12 @@ def generic_file_extractors( log.debug("Files already extracted from %s by %s. Also extracted with %s", file, old_tool_name, new_tool_name) continue metadata = _extracted_files_metadata(tempdir, destination_folder, files=extracted_files, results=results) - data_dictionary.update( - { - "extracted_files": metadata, - "extracted_files_tool": new_tool_name, - "extracted_files_time": func_result["took_seconds"], - } - ) + data_dictionary.setdefault("selfextract", {}) + data_dictionary["selfextract"][new_tool_name] = { + "extracted_files": metadata, + "extracted_files_time": func_result["took_seconds"], + "password": extraction_result.get("password", ""), + } finally: if tempdir: # ToDo doesn't work @@ -559,7 +593,7 @@ def vbe_extract(file: str, **_) -> ExtractorReturnType: try: decoded = vbe_decode_file(file, data) except Exception as e: - log.error(e, exc_info=True) + log.exception(e) if not decoded: log.debug("VBE content wasn't decoded") @@ -576,7 +610,7 @@ def eziriz_deobfuscate(file: str, *, data_dictionary: dict, **_) -> ExtractorRet if all(".NET Reactor" not in string for string in data_dictionary.get("die", [])): return - binary = shlex.split(selfextract_conf.eziriz_deobfuscate.binary.strip())[0] + binary = shlex.split(integration_conf.eziriz_deobfuscate.binary.strip())[0] binary = os.path.join(CUCKOO_ROOT, binary) if not binary: log.warning("eziriz_deobfuscate.binary is not defined in the configuration.") @@ -599,7 +633,7 @@ def eziriz_deobfuscate(file: str, *, data_dictionary: dict, **_) -> ExtractorRet _ = run_tool( [ os.path.join(CUCKOO_ROOT, binary), - *shlex.split(selfextract_conf.eziriz_deobfuscate.extra_args.strip()), + *shlex.split(integration_conf.eziriz_deobfuscate.extra_args.strip()), file, ], universal_newlines=True, @@ -618,7 +652,7 @@ def de4dot_deobfuscate(file: str, *, filetype: str, **_) -> ExtractorReturnType: if "Mono" not in filetype: return - binary = shlex.split(selfextract_conf.de4dot_deobfuscate.binary.strip())[0] + binary = shlex.split(integration_conf.de4dot_deobfuscate.binary.strip())[0] if not binary: log.warning("de4dot_deobfuscate.binary is not defined in the configuration.") return @@ -632,7 +666,7 @@ def de4dot_deobfuscate(file: str, *, filetype: str, **_) -> ExtractorReturnType: _ = run_tool( [ binary, - *shlex.split(selfextract_conf.de4dot_deobfuscate.extra_args.strip()), + *shlex.split(integration_conf.de4dot_deobfuscate.extra_args.strip()), "-f", file, "-o", @@ -661,7 +695,7 @@ def msi_extract(file: str, *, filetype: str, **kwargs) -> ExtractorReturnType: if not kwargs.get("tests"): # msiextract in different way that 7z, we need to add subfolder support output = run_tool( - [selfextract_conf.msi_extract.binary, file, "--directory", tempdir], + [integration_conf.msi_extract.binary, file, "--directory", tempdir], universal_newlines=True, stderr=subprocess.PIPE, ) @@ -699,17 +733,36 @@ def Inno_extract(file: str, *, data_dictionary: dict, **_) -> ExtractorReturnTyp if all("Inno Setup" not in string for string in data_dictionary.get("die", [])): return - if not path_exists(selfextract_conf.Inno_extract.binary): - log.error("Missed dependency: sudo apt install innoextract") + if not path_exists(innoextact_binary): + log.error("Missed dependency: Get a release from https://github.com/gdesmar/innoextract") return + password = "" with extractor_ctx(file, "InnoExtract", prefix="innoextract_", folder=tools_folder) as ctx: tempdir = ctx["tempdir"] - run_tool( - [selfextract_conf.Inno_extract.binary, file, "--output-dir", tempdir], + output = run_tool( + [innoextact_binary, file, "--output-dir", tempdir], universal_newlines=True, stderr=subprocess.PIPE, ) + if ( + "Warning: Setup contains encrypted files, use the --password option to extract them" in output + or "- encrypted" in output + ): + output = run_tool( + [innoextact_binary, "--crack", file], + universal_newlines=True, + stderr=subprocess.PIPE, + ) + if "Password found: " in output: + password = output.split("\n")[0].split(": ")[1] + if password: + _ = run_tool( + [innoextact_binary, file, "--output-dir", tempdir, "--password", password], + universal_newlines=True, + stderr=subprocess.PIPE, + ) + ctx["password"] = password ctx["extracted_files"] = collect_extracted_filenames(tempdir) return ctx @@ -745,7 +798,8 @@ def kixtart_extract(file: str, **_) -> ExtractorReturnType: @time_tracker def UnAutoIt_extract(file: str, *, data_dictionary: dict, **_) -> ExtractorReturnType: global UN_AUTOIT_NOTIF - if all(block.get("name") not in ("AutoIT_Compiled", "AutoIT_Script") for block in data_dictionary.get("yara", {})): + merged_lists = data_dictionary.get("yara", []) + data_dictionary.get("cape_yara", []) + if all(not block.get("name", "").lower().startswith("autoit") for block in merged_lists): return # this is useless to notify in each iteration @@ -800,8 +854,8 @@ def UPX_unpack(file: str, *, filetype: str, data_dictionary: dict, **_) -> Extra def SevenZip_unpack(file: str, *, filetype: str, data_dictionary: dict, options: dict, **_) -> ExtractorReturnType: tool = False - if not path_exists("/usr/bin/7z"): - logging.error("Missed 7z package: apt install p7zip-full") + if not path_exists(sevenzip_binary): + logging.error("Missed 7zip executable. Run: poetry run python utils/community.py -waf") return # Check for msix file since it's a zip @@ -820,14 +874,13 @@ def SevenZip_unpack(file: str, *, filetype: str, data_dictionary: dict, options: if all([pattern in file_data for pattern in (b"AndroidManifest.xml", b"classes.dex")]): return - password = "" # Only for real 7zip, breaks others password = options.get("password", "infected") if any( "7-zip Installer data" in string for string in data_dictionary.get("die", []) ) or "Zip archive data" in data_dictionary.get("type", ""): - tool = "7Zip" - prefix = "7zip_" + tool = "SevenZip" + prefix = "SevenZip_" password = options.get("password", "infected") password = f"-p{password}" @@ -838,9 +891,12 @@ def SevenZip_unpack(file: str, *, filetype: str, data_dictionary: dict, options: prefix = "cab_" password = "" - elif "Nullsoft Installer self-extracting archive" in filetype: + elif "Nullsoft Installer self-extracting archive" in filetype or any( + "Nullsoft Scriptable Install System" in string for string in data_dictionary.get("die", []) + ): tool = "UnNSIS" prefix = "unnsis_" + password = "" """ elif ( any("SFX: WinRAR" in string for string in data_dictionary.get("die", [{}])) @@ -856,24 +912,16 @@ def SevenZip_unpack(file: str, *, filetype: str, data_dictionary: dict, options: with extractor_ctx(file, tool, prefix=prefix, folder=tools_folder) as ctx: tempdir = ctx["tempdir"] HAVE_SFLOCK = False - if HAVE_SFLOCK: + if HAVE_SFLOCK and tool not in ("UnNSIS",): unpacked = unpack(file.encode(), password=password) for child in unpacked.children: _ = path_write_file(os.path.join(tempdir, child.filename.decode()), child.contents) else: - _ = run_tool( - [ - "7z", - "e", - file, - password, - f"-o{tempdir}", - "-y", - ], - universal_newlines=True, - stderr=subprocess.PIPE, - ) - + sevenzip_args = [sevenzip_binary, "e", file, f"-o{tempdir}", "-y"] + # Need this, otherwie NSIS fails + if password: + sevenzip_args.append(password) + _ = run_tool(sevenzip_args, universal_newlines=True, stderr=subprocess.PIPE) ctx["extracted_files"] = collect_extracted_filenames(tempdir) return ctx @@ -948,3 +996,24 @@ def msix_extract(file: str, *, data_dictionary: dict, **_) -> ExtractorReturnTyp ctx["extracted_files"] = collect_extracted_filenames(tempdir) return ctx + + +@time_tracker +def UnGPG_extract(file: str, filetype: str, data_dictionary: dict, options: dict, **_) -> ExtractorReturnType: + + if "PGP symmetric key encrypted data" not in data_dictionary.get("type", ""): + return + + password = options.get("password", "infected") + filename = os.path.basename(file) + with extractor_ctx(file, "UnGPG", prefix="unpgp", folder=tools_folder) as ctx: + tempdir = ctx["tempdir"] + output = run_tool( + ["gpg", "--passphrase", password, "--batch", "--quiet", "--yes", "-o", os.path.join(tempdir, filename), "-d", file], + universal_newlines=True, + stderr=subprocess.PIPE, + ) + if output: + ctx["extracted_files"] = collect_extracted_filenames(tempdir) + + return ctx diff --git a/lib/cuckoo/common/integrations/file_extra_info_modules/overlay.py b/lib/cuckoo/common/integrations/file_extra_info_modules/overlay.py index 540a97d2e22..31bcf4136fc 100644 --- a/lib/cuckoo/common/integrations/file_extra_info_modules/overlay.py +++ b/lib/cuckoo/common/integrations/file_extra_info_modules/overlay.py @@ -17,7 +17,6 @@ @time_tracker def extract_details(file, *, data_dictionary, **_) -> ExtractorReturnType: - if not data_dictionary.get("pe", {}).get("overlay"): return {} diff --git a/lib/cuckoo/common/integrations/file_extra_info_modules/pyinstaller.py b/lib/cuckoo/common/integrations/file_extra_info_modules/pyinstaller.py new file mode 100644 index 00000000000..c68cf054322 --- /dev/null +++ b/lib/cuckoo/common/integrations/file_extra_info_modules/pyinstaller.py @@ -0,0 +1,32 @@ +import logging + +from lib.cuckoo.common.integrations.file_extra_info_modules import ( + ExtractorReturnType, + collect_extracted_filenames, + extractor_ctx, + time_tracker, +) +from lib.cuckoo.common.integrations.pyinstxtractor import PyInstArchive + +log = logging.getLogger(__name__) + + +@time_tracker +def extract_details(file, *, data_dictionary, **_) -> ExtractorReturnType: + if all("PyInstaller" not in string for string in data_dictionary.get("die", [])): + return {} + + with extractor_ctx(file, "PyInstaller", prefix="PyInstaller") as ctx: + tempdir = ctx["tempdir"] + arch = PyInstArchive({"file": file, "destination_folder": tempdir, "entry_points": True}) + if arch.open() and arch.checkFile() and arch.getCArchiveInfo(): + arch.parseTOC() + arch.extractFiles() + arch.close() + log.debug( + "[+] Successfully extracted pyinstaller archive: %s\nYou can now use a python decompiler on the pyc files within the extracted directory", + ) + arch.close() + + ctx["extracted_files"] = collect_extracted_filenames(tempdir) + return ctx diff --git a/lib/cuckoo/common/integrations/floss.py b/lib/cuckoo/common/integrations/floss.py index 5bc55331822..7fa43aa2274 100644 --- a/lib/cuckoo/common/integrations/floss.py +++ b/lib/cuckoo/common/integrations/floss.py @@ -8,13 +8,14 @@ from lib.cuckoo.common.constants import CUCKOO_ROOT from lib.cuckoo.common.path_utils import path_exists -processing_cfg = Config("processing") +integrations_conf = Config("integrations") HAVE_FLOSS = False try: - HAVE_FLOSS = True import floss.main as fm from floss.strings import extract_ascii_unicode_strings + + HAVE_FLOSS = True except ImportError: print("Missed dependency flare-floss: poetry run pip install -U flare-floss") @@ -37,7 +38,7 @@ def run(self): if not HAVE_FLOSS: return - if processing_cfg.floss.on_demand and not self.on_demand: + if integrations_conf.floss.on_demand and not self.on_demand: return results = {} @@ -57,17 +58,17 @@ def run(self): else: fileformat = "pe" - min_length = processing_cfg.floss.min_length + min_length = integrations_conf.floss.min_length fm.set_log_config(fm.DebugLevel.NONE, True) tmpres = {} results = {} - if processing_cfg.floss.static_strings: + if integrations_conf.floss.static_strings: with open(self.file_path, "rb") as f: with contextlib.closing(mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ)) as buf: tmpres["static_strings"] = list(extract_ascii_unicode_strings(buf, min_length)) - sigspath = fm.get_signatures(Path(os.path.join(CUCKOO_ROOT, processing_cfg.floss.sigs_path))) + sigspath = fm.get_signatures(Path(os.path.join(CUCKOO_ROOT, integrations_conf.floss.sigs_path))) vw = fm.load_vw(Path(self.file_path), fileformat, sigspath, False) try: @@ -83,7 +84,7 @@ def run(self): True, ) - if processing_cfg.floss.stack_strings: + if integrations_conf.floss.stack_strings: selected_functions = fm.get_functions_without_tightloops(decoding_function_features) tmpres["stack_strings"] = fm.extract_stackstrings( vw, @@ -93,7 +94,7 @@ def run(self): disable_progress=True, ) - if processing_cfg.floss.tight_strings: + if integrations_conf.floss.tight_strings: tightloop_functions = fm.get_functions_with_tightloops(decoding_function_features) tmpres["tight_strings"] = fm.extract_tightstrings( vw, @@ -103,7 +104,7 @@ def run(self): disable_progress=True, ) - if processing_cfg.floss.decoded_strings: + if integrations_conf.floss.decoded_strings: top_functions = fm.get_top_functions(decoding_function_features, 20) fvas_to_emulate = fm.get_function_fvas(top_functions) fvas_tight_functions = fm.get_tight_function_fvas(decoding_function_features) @@ -124,7 +125,7 @@ def run(self): results[stype].append(sval.string) except Exception as e: - log.error(e, exc_info=True) + log.exception(e) fm.set_log_config(fm.DebugLevel.DEFAULT, False) diff --git a/lib/cuckoo/common/integrations/lznt1.py b/lib/cuckoo/common/integrations/lznt1.py deleted file mode 100644 index 2c11586a480..00000000000 --- a/lib/cuckoo/common/integrations/lznt1.py +++ /dev/null @@ -1,132 +0,0 @@ -# Rekall Memory Forensics -# Copyright 2014 Google Inc. All Rights Reserved. -# -# Author: Michael Cohen scudette@google.com. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 2 of the License, or (at -# your option) any later version. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA -# - -"""Decompression support for the LZNT1 compression algorithm. - -Reference: -http://msdn.microsoft.com/en-us/library/jj665697.aspx -(2.5 LZNT1 Algorithm Details) - -https://github.com/libyal/reviveit/ -https://github.com/sleuthkit/sleuthkit/blob/develop/tsk/fs/ntfs.c -""" -import array -import struct -from io import BytesIO - -__all__ = ["Lznt1", "lznt1"] - - -def get_displacement(offset: int) -> int: - """Calculate the displacement.""" - result = 0 - while offset >= 0x10: - offset >>= 1 - result += 1 - - return result - - -DISPLACEMENT_TABLE = array.array("B", [get_displacement(x) for x in range(8192)]) - -COMPRESSED_MASK = 1 << 15 -SIGNATURE_MASK = 3 << 12 -SIZE_MASK = (1 << 12) - 1 -TAG_MASKS = [(1 << i) for i in range(0, 8)] - - -def decompress_data(cdata: bytes) -> bytes: - """Decompresses the data.""" - block_end = 0 - - with BytesIO(cdata) as in_fd, BytesIO() as output_fd: - while in_fd.tell() < len(cdata): - block_offset = in_fd.tell() - uncompressed_chunk_offset = output_fd.tell() - - block_header = struct.unpack("= block_end: - break - - if header & mask: - pointer = struct.unpack("> (12 - displacement)) + 1 - symbol_length = (pointer & (0xFFF >> displacement)) + 3 - - output_fd.seek(-symbol_offset, 2) - data = output_fd.read(symbol_length) - - # Pad the data to make it fit. - if 0 < len(data) < symbol_length: - data = data * (symbol_length // len(data) + 1) - data = data[:symbol_length] - - output_fd.seek(0, 2) - - output_fd.write(data) - - else: - data = in_fd.read(1) - - output_fd.write(data) - - else: - # Block is not compressed - data = in_fd.read(size + 1) - output_fd.write(data) - - result = output_fd.getvalue() - - return result - - -class Lznt1: - """ - Implementation of LZNT1 decompression. Allows to decompress data compressed by RtlCompressBuffer - .. code-block:: python - from malduck import lznt1 - lznt1(b"\x1a\xb0\x00compress\x00edtestda\x04ta\x07\x88alot") - :param buf: Buffer to decompress - :type buf: bytes - :rtype: bytes - """ - - def decompress(self, buf: bytes) -> bytes: - return decompress_data(buf) - - __call__ = decompress - - -lznt1 = Lznt1() diff --git a/lib/cuckoo/common/integrations/mandiant_intel.py b/lib/cuckoo/common/integrations/mandiant_intel.py new file mode 100644 index 00000000000..53f4fca393f --- /dev/null +++ b/lib/cuckoo/common/integrations/mandiant_intel.py @@ -0,0 +1,154 @@ +import base64 +import json +import logging + +import requests +from urllib3.exceptions import InsecureRequestWarning + +from lib.cuckoo.common.config import Config +from lib.cuckoo.common.objects import File +from lib.cuckoo.common.path_utils import path_exists +from lib.cuckoo.common.utils import add_family_detection + +requests.packages.urllib3.disable_warnings(category=InsecureRequestWarning) + +log = logging.getLogger(__name__) + +MANDIANT_AUTH_URL = "https://api.intelligence.mandiant.com/token" +MANDIANT_SEARCH_URL = "https://api.intelligence.mandiant.com/v4/search" + +integragrations_conf = Config("integrations") + +api_access = integragrations_conf.mandiant_intel.api_access +api_secret = integragrations_conf.mandiant_intel.api_secret + + +class MandiantAPIClient: + def __init__(self): + self.api_access = api_access + self.api_secret = api_secret + self.auth_url = MANDIANT_AUTH_URL + self.search_url = MANDIANT_SEARCH_URL + self.token = None + + def _generate_auth_header(self): + auth_token_bytes = f"{self.api_access}:{self.api_secret}".encode("ascii") + base64_auth_token_bytes = base64.b64encode(auth_token_bytes) + return base64_auth_token_bytes.decode("ascii") + + def get_new_token(self): + if self.token: + return + + headers = { + "Authorization": f"Basic {self._generate_auth_header()}", + "Content-Type": "application/x-www-form-urlencoded", + "Accept": "application/json", + "X-App-Name": "get-indicator-infos", + } + params = {"grant_type": "client_credentials"} + + try: + response = requests.post(url=self.auth_url, headers=headers, verify=False, allow_redirects=False, data=params) + except Exception as e: + return {"error": True, "msg": f"Error during token request: {e}"} + + if response.status_code == 200: + self.token = response.json().get("access_token") + return self.token + else: + return {"error": True, "msg": f"Failed to obtain token from server: {response.status_code}"} + + def search(self, indicator): + if not self.token: + log.error("No valid token available. Please authenticate first.") + return + + headers = { + "Authorization": f"Bearer {self.token}", + "Content-Type": "application/json", + "Accept": "application/json", + "X-App-Name": "get-indicator-infos", + } + + body = { + "search": f"{indicator}", + "type": "all", + "limit": 50, + "sort_by": ["relevance"], + "sort_order": "asc", + "next": "", + } + + try: + response = requests.post( + url=self.search_url, headers=headers, verify=False, allow_redirects=False, data=json.dumps(body) + ) + except Exception as e: + return {"error": True, "msg": f"Error during search request: {e}"} + + if response.status_code == 200: + return self.parse_response(response.json()) + elif response.status_code == 401: + self.token = None + if self.get_new_token(): + return self.search(indicator) + else: + log.error("Failed to refresh token.") + return + else: + return {"error": True, "msg": f"Search failed: {response.status_code}"} + + def parse_response(self, response): + actors = [] + malwares = [] + objects = response.get("objects") + if not objects: + return + + for obj in objects: + if "actors" in obj: + actors.extend(actor.get("name") for actor in obj["actors"] if "name" in actor) + if "malwares" in obj: + malwares.extend(malware.get("name") for malware in obj["malwares"] if "name" in malware) + + return {"actor": actors, "malware": malwares} + + +def mandiant_lookup(category: str, target: str, results: dict = {}): + if not integragrations_conf.mandiant_intel.enabled: + return results + + mandiant_intel = {} + + if category == "file": + sha256 = False + if not path_exists(target) and len(target) != 64: + return {"error": True, "msg": "File doesn't exist"} + + sha256 = target if len(target) == 64 else File(target).get_sha256() + client = MandiantAPIClient() + mandiant_intel = {} + mandiant_intel["sha256"] = sha256 + if client.get_new_token(): + result = client.search(sha256) + if result: + mandiant_intel["detections"] = result + names = result.get("malware", []) + for name in names: + add_family_detection(results, name, "Mandiant", sha256) + + return mandiant_intel + + +if __name__ == "__main__": + import sys + + indicator = sys.argv[1] + + client = MandiantAPIClient() + + if client.get_new_token(): + result = client.search(indicator) + if result: + print(json.dumps(result, indent=4)) diff --git a/lib/cuckoo/common/integrations/mitre.py b/lib/cuckoo/common/integrations/mitre.py index 4f56a8e9077..918efc1b8b4 100644 --- a/lib/cuckoo/common/integrations/mitre.py +++ b/lib/cuckoo/common/integrations/mitre.py @@ -13,6 +13,11 @@ def mitre_generate_attck(results, mitre): attck = {} ttp_dict = {} + + if not mitre or not hasattr(mitre, "enterprise"): + print("Missed dependency: poetry run pip install git+https://github.com/CAPESandbox/pyattck") + return attck + # [{'signature': 'http_request', 'ttps': ['T1071']}, {'signature': 'modify_proxy', 'ttps': ['T1112']}, {'signature': 'recon_fingerprint', 'ttps': ['T1012', 'T1082']}] for ttp_block in results["ttps"]: for ttp in ttp_block.get("ttps", []): @@ -31,7 +36,9 @@ def mitre_generate_attck(results, mitre): } ) except FileNotFoundError: - print("MITRE Att&ck data missed, execute: 'python3 utils/community.py -waf --mitre'") + print("MITRE Att&ck data missed, execute: 'poetry run python utils/community.py -waf --mitre'") + except AttributeError: + print("Missed dependency: poetry run pip install git+https://github.com/CAPESandbox/pyattck") except Exception as e: # simplejson.errors.JSONDecodeError log.error(("Mitre", e)) diff --git a/lib/cuckoo/common/integrations/office_one.py b/lib/cuckoo/common/integrations/office_one.py index 4594a7bd0da..bd7d20604da 100644 --- a/lib/cuckoo/common/integrations/office_one.py +++ b/lib/cuckoo/common/integrations/office_one.py @@ -115,10 +115,10 @@ def extract_files(self) -> Iterator[bytes]: size = self.data[size_offset : size_offset + 4] i_size = struct.unpack(" Iterator[OneNoteMetadataObject]: creation_date=h_createDate, last_modification_date=h_LastDate, ) - except Exception as e: - logger.error(f"Error while parsing object {cpt}") - logger.error(f"Error: {e}.") + logger.error("Error while parsing object %s - Error: %s", cpt, str(e)) return ret diff --git a/lib/cuckoo/common/integrations/parse_dotnet.py b/lib/cuckoo/common/integrations/parse_dotnet.py index c88c1b936f6..6a86fb30eb4 100644 --- a/lib/cuckoo/common/integrations/parse_dotnet.py +++ b/lib/cuckoo/common/integrations/parse_dotnet.py @@ -56,7 +56,7 @@ def _get_custom_attrs(self) -> List[Dict[str, str]]: except subprocess.CalledProcessError as e: log.error("Monodis: %s", str(e)) except Exception as e: - log.error(e, exc_info=True) + log.exception(e) return None def _get_assembly_refs(self) -> List[Dict[str, str]]: @@ -84,7 +84,7 @@ def _get_assembly_refs(self) -> List[Dict[str, str]]: except subprocess.CalledProcessError as e: log.error("Monodis: %s", str(e)) except Exception as e: - log.error(e, exc_info=True) + log.exception(e) return None def _get_assembly_info(self) -> Dict[str, str]: @@ -103,7 +103,7 @@ def _get_assembly_info(self) -> Dict[str, str]: except subprocess.CalledProcessError as e: log.error("Monodis: %s", str(e)) except Exception as e: - log.error(e, exc_info=True) + log.exception(e) return None def _get_type_refs(self) -> List[Dict[str, str]]: @@ -128,7 +128,7 @@ def _get_type_refs(self) -> List[Dict[str, str]]: except subprocess.CalledProcessError as e: log.error("Monodis: %s", str(e)) except Exception as e: - log.error(e, exc_info=True) + log.exception(e) return None def run(self) -> Dict[str, Any]: @@ -151,5 +151,5 @@ def run(self) -> Dict[str, Any]: else: return except Exception as e: - log.error(e, exc_info=True) + log.exception(e) return None diff --git a/lib/cuckoo/common/integrations/parse_hwp.py b/lib/cuckoo/common/integrations/parse_hwp.py index 2b037bdf227..5d64bd24ba5 100644 --- a/lib/cuckoo/common/integrations/parse_hwp.py +++ b/lib/cuckoo/common/integrations/parse_hwp.py @@ -38,7 +38,7 @@ def unpack_hwp(self): stream_content = zlib.decompress(contents, -15) self.files[stream_name] = stream_content except Exception as e: - log.error(e, exc_info=True) + log.exception(e) def extract_eps(self) -> List[bytes]: """Extract some information from Encapsulated Post Script files.""" diff --git a/lib/cuckoo/common/integrations/parse_java.py b/lib/cuckoo/common/integrations/parse_java.py index 733ed296342..4d45863c3f9 100644 --- a/lib/cuckoo/common/integrations/parse_java.py +++ b/lib/cuckoo/common/integrations/parse_java.py @@ -41,7 +41,7 @@ def run(self) -> Dict[str, Any]: p = Popen([self.decomp_jar, jar_file], stdout=PIPE) results["decompiled"] = convert_to_printable(p.stdout.read()) except Exception as e: - log.error(e, exc_info=True) + log.exception(e) with contextlib.suppress(Exception): Path(jar_file.decode()).unlink() diff --git a/lib/cuckoo/common/integrations/parse_office.py b/lib/cuckoo/common/integrations/parse_office.py index 1c7de942cca..f2b907027a2 100644 --- a/lib/cuckoo/common/integrations/parse_office.py +++ b/lib/cuckoo/common/integrations/parse_office.py @@ -78,6 +78,34 @@ class Office: - Word/Excel Single File Web Page / MHTML (.mht) - Publisher (.pub) - Rich Text Format (.rtf) + + Office Document Static Analysis + + This class provides methods to perform static analysis on various Office document formats. + Supported formats include: + + Attributes: + file_path (str): Path to the file to be analyzed. + options (Dict[str, str]): Analysis options. + task_id (str): Task identifier. + sha256 (str): SHA-256 hash of the file. + + Methods: + _get_meta(meta) -> Dict[str, Dict[str, str]]: + Extracts metadata from OLE files. + + _get_xml_meta(filepath) -> Dict[str, Dict[str, str]]: + Extracts metadata from XML files within Office documents. + + _parse_rtf(data: bytes) -> Dict[str, list]: + Parses RTF data and extracts embedded objects. + + _parse(filepath: str) -> Dict[str, Any]: + Parses an Office document for static information. + + run() -> Dict[str, Any]: + Runs the analysis and returns the results. + """ def __init__(self, file_path: str, task_id: str, sha256: str, options: Dict[str, str]): @@ -129,7 +157,7 @@ def _get_xml_meta(self, filepath) -> Dict[str, Dict[str, str]]: continue metares["SummaryInformation"][n.split(":")[1]] = convert_to_printable(data[0].data) except (IndexError, AttributeError) as e: - log.error(e, exc_info=True) + log.exception(e) for elem in app._get_documentElement().childNodes: try: @@ -146,7 +174,7 @@ def _get_xml_meta(self, filepath) -> Dict[str, Dict[str, str]]: continue metares["DocumentSummaryInformation"][n] = convert_to_printable(data[0].data) except (IndexError, AttributeError) as e: - log.error(e, exc_info=True) + log.exception(e) return metares @@ -156,7 +184,7 @@ def _parse_rtf(self, data: bytes) -> Dict[str, list]: rtfp.parse() save_dir = os.path.join(CUCKOO_ROOT, "storage", "analyses", self.task_id, "rtf_objects") if rtfp.objects and not path_exists(save_dir): - path_mkdir(save_dir) + path_mkdir(save_dir, exist_ok=True) for rtfobj in rtfp.objects: results.setdefault(str(rtfobj.format_id), []) temp_dict = {"class_name": "", "size": "", "filename": "", "type_embed": "", "CVE": "", "sha256": "", "index": ""} @@ -224,9 +252,9 @@ def _parse(self, filepath: str) -> Dict[str, Any]: @return: results dict or None """ - results = {} + officeresults = {} if not HAVE_OLETOOLS: - return results + return officeresults vba = False if is_rtf(filepath): @@ -234,29 +262,30 @@ def _parse(self, filepath: str) -> Dict[str, Any]: contents = path_read_file(filepath) temp_results = self._parse_rtf(contents) if temp_results: - results["office_rtf"] = temp_results + officeresults["rtf"] = temp_results except Exception as e: - log.error(e, exc_info=True) + log.exception(e) else: try: vba = VBA_Parser(filepath) except ValueError as e: log.error("Error VBA_Parser: %s", str(e)) except Exception: - return results + # ToDo really return + return officeresults try: # extract DDE dde = extract_dde(filepath) if dde: - results["office_dde"] = convert_to_printable(dde) + officeresults["dde"] = convert_to_printable(dde) except (csv_error, UnicodeDecodeError): pass except AttributeError: log.warning("OleFile library bug: AttributeError! fix: poetry run pip install olefile") except Exception as e: - log.error(e, exc_info=True) + log.exception(e) - officeresults = {"Metadata": {}} + officeresults["Metadata"] = {} macro_folder = os.path.join(CUCKOO_ROOT, "storage", "analyses", self.task_id, "macros") if olefile.isOleFile(filepath): try: @@ -301,7 +330,7 @@ def _parse(self, filepath: str) -> Dict[str, Any]: except ValueError as e: log.error("Can't parse macros for %s - %s ", filepath, str(e)) except Exception as e: - log.error(e, exc_info=True) + log.exception(e) for keyword, description in detect_autoexec(vba_code): officeresults["Macro"]["Analysis"].setdefault("AutoExec", []).append( (keyword.replace(".", "_"), description) @@ -328,7 +357,7 @@ def _parse(self, filepath: str) -> Dict[str, Any]: if indicator.value and indicator.name in {"Word Document", "Excel Workbook", "PowerPoint Presentation"}: officeresults["Metadata"]["DocumentType"] = indicator.name except Exception as e: - log.error(e, exc_info=True) + log.exception(e) if HAVE_XLM_DEOBF: tmp_xlmmacro = xlmdeobfuscate(filepath, self.task_id, self.options.get("password", "")) @@ -341,4 +370,7 @@ def run(self) -> Dict[str, Any]: """Run analysis. @return: analysis results dict or None. """ - return self._parse(self.file_path) if path_exists(self.file_path) else None + if not path_exists(self.file_path): + log.error("parse_office File not found: %s", self.file_path) + return {} + return self._parse(self.file_path) diff --git a/lib/cuckoo/common/integrations/parse_pe.py b/lib/cuckoo/common/integrations/parse_pe.py index f24c0397fa7..652c0020268 100644 --- a/lib/cuckoo/common/integrations/parse_pe.py +++ b/lib/cuckoo/common/integrations/parse_pe.py @@ -161,7 +161,7 @@ def __init__(self, file_path: str = False, data: bytes = False): self.pe = pefile.PE(self.file_path) self.HAVE_PE = True except Exception as e: - log.error("PE type not recognised: %s", e) + log.debug("PE type not recognised: %s", e) # self.results = results @property @@ -219,7 +219,7 @@ def get_peid_signatures(self, pe: pefile.PE) -> list: if result: return list(result) except Exception as e: - log.error(e, exc_info=True) + log.exception(e) return None @@ -344,7 +344,7 @@ def get_resources(self, pe: pefile.PE) -> List[Dict[str, str]]: except pefile.PEFormatError as e: log.debug("get_resources error: %s", str(e)) except Exception as e: - log.error(e, exc_info=True) + log.exception(e) continue return resources @@ -368,7 +368,7 @@ def get_pdb_path(self, pe: pefile.PE) -> str: length = struct.unpack_from("IIB", dbgdata)[1] return dbgdata[12:length].decode("latin-1").rstrip("\0") except Exception as e: - log.error(e, exc_info=True) + log.exception(e) return None @@ -401,7 +401,7 @@ def get_imported_symbols(self, pe: pefile.PE) -> Dict[str, dict]: "imports": symbols, } except Exception as e: - log.error(e, exc_info=True) + log.exception(e) continue return imports @@ -538,7 +538,7 @@ def get_sections(self, pe: pefile.PE) -> List[Dict[str, str]]: } ) except Exception as e: - log.error(e, exc_info=True) + log.exception(e) continue return sections @@ -650,7 +650,7 @@ def get_icon_info(self, pe: pefile.PE) -> Tuple[str, str, str, str]: return None, None, None, None return icon, fullhash, simphash, dhash except Exception as e: - log.error(e, exc_info=True) + log.exception(e) return None, None, None, None @@ -693,7 +693,7 @@ def get_versioninfo(self, pe: pefile.PE) -> List[dict]: entry["value"] = f"0x0{entry['value'][2:5]} 0x0{entry['value'][7:10]}" peresults.append(entry) except Exception as e: - log.error(e, exc_info=True) + log.exception(e) continue return peresults @@ -853,12 +853,12 @@ def get_dll_exports(self) -> str: else: exports.append(re.sub("[^A-Za-z0-9_?@-]", "", exported_symbol.name)) except Exception as e: - log.error(e, exc_info=True) + log.exception(e) return ",".join(exports) except Exception as e: log.error("PE type not recognised") - log.error(e, exc_info=True) + log.exception(e) return "" @@ -873,7 +873,7 @@ def choose_dll_export(self) -> str: if exp.name.decode() in ("DllInstall", "DllRegisterServer", "xlAutoOpen"): return exp.name.decode() except Exception as e: - log.error(e, exc_info=True) + log.exception(e) return None def get_entrypoint(self, pe: pefile.PE) -> str: diff --git a/lib/cuckoo/common/integrations/parse_rdp.py b/lib/cuckoo/common/integrations/parse_rdp.py new file mode 100644 index 00000000000..b900e01a568 --- /dev/null +++ b/lib/cuckoo/common/integrations/parse_rdp.py @@ -0,0 +1,240 @@ +import argparse +import base64 +import datetime +import json +import logging +import sys +from contextlib import suppress +from hashlib import sha1, sha256 +from struct import unpack + +MISSED_DEPS = False +try: + import mscerts + from asn1crypto import pem, x509 + from certvalidator import CertificateValidator, ValidationContext + from cryptography import x509 as cx509 + from cryptography.hazmat.primitives import hashes, serialization + from cryptography.hazmat.primitives.serialization import pkcs7 + from cryptography.x509.oid import ExtensionOID +except ImportError: + print("pip3 install certvalidator asn1crypto mscerts") + MISSED_DEPS = True +try: + import re2 as re +except ImportError: + import re + +property_patterns = { + "alternate_full_address": re.compile(r"alternate\s+full\s+address\s*:\s*s\s*:\s*(.*)", re.I), + "alternate_shell": re.compile(r"alternate\s+shell\s*:\s*s\s*:\s*(.*)", re.I), + "authentication_level": re.compile(r"authentication\s+level\s*:\s*i\s*:\s*(\d+)", re.I), + "autoreconnection_enabled": re.compile(r"autoreconnection\s*enabled\s*:\s*i\s*:\s*(\d+)", re.I), + "audiocapturemode": re.compile(r"audiocapturemode\s*:\s*i\s*:\s*(\d+)", re.I), + "audiomode": re.compile(r"audiomode\s*:\s*i\s*:\s*(\d+)", re.I), + "bandwidthautodetect": re.compile(r"bandwidthautodetect\s*:\s*i\s*:\s*(\d+)", re.I), + "camerastoredirect": re.compile(r"camerastoredirect\s*:\s*s\s*:\s*(.*)", re.I), + "compression": re.compile(r"compression\s*:\s*i\s*:\s*(\d+)", re.I), + "disableconnectionsharing": re.compile(r"disableconnectionsharing\s*:\s*i\s*:\s*(\d+)", re.I), + "drivestoredirect": re.compile(r"drivestoredirect\s*:\s*s\s*:\s*(.*)", re.I), + "enablecredsspsupport": re.compile(r"enablecredsspsupport\s*:\s*i\s*:\s*(\d+)", re.I), + "enablerdsaadauth": re.compile(r"enablerdsaadauth\s*:\s*i\s*:\s*(\d+)", re.I), + "full_address": re.compile(r"full\s+address\s*:\s*s\s*:\s*(.*)", re.I), + "gatewaycredentialssource": re.compile(r"gatewaycredentialssource\s*:\s*i\s*:\s*(\d+)", re.I), + "gatewayhostname": re.compile(r"gatewayhostname\s*:\s*s\s*:\s*(.*)", re.I), + "gatewayprofileusagemethod": re.compile(r"gatewayprofileusagemethod\s*:\s*i\s*:\s*(\d+)", re.I), + "gatewayusagemethod": re.compile(r"gatewayusagemethod\s*:\s*i\s*:\s*(\d+)", re.I), + "keyboardhook": re.compile(r"keyboardhook\s*:\s*i\s*:\s*(\d+)", re.I), + "networkautodetect": re.compile(r"networkautodetect\s*:\s*i\s*:\s*(\d+)", re.I), + "promptcredentialonce": re.compile(r"promptcredentialonce\s*:\s*i\s*:\s*(\d+)", re.I), + "redirectclipboard": re.compile(r"redirectclipboard\s*:\s*i\s*:\s*(\d+)", re.I), + "redirectcomports": re.compile(r"redirectcomports\s*:\s*i\s*:\s*(\d+)", re.I), + "redirectlocation": re.compile(r"redirectlocation\s*:\s*i\s*:\s*(\d+)", re.I), + "redirectprinters": re.compile(r"redirectprinters\s*:\s*i\s*:\s*(\d+)", re.I), + "redirectsmartcards": re.compile(r"redirectsmartcards\s*:\s*i\s*:\s*(\d+)", re.I), + "redirectwebauthn": re.compile(r"redirectwebauthn\s*:\s*i\s*:\s*(\d+)", re.I), + "screen_mode_id": re.compile(r"screen mode id\s*:\s*i\s*:\s*(\d+)", re.I), + "usbdevicestoredirect": re.compile(r"usbdevicestoredirect\s*:\s*s\s*:\s*(.*)", re.I), + "videoplaybackmode": re.compile(r"videoplaybackmode\s*:\s*i\s*:\s*(\d+)", re.I), + "remoteapplicationcmdline": re.compile(r"remoteapplicationcmdline\s*:\s*s\s*:\s*(.*)", re.I), + "remoteapplicationexpandcmdline": re.compile(r"remoteapplicationexpandcmdline\s*:\s*i\s*:\s*(\d+)", re.I), + "remoteapplicationexpandworkingdir": re.compile(r"remoteapplicationexpandworkingdir\s*:\s*i\s*:\s*(\d+)", re.I), + "remoteapplicationfile": re.compile(r"remoteapplicationfile\s*:\s*s\s*:\s*(.*)", re.I), + "remoteapplicationicon": re.compile(r"remoteapplicationicon\s*:\s*s\s*:\s*(.*)", re.I), + "remoteapplicationmode": re.compile(r"remoteapplicationmode\s*:\s*i\s*:\s*(\d+)", re.I), + "remoteapplicationname": re.compile(r"remoteapplicationname\s*:\s*s\s*:\s*(.*)", re.I), + "remoteapplicationprogram": re.compile(r"remoteapplicationprogram\s*:\s*s\s*:\s*(.*)", re.I), + "devicestoredirect": re.compile(r"devicestoredirect\s*:\s*s\s*:\s*(.*)", re.I), + "signature": re.compile(r"signature\s*:\s*s\s*:\s*([A-Za-z0-9+/=\s]+)", re.I), + "signscope": re.compile(r"signscope\s*:\s*s\s*:\s*(.*)", re.I), +} + +log = logging.getLogger() + + +def validate_sig(hostname, alternate_faddress, pkcs7_certificates): + sign_data = { + "can_sign": False, + "usage_error": "", + "valid": False, + "validation_errors": [], + "general_error": "", + "main_valid": False, + "alt_valid": False, + } + if MISSED_DEPS: + return sign_data + try: + ca_bundle_path = mscerts.where() + trust_roots = [] + with open(ca_bundle_path, "rb") as f: + pem_data = f.read() + for _, _, der_bytes in pem.unarmor(pem_data, multiple=True): + trust_roots.append(der_bytes) + certs = [x509.Certificate.load(cert.public_bytes(serialization.Encoding.DER)) for cert in pkcs7_certificates] + context = ValidationContext(trust_roots=trust_roots) + main_cert = certs[0] + validator = CertificateValidator(main_cert, intermediate_certs=certs[1:], validation_context=context) + try: + validator.validate_usage({"digital_signature"}) + sign_data["can_sign"] = True + except Exception as e: + sign_data["usage_error"] = f"{e}" + try: + validator.validate_tls(hostname) + sign_data["main_valid"] = True + except Exception as e: + sign_data["validation_errors"].append(f"{e}") + if alternate_faddress: + try: + validator.validate_tls(alternate_faddress) + sign_data["alt_valid"] = True + except Exception as e: + sign_data["validation_errors"].append(f"{e}") + if sign_data["main_valid"] or sign_data["alt_valid"]: + sign_data["valid"] = True + except Exception as e: + sign_data["general_error"] = f"{e}" + return sign_data + + +def parse_rdp_file(file_path): + rdp_properties = {} + try: + content = "" + encoding = "utf-8" + with open(file_path, "rb") as f: + raw = f.read(4) + if raw.startswith(b"\xff\xfe\x00\x00"): + encoding = "utf-32-le" + elif raw.startswith(b"\x00\x00\xfe\xff"): + encoding = "utf-32-be" + elif raw.startswith(b"\xfe\xff"): + encoding = "utf-16-be" + elif raw.startswith(b"\xff\xfe"): + encoding = "utf-16-le" + elif raw.startswith(b"\xef\xbb\xbf"): + encoding = "utf-8-sig" + + with open(file_path, "r", encoding=encoding, errors="ignore") as f: + content = f.read() + if content and re.search(r"full\s+address\s*:\s*s\s*:", content, re.I): + for line in content.splitlines(): + for prop, pattern in property_patterns.items(): + match = pattern.search(line) + if match: + value = match.group(1).strip() + if value != "": + rdp_properties[prop] = value + else: + print("full_address is a required field... what sort of nonsense are you trying to feed me?") + return rdp_properties + if "full_address" not in rdp_properties: + print( + "full_address is a required field but is not in parsed Properties what sort of nonsense are you trying to feed me?" + ) + return rdp_properties + rdp_properties["signscope_but_missing_sig"] = False + if "signscope" in rdp_properties and "signature" not in rdp_properties: + rdp_properties["signscope_but_missing_sig"] = True + elif "signature" in rdp_properties and "signscope" in rdp_properties: + rdp_properties["certificate_truncated_or_invalid"] = False + signature_base64 = rdp_properties["signature"] + rdp_properties["certificates"] = [] + rdp_properties["certificate_chain_len"] = 0 + signature_bytes = b"" + rdp_properties["certificate_truncated_or_invalid"] = True + with suppress(Exception): + signature_bytes = base64.b64decode(signature_base64.replace("\n", "").replace("\r", "")) + size_bytes = signature_bytes[8:12] + data_size = 0 + with suppress(Exception): + data_size = unpack(" Set[str]: try: obj = doc.getobj(object_id) urls.update(_search_for_url(obj)) - except Exception as ex: - log.error(ex, exc_info=True) - except Exception as ex: - log.error(ex, exc_info=True) + except Exception as e: + log.exception(e) + except Exception as e: + log.exception(e) return urls diff --git a/lib/cuckoo/common/integrations/peepdf.py b/lib/cuckoo/common/integrations/peepdf.py index 27357ecb646..8c6770d3989 100644 --- a/lib/cuckoo/common/integrations/peepdf.py +++ b/lib/cuckoo/common/integrations/peepdf.py @@ -44,8 +44,8 @@ def _set_base_uri(pdf): try: for version in range(pdf.updates + 1): trailer, _ = pdf.trailer[version] - if trailer is not None: - elem = trailer.dict.getElementByName("/Root") + if trailer: + elem = trailer.getTrailerDictionary().getElementByName("/Root") if elem: elem = _get_obj_val(pdf, version, elem) if elem: @@ -59,8 +59,8 @@ def _set_base_uri(pdf): if elem: return elem.getValue() except Exception as e: - log.error(e, exc_info=True) - return "" + log.exception(e) + return def peepdf_parse(filepath: str, pdfresult: Dict[str, Any]) -> Dict[str, Any]: @@ -74,7 +74,7 @@ def peepdf_parse(filepath: str, pdfresult: Dict[str, Any]) -> Dict[str, Any]: try: _, pdf = parser.parse(filepath, forceMode=True, looseMode=True, manualAnalysis=False) except Exception as e: - log.debug("Error parsing pdf: {}".format(e)) + log.debug("Error parsing pdf: %s", str(e)) return pdfresult urlset = set() annoturiset = set() @@ -92,16 +92,16 @@ def peepdf_parse(filepath: str, pdfresult: Dict[str, Any]) -> Dict[str, Any]: metadata = metatmp objects = body.objects for index in objects: - oid = objects[index].id + oid = objects[index].thisId offset = objects[index].offset size = objects[index].size - details = objects[index].object + details = objects[index].obj obj_data = { "Object ID": oid, "Offset": offset, "Size": size, } - if details.type == "stream": + if details.objType == "stream": decoded_stream = details.decodedStream if isJavascript(decoded_stream.strip()): jsdata = None @@ -109,7 +109,7 @@ def peepdf_parse(filepath: str, pdfresult: Dict[str, Any]) -> Dict[str, Any]: jslist, unescapedbytes, urlsfound, errors, ctxdummy = analyseJS(decoded_stream.strip()) jsdata = jslist[0] except Exception as e: - log.error(e, exc_info=True) + log.exception(e) continue if errors or jsdata is None: continue @@ -129,7 +129,7 @@ def peepdf_parse(filepath: str, pdfresult: Dict[str, Any]) -> Dict[str, Any]: ret_data += tmp obj_data["Data"] = ret_data retobjects.append(obj_data) - elif details.type == "dictionary" and details.containsJScode: + elif details.objType == "dictionary" and details.containsJScode: js_elem = details.getElementByName("/JS") if js_elem: jsdata = None @@ -137,7 +137,7 @@ def peepdf_parse(filepath: str, pdfresult: Dict[str, Any]) -> Dict[str, Any]: jslist, unescapedbytes, urlsfound, errors, ctxdummy = analyseJS(js_elem.value) jsdata = jslist[0] except Exception as e: - log.error(e, exc_info=True) + log.exception(e) continue if errors or not jsdata: continue @@ -157,7 +157,7 @@ def peepdf_parse(filepath: str, pdfresult: Dict[str, Any]) -> Dict[str, Any]: ret_data += tmp obj_data["Data"] = ret_data retobjects.append(obj_data) - elif details.type == "dictionary" and details.hasElement("/A"): + elif details.objType == "dictionary" and details.hasElement("/A"): # verify it to be a link type annotation subtype_elem = details.getElementByName("/Subtype") type_elem = details.getElementByName("/Type") @@ -169,7 +169,7 @@ def peepdf_parse(filepath: str, pdfresult: Dict[str, Any]) -> Dict[str, Any]: continue a_elem = details.getElementByName("/A") a_elem = _get_obj_val(pdf, i, a_elem) - if a_elem and a_elem.type == "dictionary" and a_elem.hasElement("/URI"): + if a_elem and a_elem.getType() == "dictionary" and a_elem.hasElement("/URI"): uri_elem = a_elem.getElementByName("/URI") if uri_elem: uri_elem = _get_obj_val(pdf, i, uri_elem) diff --git a/lib/cuckoo/common/integrations/pyinstxtractor.py b/lib/cuckoo/common/integrations/pyinstxtractor.py new file mode 100644 index 00000000000..497fdee37ef --- /dev/null +++ b/lib/cuckoo/common/integrations/pyinstxtractor.py @@ -0,0 +1,457 @@ +""" +PyInstaller Extractor v2.0 (Supports pyinstaller 6.11.1, 6.11.0, 6.10.0, 6.9.0, 6.8.0, 6.7.0, 6.6.0, 6.5.0, 6.4.0, 6.3.0, 6.2.0, 6.1.0, 6.0.0, 5.13.2, 5.13.1, 5.13.0, 5.12.0, 5.11.0, 5.10.1, 5.10.0, 5.9.0, 5.8.0, 5.7.0, 5.6.2, 5.6.1, 5.6, 5.5, 5.4.1, 5.4, 5.3, 5.2, 5.1, 5.0.1, 5.0, 4.10, 4.9, 4.8, 4.7, 4.6, 4.5.1, 4.5, 4.4, 4.3, 4.2, 4.1, 4.0, 3.6, 3.5, 3.4, 3.3, 3.2, 3.1, 3.0, 2.1, 2.0) +Author : Extreme Coders +E-mail : extremecoders(at)hotmail(dot)com +Web : https://0xec.blogspot.com +Date : 26-March-2020 +Url : https://github.com/extremecoders-re/pyinstxtractor + +For any suggestions, leave a comment on +https://forum.tuts4you.com/topic/34455-pyinstaller-extractor/ + +This script extracts a pyinstaller generated executable file. +Pyinstaller installation is not needed. The script has it all. + +For best results, it is recommended to run this script in the +same version of python as was used to create the executable. +This is just to prevent unmarshalling errors(if any) while +extracting the PYZ archive. + +Usage : Just copy this script to the directory where your exe resides + and run the script with the exe file name as a parameter + +C:\\path\\to\\exe\\>python pyinstxtractor.py +$ /path/to/exe/python pyinstxtractor.py + +Licensed under GNU General Public License (GPL) v3. +You are free to modify this source. + +CHANGELOG +================================================ + +Version 1.1 (Jan 28, 2014) +------------------------------------------------- +- First Release +- Supports only pyinstaller 2.0 + +Version 1.2 (Sept 12, 2015) +------------------------------------------------- +- Added support for pyinstaller 2.1 and 3.0 dev +- Cleaned up code +- Script is now more verbose +- Executable extracted within a dedicated sub-directory + +(Support for pyinstaller 3.0 dev is experimental) + +Version 1.3 (Dec 12, 2015) +------------------------------------------------- +- Added support for pyinstaller 3.0 final +- Script is compatible with both python 2.x & 3.x (Thanks to Moritz Kroll @ Avira Operations GmbH & Co. KG) + +Version 1.4 (Jan 19, 2016) +------------------------------------------------- +- Fixed a bug when writing pyc files >= version 3.3 (Thanks to Daniello Alto: https://github.com/Djamana) + +Version 1.5 (March 1, 2016) +------------------------------------------------- +- Added support for pyinstaller 3.1 (Thanks to Berwyn Hoyt for reporting) + +Version 1.6 (Sept 5, 2016) +------------------------------------------------- +- Added support for pyinstaller 3.2 +- Extractor will use a random name while extracting unnamed files. +- For encrypted pyz archives it will dump the contents as is. Previously, the tool would fail. + +Version 1.7 (March 13, 2017) +------------------------------------------------- +- Made the script compatible with python 2.6 (Thanks to Ross for reporting) + +Version 1.8 (April 28, 2017) +------------------------------------------------- +- Support for sub-directories in .pyz files (Thanks to Moritz Kroll @ Avira Operations GmbH & Co. KG) + +Version 1.9 (November 29, 2017) +------------------------------------------------- +- Added support for pyinstaller 3.3 +- Display the scripts which are run at entry (Thanks to Michael Gillespie @ malwarehunterteam for the feature request) + +Version 2.0 (March 26, 2020) +------------------------------------------------- +- Project migrated to github +- Supports pyinstaller 3.6 +- Added support for Python 3.7, 3.8 +- The header of all extracted pyc's are now automatically fixed +""" + +import logging +import marshal +import os +import struct +import sys +import zlib +from contextlib import suppress +from uuid import uuid4 as uniquename + +log = logging.getLogger() + + +class CTOCEntry: + def __init__(self, position, cmprsdDataSize, uncmprsdDataSize, cmprsFlag, typeCmprsData, name): + self.position = position + self.cmprsdDataSize = cmprsdDataSize + self.uncmprsdDataSize = uncmprsdDataSize + self.cmprsFlag = cmprsFlag + self.typeCmprsData = typeCmprsData + self.name = name + + +class PyInstArchive: + PYINST20_COOKIE_SIZE = 24 # For pyinstaller 2.0 + PYINST21_COOKIE_SIZE = 24 + 64 # For pyinstaller 2.1+ + MAGIC = b"MEI\014\013\012\013\016" # Magic number which identifies pyinstaller + + def __init__(self, kwargs): + self.filePath = kwargs["file"] + self.destination_folder = kwargs["destination_folder"] + self.only_entrypoints = kwargs["entry_points"] + self.pycMagic = b"\0" * 4 + self.barePycList = [] # List of pyc's whose headers have to be fixed + + def open(self): + try: + self.fPtr = open(self.filePath, "rb") + self.fileSize = os.stat(self.filePath).st_size + except Exception as e: + log.error("[!] Could not open: %s. Error: %s", self.filePath, str(e)) + return False + return True + + def close(self): + with suppress(Exception): + self.fPtr.close() + + def checkFile(self): + log.debug("[+] Processing %s", self.filePath) + searchChunkSize = 8192 + endPos = self.fileSize + self.cookiePos = -1 + + if endPos < len(self.MAGIC): + log.error("[!] File is too short or truncated") + return False + + while True: + startPos = endPos - searchChunkSize if endPos >= searchChunkSize else 0 + chunkSize = endPos - startPos + if chunkSize < len(self.MAGIC): + break + + self.fPtr.seek(startPos, os.SEEK_SET) + data = self.fPtr.read(chunkSize) + offs = data.rfind(self.MAGIC) + if offs != -1: + self.cookiePos = startPos + offs + break + + endPos = startPos + len(self.MAGIC) - 1 + if startPos == 0: + break + + if self.cookiePos == -1: + log.error("[!] Missing cookie, unsupported pyinstaller version or not a pyinstaller archive") + return False + + self.fPtr.seek(self.cookiePos + self.PYINST20_COOKIE_SIZE, os.SEEK_SET) + if b"python" in self.fPtr.read(64).lower(): + log.debug("[+] Pyinstaller version: 2.1+") + self.pyinstVer = 21 # pyinstaller 2.1+ + else: + self.pyinstVer = 20 # pyinstaller 2.0 + log.debug("[+] Pyinstaller version: 2.0") + return True + + def getCArchiveInfo(self): + try: + if self.pyinstVer == 20: + self.fPtr.seek(self.cookiePos, os.SEEK_SET) + # Read CArchive cookie + (magic, lengthofPackage, toc, tocLen, pyver) = struct.unpack("!8siiii", self.fPtr.read(self.PYINST20_COOKIE_SIZE)) + elif self.pyinstVer == 21: + self.fPtr.seek(self.cookiePos, os.SEEK_SET) + # Read CArchive cookie + (magic, lengthofPackage, toc, tocLen, pyver, pylibname) = struct.unpack( + "!8sIIii64s", self.fPtr.read(self.PYINST21_COOKIE_SIZE) + ) + except Exception as e: + log.error("[!] The file is not a pyinstaller archive: %s", str(e)) + return False + + self.pymaj, self.pymin = (pyver // 100, pyver % 100) if pyver >= 100 else (pyver // 10, pyver % 10) + log.debug("[+] Python version: %d.%d", self.pymaj, self.pymin) + + # Additional data after the cookie + tailBytes = ( + self.fileSize - self.cookiePos - (self.PYINST20_COOKIE_SIZE if self.pyinstVer == 20 else self.PYINST21_COOKIE_SIZE) + ) + + # Overlay is the data appended at the end of the PE + self.overlaySize = lengthofPackage + tailBytes + self.overlayPos = self.fileSize - self.overlaySize + self.tableOfContentsPos = self.overlayPos + toc + self.tableOfContentsSize = tocLen + + log.debug("[+] Length of package: %d bytes", lengthofPackage) + return True + + def parseTOC(self): + # Go to the table of contents + self.fPtr.seek(self.tableOfContentsPos, os.SEEK_SET) + + self.tocList = [] + parsedLen = 0 + + # Parse table of contents + while parsedLen < self.tableOfContentsSize: + (entrySize,) = struct.unpack("!i", self.fPtr.read(4)) + nameLen = struct.calcsize("!iIIIBc") + + (entryPos, cmprsdDataSize, uncmprsdDataSize, cmprsFlag, typeCmprsData, name) = struct.unpack( + "!IIIBc{0}s".format(entrySize - nameLen), self.fPtr.read(entrySize - 4) + ) + + try: + name = name.decode("utf-8").rstrip("\0") + except UnicodeDecodeError: + newName = str(uniquename()) + log.warning("[!] File name %s contains invalid bytes. Using random name %s", name, newName) + name = newName + + # Prevent writing outside the extraction directory + if name.startswith("/"): + name = name.lstrip("/") + + if len(name) == 0: + name = str(uniquename()) + log.warning("[!] Found an unamed file in CArchive. Using random name %s", name) + + self.tocList.append( + CTOCEntry(self.overlayPos + entryPos, cmprsdDataSize, uncmprsdDataSize, cmprsFlag, typeCmprsData, name) + ) + + parsedLen += entrySize + log.info("[+] Found %d files in CArchive", len(self.tocList)) + + def _writeRawData(self, filepath, data): + nm = filepath.replace("\\", os.path.sep).replace("/", os.path.sep).replace("..", "__") + nmDir = os.path.dirname(nm) + if nmDir != "" and not os.path.exists(nmDir): # Check if path exists, create if not + os.makedirs(nmDir) + + with open(nm, "wb") as f: + f.write(data) + + def extractFiles(self): + log.debug("[+] Beginning extraction...please standby") + # extractionDir = os.path.join(os.getcwd(), os.path.basename(self.filePath) + "_extracted") + extractionDir = self.destination_folder + if not os.path.exists(extractionDir): + os.mkdir(extractionDir) + + # os.chdir(extractionDir) + + for entry in self.tocList: + destination_entry = os.path.join(self.destination_folder, entry.name) + self.fPtr.seek(entry.position, os.SEEK_SET) + data = self.fPtr.read(entry.cmprsdDataSize) + + if entry.cmprsFlag == 1: + try: + data = zlib.decompress(data) + except zlib.error: + log.error("[!] Failed to decompress %s", entry.name) + continue + # Malware may tamper with the uncompressed size + # Comment out the assertion in such a case + assert len(data) == entry.uncmprsdDataSize # Sanity Check + + if entry.typeCmprsData in (b"d", b"o"): + # d -> ARCHIVE_ITEM_DEPENDENCY + # o -> ARCHIVE_ITEM_RUNTIME_OPTION + # These are runtime options, not files + continue + + basePath = os.path.dirname(entry.name) + if basePath != "": + # Check if path exists, create if not + if not os.path.exists(basePath): + os.makedirs(basePath) + + if entry.typeCmprsData == b"s": + # s -> ARCHIVE_ITEM_PYSOURCE + # Entry point are expected to be python scripts + log.info("[+] Possible entry point: %s.pyc", entry.name) + + if self.pycMagic == b"\0" * 4: + # if we don't have the pyc header yet, fix them in a later pass + self.barePycList.append(destination_entry + ".pyc") + self._writePyc(destination_entry + ".pyc", data) + + elif entry.typeCmprsData == (b"M", b"m") and not self.only_entrypoints: + # M -> ARCHIVE_ITEM_PYPACKAGE + # m -> ARCHIVE_ITEM_PYMODULE + # packages and modules are pyc files with their header intact + + # From PyInstaller 5.3 and above pyc headers are no longer stored + # https://github.com/pyinstaller/pyinstaller/commit/a97fdf + if data[2:4] == b"\r\n": + # < pyinstaller 5.3 + if self.pycMagic == b"\0" * 4: + self.pycMagic = data[0:4] + self._writeRawData(destination_entry + ".pyc", data) + else: + # >= pyinstaller 5.3 + if self.pycMagic == b"\0" * 4: + # if we don't have the pyc header yet, fix them in a later pass + self.barePycList.append(destination_entry + ".pyc") + self._writePyc(destination_entry + ".pyc", data) + else: + if not self.only_entrypoints: + self._writeRawData(destination_entry, data) + + if entry.typeCmprsData in (b"z", b"Z"): + self._extractPyz(destination_entry) + + # Fix bare pyc's if any + self._fixBarePycs() + + def _fixBarePycs(self): + for pycFile in self.barePycList: + with open(pycFile, "r+b") as pycFile: + # Overwrite the first four bytes + pycFile.write(self.pycMagic) + + def _writePyc(self, filename, data): + with open(filename, "wb") as pycFile: + pycFile.write(self.pycMagic) # pyc magic + + if self.pymaj >= 3 and self.pymin >= 7: # PEP 552 -- Deterministic pycs + pycFile.write(b"\0" * 4) # Bitfield + pycFile.write(b"\0" * 8) # (Timestamp + size) || hash + + else: + pycFile.write(b"\0" * 4) # Timestamp + if self.pymaj >= 3 and self.pymin >= 3: + pycFile.write(b"\0" * 4) # Size parameter added in Python 3.3 + + pycFile.write(data) + + def _extractPyz(self, name): + dirName = name + "_extracted" + # Create a directory for the contents of the pyz + if not os.path.exists(dirName): + os.mkdir(dirName) + + with open(name, "rb") as f: + pyzMagic = f.read(4) + assert pyzMagic == b"PYZ\0" # Sanity Check + + pyzPycMagic = f.read(4) # Python magic value + if self.pycMagic == b"\0" * 4: + self.pycMagic = pyzPycMagic + elif self.pycMagic != pyzPycMagic: + self.pycMagic = pyzPycMagic + log.warning("[!] pyc magic of files inside PYZ archive are different from those in CArchive") + + # Skip PYZ extraction if not running under the same python version + if self.pymaj != sys.version_info.major or self.pymin != sys.version_info.minor: + log.warning( + "[!] Warning: This script is running in a different Python version than the one used to build the executable." + ) + log.info( + "[!] Please run this script in Python %d.%d to prevent extraction errors during unmarshalling.\nSkipping pyz extraction", + self.pymaj, + self.pymin, + ) + return + + (tocPosition,) = struct.unpack("!i", f.read(4)) + f.seek(tocPosition, os.SEEK_SET) + + try: + toc = marshal.load(f) + except Exception as e: + log.error("[!] Unmarshalling FAILED. Cannot extract %s. Extracting remaining files. Error: %s", name, str(e)) + return + + log.debug("[+] Found %d files in PYZ archive", len(toc)) + + # From pyinstaller 3.1+ toc is a list of tuples + if isinstance(toc, list): + toc = dict(toc) + + for key in toc.keys(): + (ispkg, pos, length) = toc[key] + f.seek(pos, os.SEEK_SET) + fileName = key + + with suppress(Exception): + # for Python > 3.3 some keys are bytes object some are str object + fileName = fileName.decode("utf-8") + + # Prevent writing outside dirName + fileName = fileName.replace("..", "__").replace(".", os.path.sep) + if ispkg == 1: + filePath = os.path.join(dirName, fileName, "__init__.pyc") + else: + filePath = os.path.join(dirName, fileName + ".pyc") + + fileDir = os.path.dirname(filePath) + if not os.path.exists(fileDir): + os.makedirs(fileDir) + + try: + data = f.read(length) + data = zlib.decompress(data) + except Exception as e: + print("[!] Error: Failed to decompress %s, probably encrypted. Extracting as is. Error: %s", filePath, str(e)) + open(filePath + ".encrypted", "wb").write(data) + else: + self._writePyc(filePath, data) + + +def main(kwargs): + # kwargs = {"file": "path_to_file", "destination_folder": "path_where_to_extract", "entry_points": False/True} + arch = PyInstArchive(kwargs) + if arch.open() and arch.checkFile() and arch.getCArchiveInfo(): + arch.parseTOC() + arch.extractFiles() + arch.close() + log.debug( + "[+] Successfully extracted pyinstaller archive: %s\nYou can now use a python decompiler on the pyc files within the extracted directory", + kwargs["file"], + ) + return + arch.close() + + +if __name__ == "__main__": + import argparse + + parser = argparse.ArgumentParser( + prog="PyInstaller Extractor", + description="PyInstaller Extractor is a Python script to extract the contents of a PyInstaller generated executable file.", + ) + parser.add_argument("-f", "--file", action="store") + parser.add_argument("-d", "--destination-folder", action="store", help="Folder to store extracted files") + parser.add_argument("-e", "--entry-points", action="store_true", help="Extract only possible entry points") + + options = parser.parse_args() + if not options.file or not os.path.exists(options.file): + parser.print_help() + sys.exit() + + # Convert to dict/kwargs + options = vars(options) + logging.basicConfig() + log.setLevel(logging.DEBUG) + main(options) diff --git a/lib/cuckoo/common/integrations/vba2graph.py b/lib/cuckoo/common/integrations/vba2graph.py index fcaaf64708a..7b73b2d0b74 100644 --- a/lib/cuckoo/common/integrations/vba2graph.py +++ b/lib/cuckoo/common/integrations/vba2graph.py @@ -599,7 +599,6 @@ def vba_clean_whitespace(vba_content_lines): # process lines one by one for vba_line in vba_content_lines: - # remove leading and trailing whitespace # & reduce multiple whitespaces into one space vba_line = " ".join(vba_line.split()) @@ -801,7 +800,6 @@ def vba_extract_properties(vba_content_lines): # process lines one by one for vba_line in vba_content_lines: - # look for property start keywords prop_start_pos = max(vba_line.find("Property Let "), vba_line.find("Property Get ")) @@ -856,7 +854,6 @@ def create_call_graph(vba_func_dict): DG.add_node(func_name, keywords="") # analyze function calls for func_name in vba_func_dict: - func_code = vba_func_dict[func_name] # split function code into tokens func_code_tokens = list(filter(None, re.split(r'["(, \\-!?:\\r\\n)&=.><]+', func_code))) @@ -898,7 +895,6 @@ def find_keywords_in_graph(vba_func_dict, DG): """ # analyze function calls for func_name in vba_func_dict: - func_code = vba_func_dict[func_name] # split function code into lines func_code_lines = filter(None, re.split("\n", func_code)) diff --git a/lib/cuckoo/common/integrations/virustotal.py b/lib/cuckoo/common/integrations/virustotal.py index c56f8fb4e66..cb7068f5e7c 100644 --- a/lib/cuckoo/common/integrations/virustotal.py +++ b/lib/cuckoo/common/integrations/virustotal.py @@ -156,7 +156,6 @@ def get_vt_consensus(namelist: list): - finaltoks = defaultdict(int) for name in namelist: toks = re.findall(r"[A-Za-z0-9]+", name) @@ -210,7 +209,7 @@ def vt_lookup(category: str, target: str, results: dict = {}, on_demand: bool = try: urlscrub_compiled_re = re.compile(urlscrub) except Exception as e: - log.error(f"Failed to compile urlscrub regex: {e}") + log.error("Failed to compile urlscrub regex:", str(e)) return {} try: target = re.sub(urlscrub_compiled_re, "", target) diff --git a/lib/cuckoo/common/irc.py b/lib/cuckoo/common/irc.py index 5345829aefa..cc93aa28350 100644 --- a/lib/cuckoo/common/irc.py +++ b/lib/cuckoo/common/irc.py @@ -6,15 +6,11 @@ """IRC Protocol""" import logging +import re from io import BytesIO from lib.cuckoo.common.utils import convert_to_printable -try: - import re2 as re -except ImportError: - import re - log = logging.getLogger("Processing.Pcap.irc.protocol") diff --git a/lib/cuckoo/common/load_extra_modules.py b/lib/cuckoo/common/load_extra_modules.py index 35ce80f423e..1a4e3985a3f 100644 --- a/lib/cuckoo/common/load_extra_modules.py +++ b/lib/cuckoo/common/load_extra_modules.py @@ -1,12 +1,38 @@ import glob import importlib import inspect +import logging import os import pkgutil from pathlib import Path +from lib.cuckoo.common.config import Config + +log = logging.getLogger(__name__) + +integrations_conf = Config("integrations") + def ratdecodedr_load_decoders(path: str): + """ + Loads and returns a dictionary of RAT decoder modules from the specified path. + + This function walks recursively through all modules and packages in the given path, + imports them, and collects classes that are subclasses of the `Decoder` class from + the `malwareconfig.common` module. It skips packages and handles import errors gracefully. + + Args: + path (str): The path to the directory containing the RAT decoder modules. + + Returns: + dict: A dictionary where the keys are decoder names and the values are dictionaries + containing the following information about each decoder: + - obj: The decoder class object. + - decoder_name: The name of the decoder. + - decoder_description: A description of the decoder. + - decoder_version: The version of the decoder. + - decoder_author: The author of the decoder. + """ from malwareconfig.common import Decoder dec_modules = {} @@ -19,7 +45,7 @@ def ratdecodedr_load_decoders(path: str): try: module = importlib.import_module(module_name) except ImportError as e: - print(f"Unable to import Module {module_name}: {e}") + log.error("Unable to import Module %s - %s", module_name, e) continue for mod_name, mod_object in inspect.getmembers(module): @@ -34,7 +60,29 @@ def ratdecodedr_load_decoders(path: str): return dec_modules -def cape_load_decoders(CUCKOO_ROOT: str): +def cape_load_custom_decoders(CUCKOO_ROOT: str): + """ + Loads custom decoders for CAPE from specified directories within the CUCKOO_ROOT path. + + This function searches for Python modules in the "modules/processing/parsers/CAPE" and + "custom/parsers" directories within the CUCKOO_ROOT path. It imports these modules and + stores them in a dictionary where the keys are the module names with spaces replaced by + underscores, and the values are the imported modules. + + Args: + CUCKOO_ROOT (str): The root directory of the CUCKOO installation. + + Returns: + dict: A dictionary where the keys are the names of the decoders and the values are + the imported modules. + + Raises: + ImportError: If a module cannot be imported. + IndexError: If there is an indexing error during module import. + AttributeError: If there is an attribute error during module import. + SyntaxError: If there is a syntax error in the module code. + Exception: For any other exceptions that occur during module import. + """ cape_modules = {} cape_decoders = os.path.join(CUCKOO_ROOT, "modules", "processing", "parsers", "CAPE") @@ -58,16 +106,29 @@ def cape_load_decoders(CUCKOO_ROOT: str): # For example, a cape_type of "Emotet Payload" would trigger a config parser named "Emotet.py". cape_modules[name.replace("_", " ")] = importlib.import_module(f"{versions[version]}.{name}") except (ImportError, IndexError, AttributeError) as e: - print(f"CAPE parser: No module named {name} - {e}") + log.error("CAPE parser: No module named %s - %s", name, e) except SyntaxError as e: - print(f"CAPE parser: Fix your code in {name} - {e}") + log.error("CAPE parser: Fix your code in %s - %s", name, e) except Exception as e: - print(f"CAPE parser: Fix your code in {name} - {e}") + log.error("CAPE parser: Fix your code in %s - %s", name, e) return cape_modules def malduck_load_decoders(CUCKOO_ROOT: str): + """ + Loads and imports malduck decoder modules from the specified CUCKOO_ROOT directory. + + Args: + CUCKOO_ROOT (str): The root directory of the CUCKOO installation. + + Returns: + dict: A dictionary where the keys are the names of the decoder modules and the values are the imported module objects. + + Raises: + ImportError: If a module cannot be imported. + IndexError: If there is an issue with the module name. + """ malduck_modules = {} malduck_decoders = os.path.join(CUCKOO_ROOT, "modules", "processing", "parsers", "malduck") @@ -77,12 +138,31 @@ def malduck_load_decoders(CUCKOO_ROOT: str): try: malduck_modules[name] = importlib.import_module(f"modules.processing.parsers.malduck.{name}") except (ImportError, IndexError) as e: - print(f"malduck parser: No module named {name} - {e}") + log.error("malduck parser: No module named %s - %s", name, e) return malduck_modules def file_extra_info_load_modules(CUCKOO_ROOT: str): + """ + Loads extra file information modules from the specified CUCKOO_ROOT directory. + + This function searches for Python modules in the "file_extra_info_modules" directory + within the given CUCKOO_ROOT path. It imports and returns a list of modules that are + enabled based on their internal configuration or the integrations_conf settings. + + Args: + CUCKOO_ROOT (str): The root directory of the CUCKOO installation. + + Returns: + list: A list of imported modules that are enabled. If the directory does not exist, + an empty list is returned. + + Raises: + ImportError: If a module cannot be imported. + IndexError: If there is an indexing error during module import. + AttributeError: If an attribute is missing during module import. + """ file_extra_modules = [] extra_modules = os.path.join(CUCKOO_ROOT, "lib", "cuckoo", "common", "integrations", "file_extra_info_modules") if not Path(extra_modules).exists(): @@ -93,10 +173,63 @@ def file_extra_info_load_modules(CUCKOO_ROOT: str): for name in EXTRA_MODULES: try: module = importlib.import_module(f"lib.cuckoo.common.integrations.file_extra_info_modules.{name}") - if not getattr(module, "enabled", False): + if not getattr(module, "enabled", False) and not integrations_conf.__dict__.get(name, {}).get("enabled", False): continue file_extra_modules.append(module) except (ImportError, IndexError, AttributeError) as e: - print(f"file_extra_info module: No module named {name} - {e}") + log.error("file_extra_info module: No module named %s - %s", name, e) return file_extra_modules + + +def load_downloaders(CUCKOO_ROOT: str): + """ + Loads and returns a dictionary of downloader modules from the specified CUCKOO_ROOT directory. + + This function searches for Python modules in the "downloaders" directory within the given + CUCKOO_ROOT path. It imports these modules and stores them in a dictionary where the keys + are the module names and the values are the imported modules. + + Args: + CUCKOO_ROOT (str): The root directory of the CUCKOO installation. + + Returns: + dict: A dictionary where the keys are the names of the downloader modules and the values + are the imported module objects. + + Raises: + ImportError: If a module cannot be imported. + IndexError: If there is an indexing error during module import. + AttributeError: If an attribute is missing during module import. + """ + downloaders = {} + downloaders_modules = {} + versions = {} + custom_downloaders = os.path.join(CUCKOO_ROOT, "custom", "downloaders") + if os.path.exists(custom_downloaders): + downloaders_modules.setdefault("custom", []).extend( + [os.path.basename(decoder)[:-3] for decoder in glob.glob(f"{custom_downloaders}/[!_]*.py")] + ) + versions["custom"] = "custom.downloaders" + + # breakpoint() + downloaders_dir = os.path.join(CUCKOO_ROOT, "lib", "downloaders") + downloaders_modules = {"cape": [os.path.basename(downloader)[:-3] for downloader in glob.glob(f"{downloaders_dir}/[!_]*.py")]} + versions["cape"] = "lib.downloaders" + + for version, names in downloaders_modules.items(): + for name in names: + try: + module = importlib.import_module(f"{versions[version]}.{name}") + if name == "malwarebazaar": + # config under [abusech] + if not integrations_conf.__dict__.get("abusech", {}).get("malwarebazaar", False): + continue + else: + if not getattr(module, "enabled", False) and not integrations_conf.__dict__.get(name, {}).get("enabled", False): + continue + downloaders[name] = module + except (ImportError, IndexError, AttributeError) as e: + log.error("Downloader: No module named %s - %s", name, e) + + return downloaders diff --git a/lib/cuckoo/common/logtbl.py b/lib/cuckoo/common/logtbl.py index 8bba49c920b..0c1dbd8e8ec 100644 --- a/lib/cuckoo/common/logtbl.py +++ b/lib/cuckoo/common/logtbl.py @@ -8,6 +8,7 @@ by hand. """ + table = ( ("__process__", "__init__", ("",)), ("__thread__", "__init__", ("",)), diff --git a/lib/cuckoo/common/mapTTPs.py b/lib/cuckoo/common/mapTTPs.py index 0aecf218413..186f8b94e9b 100644 --- a/lib/cuckoo/common/mapTTPs.py +++ b/lib/cuckoo/common/mapTTPs.py @@ -15,6 +15,19 @@ # Read the config file def mapTTP(oldTTPs: list, mbcs: list): + """ + Maps old TTPs (Tactics, Techniques, and Procedures) to a new format and groups them by signature. + + Args: + oldTTPs (list): A list of dictionaries containing old TTPs. Each dictionary should have a "ttp" key. + mbcs (list): A list of MBCs (Malware Behavior Catalog) mapped by signature. + + Returns: + list: A list of dictionaries where each dictionary contains: + - "signature" (str): The signature of the TTP. + - "ttps" (list): A list of unique TTPs associated with the signature. + - "mbcs" (list): A list of MBCs associated with the signature. + """ ttpsList = [] grouped_ttps = defaultdict(list) diff --git a/lib/cuckoo/common/objects.py b/lib/cuckoo/common/objects.py index d8a3814013f..e31c3273556 100644 --- a/lib/cuckoo/common/objects.py +++ b/lib/cuckoo/common/objects.py @@ -31,6 +31,7 @@ from lib.cuckoo.common.path_utils import path_exists try: + # python-magic, not file-magic! import magic HAVE_MAGIC = True @@ -74,13 +75,16 @@ print("Missed library. Run: poetry install") HAVE_YARA = False +HAVE_YARA_X = False +yara_x = False +""" try: import yara_x HAVE_YARA_X = True except ImportError: # print("Missed library. Run: poetry install pip3 install yara-x") - HAVE_YARA_X = False +""" log = logging.getLogger(__name__) @@ -163,7 +167,8 @@ def __init__(self, url): class File: """Basic file object class with all useful utilities.""" - LINUX_TYPES = {"Bourne-Again", "POSIX shell script", "ELF", "Python"} + # ToDo python can be executed on windows too + LINUX_TYPES = {"Bourne-Again", "POSIX shell script", "ELF"} # , "Python" DARWIN_TYPES = {"Mach-O"} # The yara rules should not change during one Cuckoo run and as such we're @@ -338,18 +343,12 @@ def get_content_type(self): file_type = None if self.path_object.exists(): if HAVE_MAGIC: - fn = False - if hasattr(magic, "detect_from_filename"): - fn = magic.detect_from_filename - if hasattr(magic, "from_file"): - fn = magic.from_file - if fn: - try: - file_type = fn(self.file_path_ansii) - except magic.MagicException as e: - log.error("Magic error: %s", str(e)) - except Exception as e: - log.error(e, exc_info=True) + try: + file_type = magic.from_file(self.file_path_ansii) + except magic.MagicException as e: + log.error("Magic error: %s", str(e)) + except Exception as e: + log.exception(e) if not file_type and hasattr(magic, "open"): try: ms = magic.open(magic.MAGIC_MIME | magic.MAGIC_SYMLINK) @@ -357,7 +356,7 @@ def get_content_type(self): file_type = ms.file(self.file_path) ms.close() except Exception as e: - log.error(e, exc_info=True) + log.exception(e) if file_type is None: try: @@ -366,7 +365,7 @@ def get_content_type(self): ) file_type = p.stdout.read().strip() except Exception as e: - log.error(e, exc_info=True) + log.exception(e) return file_type @@ -415,7 +414,7 @@ def get_type(self): File.notified_pefile = True log.warning("Unable to import pefile (install with `pip3 install pefile`)") except Exception as e: - log.error(e, exc_info=True) + log.exception(e) if not self.file_type: self.file_type = self.get_content_type() @@ -455,12 +454,15 @@ def init_yara(self, raise_exception: bool = False): log.warning("Missing Yara directory: %s?", category_root) continue - for filename in os.listdir(category_root): - if not filename.endswith((".yar", ".yara")): + for category_root, _, filenames in os.walk(category_root, followlinks=True): + if category_root.endswith("deprecated"): continue - filepath = os.path.join(category_root, filename) - rules[f"rule_{category}_{len(rules)}"] = filepath - indexed.append(filename) + for filename in filenames: + if not filename.endswith((".yar", ".yara")): + continue + filepath = os.path.join(category_root, filename) + rules[f"rule_{category}_{len(rules)}"] = filepath + indexed.append(filename) # Need to define each external variable that will be used in the # future. Otherwise Yara will complain. @@ -623,7 +625,10 @@ def get_cape_name_from_cape_type(cls, cape_type: str) -> str: """Return the part of the cape_type (e.g. "SocGholish Payload") preceding " Payload", " Config", " Loader", or " Strings" """ - return cls.cape_name_regex.sub("", cape_type) + if bool(cls.cape_name_regex.search(cape_type)): + return cls.cape_name_regex.sub("", cape_type) + else: + return "" def get_tlsh(self): """ @@ -729,6 +734,7 @@ def get_platform(self): retval = "windows" ftype = self.get_type() if isinstance(ftype, str): + # ToDo check if linux enabled if any(x in ftype for x in File.LINUX_TYPES): retval = "linux" elif any(x in ftype for x in File.DARWIN_TYPES): diff --git a/lib/cuckoo/common/quarantine.py b/lib/cuckoo/common/quarantine.py index e8745318f57..1d39dadcb06 100644 --- a/lib/cuckoo/common/quarantine.py +++ b/lib/cuckoo/common/quarantine.py @@ -20,7 +20,7 @@ HAVE_OLEFILE = True except ImportError: HAVE_OLEFILE = False - print("Missed olefile dependency: pip3 install olefile") + print("Missed olefile dependency: poetry run pip install olefile") def bytearray_xor(data, key): @@ -558,12 +558,15 @@ def kav_unquarantine(file): def trend_unquarantine(f): - qdata = Path(f).read_bytes() - data = bytearray_xor(bytearray(qdata), 0xFF) + # Read first 10 bytes + with open(f, "rb") as fil: + qheader = fil.read(10) + header = bytearray_xor(bytearray(qheader), 0xFF) - magic, dataoffset, numtags = struct.unpack(" 15: return None + # If file looks like a quarantine file, then read it all + qdata = Path(f).read_bytes() + data = bytearray_xor(bytearray(qdata), 0xFF) + dataoffset += 10 offset = 10 for _ in range(numtags): code, tagdata = read_trend_tag(data, offset) if code == 2: # original filename - origname = str(tagdata).encode("utf16").decode(error="ignore").rstrip("\0") + origname = str(tagdata).encode("utf16").decode(errors="ignore").rstrip("\0") elif code == 6: # base key basekey = struct.unpack(" float: + """Calculates a generic score based on a list of matched signatures.""" + score = 0.0 + for match in matched: + # We apply the 'maximum' attribute if present in the signature. + # Check for key existence and that the value is not None to handle the case where maximum could be 0. + if "maximum" in match and match["maximum"] is not None: + score = max(score, match["maximum"]) + continue # Skip to next signature + + if match["severity"] == 1: + score += match["weight"] * 0.5 * (match["confidence"] / 100.0) + else: + score += match["weight"] * (match["severity"] - 1) * (match["confidence"] / 100.0) + + # Clamp the score between 0.0 and 10.0 using a common Python idiom. + score = max(0.0, min(score, 10.0)) + + return score + + +# ============================================================================= +# Main scoring function. +# ============================================================================= def calc_scoring(results: dict, matched: list): + """ + Calculate the final malware score and status based on the analysis results and matched signatures. + + The scoring is determined by the type of file and the categories of signatures it triggers. The methodology is as follows: + 1. Malicious-Known: The file is detected by YARA. + - Score: 10/10 (Malicious) + 2. Malicious-Unknown: The file triggers signatures with specific malicious categories. + - Categories: ["malware", "ransomware", "infostealer", "rat", "trojan", "rootkit", "bootkit", "wiper", "banker", "bypass", "anti-sandbox", "keylogger"] + - Score: 7-9/10 (Malicious) + 3. Suspicious-Unknown: The file triggers signatures with specific suspicious categories. + - Categories: ["network", "encryption", "anti-vm", "anti-analysis", "anti-av", "anti-debug", "anti-emulation", "persistence", "stealth", "discovery", "injection", "generic", "account", "bot", "browser", "allocation", "command", "execution"] + - Score: 4-6/10 (Suspicious) + 4. Benign: The file is likely trusted and digitally signed. + - Score: 0-3/10 (Benign) + 5. Undetected/Failed: The file does not trigger any signatures. + - Score: 0/10 (Undetected/Failed) + + Parameters: + results (dict): The analysis results containing details about the file and its behavior. + matched (list): A list of matched signatures with their categories, severity, confidence, and weight. + + Returns: + tuple: A tuple containing the final malware score (float) and the status (str). + """ finalMalscore = 0.0 status = None + # Identify the analysis category (file or url). + category = results.get("target", {}).get("category") fileType = results.get("target", {}).get("file", {}).get("type") + # IF THE ANALYSIS IS OF URL TYPE, we use the generic scoring logic + if category == "url": + # Calculate score using the helper function + finalMalscore = _calculate_generic_score(matched) + + # We assign a status based on the score + if finalMalscore >= 7.0: + status = "Malicious" + elif finalMalscore >= 4.0: + status = "Suspicious" + elif finalMalscore > 0.0: + status = "Clean" + else: + status = "Undetected" + + return finalMalscore, status + if not fileType: return finalMalscore, status if "executable" in fileType: # We have 5 methodologies # 1. The file is Malicious-Known (The sample is detected by YARA) - ## score 10/10 (Malicious) - # =======================================================================================================# - # 2. If the file is Malicious-Unknown - ## triggered some signatures that has specific malicious categories such as: - ## ["malware", "ransomware", "infostealer", "rat", "trojan", "rootkit", "bootkit", "wiper", "banker", - ## "bypass", "anti-sandbox", "keylogger"] - ## score [7-9]/10 (Malicious) - # =======================================================================================================# - # 3. If the file is Suspicious-Unknown - ## triggered some signatures that has specific suspicious categories such as: - ## ["network", "encryption", "anti-vm", "anti-analysis", "anti-av", "anti-debug", "anti-emulation", - ## "persistence", "stealth", "discovery", "injection", "generic", "account", "bot", "browser", - # "allocation", "command"] - ## score[4-6]/10 (Suspicious) - # =======================================================================================================# - # 4. If the file is benign - ## Likely all trusted files are digitally signed. - ## score [0-3]/10 (benign) - # =======================================================================================================# - # 5. If the file doesn't trigger any signatures - ## The file is undetected/failed + # ... (and so on, this logic is specific to executables) tempScore1 = 0.0 tempScore2 = 0.0 is_maliciousCategoryHit = False @@ -42,39 +92,14 @@ def calc_scoring(results: dict, matched: list): ) maliciousCategories = [ - "malware", - "ransomware", - "infostealer", - "rat", - "trojan", - "rootkit", - "bootkit", - "wiper", - "banker", - "bypass", - "anti-sandbox", - "keylogger", + "malware", "ransomware", "infostealer", "rat", "trojan", "rootkit", "bootkit", "wiper", "banker", + "bypass", "anti-sandbox", "keylogger", ] suspiciousCategories = [ - "network", - "encryption", - "anti-vm", - "anti-analysis", - "anti-av", - "anti-debug", - "anti-emulation", - "persistence", - "stealth", - "discovery", - "injection", - "generic", - "account", - "bot", - "browser", - "allocation", - "command", - "execution", + "network", "encryption", "anti-vm", "anti-analysis", "anti-av", "anti-debug", "anti-emulation", + "persistence", "stealth", "discovery", "injection", "generic", "account", "bot", "browser", + "allocation", "command", "execution", ] for detection in results.get("detections", []): @@ -98,21 +123,15 @@ def calc_scoring(results: dict, matched: list): else: tempScore2 += matchedSig["weight"] * (matchedSig["severity"] - 1) * (matchedSig["confidence"] / 100.0) - # 1. The file is Malicious-Known (The sample is detected by YARA) - ## score 10/10 (Malicious) + # 1. Malicious-Known if is_detected: status = "Malicious" finalMalscore = 10.0 - # 2. If the file is Malicious-Unknown - ## triggered some signatures that has specific malicious categories such as: - ## ["malware", "ransomware", "infostealer", "rat", "trojan", "rootkit", "bootkit", "wiper", "banker", - ## "bypass", "anti-sandbox", "keylogger"] - ## score [7-9]/10 (Malicious) + # 2. Malicious-Unknown elif is_maliciousCategoryHit: finalMalscore = tempScore1 status = "Malicious" - ## Include numbers between that range if 7.0 < finalMalscore < 9.0: pass elif finalMalscore >= 9.0: @@ -120,37 +139,21 @@ def calc_scoring(results: dict, matched: list): elif finalMalscore < 7.0: finalMalscore = 7.0 - # 3. If the file is Suspicious-Unknown - ## triggered some signatures that has specific suspicious categories such as: - ## ["network", "encryption", "anti-vm", "anti-analysis", "anti-av", "anti-debug", "anti-emulation", - ## "persistence", "stealth", "discovery", "injection", "generic", "account", "bot", "browser", - # "allocation", "command"] - ## score[4-6]/10 (Suspicious) + # 3. Suspicious-Unknown elif is_suspiciousCategoryHit: finalMalscore = tempScore2 - - # 4. If the file is benign - ## Likely all trusted files are digitally signed. - ## score [0-3]/10 (benign) if is_digital_signauture_verified: finalMalscore = 0.0 status = "Clean" - elif finalMalscore < 4.0: status = "Clean" - - ## Include numbers between that range - elif 4.0 < finalMalscore < 6.0: - status = "Suspicious" - elif finalMalscore == 4: - finalMalscore = 4 - status = "Suspicious" elif finalMalscore >= 6.0: finalMalscore = 6.0 status = "Suspicious" + elif 4.0 <= finalMalscore < 6.0: + status = "Suspicious" - # 5. If the file doesn't trigger any signatures - ## The file is undetected/failed + # 5. Undetected/Failed else: finalMalscore = 0 if results.get("behavior", {}).get("processtree", []): @@ -158,14 +161,8 @@ def calc_scoring(results: dict, matched: list): else: status = "Failed" else: - for match in matched: - if match["severity"] == 1: - finalMalscore += match["weight"] * 0.5 * (match["confidence"] / 100.0) - else: - finalMalscore += match["weight"] * (match["severity"] - 1) * (match["confidence"] / 100.0) - if finalMalscore > 10.0: - finalMalscore = 10.0 - if finalMalscore < 0.0: - finalMalscore = 0.0 + # For all other non-executable file types, use the generic scoring logic + finalMalscore = _calculate_generic_score(matched) + # Note: The original logic did not assign a status here, so we keep that behavior. return finalMalscore, status diff --git a/lib/cuckoo/common/socket_utils.py b/lib/cuckoo/common/socket_utils.py index 73613aa9122..610a4e4c706 100644 --- a/lib/cuckoo/common/socket_utils.py +++ b/lib/cuckoo/common/socket_utils.py @@ -12,6 +12,22 @@ def send_socket_command(socket_path: str, command: str, *args, **kwargs): + """ + Sends a command via a Unix domain socket to a root-executed component. + + Args: + socket_path (str): The path to the Unix domain socket. + command (str): The command to send. + args: Additional positional arguments to include in the command. + kwargs: Additional keyword arguments to include in the command. + + Returns: + dict: The response from the socket, parsed from JSON. If there is a timeout or connection error, + a dictionary with an "exception" key will be returned. + + Logs: + Critical errors if the socket path does not exist or if unable to connect to the Unix socket. + """ """Aux function to send commands via socket to root executed components""" if not path_exists(socket_path): log.critical("Unable to passthrough root command (%s) as the rooter unix socket: %s doesn't exist", socket_path, command) diff --git a/lib/cuckoo/common/suricata_detection.py b/lib/cuckoo/common/suricata_detection.py index fd5c7521e37..4d42c1aae54 100644 --- a/lib/cuckoo/common/suricata_detection.py +++ b/lib/cuckoo/common/suricata_detection.py @@ -1,7 +1,4 @@ -try: - import re2 as re -except ImportError: - import re +import re suricata_passlist = ( "agenttesla", @@ -83,6 +80,7 @@ def get_suricata_family(signature): """ + Extracts the family name from a Suricata alert string. Args: signature: suricata alert string Return @@ -120,8 +118,8 @@ def get_suricata_family(signature): break isbad = any(block in famchecklower for block in suricata_blocklist) if not isbad and len(famcheck) >= 4: - family = famcheck.title() + family = famcheck isgood = any(allow in famchecklower for allow in suricata_passlist) if isgood and len(famcheck) >= 4: - family = famcheck.title() + family = famcheck return family diff --git a/lib/cuckoo/common/url_validate.py b/lib/cuckoo/common/url_validate.py index 9df8de2a461..8ce2b626509 100644 --- a/lib/cuckoo/common/url_validate.py +++ b/lib/cuckoo/common/url_validate.py @@ -17,7 +17,10 @@ # protocol identifier r"(?:(?:https?|ftp|tcp|udp)://)" # user:pass authentication - r"(?:[-a-z\u00a1-\uffff0-9._~%!$&'()*+,;=:]+" r"(?::[-a-z0-9._~%!$&'()*+,;=:]*)?@)?" r"(?:" r"(?P" + r"(?:[-a-z\u00a1-\uffff0-9._~%!$&'()*+,;=:]+" + r"(?::[-a-z0-9._~%!$&'()*+,;=:]*)?@)?" + r"(?:" + r"(?P" # IP address exclusion # private & local networks r"(?:(?:10|127)" + ip_middle_octet + r"{2}" + ip_last_octet + r")|" @@ -25,13 +28,19 @@ r"(?:172\.(?:1[6-9]|2\d|3[0-1])" + ip_middle_octet + ip_last_octet + r"))" r"|" # private & local hosts - r"(?P" r"(?:localhost))" r"|" + r"(?P" + r"(?:localhost))" + r"|" # IP address dotted notation octets # excludes loopback network 0.0.0.0 # excludes reserved space >= 224.0.0.0 # excludes network & broadcast addresses # (first & last IP address of each class) - r"(?P" r"(?:[1-9]\d?|1\d\d|2[01]\d|22[0-3])" r"" + ip_middle_octet + r"{2}" r"" + ip_last_octet + r")" r"|" + r"(?P" + r"(?:[1-9]\d?|1\d\d|2[01]\d|22[0-3])" + r"" + ip_middle_octet + r"{2}" + r"" + ip_last_octet + r")" + r"|" # IPv6 RegEx from https://stackoverflow.com/a/17871737 r"\[(" # 1:2:3:4:5:6:7:8 @@ -59,16 +68,23 @@ r"((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}" # ::255.255.255.255 ::ffff:255.255.255.255 ::ffff:0:255.255.255.255 # (IPv4-mapped IPv6 addresses and IPv4-translated addresses) - r"(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|" r"([0-9a-fA-F]{1,4}:){1,4}:" r"((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}" + r"(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])|" + r"([0-9a-fA-F]{1,4}:){1,4}:" + r"((25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])\.){3,3}" # 2001:db8:3:4::192.0.2.33 64:ff9b::192.0.2.33 # (IPv4-Embedded IPv6 Address) - r"(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])" r")\]|" + r"(25[0-5]|(2[0-4]|1{0,1}[0-9]){0,1}[0-9])" + r")\]|" # host name - r"(?:(?:(?:xn--[-]{0,2})|[a-z\u00a1-\uffff\U00010000-\U0010ffff0-9]-?)*" r"[a-z\u00a1-\uffff\U00010000-\U0010ffff0-9]+)" + r"(?:(?:(?:xn--[-]{0,2})|[a-z\u00a1-\uffff\U00010000-\U0010ffff0-9]-?)*" + r"[a-z\u00a1-\uffff\U00010000-\U0010ffff0-9]+)" # domain name - r"(?:\.(?:(?:xn--[-]{0,2})|[a-z\u00a1-\uffff\U00010000-\U0010ffff0-9]-?)*" r"[a-z\u00a1-\uffff\U00010000-\U0010ffff0-9]+)*" + r"(?:\.(?:(?:xn--[-]{0,2})|[a-z\u00a1-\uffff\U00010000-\U0010ffff0-9]-?)*" + r"[a-z\u00a1-\uffff\U00010000-\U0010ffff0-9]+)*" # TLD identifier - r"(?:\.(?:(?:xn--[-]{0,2}[a-z\u00a1-\uffff\U00010000-\U0010ffff0-9]{2,})|" r"[a-z\u00a1-\uffff\U00010000-\U0010ffff]{2,}))" r")" + r"(?:\.(?:(?:xn--[-]{0,2}[a-z\u00a1-\uffff\U00010000-\U0010ffff0-9]{2,})|" + r"[a-z\u00a1-\uffff\U00010000-\U0010ffff]{2,}))" + r")" # port number r"(?::\d{2,5})?" # resource path @@ -76,7 +92,8 @@ # query string r"(?:\?\S*)?" # fragment - r"(?:#\S*)?" r"$", + r"(?:#\S*)?" + r"$", re.UNICODE | re.IGNORECASE, ) diff --git a/lib/cuckoo/common/utils.py b/lib/cuckoo/common/utils.py index 829aa5006a4..2b6f09d79d7 100644 --- a/lib/cuckoo/common/utils.py +++ b/lib/cuckoo/common/utils.py @@ -571,12 +571,17 @@ def datetime_to_iso(timestamp): return datetime.strptime(timestamp, "%Y-%m-%d %H:%M:%S").isoformat() -def store_temp_file(filedata, filename, path=None): - """Store a temporary file. - @param filedata: content of the original file. - @param filename: name of the original file. - @param path: optional path for temp directory. - @return: path to the temporary file. +def store_temp_file(filedata: bytes, filename: str, path=None) -> bytes: + """ + Store a temporary file. + + Args: + filedata (bytes or file-like object): Content of the original file. + filename (str): Name of the original file. + path (str, optional): Optional path for the temporary directory. Defaults to None. + + Returns: + bytes: Path to the temporary file. """ filename = path_get_filename(filename).encode("utf-8", "replace") @@ -604,6 +609,7 @@ def store_temp_file(filedata, filename, path=None): else: tmp_file.write(filedata) + # ToDo consider change from bytes to str return tmp_file_path @@ -769,9 +775,21 @@ def truncate_filename(x): return truncated -def sanitize_filename(x): - """Kind of awful but necessary sanitizing of filenames to - get rid of unicode problems.""" +def sanitize_filename(x: str): + """ + Sanitizes a given filename to remove problematic characters and ensure it is safe for use. + + This function performs the following operations: + 1. Strips leading spaces from the filename. + 2. Replaces any character that is not an ASCII letter, digit, space, underscore, hyphen, or period with an underscore. + 3. Truncates the filename if it exceeds a certain length to prevent issues with overly long filenames. + + Args: + x (str): The filename to sanitize. + + Returns: + str: The sanitized filename. + """ while x.startswith(" "): x = x.lstrip() out = "".join(c if c in string.ascii_letters + string.digits + " _-." else "_" for c in x) diff --git a/lib/cuckoo/common/web_utils.py b/lib/cuckoo/common/web_utils.py index eb10064548c..53a51ac6959 100644 --- a/lib/cuckoo/common/web_utils.py +++ b/lib/cuckoo/common/web_utils.py @@ -1,8 +1,8 @@ import hashlib -import io import json import logging import os +import re import sys import tempfile import threading @@ -18,12 +18,6 @@ import requests from django.http import HttpResponse -HAVE_PYZIPPER = False -with suppress(ImportError): - import pyzipper - - HAVE_PYZIPPER = True - from dev_utils.mongo_hooks import FILE_REF_KEY, FILES_COLL, NORMALIZED_FILE_FIELDS from lib.cuckoo.common.config import Config from lib.cuckoo.common.integrations.parse_pe import HAVE_PEFILE, IsPEImage, pefile @@ -51,6 +45,7 @@ Task, ) from lib.cuckoo.core.rooter import _load_socks5_operational, vpns +from lib.downloaders import Downloaders _current_dir = os.path.abspath(os.path.dirname(__file__)) CUCKOO_ROOT = os.path.normpath(os.path.join(_current_dir, "..", "..", "..")) @@ -63,20 +58,16 @@ routing_conf = Config("routing") machinery = Config(cfg.cuckoo.machinery) disable_x64 = cfg.cuckoo.get("disable_x64", False) - apiconf = Config("api") +db = Database() +downloader_services = Downloaders() + linux_enabled = web_cfg.linux.get("enabled", False) rateblock = web_cfg.ratelimit.get("enabled", False) rps = web_cfg.ratelimit.get("rps", "1/rps") rpm = web_cfg.ratelimit.get("rpm", "5/rpm") -db = Database() - -try: - import re2 as re -except ImportError: - import re DYNAMIC_PLATFORM_DETERMINATION = web_cfg.general.dynamic_platform_determination @@ -85,9 +76,8 @@ if dist_conf.distributed.enabled: try: # Tags - from lib.cuckoo.common.dist_db import Machine, Node + from lib.cuckoo.common.dist_db import Machine, Node, create_session from lib.cuckoo.common.dist_db import Task as DTask - from lib.cuckoo.common.dist_db import create_session HAVE_DIST = True dist_session = create_session(dist_conf.distributed.db) @@ -181,7 +171,23 @@ def my_rate_minutes(group, request): _load_vms_exits_lock = threading.Lock() -def load_vms_exits(force=False): +def load_vms_exits(force: bool = False): + """ + Load the VM exits information. + + This function loads the VM exit nodes information and stores it in the global + variable `_all_nodes_exits`. If the information is already loaded and the + `force` parameter is not set to True, it returns the cached information. + Otherwise, it reloads the information. + + Args: + force (bool): If set to True, forces the reloading of the VM exits + information even if it is already loaded. Default is False. + + Returns: + dict: A dictionary where the keys are exit node names and the values are + lists of node names associated with each exit node. + """ global _all_nodes_exits with _load_vms_exits_lock: if _all_nodes_exits is not None and not force: @@ -205,7 +211,22 @@ def load_vms_exits(force=False): _load_vms_tags_lock = threading.Lock() -def load_vms_tags(force=False): +def load_vms_tags(force: bool = False): + """ + Load and return the tags associated with all virtual machines (VMs). + + This function retrieves tags from both a distributed database (if enabled) + and a local database, combines them, and returns a sorted list of unique tags. + The result is cached globally and can be forced to refresh by setting the + `force` parameter to True. + + Args: + force (bool): If True, forces the function to reload the tags from the + databases even if they are already cached. Default is False. + + Returns: + list: A sorted list of unique tags associated with all VMs. + """ global _all_vms_tags with _load_vms_tags_lock: if _all_vms_tags is not None and not force: @@ -229,6 +250,19 @@ def load_vms_tags(force=False): def top_asn(date_since: datetime = False, results_limit: int = 20) -> dict: + """ + Retrieves the top Autonomous System Numbers (ASNs) based on the number of occurrences in the database. + + This function queries a MongoDB collection to aggregate and count the occurrences of ASNs in the network hosts. + The results are cached for 10 minutes to improve performance. + + Args: + date_since (datetime, optional): A datetime object to filter results starting from this date. Defaults to False. + results_limit (int, optional): The maximum number of ASNs to return. Defaults to 20. + + Returns: + dict: A dictionary containing the top ASNs and their counts. Returns False if the MongoDB is not enabled or if the "top_asn" configuration is disabled. + """ if web_cfg.general.get("top_asn", False) is False: return False @@ -274,6 +308,17 @@ def top_asn(date_since: datetime = False, results_limit: int = 20) -> dict: def top_detections(date_since: datetime = False, results_limit: int = 20) -> dict: + """ + Retrieves the top detections from the database, either from MongoDB or Elasticsearch, + and caches the results for 10 minutes. + + Args: + date_since (datetime, optional): The starting date to filter detections. Defaults to False. + results_limit (int, optional): The maximum number of results to return. Defaults to 20. + + Returns: + dict: A dictionary containing the top detections with their counts, or False if the feature is disabled. + """ if web_cfg.general.get("top_detections", False) is False: return False @@ -332,7 +377,24 @@ def top_detections(date_since: datetime = False, results_limit: int = 20) -> dic # ToDo extend this to directly extract per day -def get_stats_per_category(category: str, date_since): +def get_stats_per_category(category: str, date_since: datetime) -> List[Dict[str, int]]: + """ + Retrieves statistical data for a given category from the MongoDB collection "analysis" + starting from a specified date. + + Args: + category (str): The category to retrieve statistics for. + date_since (datetime): The starting date to filter the data. + + Returns: + list: A list of dictionaries containing the aggregated statistics per category. + Each dictionary contains the following keys: + - name (str): The name of the category. + - successful (int): The count of successful extractions. + - runs (int): The total number of runs. + - total (float): The total time in minutes, rounded to 2 decimal places. + - average (float): The average time per run in minutes, rounded to 2 decimal places. + """ aggregation_command = [ { "$match": { @@ -370,6 +432,26 @@ def get_stats_per_category(category: str, date_since): def statistics(s_days: int) -> dict: + """ + Generate statistics for the given number of days. + + Args: + s_days (int): The number of days to generate statistics for. + + Returns: + dict: A dictionary containing various statistics including: + - signatures: Statistics related to signatures. + - processing: Statistics related to processing. + - reporting: Statistics related to reporting. + - top_samples: Top samples per day seen more than once. + - detections: Top detections. + - custom_statistics: Custom statistics. + - total: Total number of tasks completed. + - average: Average number of tasks completed per day. + - tasks: Detailed task statistics per day. + - distributed_tasks: Statistics related to distributed tasks (if applicable). + - asns: Top Autonomous System Numbers (ASNs). + """ date_since = datetime.now().replace(hour=0, minute=0, second=0, microsecond=0) - timedelta(days=s_days) date_till = datetime.now().replace(hour=0, minute=0, second=0, microsecond=0) @@ -492,7 +574,7 @@ def statistics(s_days: int) -> dict: # Same jsonize function from api.py except we can now return Django # HttpResponse objects as well. (Shortcut to return errors) -def jsonize(data, response=False): +def jsonize(data: dict, response: bool = False): """Converts data dict to JSON. @param data: data dict @return: JSON formatted data or HttpResponse object with json data @@ -503,7 +585,99 @@ def jsonize(data, response=False): return json.dumps(data, sort_keys=False, indent=4) -def get_file_content(paths): +def get_hash_list(hashes: str) -> list: + """ + Parses a string of hashes separated by commas or spaces and returns a list of cleaned hash values. + Args: + hashes (str): A string containing hash values separated by commas or spaces. + Returns: + list: A list of cleaned hash values. If a hash value is a URL ending with a slash, the hash is extracted from the URL. + """ + hashlist = [] + if "," in hashes: + hashlist = list(filter(None, hashes.replace(" ", "").strip().split(","))) + else: + hashlist = hashes.split() + for i in range(len(hashlist)): + if hashlist[i].startswith("http") and hashlist[i].endswith("/"): + hash = hashlist[i].split("/")[-2] + if len(hash) in (32, 40, 64): + hashlist[i] = hash + return hashlist + + +def download_from_3rdparty(samples: str, opt_filename: str, details: dict) -> dict: + """ + Processes a list of samples by downloading or retrieving their content from local storage, + and updates the details dictionary with the file path, hash, and other relevant information. + + Args: + samples (list): A list of sample hashes to process. + prefix (str): A prefix indicating the source of the samples (e.g., "vt" for VirusTotal, "bazaar" for MalwareBazaar). + opt_filename (str): An optional filename to use for the downloaded files. If not provided, the hash will be used as the filename. + details (dict): A dictionary to store details about the processed samples, including path, hash, content, errors, etc. + settings (object): An object containing configuration settings, including the temporary path. + + Returns: + dict: The updated details dictionary with information about the processed samples, including any errors encountered. + """ + folder = os.path.join(cfg.cuckoo.tmppath, "cape-external") + if not path_exists(folder): + path_mkdir(folder, exist_ok=True) + for h in get_hash_list(samples): + base_dir = tempfile.mkdtemp(prefix="third_party", dir=folder) + if opt_filename: + filename = f"{base_dir}/{opt_filename}" + else: + filename = f"{base_dir}/{sanitize_filename(h)}" + + content = False + details["path"] = filename + details["fhash"] = h + # clean old content + if "content" in details: + del details["content"] + paths = db.sample_path_by_hash(h) + if paths: + details["content"] = get_file_content(paths) + details["service"] = "Local" + + if not details.get("content", False): + content, service = downloader_services.download(h, details.get("apikey")) + if not content: + details["errors"].append({h: "Can't download sample from external services"}) + continue + details["service"] = service + + if content: + details["content"] = content + + errors = {} + if not details.get("content", False): + status, tasks_details = download_file(**details) + else: + status, tasks_details = download_file(**details) + if status == "error": + details["errors"].append({h: tasks_details}) + else: + details["task_ids"] = tasks_details.get("task_ids", []) + errors = tasks_details.get("errors") + if errors: + details["errors"].extend(errors) + + return details + + +def get_file_content(paths: list) -> bytes: + """ + Retrieves the content of the first existing file from a list of file paths. + + Args: + paths (str or list of str): A single file path or a list of file paths to check. + + Returns: + bytes or bool: The content of the first existing file as bytes, or False if no file exists. + """ content = False if not isinstance(paths, list): paths = [paths] @@ -515,7 +689,17 @@ def get_file_content(paths): return content -def fix_section_permission(path): +def fix_section_permission(path: str): + """ + Adjusts the permissions of the .rdata section in a PE file to include write permissions. + + This function checks if the 'pefile' module is available and if the given file is a PE image. + If the .rdata section of the PE file has read-only permissions, it modifies the section + characteristics to include write permissions. + + Args: + path (str): The file path to the PE file. + """ if not HAVE_PEFILE: log.info("[-] Missed dependency pefile") return @@ -534,7 +718,22 @@ def fix_section_permission(path): log.info(e) -def get_magic_type(data): +def get_magic_type(data: bytes) -> str: + """ + Determine the MIME type of the given data using the `magic` library. + + This function attempts to identify the MIME type of the provided data. If the data + represents a file path and the file exists, it uses `magic.from_file` to determine + the MIME type. Otherwise, it uses `magic.from_buffer` to determine the MIME type + from the data buffer. + + Args: + data (bytes): The data to analyze, which can be a file path or a data buffer. + + Returns: + str: The MIME type of the data if successfully determined. + bool: False if an error occurs during MIME type determination. + """ try: if path_exists(data): return magic.from_file(data) @@ -547,7 +746,30 @@ def get_magic_type(data): def download_file(**kwargs): - """Example of kwargs + """ + Downloads a file based on the provided arguments and handles various conditions and errors. + + Keyword Arguments: + errors (list): List to store error messages. + content (bytes): Content of the file to be downloaded. + request (object): Request object containing details of the request. + task_id (list): List to store task IDs. + url (str): URL to download the file from. + params (dict): Parameters to be sent in the request. + headers (dict): Headers to be sent in the request. + service (str): Name of the service to download the file from. + path (str): Path to save the downloaded file. + fhash (str): Expected hash of the file to verify integrity. + options (str): Additional options for the download. + only_extraction (bool): Flag to indicate if only extraction is needed. + user_id (int): ID of the user requesting the download. + source_url (str): Source URL of the file. + + Returns: + tuple: A tuple containing the status ("ok" or "error") and a dictionary with task IDs and errors. + """ + """ + Example of kwargs { "errors": [], "content": content, @@ -769,8 +991,21 @@ def download_file(**kwargs): return "ok", {"task_ids": kwargs["task_ids"], "errors": extra_details.get("errors", [])} -def save_script_to_storage(task_ids, kwargs): +def save_script_to_storage(task_ids: list, kwargs): """ + Save pre_script and during_script contents to a temporary storage. + + Parameters: + task_ids (list): List of task IDs for which the scripts need to be saved. + kwargs (dict): Dictionary containing script names and contents. Expected keys are: + - "pre_script_name" (str): Name of the pre-script file. + - "pre_script_content" (bytes): Content of the pre-script file. + - "during_script_name" (str): Name of the during-script file. + - "during_script_content" (bytes): Content of the during-script file. + + Raises: + ValueError: If the file extension of the script is not one of ".py", ".ps1", or ".exe". + Parameters: task_ids, kwargs Retrieve pre_script and during_script contents and save it to a temp storage """ @@ -795,14 +1030,46 @@ def save_script_to_storage(task_ids, kwargs): _ = Path(os.path.join(script_temp_path, f"during_script{file_ext}")).write_bytes(kwargs["during_script_content"]) -def url_defang(url): +def url_defang(url: str): + """ + Defangs a given URL by replacing common defanged components with their original counterparts. + + This function performs the following replacements: + - "[.]" with "." + - "[." with "." + - ".]" with "." + - "hxxp" with "http" + - "hxtp" with "http" + + Additionally, if the URL does not start with "http", it prepends "http://" to the URL. + + Args: + url (str): The defanged URL to be processed. + + Returns: + str: The refanged URL. + """ url = url.replace("[.]", ".").replace("[.", ".").replace(".]", ".").replace("hxxp", "http").replace("hxtp", "http") if not url.startswith("http"): url = f"http://{url}" return url -def _download_file(route, url, options): +def _download_file(route: str, url: str, options: str): + """ + Downloads a file from the specified URL using 3rd party proxy settings and custom headers. + + Args: + route (str): The route to determine proxy settings. If "tor", uses Tor network. + If in socks5s, uses the specified SOCKS5 proxy settings. + url (str): The URL of the file to download. + options (str): Comma-separated string of options to customize headers. + Options starting with "dne_" will be added to headers. + + Returns: + bytes: The content of the downloaded file if the request is successful. + bool: False if the request fails or an exception occurs. + """ socks5s = _load_socks5_operational() proxies = {} response = False @@ -839,7 +1106,23 @@ def _download_file(route, url, options): return response -def category_all_files(task_id, category, base_path): +def category_all_files(task_id: str, category: str, base_path: str): + """ + Retrieve all file paths for a given task and category. + + Args: + task_id (str): The ID of the task to retrieve files for. + category (str): The category of files to retrieve. Special handling for "CAPE" category. + base_path (str): The base path to prepend to the file paths. + + Returns: + list: A list of file paths corresponding to the given task and category. + + Notes: + - If the category is "CAPE", it will be internally mapped to "CAPE.payloads". + - The function currently supports MongoDB as the database backend. + - Elasticsearch support is mentioned but not implemented. + """ analysis = False query_category = category if category == "CAPE": @@ -919,7 +1202,7 @@ def validate_task_by_path(tid): "md5": "md5", "sha1": "sha1", "sha3": "sha3_384", - "sha256": "sha256", + "sha256": "_id", "sha512": "sha512", } @@ -971,7 +1254,7 @@ def validate_task_by_path(tid): "shrikesid": "info.shrike_sid", "custom": "info.custom", # initial binary - "target_sha256": ("target.file.sha256", f"target.file.{FILE_REF_KEY}"), + "target_sha256": f"target.file.{FILE_REF_KEY}", "tlp": "info.tlp", "ja3_hash": "suricata.tls.ja3.hash", "ja3_string": "suricata.tls.ja3.string", @@ -986,13 +1269,14 @@ def validate_task_by_path(tid): "network.udp.dport", "network.smtp_ex.dport", ), + # ToDo update schema # File_extra_info "extracted_tool": ( - "info.parent_sample.extracted_files_tool", - "target.file.extracted_files_tool", - "dropped.extracted_files_tool", - "procdump.extracted_files_tool", - "CAPE.payloads.extracted_files_tool", + "info.parent_sample.selfextract", + "target.file.selfextract", + "dropped.selfextract", + "procdump.selfextract", + "CAPE.payloads.selfextract", ), } @@ -1013,6 +1297,7 @@ def validate_task_by_path(tid): "imphash": "imphash", } +# ToDo review extracted_files key still the same search_term_map_base_naming = ( ("info.parent_sample",) + NORMALIZED_FILE_FIELDS + tuple(f"{category}.extracted_files" for category in NORMALIZED_FILE_FIELDS) ) @@ -1044,7 +1329,24 @@ def validate_task_by_path(tid): ) -def perform_search(term, value, search_limit=False, user_id=False, privs=False, web=True, projection=None): +def perform_search( + term: str, value: str, search_limit: int = 0, user_id: int = 0, privs: bool = False, web: bool = True, projection: dict = None +): + """ + Perform a search based on the provided term and value. + + Args: + term (str): The search term to use. + value (str): The value to search for. + search_limit (int, optional): The maximum number of search results to return. Defaults to 0. + user_id (int, optional): The user ID to filter tasks by. Defaults to 0. + privs (bool, optional): Indicates if the user has privileges. Defaults to False. + web (bool, optional): Indicates if the search is performed via the web interface. Defaults to True. + projection (dict, optional): Fields to include or exclude in the search results. Defaults to None. + + Returns: + list: A list of search results matching the criteria. + """ if repconf.mongodb.enabled and repconf.elasticsearchdb.enabled and essearch and not term: multi_match_search = {"query": {"multi_match": {"query": value, "fields": ["*"]}}} numhits = es.search(index=get_analysis_index(), body=multi_match_search, size=0)["hits"]["total"] @@ -1093,6 +1395,9 @@ def perform_search(term, value, search_limit=False, user_id=False, privs=False, elif term == "configs": # check if family name is string only maybe? query_val = {f"{search_term_map[term]}.{value}": {"$exist": True}, "$options": "i"} + # ToDo proper implementation here + # elif term == "extracted_tool": + # query_val = {"$exist": True} elif term == "ttp": if validate_ttp(value): query_val = value.upper() @@ -1115,29 +1420,21 @@ def perform_search(term, value, search_limit=False, user_id=False, privs=False, query_val = {"$exists": True} if repconf.mongodb.enabled and query_val: - if isinstance(search_term_map[term], str): + if term in hash_searches: + # The file details are uniq, and we store 1 to many. So where hash type is uniq, IDs are list + file_docs = list(mongo_find(FILES_COLL, {hash_searches[term]: query_val}, {"_task_ids": 1})) + if not file_docs: + return [] + ids = sorted(list(set(file_docs[0]["_task_ids"])), reverse=True)[:search_limit] + term = "ids" + mongo_search_query = {"info.id": {"$in": ids}} + elif isinstance(search_term_map[term], str): mongo_search_query = {search_term_map[term]: query_val} + elif isinstance(search_term_map[term], list): + mongo_search_query = {search_term:query_val for search_term in search_term_map[term]} else: - search_terms = [{search_term: query_val} for search_term in search_term_map[term]] - if term in hash_searches: - # For analyses where files have been stored in the "files" collection, search - # there for the _id (i.e. sha256) of documents matching the given hash. As a - # special case, we don't need to do that query if the requested hash type is - # "sha256" since that's what's stored in the "file_refs" key. - # We do all this in addition to search the old keys for backwards-compatibility - # with documents that do not use this mechanism for storing file data. - if term == "sha256": - file_refs = [query_val] - else: - file_docs = mongo_find(FILES_COLL, {hash_searches[term]: query_val}, {"_id": 1}) - file_refs = [doc["_id"] for doc in file_docs] - if file_refs: - if len(file_refs) > 1: - query = {"$in": file_refs} - else: - query = file_refs[0] - search_terms.extend([{f"{pfx}.{FILE_REF_KEY}": query} for pfx in NORMALIZED_FILE_FIELDS]) - mongo_search_query = {"$or": search_terms} + print(f"Unknown search {term}:{value}") + return [] # Allow to overwrite perform_search_filters for custom results if not projection: @@ -1173,6 +1470,20 @@ def force_int(value): def force_bool(value): + """ + Converts a given value to a boolean. + + Args: + value: The value to be converted. It can be of any type. + + Returns: + bool: The boolean representation of the input value. Returns True if the value is one of + ("true", "yes", "on", "1") (case insensitive). Returns False if the value is one of + ("false", "no", "off", "0") (case insensitive), or if the value is None or empty. + + Logs: + A warning is logged if the value cannot be converted from string to bool. + """ if isinstance(value, bool): return value @@ -1189,6 +1500,38 @@ def force_bool(value): def parse_request_arguments(request, keyword="POST"): + """ + Parses request arguments from a Django or API request object. + + Args: + request (HttpRequest): The request object containing the arguments. + keyword (str, optional): The attribute of the request object to extract arguments from. Defaults to "POST". + + Returns: + tuple: A tuple containing the following parsed arguments: + - static (str): Static argument. + - package (str): Package argument. + - timeout (int): Timeout argument. + - priority (int): Priority argument. + - options (str): Options argument. + - machine (str): Machine argument. + - platform (str): Platform argument. + - tags (str): Tags argument. + - custom (str): Custom argument. + - memory (bool): Memory argument. + - clock (str): Clock argument. + - enforce_timeout (bool): Enforce timeout argument. + - shrike_url (str): Shrike URL argument. + - shrike_msg (str): Shrike message argument. + - shrike_sid (str): Shrike SID argument. + - shrike_refer (str): Shrike refer argument. + - unique (bool): Unique argument. + - referrer (str): Referrer argument. + - tlp (str): TLP argument. + - tags_tasks (str): Tags tasks argument. + - route (str): Route argument. + - cape (str): CAPE argument. + """ # Django uses request.POST and API uses request.data static = getattr(request, keyword).get("static", "") referrer = validate_referrer(getattr(request, keyword).get("referrer")) @@ -1253,117 +1596,31 @@ def parse_request_arguments(request, keyword="POST"): ) -def get_hash_list(hashes): - hashlist = [] - if "," in hashes: - hashlist = list(filter(None, hashes.replace(" ", "").strip().split(","))) - else: - hashlist = hashes.split() - - for i in range(len(hashlist)): - if hashlist[i].startswith("http") and hashlist[i].endswith("/"): - hash = hashlist[i].split("/")[-2] - if len(hash) in (32, 40, 64): - hashlist[i] = hash - - return hashlist - - -_bazaar_map = { - 32: "md5_hash", - 40: "sha1_hash", - 64: "sha256_hash", -} - - -def _malwarebazaar_dl(hash): - sample = None - if len(hash) not in _bazaar_map: - return False - - try: - data = requests.post("https://mb-api.abuse.ch/api/v1/", data={"query": "get_file", _bazaar_map[len(hash)]: hash}) - if data.ok and b"file_not_found" not in data.content: - try: - with pyzipper.AESZipFile(io.BytesIO(data.content)) as zf: - zf.setpassword(b"infected") - sample = zf.read(zf.namelist()[0]) - except pyzipper.zipfile.BadZipFile: - print(data.content) - except Exception as e: - logging.error(e, exc_info=True) - - return sample - - -def thirdpart_aux(samples, prefix, opt_filename, details, settings): - folder = os.path.join(settings.TEMP_PATH, "cape-external") - if not path_exists(folder): - path_mkdir(folder, exist_ok=True) - for h in get_hash_list(samples): - base_dir = tempfile.mkdtemp(prefix=prefix, dir=folder) - if opt_filename: - filename = f"{base_dir}/{opt_filename}" - else: - filename = f"{base_dir}/{sanitize_filename(h)}" - details["path"] = filename - details["fhash"] = h - paths = db.sample_path_by_hash(h) - - # clean old content - if "content" in details: - del details["content"] - - if paths: - details["content"] = get_file_content(paths) - - if prefix == "vt": - details["url"] = f"https://www.virustotal.com/api/v3/files/{h.lower()}/download" - elif prefix == "bazaar": - content = _malwarebazaar_dl(h) - if content: - details["content"] = content - - errors = {} - if not details.get("content", False): - status, tasks_details = download_file(**details) - else: - details["service"] = "Local" - status, tasks_details = download_file(**details) - if status == "error": - details["errors"].append({h: tasks_details}) - else: - details["task_ids"] = tasks_details.get("task_ids", []) - errors = tasks_details.get("errors") - if errors: - details["errors"].extend(errors) - - return details - - -def download_from_vt(samples, details, opt_filename, settings): - if settings.VTDL_KEY: - details["headers"] = {"x-apikey": settings.VTDL_KEY} - elif details.get("apikey", False): - details["headers"] = {"x-apikey": details["apikey"]} - else: - details["errors"].append({"error": "Apikey not configured, neither passed as opt_apikey"}) - return details - - details["service"] = "VirusTotal" - return thirdpart_aux(samples, "vt", opt_filename, details, settings) - - -def download_from_bazaar(samples, details, opt_filename, settings): - if not HAVE_PYZIPPER: - print("Malware Bazaar download: Missed pyzipper dependency: pip3 install pyzipper -U") - return - - details["service"] = "MalwareBazaar" - return thirdpart_aux(samples, "bazaar", opt_filename, details, settings) - - -def process_new_task_files(request, samples, details, opt_filename, unique): +def process_new_task_files(request, samples: list, details: dict, opt_filename: str, unique: bool = False) -> tuple: + """ + Processes new task files by validating and storing them. + + Args: + request: The HTTP request object containing user information. + samples (list): A list of sample files to be processed. + details (dict): A dictionary to store error messages and other details. + opt_filename (str): An optional filename to use for the stored files. + unique (bool, optional): A flag to enforce unique file submission. Defaults to False. + + Returns: + tuple: A tuple containing a list of processed files and the updated details dictionary. + + The function performs the following steps: + 1. Checks if each sample file is empty and logs an error if so. + 2. Validates the size of each sample file against the configured maximum size. + 3. Reads the data from each sample file. + 4. Sanitizes the filename or uses the optional filename provided. + 5. Stores the sample file in temporary storage and calculates its SHA-256 hash. + 6. Checks for duplicate file submissions if the unique flag is set. + 7. Appends the processed file data, path, and SHA-256 hash to the list of files. + + Errors encountered during processing are appended to the details dictionary. + """ list_of_files = [] for sample in samples: # Error if there was only one submitted sample, and it's empty. @@ -1417,7 +1674,26 @@ def process_new_task_files(request, samples, details, opt_filename, unique): return list_of_files, details -def process_new_dlnexec_task(url, route, options, custom): +def process_new_dlnexec_task(url: str, route: str, options: str, custom: str): + """ + Processes a new download and execute task by downloading a file from a given URL, + sanitizing the URL, and storing the file temporarily. + + Args: + url (str): The URL of the file to download. The URL may use obfuscation techniques + such as "hxxp" instead of "http" and "[.]" instead of ".". + route (str): The route or path where the file should be downloaded. + options (dict): Additional options for downloading the file. + custom (str): Custom parameters or settings for the task. + + Returns: + tuple: A tuple containing: + - path (str): The temporary file path where the downloaded file is stored. + - response (bytes): The content of the downloaded file. + - str: An empty string (reserved for future use or additional information). + + If the download fails, returns (False, False, False). + """ url = url.replace("hxxps://", "https://").replace("hxxp://", "http://").replace("[.]", ".") response = _download_file(route, url, options) if not response: @@ -1497,7 +1773,7 @@ def submit_task( filename=filename, ) if not task_id: - log.warn("Error adding CAPE task to database: %s", package) + log.warning("Error adding CAPE task to database: %s", package) return task_id log.info('CAPE detection on file "%s": %s - added as CAPE task with ID %s', target, package, task_id) @@ -1506,6 +1782,16 @@ def submit_task( # https://stackoverflow.com/questions/14989858/get-the-current-git-hash-in-a-python-script/68215738#68215738 def get_running_commit() -> str: + """ + Retrieves the current Git commit hash of the repository. + + This function reads the HEAD file in the .git directory to determine the + current branch or commit reference, then reads the corresponding file to + get the commit hash. + + Returns: + str: The current Git commit hash as a string. + """ git_folder = Path(CUCKOO_ROOT, ".git") head_name = Path(git_folder, "HEAD").read_text().split("\n")[0].split(" ")[-1] return Path(git_folder, head_name).read_text().replace("\n", "") diff --git a/lib/cuckoo/common/webadmin_utils.py b/lib/cuckoo/common/webadmin_utils.py index 308a56bb65e..9b27dfb1692 100644 --- a/lib/cuckoo/common/webadmin_utils.py +++ b/lib/cuckoo/common/webadmin_utils.py @@ -7,6 +7,15 @@ # admin utils def disable_user(user_id: int) -> bool: + """ + Disables a user by setting their 'is_active' status to False. + + Args: + user_id (int): The ID of the user to disable. + + Returns: + bool: True if the user was successfully disabled, False otherwise. + """ user = User.objects.get(id=user_id) if user: user.is_active = False diff --git a/lib/cuckoo/core/analysis_manager.py b/lib/cuckoo/core/analysis_manager.py index ce81b520eb6..fcd0ea8f120 100644 --- a/lib/cuckoo/core/analysis_manager.py +++ b/lib/cuckoo/core/analysis_manager.py @@ -32,17 +32,24 @@ # os.listdir('/sys/class/net/') HAVE_NETWORKIFACES = False + try: import psutil network_interfaces = list(psutil.net_if_addrs().keys()) HAVE_NETWORKIFACES = True except ImportError: - print("Missed dependency: pip3 install psutil") + print("Missed dependency: poetry run pip install psutil") latest_symlink_lock = threading.Lock() +def is_network_interface(intf: str): + global network_interfaces + network_interfaces = list(psutil.net_if_addrs().keys()) + return intf in network_interfaces + + class CuckooDeadMachine(Exception): """Exception thrown when a machine turns dead. @@ -261,7 +268,12 @@ def build_options(self): def category_checks(self) -> Optional[bool]: if self.task.category in ("file", "pcap", "static"): - sha256 = File(self.task.target).get_sha256() + try: + sha256 = File(self.task.target).get_sha256() + except FileNotFoundError: + # Happens when cleaner deleted target file + self.log.error("File %s missed", self.task.target) + return False # Check whether the file has been changed for some unknown reason. # And fail this analysis if it has been modified. if not self.check_file(sha256): @@ -313,6 +325,7 @@ def machine_running(self) -> Generator[None, None, None]: with self.db.session.begin(): self.db.guest_remove(self.guest.id) self.db.assign_machine_to_task(self.task, None) + # ToDo do we really need to delete machine here? self.machinery_manager.machinery.delete_machine(self.machine.name) # Remove the analysis directory that has been created so @@ -459,21 +472,22 @@ def launch_analysis(self) -> None: success = self.perform_analysis() except CuckooDeadMachine: with self.db.session.begin(): - # Put the task back in pending so that the schedule can attempt to - # choose a new machine. + # Put the task back in pending so that the schedule can attempt to choose a new machine. self.db.set_status(self.task.id, TASK_PENDING) raise else: with self.db.session.begin(): self.db.set_status(self.task.id, TASK_COMPLETED) self.log.info("Completed analysis %ssuccessfully.", "" if success else "un") + # Need to be release on unsucess + if not success and hasattr(self, "machine") and self.machine: + self.db.unlock_machine(self.machine) self.update_latest_symlink() def update_latest_symlink(self): - # We make a symbolic link ("latest") which links to the latest - # analysis - this is useful for debugging purposes. This is only - # supported under systems that support symbolic links. + # We make a symbolic link ("latest") which links to the latest analysis this is useful for debugging purposes. + # This is only supported under systems that support symbolic links. if not hasattr(os, "symlink"): return @@ -536,6 +550,9 @@ def route_network(self): self.rt_table = vpns[self.route].rt_table elif self.route in self.socks5s: self.interface = "" + elif self.route[:3] == "tun" and is_network_interface(self.route): + # tunnel interface starts with "tun" and interface exists on machine + self.interface = self.route else: self.log.warning("Unknown network routing destination specified, ignoring routing for this analysis: %s", self.route) self.interface = None @@ -583,12 +600,15 @@ def route_network(self): elif self.route in ("none", "None", "drop"): self.rooter_response = rooter("drop_enable", self.machine.ip, str(self.cfg.resultserver.port)) + elif self.route[:3] == "tun" and is_network_interface(self.route): + self.log.info("Network interface %s is tunnel", self.interface) + self.rooter_response = rooter("interface_route_tun_enable", self.machine.ip, self.route, str(self.task.id)) self._rooter_response_check() # check if the interface is up if HAVE_NETWORKIFACES and routing.routing.verify_interface and self.interface and self.interface not in network_interfaces: - self.log.info("Network interface {} not found, falling back to dropping network traffic", self.interface) + self.log.info("Network interface %s not found, falling back to dropping network traffic", self.interface) self.interface = None self.rt_table = None self.route = "drop" @@ -714,6 +734,9 @@ def unroute_network(self): elif self.route in ("none", "None", "drop"): self.rooter_response = rooter("drop_disable", self.machine.ip, str(self.cfg.resultserver.port)) + elif self.route[:3] == "tun": + self.log.info("Disable tunnel interface: %s", self.interface) + self.rooter_response = rooter("interface_route_tun_disable", self.machine.ip, self.route, str(self.task.id)) self._rooter_response_check() diff --git a/lib/cuckoo/core/database.py b/lib/cuckoo/core/database.py index 20c66e1b738..110bc3c944f 100644 --- a/lib/cuckoo/core/database.py +++ b/lib/cuckoo/core/database.py @@ -125,11 +125,11 @@ if repconf.mongodb.enabled: from dev_utils.mongodb import mongo_find if repconf.elasticsearchdb.enabled: - from dev_utils.elasticsearchdb import elastic_handler, get_analysis_index + from dev_utils.elasticsearchdb import elastic_handler # , get_analysis_index es = elastic_handler -SCHEMA_VERSION = "c2bd0eb5e69d" +SCHEMA_VERSION = "4e000e02a409" TASK_BANNED = "banned" TASK_PENDING = "pending" TASK_RUNNING = "running" @@ -170,8 +170,8 @@ tasks_tags = Table( "tasks_tags", Base.metadata, - Column("task_id", Integer, ForeignKey("tasks.id")), - Column("tag_id", Integer, ForeignKey("tags.id")), + Column("task_id", Integer, ForeignKey("tasks.id", ondelete="cascade")), + Column("tag_id", Integer, ForeignKey("tags.id", ondelete="cascade")), ) @@ -269,7 +269,7 @@ class Guest(Base): manager = Column(String(255), nullable=False) started_on = Column(DateTime(timezone=False), default=datetime.now, nullable=False) shutdown_on = Column(DateTime(timezone=False), nullable=True) - task_id = Column(Integer, ForeignKey("tasks.id"), nullable=False, unique=True) + task_id = Column(Integer, ForeignKey("tasks.id", ondelete="cascade"), nullable=False, unique=True) def __repr__(self): return f"" @@ -465,7 +465,7 @@ class Task(Base): timedout = Column(Boolean, nullable=False, default=False) sample_id = Column(Integer, ForeignKey("samples.id"), nullable=True) - sample = relationship("Sample", backref=backref("tasks", lazy="subquery")) + sample = relationship("Sample", backref=backref("tasks", lazy="subquery", cascade="save-update, delete")) machine_id = Column(Integer, nullable=True) guest = relationship("Guest", uselist=False, backref=backref("tasks"), cascade="save-update, delete") errors = relationship("Error", backref=backref("tasks"), cascade="save-update, delete") @@ -661,7 +661,7 @@ def delete_machine(self, name) -> bool: self.session.delete(machine) return True else: - log.warning(f"{name} does not exist in the database.") + log.warning("%s does not exist in the database.", name) return False def add_machine( @@ -1170,7 +1170,10 @@ def add( elif isinstance(obj, URL): task = Task(obj.url) - tags = "x64,x86" + _tags = tags.split(",") if isinstance(tags, str) else [] + _tags.append("x64") + _tags.append("x86") + tags = ",".join(set(_tags)) else: return None @@ -1326,7 +1329,7 @@ def _identify_aux_func(self, file: bytes, package: str, check_shellcode: bool = try: tmp_package = sflock_identify(f, check_shellcode=check_shellcode) except Exception as e: - log.error(f"Failed to sflock_ident due to {e}") + log.error("Failed to sflock_ident due to %s", str(e)) tmp_package = "generic" if tmp_package and tmp_package in sandbox_packages: @@ -1366,7 +1369,6 @@ def recon( cape=False, category=None, ): - # Get file filetype to ensure self extracting archives run longer if not isinstance(filename, str): filename = bytes2str(filename) @@ -1395,7 +1397,8 @@ def recon( tags = "," + parsed_options["tags"] if tags else parsed_options["tags"] del parsed_options["tags"] # custom packages should be added to lib/cuckoo/core/database.py -> sandbox_packages list - if "package" in parsed_options: + # Do not overwrite user provided package + if not package and "package" in parsed_options: package = parsed_options["package"] del parsed_options["package"] @@ -1547,6 +1550,9 @@ def demux_sample_and_add_to_db( # create tasks for each file in the archive for file, platform in extracted_files: + if not path_exists(file): + log.error("Extracted file doesn't exist: %s", file) + continue # ToDo we lose package here and send APKs to windows if platform in ("linux", "darwin") and LINUX_STATIC: task_ids += self.add_static( @@ -1713,9 +1719,12 @@ def add_static( # check if len is 1 and the same file, if diff register file, and set parent if not isinstance(file_path, bytes): file_path = file_path.encode() - if extracted_files and (file_path, platform) not in extracted_files: + + if extracted_files and ((file_path, platform) not in extracted_files and (file_path, "") not in extracted_files): sample_parent_id = self.register_sample(File(file_path)) if conf.cuckoo.delete_archive: + # ToDo keep as info for now + log.info("Deleting archive: %s. conf.cuckoo.delete_archive is enabled. %s", file_path, str(extracted_files)) path_delete(file_path) task_ids = [] @@ -2078,6 +2087,7 @@ def list_tasks( search = search.filter(Task.id.in_(task_ids)) if user_id is not None: search = search.filter(Task.user_id == user_id) + if order_by is not None and isinstance(order_by, tuple): search = search.order_by(*order_by) elif order_by is not None: @@ -2092,6 +2102,135 @@ def list_tasks( return tasks + def delete_task(self, task_id): + """Delete information on a task. + @param task_id: ID of the task to query. + @return: operation status. + """ + task = self.session.get(Task, task_id) + if task is None: + return False + self.session.delete(task) + return True + + def delete_tasks( + self, + category=None, + status=None, + sample_id=None, + not_status=None, + completed_after=None, + added_before=None, + id_before=None, + id_after=None, + options_like=False, + options_not_like=False, + tags_tasks_like=False, + task_ids=False, + user_id=None, + ): + """Delete tasks based on parameters. If no filters are provided, no tasks will be deleted. + + Args: + category: filter by category + status: filter by task status + sample_id: filter tasks for a sample + not_status: exclude this task status from filter + completed_after: only list tasks completed after this timestamp + added_before: tasks added before a specific timestamp + id_before: filter by tasks which is less than this value + id_after: filter by tasks which is greater than this value + options_like: filter tasks by specific option inside of the options + options_not_like: filter tasks by specific option not inside of the options + tags_tasks_like: filter tasks by specific tag + task_ids: list of task_id + user_id: list of tasks submitted by user X + + Returns: + bool: True if the operation was successful (including no tasks to delete), False otherwise. + """ + filters_applied = False + search = self.session.query(Task) + + if status: + if "|" in status: + search = search.filter(Task.status.in_(status.split("|"))) + else: + search = search.filter(Task.status == status) + filters_applied = True + if not_status: + search = search.filter(Task.status != not_status) + filters_applied = True + if category: + search = search.filter(Task.category.in_([category] if isinstance(category, str) else category)) + filters_applied = True + if sample_id is not None: + search = search.filter(Task.sample_id == sample_id) + filters_applied = True + if id_before is not None: + search = search.filter(Task.id < id_before) + filters_applied = True + if id_after is not None: + search = search.filter(Task.id > id_after) + filters_applied = True + if completed_after: + search = search.filter(Task.completed_on > completed_after) + filters_applied = True + if added_before: + search = search.filter(Task.added_on < added_before) + filters_applied = True + if options_like: + # Replace '*' wildcards with wildcard for sql + options_like = options_like.replace("*", "%") + search = search.filter(Task.options.like(f"%{options_like}%")) + filters_applied = True + if options_not_like: + # Replace '*' wildcards with wildcard for sql + options_not_like = options_not_like.replace("*", "%") + search = search.filter(Task.options.notlike(f"%{options_not_like}%")) + filters_applied = True + if tags_tasks_like: + search = search.filter(Task.tags_tasks.like(f"%{tags_tasks_like}%")) + filters_applied = True + if task_ids: + search = search.filter(Task.id.in_(task_ids)) + filters_applied = True + if user_id is not None: + search = search.filter(Task.user_id == user_id) + filters_applied = True + + if not filters_applied: + log.warning("No filters provided for delete_tasks. No tasks will be deleted.") + return True # Indicate success as no deletion was requested/needed + + try: + # Perform the deletion and get the count of deleted rows + deleted_count = search.delete(synchronize_session=False) + log.info("Deleted %d tasks matching the criteria.", deleted_count) + self.session.commit() + return True + except Exception as e: + log.error("Error deleting tasks: %s", str(e)) + # Rollback might be needed if this function is called outside a `with db.session.begin():` + # but typically it should be called within one. + self.session.rollback() + return False + + + def check_tasks_timeout(self, timeout): + """Find tasks which were added_on more than timeout ago and clean""" + tasks: List[Task] = [] + ids_to_delete = [] + if timeout == 0: + return + search = self.session.query(Task).filter(Task.status == TASK_PENDING).order_by(Task.added_on.desc()) + tasks = search.all() + for task in tasks: + if task.added_on + timedelta(seconds=timeout) < datetime.now(): + ids_to_delete.append(task.id) + if len(ids_to_delete) > 0: + self.session.query(Task).filter(Task.id.in_(ids_to_delete)).delete(synchronize_session=False) + def minmax_tasks(self): """Find tasks minimum and maximum @return: unix timestamps of minimum and maximum @@ -2182,21 +2321,6 @@ def add_statistics_to_task(self, task_id, details): # pragma: no cover task.anti_issues = details["anti_issues"] return True - def delete_task(self, task_id): - """Delete information on a task. - @param task_id: ID of the task to query. - @return: operation status. - """ - task = self.session.get(Task, task_id) - if task is None: - return False - self.session.delete(task) - return True - - def delete_tasks(self, ids): - self.session.query(Task).filter(Task.id.in_(ids)).delete(synchronize_session=False) - return True - def view_sample(self, sample_id): """Retrieve information on a sample given a sample id. @param sample_id: ID of the sample to query. @@ -2281,12 +2405,6 @@ def sample_path_by_hash(self, sample_hash: str = False, task_id: int = False): 128: "sha512", } - folders = { - "dropped": "files", - "CAPE": "CAPE", - "procdump": "procdump", - } - if task_id: file_path = os.path.join(CUCKOO_ROOT, "storage", "analyses", str(task_id), "binary") if path_exists(file_path): @@ -2322,12 +2440,14 @@ def sample_path_by_hash(self, sample_hash: str = False, task_id: int = False): sample = [path] if not sample: - if repconf.mongodb.enabled: + tasks = [] + if repconf.mongodb.enabled and web_conf.general.check_sample_in_mongodb: tasks = mongo_find( - "analysis", - {f"CAPE.payloads.{sizes_mongo.get(len(sample_hash), '')}": sample_hash}, - {"CAPE.payloads": 1, "_id": 0, "info.id": 1}, + "files", + {sizes_mongo.get(len(sample_hash), ""): sample_hash}, + {"_info_ids": 1, "sha256": 1}, ) + """ deprecated code elif repconf.elasticsearchdb.enabled: tasks = [ d["_source"] @@ -2337,65 +2457,19 @@ def sample_path_by_hash(self, sample_hash: str = False, task_id: int = False): _source=["CAPE.payloads", "info.id"], )["hits"]["hits"] ] - else: - tasks = [] - + """ if tasks: for task in tasks: - for block in task.get("CAPE", {}).get("payloads", []) or []: - if block[sizes_mongo.get(len(sample_hash), "")] == sample_hash: - file_path = os.path.join( - CUCKOO_ROOT, - "storage", - "analyses", - str(task["info"]["id"]), - folders.get("CAPE"), - block["sha256"], - ) + for id in task.get("_task_ids", []): + # ToDo suricata path - "suricata.files.file_info.path + for category in ("files", "procdump", "CAPE"): + file_path = os.path.join(CUCKOO_ROOT, "storage", "analyses", str(id), category, task["sha256"]) if path_exists(file_path): sample = [file_path] break if sample: break - for category in ("dropped", "procdump"): - # we can't filter more if query isn't sha256 - if repconf.mongodb.enabled: - tasks = mongo_find( - "analysis", - {f"{category}.{sizes_mongo.get(len(sample_hash), '')}": sample_hash}, - {category: 1, "_id": 0, "info.id": 1}, - ) - elif repconf.elasticsearchdb.enabled: - tasks = [ - d["_source"] - for d in es.search( - index=get_analysis_index(), - body={"query": {"match": {f"{category}.{sizes_mongo.get(len(sample_hash), '')}": sample_hash}}}, - _source=["info.id", category], - )["hits"]["hits"] - ] - else: - tasks = [] - - if tasks: - for task in tasks: - for block in task.get(category, []) or []: - if block[sizes_mongo.get(len(sample_hash), "")] == sample_hash: - file_path = os.path.join( - CUCKOO_ROOT, - "storage", - "analyses", - str(task["info"]["id"]), - folders.get(category), - block["sha256"], - ) - if path_exists(file_path): - sample = [file_path] - break - if sample: - break - if not sample: # search in temp folder if not found in binaries db_sample = ( @@ -2411,34 +2485,6 @@ def sample_path_by_hash(self, sample_hash: str = False, task_id: int = False): if sample_hash == hashlib_sizes[len(sample_hash)](f.read()).hexdigest(): sample = [path] break - - if not sample: - # search in Suricata files folder - if repconf.mongodb.enabled: - tasks = mongo_find( - "analysis", {"suricata.files.sha256": sample_hash}, {"suricata.files.file_info.path": 1, "_id": 0} - ) - elif repconf.elasticsearchdb.enabled: - tasks = [ - d["_source"] - for d in es.search( - index=get_analysis_index(), - body={"query": {"match": {"suricata.files.sha256": sample_hash}}}, - _source="suricata.files.file_info.path", - )["hits"]["hits"] - ] - else: - tasks = [] - - if tasks: - for task in tasks: - for item in task["suricata"]["files"] or []: - file_path = item.get("file_info", {}).get("path", "") - if sample_hash in file_path: - if path_exists(file_path): - sample = [file_path] - break - return sample def count_samples(self) -> int: diff --git a/lib/cuckoo/core/guest.py b/lib/cuckoo/core/guest.py index 8c268b72ecd..4614d658681 100644 --- a/lib/cuckoo/core/guest.py +++ b/lib/cuckoo/core/guest.py @@ -388,7 +388,7 @@ def wait_for_completion(self): ) continue except Exception as e: - log.error("Task #%s: Virtual machine %s /status failed. %s", self.task_id, self.vmid, e, exc_info=True) + log.exception("Task #%s: Virtual machine %s /status failed. %s", self.task_id, self.vmid, e) continue if status["status"] in ("complete", "failed"): diff --git a/lib/cuckoo/core/log.py b/lib/cuckoo/core/log.py index cbe1593fa39..8e983750109 100644 --- a/lib/cuckoo/core/log.py +++ b/lib/cuckoo/core/log.py @@ -70,7 +70,7 @@ def emit(self, record): colored.msg = red(record.msg) else: # Hack for pymongo.logger.LogMessage - if type(record.msg) != "str": + if not isinstance(record.msg, str): record.msg = str(record.msg) if "analysis procedure completed" in record.msg: diff --git a/lib/cuckoo/core/plugins.py b/lib/cuckoo/core/plugins.py index 86aa0a040d5..f510bac78a5 100644 --- a/lib/cuckoo/core/plugins.py +++ b/lib/cuckoo/core/plugins.py @@ -29,6 +29,7 @@ from lib.cuckoo.common.scoring import calc_scoring from lib.cuckoo.common.utils import add_family_detection from lib.cuckoo.core.database import Database +from utils.community_blocklist import blocklist log = logging.getLogger(__name__) db = Database() @@ -42,6 +43,10 @@ "reporting": reporting_cfg, } +banned_signatures = [] +if blocklist.get("signatures"): + banned_signatures = [os.path.basename(sig).replace(".py", "") for sig in blocklist["signatures"]] + def import_plugin(name): try: @@ -58,6 +63,22 @@ def import_plugin(name): def import_package(package): + """ + Imports all modules from a given package, excluding disabled plugins and banned signatures. + + Args: + package (module): The package from which to import modules. + + The function iterates over all modules in the specified package and imports them unless: + - The module is a package itself. + - The module's name is in the list of banned signatures. + - The module is disabled according to the configuration. + + If an error occurs during the import of a module, it catches the exception and prints the error message. + + Raises: + Exception: If an error occurs during the import of a module. + """ prefix = f"{package.__name__}." for _, name, ispkg in pkgutil.iter_modules(package.__path__, prefix): if ispkg: @@ -65,6 +86,9 @@ def import_package(package): # Disable initialization of disabled plugins, performance++ _, category, *_, module_name = name.split(".") + if module_name in banned_signatures: + log.debug("Ignoring signature: %s", module_name) + continue if ( category in config_mapper and module_name in config_mapper[category].fullconfig @@ -75,7 +99,7 @@ def import_package(package): try: import_plugin(name) except Exception as e: - print(e) + log.exception("import_package: %s - error: %s", name, str(e)) def load_plugins(module): @@ -109,6 +133,25 @@ def list_plugins(group=None): class RunAuxiliary: + """ + Auxiliary modules manager. + + Attributes: + task (dict): The task information. + machine (dict): The machine information. + cfg (Config): Configuration for auxiliary modules. + enabled (list): List of enabled auxiliary modules. + + Methods: + start(): + Starts all enabled auxiliary modules. + + callback(name, *args, **kwargs): + Executes the callback function for each enabled auxiliary module. + + stop(): + Stops all enabled auxiliary modules. + """ """Auxiliary modules manager.""" def __init__(self, task, machine): @@ -192,6 +235,27 @@ def stop(self): class RunProcessing: """Analysis Results Processing Engine. + + Attributes: + task (dict): Task dictionary of the analysis to process. + analysis_path (str): Path to the analysis results. + cfg (Config): Configuration for processing modules. + cuckoo_cfg (Config): General Cuckoo configuration. + results (dict): Dictionary to store the results of the processing. + + Methods: + process(module): + Run a processing module. + Args: + module: Processing module to run. + Returns: + dict: Results generated by the module or None if an error occurred. + + run(): + Run all processing modules and all signatures. + Returns: + dict: Processing results. + This class handles the loading and execution of the processing modules. It executes the enabled ones sequentially and generates a dictionary which is then passed over the reporting engine. @@ -323,7 +387,36 @@ def run(self): class RunSignatures: - """Run Signatures.""" + """ + RunSignatures is responsible for executing and managing the lifecycle of signatures during an analysis task. + It initializes, filters, and runs both evented and non-evented signatures, applying overlays and handling + signature-specific logic. + + Attributes: + task (dict): The analysis task information. + results (dict): The results of the analysis. + ttps (list): List of TTPs (Tactics, Techniques, and Procedures) identified. + mbcs (dict): Dictionary of MBCs (Malware Behavior Catalog) identified. + cfg_processing (Config): Configuration for processing. + analysis_path (str): Path to the analysis results. + signatures (list): List of initialized signature instances. + evented_list (list): List of evented signatures. + non_evented_list (list): List of non-evented signatures. + api_sigs (dict): Cache of signatures to call per API name. + call_always (set): Set of signatures that should always be called. + call_for_api (defaultdict): Signatures interested in specific API calls. + call_for_cat (defaultdict): Signatures interested in specific categories. + call_for_processname (defaultdict): Signatures interested in specific process names. + + Methods: + _should_load_signature(signature): Determines if a signature should be loaded. + _load_overlay(): Loads overlay data from a JSON file. + _apply_overlay(signature, overlay): Applies overlay attributes to a signature. + _check_signature_version(current): Checks if the signature version is compatible. + _check_signature_platform(signature): Checks if the signature is compatible with the platform. + process(signature): Runs a single signature and returns the matched result. + run(test_signature=False): Runs all evented and non-evented signatures, optionally testing a specific signature. + """ def __init__(self, task, results): self.task = task @@ -338,7 +431,10 @@ def __init__(self, task, results): for signature in list_plugins(group="signatures"): if self._should_load_signature(signature): # Initialize them all - self.signatures.append(signature(self.results)) + try: + self.signatures.append(signature(self.results)) + except Exception as exc: + log.error("failed to initialize signature %s: %s", signature.__name__, exc) overlay = self._load_overlay() log.debug("Applying signature overlays for signatures: %s", ", ".join(overlay)) @@ -362,7 +458,7 @@ def __init__(self, task, results): if sig not in self.evented_list: self.non_evented_list.append(sig) except Exception as e: - print(e) + print("RunSignatures: ", e) # Cache of signatures to call per API name. self.api_sigs = {} @@ -466,7 +562,7 @@ def _check_signature_version(self, current): def _check_signature_platform(self, signature): module = inspect.getmodule(signature).__name__ - platform = self.task.get("platform", "") + platform = self.task.get("platform") or "" if platform in module: return True @@ -509,8 +605,8 @@ def process(self, signature): log.debug('Analysis matched signature "%s"', signature.name) # Return information on the matched signature. return signature.as_result() - except KeyError as e: - log.error('Failed to run signature "%s": %s', signature.name, e) + except (KeyError, TypeError, AttributeError) as e: + log.debug('Failed to run signature "%s": %s', signature.name, e) except NotImplementedError: return None except Exception as e: @@ -593,6 +689,9 @@ def run(self, test_signature: str = False): for sig in self.evented_list: if sig.matched: continue + + # Give it the path to the analysis results folder. + sig.set_path(self.analysis_path) try: pretime = timeit.default_timer() result = sig.on_complete() @@ -600,6 +699,8 @@ def run(self, test_signature: str = False): stats[sig.name] += timediff except NotImplementedError: continue + except (KeyError, TypeError, AttributeError) as e: + log.debug('Failed to run signature "%s": %s', sig.name, e) except Exception as e: log.exception('Failed run on_complete() method for signature "%s": %s', sig.name, e) continue @@ -678,6 +779,26 @@ def run(self, test_signature: str = False): class RunReporting: + """ + Reporting Engine. + Engine and passes it over to the reporting modules before executing them. + Attributes: + task (dict): The analysis task object. + results (dict): The analysis results dictionary. + analysis_path (str): The path to the analysis results folder. + cfg (dict): The reporting configuration. + reprocess (bool): Flag indicating if reprocessing is required. + reporting_errors (int): Counter for reporting module errors. + Methods: + process(module): + Runs a single reporting module. + Args: + module (module): The reporting module to run. + run(): + Generates all reports. + Returns: + int: A count of the reporting module errors. + """ """Reporting Engine. This class handles the loading and execution of the enabled reporting @@ -694,6 +815,9 @@ def __init__(self, task, results, reprocess=False): # remove unwanted/duplicate information from reporting for process in results["behavior"]["processes"]: + # Reprocessing and Behavior set from json file + if isinstance(process["calls"], list) and type(process["calls"]).__name__ != "ParseProcessLog": + break process["calls"].begin_reporting() # required to convert object to list process["calls"] = list(process["calls"]) @@ -702,11 +826,11 @@ def __init__(self, task, results, reprocess=False): self.analysis_path = os.path.join(CUCKOO_ROOT, "storage", "analyses", str(task["id"])) self.cfg = reporting_cfg self.reprocess = reprocess + self.reporting_errors = 0 def process(self, module): """Run a single reporting module. @param module: reporting module. - @param results: results results from analysis. """ # Initialize current reporting module. try: @@ -734,7 +858,7 @@ def process(self, module): current.set_path(self.analysis_path) # Give it the analysis task object. current.set_task(self.task) - # Give it the the relevant reporting.conf section. + # Give it the relevant reporting.conf section. current.set_options(options) # Load the content of the analysis.conf file. current.cfg = AnalysisConfig(current.conf_path) @@ -753,14 +877,18 @@ def process(self, module): except CuckooDependencyError as e: log.warning('The reporting module "%s" has missing dependencies: %s', current.__class__.__name__, e) + self.reporting_errors += 1 except CuckooReportError as e: log.warning('The reporting module "%s" returned the following error: %s', current.__class__.__name__, e) + self.reporting_errors += 1 except Exception as e: log.exception('Failed to run the reporting module "%s": %s', current.__class__.__name__, e) + self.reporting_errors += 1 def run(self): """Generates all reports. - @raise CuckooReportError: if a report module fails. + + @return a count of the reporting module errors. """ # In every reporting module you can specify a numeric value that # represents at which position that module should be executed among @@ -778,9 +906,31 @@ def run(self): self.process(module) else: log.info("No reporting modules loaded") + return self.reporting_errors class GetFeeds: + """ + Feed Download and Parsing Engine + + It then saves the parsed feed data to CUCKOO_ROOT/feeds/. + + Attributes: + results (dict): A dictionary to store the results of the feed processing. + + Methods: + process(feed): + Processes a feed module by downloading data, modifying, and parsing it. + Args: + feed: The feed module to update and process. + Returns: + None + + run(): + Runs all enabled feed modules. + Returns: + None + """ """Feed Download and Parsing Engine This class handles the downloading and modification of feed modules. diff --git a/lib/cuckoo/core/resultserver.py b/lib/cuckoo/core/resultserver.py index 4d952001a9e..ae560bc050b 100644 --- a/lib/cuckoo/core/resultserver.py +++ b/lib/cuckoo/core/resultserver.py @@ -172,6 +172,10 @@ def discard(self): while _ := self.read(): pass + def __del__(self): + if self.sock: + self.sock.close() + class WriteLimiter: def __init__(self, fd, remain): @@ -197,6 +201,10 @@ def write(self, buf): def flush(self): self.fd.flush() + def __del__(self): + if self.fd: + self.fd.close() + class FileUpload(ProtocolHandler): def init(self): @@ -205,6 +213,10 @@ def init(self): self.fd = None self.filelog = os.path.join(self.handler.storagepath, "files.json") + def __del__(self): + if self.fd: + self.fd.close() + def handle(self): # Read until newline for file path, e.g., # shots/0001.jpg or files/9498687557/libcurl-4.dll.bin @@ -299,6 +311,9 @@ def handle(self): if self.fd: return self.handler.copy_to_fd(self.fd) + def __del__(self): + if self.fd: + self.fd.close() TYPECONVERTERS = {"h": lambda v: f"0x{default_converter(v):08x}", "p": lambda v: f"0x{default_converter(v):08x}"} @@ -383,7 +398,6 @@ def parse_message(self, buffer): argdict = {argnames[i]: converters[i](arg) for i, arg in enumerate(args)} if apiname == "__process__": - # pid = argdict["ProcessIdentifier"] ppid = argdict["ParentProcessIdentifier"] modulepath = argdict["ModulePath"] @@ -406,6 +420,10 @@ def handle(self): self.handler.sock.settimeout(None) return self.handler.copy_to_fd(self.fd) + def __del__(self): + if self.fd: + self.fd.close() + class GeventResultServerWorker(gevent.server.StreamServer): """The new ResultServer, providing a huge performance boost as well as @@ -435,6 +453,9 @@ def __init__(self, *args, **kwargs): # Store running handlers for task_id self.handlers = {} + self.log_start_count = 0 + self.log_stop_count = 0 + self.task_id = None def do_run(self): self.serve_forever() @@ -458,13 +479,17 @@ def del_task(self, task_id, ipaddr): for ctx in ctxs: log.debug("Task #%s: Cancel %s", task_id, ctx) ctx.cancel() + # ToDo just reinforce cleanup + task_log_stop(task_id) + + task_log_stop(task_id) def create_folders(self): for folder in list(RESULT_UPLOADABLE) + [b"logs"]: try: create_folder(self.storagepath, folder=folder.decode()) except Exception as e: - log.error(e, exc_info=True) + log.exception(e) # ToDo # except CuckooOperationalError as e: # log.error("Unable to create folder %s", folder) @@ -481,6 +506,7 @@ def handle(self, sock, addr): log.warning("ResultServer did not have a task for IP %s", ipaddr) return + self.task_id = task_id self.storagepath = os.path.join(CUCKOO_ROOT, "storage", "analyses", str(task_id)) # Create all missing folders for this analysis. @@ -488,10 +514,13 @@ def handle(self, sock, addr): ctx = HandlerContext(task_id, self.storagepath, sock) task_log_start(task_id) + self.log_start_count += 1 try: try: + log.debug("Task #%s: Negotiation started", task_id) protocol = self.negotiate_protocol(task_id, ctx) except EOFError: + log.debug("Task #%s: Negotiation failed on start", task_id) return # Registering the context allows us to abort the handler by @@ -503,15 +532,18 @@ def handle(self, sock, addr): # been registered if self.tasks.get(ipaddr) != task_id: log.warning("Task #%s for IP %s was cancelled during negotiation", task_id, ipaddr) + log.debug("Task #%s: Negotiation failed inside task_mgmt_lock manager", task_id) return s = self.handlers.setdefault(task_id, set()) s.add(ctx) try: with protocol: + log.debug("Task #%s: Negotiation succeeded", task_id) protocol.handle() + log.debug("Task #%s: Negotiation succeeded handled", task_id) except CuckooOperationalError as e: - log.error(e, exc_info=True) + log.exception(e) finally: with self.task_mgmt_lock: s.discard(ctx) @@ -520,7 +552,11 @@ def handle(self, sock, addr): # This is usually not a good sign log.warning("Task #%s with protocol %s has unprocessed data before getting disconnected", task_id, protocol) finally: + log.debug("Task #%s: Negotiation finished", task_id) task_log_stop(task_id) + self.log_stop_count += 1 + + log.debug("Task #%s: Connection closed. Start count %d, stop count %d", task_id, self.log_start_count, self.log_stop_count) def negotiate_protocol(self, task_id, ctx): header = ctx.read_newline() diff --git a/lib/cuckoo/core/scheduler.py b/lib/cuckoo/core/scheduler.py index 8698ec52a86..aa00a7f5d64 100644 --- a/lib/cuckoo/core/scheduler.py +++ b/lib/cuckoo/core/scheduler.py @@ -8,6 +8,7 @@ import os import queue import signal +import sys import threading import time from collections import defaultdict @@ -64,6 +65,8 @@ def __init__(self, maxcount=0): self.analysis_threads: List[AnalysisManager] = [] self.analyzing_categories, categories_need_VM = load_categories() self.machinery_manager = MachineryManager() if categories_need_VM else None + if self.cfg.cuckoo.get("task_timeout", False): + self.next_timeout_time = time.time() + self.cfg.cuckoo.get("task_timeout_scan_interval", 30) log.info("Creating scheduler with max_analysis_count=%s", self.max_analysis_count or "unlimited") @property @@ -98,6 +101,12 @@ def do_main_loop_work(self, error_queue: queue.Queue) -> SchedulerCycleDelay: if self.is_short_on_disk_space(): return SchedulerCycleDelay.LOW_DISK_SPACE + if self.cfg.cuckoo.get("task_timeout", False): + if self.next_timeout_time < time.time(): + self.next_timeout_time = time.time() + self.cfg.cuckoo.get("task_timeout_scan_interval", 30) + with self.db.session.begin(): + self.db.check_tasks_timeout(self.cfg.cuckoo.get("task_pending_timeout", 0)) + analysis_manager: Optional[AnalysisManager] = None with self.db.session.begin(): max_machines_reached = False @@ -242,12 +251,7 @@ def is_short_on_disk_space(self): # Resolve the full base path to the analysis folder, just in # case somebody decides to make a symbolic link out of it. dir_path = os.path.join(CUCKOO_ROOT, "storage", "analyses") - need_space, space_available = free_space_monitor(dir_path, return_value=True, analysis=True) - if need_space: - log.error( - "Not enough free disk space! (Only %d MB!). You can change limits it in cuckoo.conf -> freespace", space_available - ) - return need_space + free_space_monitor(dir_path, analysis=True) @contextlib.contextmanager def loop_signals(self): @@ -275,6 +279,8 @@ def signal_handler(self, signum, frame): elif sig == signal.SIGUSR1: log.info("received signal '%s', pausing new detonations, running detonations will continue until completion", sig.name) self.loop_state = LoopState.PAUSED + if self.cfg.cuckoo.ignore_signals: + sys.exit() elif sig == signal.SIGUSR2: log.info("received signal '%s', resuming detonations", sig.name) self.loop_state = LoopState.RUNNING @@ -311,6 +317,8 @@ def start(self): def stop(self): """Set loop state to stopping.""" self.loop_state = LoopState.STOPPING + if self.cfg.cuckoo.ignore_signals: + sys.exit() def thr_periodic_log(self, oneshot=False): # Ordinarily, this is the entry-point for a child thread. The oneshot parameter makes diff --git a/lib/cuckoo/core/startup.py b/lib/cuckoo/core/startup.py index 0cef0f86ed6..71d29016fe1 100644 --- a/lib/cuckoo/core/startup.py +++ b/lib/cuckoo/core/startup.py @@ -17,6 +17,25 @@ # Private import custom.signatures + +try: + import custom.signatures.all +except ImportError: + HAS_CUSTOM_SIGNATURES_ALL = False +else: + HAS_CUSTOM_SIGNATURES_ALL = True +try: + import custom.signatures.linux +except ImportError: + HAS_CUSTOM_SIGNATURES_LINUX = False +else: + HAS_CUSTOM_SIGNATURES_LINUX = True +try: + import custom.signatures.windows +except ImportError: + HAS_CUSTOM_SIGNATURES_WINDOWS = False +else: + HAS_CUSTOM_SIGNATURES_WINDOWS = True import modules.auxiliary import modules.feeds import modules.processing @@ -99,15 +118,16 @@ def check_webgui_mongo(): # with large amounts of data. # Note: Silently ignores the creation if the index already exists. mongo_create_index("analysis", "info.id", name="info.id_1") - # mongo_create_index([("target.file.sha256", TEXT)], name="target_sha256") - # We performs a lot of SHA256 hash lookup so we need this index - # mongo_create_index( - # "analysis", - # [("target.file.sha256", TEXT), ("dropped.sha256", TEXT), ("procdump.sha256", TEXT), ("CAPE.payloads.sha256", TEXT)], - # name="ALL_SHA256", - # ) + # Some indexes that can be useful for some users + mongo_create_index("files", "md5", name="file_md5") mongo_create_index("files", [("_task_ids", 1)]) + # side indexes as ideas + """ + mongo_create_index("analysis", "detections", name="detections_1") + mongo_create_index("analysis", "target.file.name", name="name_1") + """ + elif repconf.elasticsearchdb.enabled: # ToDo add check pass @@ -164,7 +184,7 @@ def emit(self, record): colored.msg = red(record.msg) else: # Hack for pymongo.logger.LogMessage - if type(record.msg) != "str": + if not isinstance(record.msg, str): record.msg = str(record.msg) if "analysis procedure completed" in record.msg: @@ -180,9 +200,7 @@ def check_linux_dist(): with suppress(AttributeError): platform_details = platform.dist() if platform_details[0] != "Ubuntu" and platform_details[1] not in ubuntu_versions: - log.info( - f"[!] You are using NOT supported Linux distribution by devs! Any issue report is invalid! We only support Ubuntu LTS {ubuntu_versions}" - ) + log.info("[!] You are using NOT supported Linux distribution by devs! Any issue report is invalid! We only support Ubuntu LTS %s", ubuntu_versions) def init_logging(level: int): @@ -272,6 +290,12 @@ def init_modules(): import_package(modules.signatures.linux) # Import all private signatures import_package(custom.signatures) + if HAS_CUSTOM_SIGNATURES_ALL: + import_package(custom.signatures.all) + if HAS_CUSTOM_SIGNATURES_LINUX: + import_package(custom.signatures.linux) + if HAS_CUSTOM_SIGNATURES_WINDOWS: + import_package(custom.signatures.windows) if len(os.listdir(os.path.join(CUCKOO_ROOT, "modules", "signatures"))) < 5: log.warning("Suggestion: looks like you didn't install community, execute: poetry run python utils/community.py -h") # Import all reporting modules. @@ -537,4 +561,4 @@ def check_vms_n_resultserver_networking(): vm_ip, vm_rs = network # is there are better way to check networkrange without range CIDR? if not resultserver_block.startswith(vm_ip) or (vm_rs and not vm_rs.startswith(vm_ip)): - log.error(f"Your resultserver and VM:{vm} are in different nework ranges. This might give you: CuckooDeadMachine") + log.error("Your resultserver and VM: %s are in different nework ranges. This might give you: CuckooDeadMachine", vm) diff --git a/lib/downloaders/__init__.py b/lib/downloaders/__init__.py new file mode 100644 index 00000000000..c81c3f7bde1 --- /dev/null +++ b/lib/downloaders/__init__.py @@ -0,0 +1,83 @@ + +import logging +import os + +from lib.cuckoo.common.config import Config +from lib.cuckoo.common.constants import CUCKOO_ROOT +from lib.cuckoo.common.load_extra_modules import load_downloaders +from lib.cuckoo.common.path_utils import path_exists, path_mkdir + +cfg = Config() +integrations_cfg = Config("integrations") +log = logging.getLogger(__name__) + + +class Downloaders(object): + """ + A class to manage and utilize various downloaders for downloading samples. + + Attributes: + downloaders (dict): A dictionary of available downloaders. + downloaders_order (list): A list of downloaders in the order specified by the configuration. + destination_folder (str): The folder where downloaded samples will be stored. + + Methods: + __init__(destination_folder=None): + Initializes the Downloaders class with the specified destination folder. + + download(hash, apikey=None): + Attempts to download a sample using the available downloaders in the specified order. + Returns the sample and the downloader's name if successful, otherwise returns False, False. + """ + def __init__(self, destination_folder=None): + self.downloaders = load_downloaders(CUCKOO_ROOT) + if integrations_cfg.downloaders.order: + order_list = [item.strip() for item in integrations_cfg.downloaders.order.split(',')] + self.downloaders_order = [k for k in order_list if k in self.downloaders.keys()] + else: + self.downloaders_order = list(self.downloaders.keys()) + + if destination_folder: + self.destination_folder = destination_folder + else: + self.destination_folder = os.path.join(cfg.cuckoo.tmppath, "cape-external") + if not path_exists(self.destination_folder): + path_mkdir(self.destination_folder, exist_ok=True) + + def download(self, hash, apikey: str = None): + """ + Attempts to download a sample using the available downloaders in the specified order. + + Args: + hash (str): The hash of the sample to be downloaded. + apikey (str, optional): The API key to be used for the downloaders that require authentication. Defaults to None. + + Returns: + tuple: A tuple containing the downloaded sample and the name of the downloader service used. + If no sample is downloaded, returns (False, False). + + Raises: + Exception: If an error occurs during the download process, it is logged and the next downloader is attempted. + """ + sample = False + for service in self.downloaders_order: + try: + if self.downloaders[service].is_supported(hash, apikey): + sample = self.downloaders[service].download(hash, apikey) + if sample: + return sample, self.downloaders[service].__name__ + else: + log.error("%s is not a valid hash for %s", hash, service) + except Exception as e: + log.error("Error downloading sample from %s: %s", service, e) + if not sample: + return False, False + +if __name__ == "__main__": + import sys + dl = Downloaders() + sample, service = dl.download(sys.argv[1]) + if sample: + print("Downloaded sample from %s" % service) + with open(sys.argv[1], "wb") as f: + f.write(sample) diff --git a/lib/downloaders/malwarebazaar.py b/lib/downloaders/malwarebazaar.py new file mode 100644 index 00000000000..63915bef584 --- /dev/null +++ b/lib/downloaders/malwarebazaar.py @@ -0,0 +1,77 @@ +import io +import logging + +import pyzipper +import requests + +from lib.cuckoo.common.config import Config + +integrations_cfg = Config("integrations") + +log = logging.getLogger(__name__) + +_bazaar_map = { + 32: "md5_hash", + 40: "sha1_hash", + 64: "sha256_hash", +} + +def is_supported(hash: str, apikey: str = "") -> bool: + """ + Checks if the hash is supported by the MalwareBazaar service. + + Args: + hash (str): The hash to check. + + Returns: + bool: True if the hash is supported by MalwareBazaar, False otherwise. + """ + + if len(hash) not in (32, 40, 64): + log.error("%s is not a valid hash for MalwareBazaar", hash) + return False + elif not integrations_cfg.abusech.apikey and not apikey: + log.error("MalwareBazaar API key not configured. Configure it in integrations.conf") + return False + + return True + + +def download(hash: str, apikey: str = "") -> bytes: + """ + Downloads a malware sample from MalwareBazaar using the provided hash and API key. + + Args: + hash (str): The hash of the malware sample to download. Can be MD5, SHA1, or SHA256. + apikey (str, optional): The API key for accessing MalwareBazaar. Defaults to an empty string. + + Returns: + bytes: The downloaded malware sample as bytes. Returns an empty byte string if the download fails or the file is not found. + """ + + sample = b"" + try: + data = requests.post( + "https://mb-api.abuse.ch/api/v1/", + data={"query": "get_file", _bazaar_map[len(hash)]: hash}, + headers={"API-KEY": integrations_cfg.abusech.apikey or apikey, "User-Agent": "CAPE Sandbox"}, + ) + if data.ok: + try: + if isinstance(data.content, bytes): + if b"file_not_found" in data.content[:50]: + return sample + tmp_sample = io.BytesIO(data.content) + elif isinstance(data.content, io.BytesIO): + tmp_sample = data.content + else: + return sample + with pyzipper.AESZipFile(tmp_sample) as zf: + zf.setpassword(b"infected") + sample = zf.read(zf.namelist()[0]) + except pyzipper.zipfile.BadZipFile: + log.error("malwarebazaar_dl: %s", str(data.content[:100])) + except Exception as e: + log.exception(e) + + return sample diff --git a/lib/downloaders/virustotal.py b/lib/downloaders/virustotal.py new file mode 100644 index 00000000000..5eae9995f72 --- /dev/null +++ b/lib/downloaders/virustotal.py @@ -0,0 +1,72 @@ +import logging + +import requests + +from lib.cuckoo.common.config import Config + +integrations_cfg = Config("integrations") +log = logging.getLogger(__name__) + + +def is_supported(hash: str, apikey: str) -> bool: + """ + Checks if the hash is supported by the VirusTotal service. + + Args: + hash (str): The hash to check. + + Returns: + bool: True if the hash is supported by VirusTotal, False otherwise. + """ + if len(hash) not in (32, 40, 64): + log.error("%s is not a valid hash for VirusTotal", hash) + return False + elif not integrations_cfg.virustotal.apikey and not apikey: + log.error("VirusTotal API key not configured. Configure it in integrations.conf") + return False + + return True + + +def download(hash: str, apikey: str=None) -> bytes: + """ + Downloads a file from VirusTotal using the provided hash and API key. + + Args: + hash (str): The hash of the file to download. + apikey (str, optional): The VirusTotal API key. If not provided, it will use the key from the integrations configuration. + + Returns: + bytes: The content of the downloaded file as bytes. Returns an empty byte string if the download fails. + + Raises: + requests.exceptions.RequestException: If there is an issue with the HTTP request. + + Logs: + Various error messages depending on the HTTP response status code: + - 403: Invalid or unauthorized API key. + - 404: Hash not found on VirusTotal. + - Other: General download failure. + """ + + url = f"https://www.virustotal.com/api/v3/files/{hash.lower()}/download" + sample = b"" + try: + r = requests.get(url, headers={"x-apikey": integrations_cfg.virustotal.apikey or apikey}, verify=False) + except requests.exceptions.RequestException as e: + logging.error(e) + return + if ( + r.status_code == 200 + and r.content != b"Hash Not Present" + and b"The request requires higher privileges than provided by the access token" not in r.content + ): + sample = r.content + elif r.status_code == 403: + log.error("API key provided is not a valid VirusTotal key or is not authorized for downloads") + elif r.status_code == 404: + log.error("Hash not found on VirusTotal") + else: + log.error("Was impossible to download from VirusTotal") + + return sample diff --git a/modules/auxiliary/AzSniffer.py b/modules/auxiliary/AzSniffer.py index 9f14eafebd1..68538834813 100644 --- a/modules/auxiliary/AzSniffer.py +++ b/modules/auxiliary/AzSniffer.py @@ -95,13 +95,13 @@ def create_packet_capture(self, custom_filters): result = poller.result() self.blob_url = result.storage_location.storage_path - log.info(f"Started Azure Network Watcher packet capture: {self.capture_name}") - log.debug(f"Blob URL for packet capture: {self.blob_url}") + log.info("Started Azure Network Watcher packet capture: %s",self.capture_name) + log.debug("Blob URL for packet capture: %s", self.blob_url) except AzureError as e: - log.error(f"Azure error occurred while creating packet capture: {str(e)}") + log.error("Azure error occurred while creating packet capture: %s", str(e)) raise except Exception as e: - log.error(f"Unexpected error occurred while creating packet capture: {str(e)}") + log.error("Unexpected error occurred while creating packet capture: %s", str(e)) raise def stop(self): @@ -124,11 +124,11 @@ def stop_packet_capture(self): packet_capture_name=self.capture_name, ) poller.result() - log.info(f"Stopped Azure Network Watcher packet capture: {self.capture_name}") + log.info("Stopped Azure Network Watcher packet capture: %s", self.capture_name) except AzureError as e: - log.error(f"Azure error occurred while stopping packet capture: {str(e)}") + log.error("Azure error occurred while stopping packet capture: %s", str(e)) except Exception as e: - log.error(f"Unexpected error occurred while stopping packet capture: {str(e)}") + log.error("Unexpected error occurred while stopping packet capture: %s", str(e)) def download_packet_capture(self): if not self.blob_url: @@ -147,22 +147,22 @@ def download_packet_capture(self): blob_client = self.blob_service_client.get_blob_client(container=container_name, blob=blob_name) self._download_to_file(blob_client, primary_output_file) - log.info(f"Downloaded packet capture for task {self.task.id} to {primary_output_file}") + log.info("Downloaded packet capture for task %s to %s", str(self.task.id), primary_output_file) self.convert_cap_to_pcap(primary_output_file) except AzureError as e: - log.error(f"Azure error occurred while downloading packet capture: {str(e)}") + log.error("Azure error occurred while downloading packet capture: %s", str(e)) self._try_fallback_download(blob_client, fallback_output_file) except Exception as e: - log.error(f"Unexpected error occurred while downloading packet capture: {str(e)}") + log.error("Unexpected error occurred while downloading packet capture: %s", str(e)) self._try_fallback_download(blob_client, fallback_output_file) def _try_fallback_download(self, blob_client, fallback_output_file): try: self._download_to_file(blob_client, fallback_output_file) - log.info(f"Downloaded packet capture for task {self.task.id} to fallback location {fallback_output_file}") + log.info("Downloaded packet capture for task %s to fallback location %s", self.task.id, fallback_output_file) self.convert_cap_to_pcap(fallback_output_file) except Exception as e: - log.error(f"Failed to download packet capture to fallback location: {str(e)}") + log.error("Failed to download packet capture to fallback location: %s", str(e)) def _download_to_file(self, blob_client, output_file): os.makedirs(os.path.dirname(output_file), exist_ok=True) @@ -178,12 +178,12 @@ def convert_cap_to_pcap(self, cap_file_path): try: os.makedirs(output_dir, exist_ok=True) subprocess.run(convert_cmd, check=True, capture_output=True, text=True) - log.info(f"Converted .cap file to .pcap: {pcap_file_path}") + log.info("Converted .cap file to .pcap: {pcap_file_path}") os.remove(cap_file_path) # Remove the original .cap file except subprocess.CalledProcessError as e: - log.error(f"Failed to convert .cap file to .pcap: {e.stderr}") + log.error("Failed to convert .cap file to .pcap: %s", str(e.stderr)) except OSError as e: - log.error(f"Failed to create directory or remove .cap file: {e}") + log.error("Failed to create directory or remove .cap file: %s", str(e)) def delete_packet_capture(self): try: @@ -193,11 +193,11 @@ def delete_packet_capture(self): packet_capture_name=self.capture_name, ) poller.result() - log.info(f"Deleted Azure Network Watcher packet capture: {self.capture_name}") + log.info("Deleted Azure Network Watcher packet capture: %s", self.capture_name) except AzureError as e: - log.error(f"Azure error occurred while deleting packet capture: {str(e)}") + log.error("Azure error occurred while deleting packet capture: %s", str(e)) except Exception as e: - log.error(f"Unexpected error occurred while deleting packet capture: {str(e)}") + log.error("Unexpected error occurred while deleting packet capture: %s", str(e)) def set_task(self, task): self.task = task diff --git a/modules/auxiliary/Mitmdump.py b/modules/auxiliary/Mitmdump.py index 68cf1ecc12c..6c232392a7f 100644 --- a/modules/auxiliary/Mitmdump.py +++ b/modules/auxiliary/Mitmdump.py @@ -4,9 +4,31 @@ # This module runs mitmdump to get a HAR file # mitmdump is behind mitmproxy project https://mitmproxy.org/ +# NOTE /opt/mitmproxy/mitmdump_wrapper.sh +# is needed to write pidfile of mitmdump to exit. +""" +#!/bin/bash +echo $$ > mitmdump.pid +# exec full args +exec $@ +""" + +# NOTE mitmdump is launched in net namespace, +# root access is needed to run in other namespace +# workaround for now is to allow sudo, which is security issue +# alternative could be to use rooter module +# in /etc/sudoers.d/ip_netns, add a line like +""" +cape ALL=NOPASSWD: /usr/sbin/ip netns exec * /usr/bin/sudo -u cape * +""" + + + import logging import os import socket +import time +import signal import subprocess from threading import Thread @@ -19,6 +41,52 @@ log = logging.getLogger(__name__) +def read_pid_from_file(pid_file_path): + """ + Reads a process ID (PID) from a given file. + + Args: + pid_file_path (str): The path to the PID file. + + Returns: + int or None: The PID if successfully read, or None if an error occurs. + """ + try: + with open(pid_file_path, 'r') as f: + pid_str = f.read().strip() + pid = int(pid_str) + return pid + except FileNotFoundError: + log.error("Error: PID file not found at: %s", pid_file_path) + return None + except ValueError: + log.error("Error: Invalid PID format in: %s", pid_file_path) + return None + except Exception as e: + log.error("An unexpected error occurred: %s", e) + return None + +def wait_for_pid_exit(pid, timeout=None, poll_interval=1): + """ + Waits for a process with the given PID to exit. + + Args: + pid (int): The process ID to wait for. + timeout (int, optional): The maximum time to wait in seconds. Defaults to None (wait indefinitely). + poll_interval (int, optional): The interval in seconds to poll for the process status. Defaults to 1 second. + + Returns: + bool: True if the process exited within the timeout, False otherwise. + """ + start_time = time.time() + while True: + try: + os.kill(pid, 0) # Send signal 0 to check if the process exists + if timeout is not None and time.time() - start_time > timeout: + return False # Timeout reached + time.sleep(poll_interval) + except OSError: + return True # Process does not exist (exited) class Mitmdump(Auxiliary): """Module for generating HAR with Mitmdump.""" @@ -57,19 +125,33 @@ def __init__(self, task, machine): self.host_port = self._get_unused_port() self.mitmdump_path = os.path.join(CUCKOO_ROOT, "storage", "analyses", str(self.task.id), "mitmdump") + def _get_netns(self): + for option in self.task.options.split(","): + if option.startswith("netns="): + _key, value = option.split("=") + return value + return '' + def stop(self): """Set stop mitmdump capture.""" self.do_run = False - if self.proc and self.proc.poll() is None: - self.proc.terminate() - self.proc.wait() - log.info("Stopping mitmdump") + log.info("MitmdumpThread.stop task.options %s", self.task.options) + + pidfile_path = os.path.join(self.mitmdump_path, "mitmdump.pid") + pid = read_pid_from_file(pidfile_path) + if pid: + log.info("MitmdumpThread.stop pid %s", pid) + # must directly kill subprocess since popen does sudo. + os.kill(pid, signal.SIGTERM) + wait_for_pid_exit(pid, 15, 1) try: - rooter("disable_mitmdump", self.host_iface, self.machine.ip, self.host_port) + netns = self._get_netns() + rooter("disable_mitmdump", self.host_iface, self.machine.ip, self.host_port, netns) except subprocess.CalledProcessError as e: log.error("Failed to execute firewall rules: %s", e) + log.info("MitmdumpThread.stop finished") def run(self): """Core function to the manage the module""" @@ -77,48 +159,63 @@ def run(self): log.info("Exiting mitmdump. No parameter received.") return - if self.do_run: - if not self.host_port: - log.exception("All ports in range are in use") - return - - try: - rooter("enable_mitmdump", self.host_iface, self.machine.ip, self.host_port) - except subprocess.CalledProcessError as e: - log.error("Failed to execute firewall rules: %s", e) - - try: - mitmdump_args = [] - os.makedirs(self.mitmdump_path, exist_ok=True) - file_path = os.path.join(self.mitmdump_path, "dump.har") - mitmdump_args.extend( - [ - self.mitmdump_bin, - "-q", - "--listen-host", - self.host_ip, - "-p", - str(self.host_port), - "--set", - "hardump=", - file_path, - ] - ) - mitmdump_args[-2:] = [ - "".join(mitmdump_args[-2:]) - ] # concatenate the last two arguments, otherwise the HAR file will not be created. - self.proc = subprocess.Popen(mitmdump_args, stdout=None, stderr=None, shell=False) - except (OSError, ValueError): - log.exception("Failed to mitmdump (host=%s, port=%s, dump_path=%s)", self.host_ip, self.host_port, file_path) - return - - log.info( - "Started mitmdump with PID %d (host=%s, port=%s, dump_path=%s)", - self.proc.pid, - self.host_ip, - self.host_port, - file_path, + log.info("MitmdumpThread.run task.options %s", self.task.options) + + if not self.do_run: + return + + if not self.host_port: + log.exception("All ports in range are in use") + return + + netns = self._get_netns() + + try: + rooter("enable_mitmdump", self.host_iface, self.machine.ip, self.host_port, netns) + except subprocess.CalledProcessError as e: + log.error("Failed to execute firewall rules: %s", e) + + try: + mitmdump_args = [] + listen_host = self.host_ip + if netns: + log.info("has netns: %s", netns) + listen_host = "0.0.0.0" # listen in net namespace + # sudo for ip netns exec, then sudo back to cape + mitmdump_args.extend([ + "/usr/bin/sudo", "ip", "netns", "exec", netns, + "/usr/bin/sudo", "-u", "cape"]) + + os.makedirs(self.mitmdump_path, exist_ok=True) + file_path = os.path.join(self.mitmdump_path, "dump.har") + mitmdump_args.extend( + [ "/opt/mitmproxy/mitmdump_wrapper.sh", + self.mitmdump_bin, + "-q", + "--listen-host", + listen_host, + "-p", + str(self.host_port), + "--set", + "hardump=", + file_path, + ] ) + mitmdump_args[-2:] = [ + "".join(mitmdump_args[-2:]) + ] # concatenate the last two arguments, otherwise the HAR file will not be created. + self.proc = subprocess.Popen(mitmdump_args, stdout=None, stderr=None, shell=False, cwd=self.mitmdump_path) + except (OSError, ValueError): + log.exception("Failed to mitmdump (host=%s, port=%s, dump_path=%s)", self.host_ip, self.host_port, file_path) + return + + log.info( + "Started mitmdump with PID %d (host=%s, port=%s, dump_path=%s)", + self.proc.pid, + self.host_ip, + self.host_port, + file_path, + ) def _get_unused_port(self) -> str | None: """Return the first unused TCP port from the set.""" diff --git a/modules/auxiliary/PolarProxy.py b/modules/auxiliary/PolarProxy.py new file mode 100644 index 00000000000..cfbc3d93afa --- /dev/null +++ b/modules/auxiliary/PolarProxy.py @@ -0,0 +1,242 @@ +import json +import logging +import os +import socket +import subprocess +import re + +from contextlib import closing +from threading import Thread + +from lib.cuckoo.common.abstracts import Auxiliary +from lib.cuckoo.common.config import Config +from lib.cuckoo.common.constants import CUCKOO_ROOT +from lib.cuckoo.core.rooter import rooter + +log = logging.getLogger(__name__) + +polarproxy = Config("polarproxy") +routing = Config("routing") + +class PolarProxy(Auxiliary): + """Module for generating PCAP with PolarProxy.""" + + def __init__(self): + Auxiliary.__init__(self) + Thread.__init__(self) + log.info("PolarProxy module loaded") + self.polarproxy_thread = None + + def start(self): + """Start PolarProxy in a separate thread.""" + + self.polarproxy_thread = PolarProxyThread(self.task, self.machine) + self.polarproxy_thread.start() + return True + + def stop(self): + """Stop PolarProxy capture thread.""" + if self.polarproxy_thread: + self.polarproxy_thread.stop() + + +class PolarProxyThread(Thread): + """Thread responsible for control PolarProxy service for each analysis.""" + + def __init__(self, task, machine): + Thread.__init__(self) + self.task = task + self.storage_dir = os.path.join(CUCKOO_ROOT, "storage", "analyses", str(self.task.id), "polarproxy") + self.machine = machine + self.proc = None + self.log_file = None + self.pcap = None + self.do_run = True + self.host_ip = polarproxy.cfg.get("host") + self.host_iface = polarproxy.cfg.get("interface") + self.polar_path = polarproxy.cfg.get("bin") + self.cert = polarproxy.cfg.get("cert") + self.password = polarproxy.cfg.get("password") + self.bypass_domains = polarproxy.cfg.get("bypass_list") + self.block_domains = polarproxy.cfg.get("block_list") + self.ruleset = os.path.join(self.storage_dir, "ruleset.json") + self.tlsport = 443 + self.listen_port = self._get_unused_port() + + def _get_unused_port(self) -> int | None: + """Return the first unused TCP port from the set.""" + with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as s: + s.bind(("", 0)) + s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) + return s.getsockname()[1] + return None + + def generate_ruleset(self): + """Generate PolarProxy TLS firewall ruleset JSON file.""" + ruleset_json = { + "name": "PolarProxy ruleset for CAPEv2.", + "version": "1.0", + "description": "A curated ruleset generated on the fly to block/bypass specific domain patterns AND handle termination proxying to InetSim.", + "rules": [], + } + if self.task.route == "inetsim": + # It does not appear feasible to redirect packets from client destined for port 443 to + # a local service listening on port XYZ _AND_ have iptables DNAT that same packet to + # inetsim. After PREROUTING to localhost:XYZ, iptables briefly "loses track" of the + # packet, so when it comes back out of PolarProxy and hits the OUTPUT table, the + # source IP is localhost and iptables cannot distinguish if the packet came from the + # host or has been proxied. This means the packet also cannot be masqueraded because + # it has not been forwarded, it has been proxied. Redirecting all 443 from localhost + # to inetsim would be very unpleasant for the hosts HTTPS stack. So, PolarProxy is + # made a termination proxy and forwards the decrypted HTTP to inetsim. + # + # Using this ruleset approach instead of `--terminate --connect` is safer because the + # default action type "inspect" will clash with these flags and try to decrypt already + # decrypted traffic. + ruleset_json["default"] = { + "action": {"type": "terminate", "target": f"{routing.inetsim.server}:80"}, + "description": "Terminate TLS and forward to InetSim server.", + } + else: + ruleset_json["default"] = { + "action": {"type": "inspect"}, + "description": "Inspect any traffic that is not bypassed or blocked.", + } + + # If bypass domains are specified in polarproxy.conf, add a block rule for each domain within. + if self.block_domains: + with open(self.block_domains, "r") as fh: + domain_regexes = [line.strip() for line in fh.readlines() if line.strip()] + for domain_regex in domain_regexes: + ruleset_json["rules"].append( + {"active": True, "match": {"type": "domain_regex", "expression": domain_regex}, "action": {"type": "block"}} + ) + + # If bypass domains are specified in polarproxy.conf, add a bypass rule for each domain within. + if self.bypass_domains: + with open(self.bypass_domains, "r") as fh: + domain_regexes = [line.strip() for line in fh.readlines() if line.strip()] + for domain_regex in domain_regexes: + ruleset_json["rules"].append( + {"active": True, "match": {"type": "domain_regex", "expression": domain_regex}, "action": {"type": "bypass"}} + ) + + with open(self.ruleset, "w") as fh: + json.dump(ruleset_json, fh, indent=2) + + def run(self): + if "polarproxy=" not in self.task.options: + log.info("Exiting polarproxy. No parameter received.") + return + + if self.do_run: + if not self.listen_port: + log.exception("PolarProxy failed to find an available bind port. Bailing...") + return + + # See if user specified a different TLS port to intercept on. + if "tlsport" in self.task.options: + match = re.search(r"tlsport=(\d+)", self.task.options) + if not match: + log.warning("Failed to parse 'tlsport' out of options (%s). Defaulting to %d.", self.task.options, self.tlsport) + else: + self.tlsport = int(match.groups()[0]) + + try: + rooter("polarproxy_enable", self.host_iface, self.machine.ip, str(self.tlsport), str(self.listen_port)) + except subprocess.CalledProcessError as e: + log.exception("Failed to execute firewall rules: %s. Bailing...", e) + return + + log.info("Starting PolarProxy process") + + # Create directory to store pcap and logs. + os.makedirs(self.storage_dir, exist_ok=True) + + # Create ruleset file to bypass/block domains AND terminate proxy to InetSim if applicable + self.generate_ruleset() + + # Specify where to dump decrypted traffic PCAP + self.pcap = os.path.join(self.storage_dir, "tls.pcap") + + # Craft polarproxy command. + polarproxy_args = [ + self.polar_path, + # Provide debugging output incase TLS MITMing fails for some reason. + "-d", + # PCAP to write to. + "-w", + self.pcap, + # Write data to PCAP once a second so it's always there when the proc gets killed. + "--autoflush", + "1", + # Specify CA cert that client VM will be expecting. + "--cacert", + f"load:{self.cert}:{self.password}", + # Always sign generated certs with PP's root CA, even when original server cert isn't trusted. + "--leafcert", + "sign", + "--ruleset", + self.ruleset, + # Allow clients to not provide an SNI + "--nosni", + "nosni.example.org", + # LISTEN-IP IPv4 or IPv6 address to bind proxy to. + # LISTEN-PORT TCP port to bind proxy to. + # DECRYPTED-PORT TCP server port to use for decrypted traffic in PCAP. + # EXTERNAL-PORT TCP port for proxy to connect to. Default value is same as LISTEN-PORT. + "-p", + f"{self.host_ip},{self.listen_port},80,{self.tlsport}", + ] + + # Open up log file handle + self.log_file = open(os.path.join(self.storage_dir, "polarproxy.log"), "w") + + # Log PolarProxy command for safe keeping + self.log_file.write(f"{' '.join(polarproxy_args)}\n") + self.log_file.flush() + + try: + self.proc = subprocess.Popen(polarproxy_args, stdout=self.log_file, stderr=self.log_file, shell=False) + except (OSError, subprocess.SubprocessError) as e: + log.info( + "Failed to start PolarProxy (host=%s, port=%s, dump_path=%s, log=%s). Error(%s)", + self.host_ip, + self.listen_port, + self.pcap, + self.log_file, + str(e) + ) + self.log_file.close() + self.log_file = None + return + + log.info( + "Started PolarProxy with PID %d (host=%s, port=%s, dump_path=%s, log=%s)", + self.proc.pid, + self.host_ip, + self.listen_port, + self.pcap, + self.log_file, + ) + + def stop(self): + """Set stop PolarProxy capture.""" + self.do_run = False + + if self.log_file: + self.log_file.close() + self.log_file = None + + try: + if self.proc and self.proc.poll() is None: + log.info("Stopping PolarProxy") + self.proc.terminate() + self.proc.wait() + + except subprocess.SubprocessError as e: + log.error("Failed to shutdown PolarProxy module: %s", e) + finally: + self.proc = None + log.info("Cleaning up PolarProxy iptables rules") + rooter("polarproxy_disable", self.host_iface, self.machine.ip, str(self.tlsport), str(self.listen_port)) diff --git a/modules/auxiliary/QemuScreenshots.py b/modules/auxiliary/QemuScreenshots.py index 5f5499af537..2874ae51e13 100644 --- a/modules/auxiliary/QemuScreenshots.py +++ b/modules/auxiliary/QemuScreenshots.py @@ -12,8 +12,11 @@ from threading import Thread from lib.cuckoo.common.abstracts import Auxiliary +from lib.cuckoo.common.config import Config from lib.cuckoo.common.constants import CUCKOO_ROOT +cfg = Config("auxiliary").get("QemuScreenshots") + log = logging.getLogger(__name__) try: @@ -36,9 +39,9 @@ import libvirt HAVE_LIBVIRT = True -except ImportError as e: +except ImportError: HAVE_LIBVIRT = False - log.error(e) + # log.error(e) SHOT_DELAY = 1 @@ -56,10 +59,12 @@ def __init__(self): Thread.__init__(self) log.info("QEMU screenshots module loaded") self.screenshot_thread = None + self.enabled = cfg.get("enabled") + self.do_run = self.enabled def start(self): """Start capture in a separate thread.""" - self.screenshot_thread = ScreenshotThread(self.task, self.machine) + self.screenshot_thread = ScreenshotThread(self.task, self.machine, self.do_run) self.screenshot_thread.start() return True @@ -72,11 +77,11 @@ def stop(self): class ScreenshotThread(Thread): """Thread responsible for taking screenshots.""" - def __init__(self, task, machine): + def __init__(self, task, machine, do_run): Thread.__init__(self) self.task = task self.machine = machine - self.do_run = True + self.do_run = do_run self.screenshots_path = os.path.join(CUCKOO_ROOT, "storage", "analyses", str(self.task.id), "shots") os.makedirs(self.screenshots_path, exist_ok=True) @@ -102,7 +107,7 @@ def run(self): # log.info(f'Screenshot saved to {file_path}') img_counter += 1 except (IOError, libvirt.libvirtError) as e: - log.error(f"Cannot take screenshot: {e}") + log.error("Cannot take screenshot: %s", str(e)) continue def _take_screenshot(self): diff --git a/modules/machinery/aws.py b/modules/machinery/aws.py index 2bb3fe12cfc..c247a473bfc 100644 --- a/modules/machinery/aws.py +++ b/modules/machinery/aws.py @@ -52,7 +52,7 @@ def _initialize_check(self): self.ec2_machines = {} self.dynamic_machines_sequence = 0 self.dynamic_machines_count = 0 - log.info("connecting to AWS:{}".format(self.options.aws.region_name)) + log.info("connecting to AWS: %s", self.options.aws.region_name) # Performing a check to see if the access and secret keys were passed through the configuration file access_key = getattr(self.options.aws, "aws_access_key_id", None) @@ -81,7 +81,7 @@ def _initialize_check(self): ] ): if self._is_autoscaled(instance): - log.info("Terminating autoscaled instance %s" % instance.id) + log.info("Terminating autoscaled instance %s", instance.id) instance.terminate() instance_ids = self._list() @@ -165,7 +165,7 @@ def _allocate_new_machine(self): break except Exception as e: attempts += 1 - log.warning(f"Failed while creating new instance {e}. Trying again.") + log.warning("Failed while creating new instance %s. Trying again.", str(e)) instance = None if instance is None: @@ -200,7 +200,7 @@ def _start_or_create_machines(self): # if no sufficient machines left -> launch a new machines while autoscale_options["autoscale"] and current_available_machines < running_machines_gap: if self.dynamic_machines_count >= dynamic_machines_limit: - log.debug("Reached dynamic machines limit - %d machines" % dynamic_machines_limit) + log.debug("Reached dynamic machines limit - %d machines", dynamic_machines_limit) break if not self._allocate_new_machine(): break @@ -245,10 +245,10 @@ def _status(self, label): status = AWS.ERROR else: status = AWS.ERROR - log.info("instance state: {}".format(status)) + log.info("instance state: %s", status) return status except Exception as e: - log.exception("can't retrieve the status: {}".format(e)) + log.exception("can't retrieve the status: %s", e) return AWS.ERROR """override Machinery method""" @@ -259,8 +259,7 @@ def start(self, label): @param label: virtual machine label. @raise CuckooMachineError: if unable to start. """ - log.debug("Starting vm {}".format(label)) - + log.debug("Starting vm %s", label) if not self._is_autoscaled(self.ec2_machines[label]): self.ec2_machines[label].start() self._wait_status(label, AWS.RUNNING) @@ -274,7 +273,7 @@ def stop(self, label): @param label: virtual machine label. @raise CuckooMachineError: if unable to stop. """ - log.debug("Stopping vm %s" % label) + log.debug("Stopping vm %s", label) status = self._status(label) @@ -364,7 +363,7 @@ def _restore(self, label): This method detaches and deletes the current volume, then creates a new one and attaches it. :param label: machine label """ - log.info("restoring machine: {}".format(label)) + log.info("restoring machine: %s", label) vm_info = self.db.view_machine_by_label(label) snap_id = vm_info.snapshot instance = self.ec2_machines[label] @@ -378,7 +377,7 @@ def _restore(self, label): log.debug("Detaching %s", old_volume.id) resp = instance.detach_volume(VolumeId=old_volume.id, Force=True) - log.debug("response: {}".format(resp)) + log.debug("response: %s", resp) while True: old_volume.reload() if old_volume.state != "in-use": @@ -412,7 +411,7 @@ def _restore(self, label): log.debug("Attaching new volume") resp = instance.attach_volume(VolumeId=new_volume.id, Device="/dev/sda1") - log.debug("response {}".format(resp)) + log.debug("response %s", resp) while True: new_volume.reload() if new_volume.state != "available": @@ -421,4 +420,4 @@ def _restore(self, label): log.debug("new volume %s in state %s", new_volume.id, new_volume.state) if new_volume.state != "in-use": new_volume.delete() - raise CuckooMachineError("New volume turned into state %s instead of 'in-use'" % old_volume.state) + raise CuckooMachineError("New volume turned into state %s instead of 'in-use'", old_volume.state) diff --git a/modules/machinery/az.py b/modules/machinery/az.py index fba0172b769..4c51e888034 100644 --- a/modules/machinery/az.py +++ b/modules/machinery/az.py @@ -24,7 +24,6 @@ HAVE_AZURE = True except ImportError: - print("Missing machinery-required libraries.") print("poetry run pip install azure-identity msrest msrestazure azure-mgmt-compute azure-mgmt-network") @@ -158,14 +157,14 @@ def _initialize(self): # scale set, which is bad for Cuckoo logic if scale_set_opts["initial_pool_size"] <= 0: raise CuckooCriticalError( - f"The initial pool size for VMSS '{scale_set_id}' is 0. Please set it to a positive integer." + "The initial pool size for VMSS '%s' is 0. Please set it to a positive integer.", scale_set_id ) # Insert the scale_set_opts into the module.scale_sets attribute mmanager_opts["scale_sets"][scale_set_id] = scale_set_opts except (AttributeError, CuckooCriticalError) as e: - log.warning(f"Configuration details about scale set {scale_set_id.strip()} are missing: {e}") + log.warning("Configuration details about scale set %s are missing: %s", str(scale_set_id.strip()), str(e)) continue def _initialize_check(self): @@ -243,7 +242,7 @@ def _thr_refresh_clients(self): and compute clients using an updated ClientSecretCredential object. """ - log.debug(f"Connecting to Azure for the region '{self.options.az.region_name}'.") + log.debug("Connecting to Azure for the region '%s'.", self.options.az.region_name) # Getting an updated ClientSecretCredential credentials = self._get_credentials() @@ -287,7 +286,7 @@ def _set_vmss_stage(self): operation=self.compute_client.gallery_images.get, ) except CuckooMachineError: - raise CuckooCriticalError(f"Gallery image '{scale_set_values.gallery_image_name}' does not exist") + raise CuckooCriticalError("Gallery image '%s' does not exist", scale_set_values.gallery_image_name) # Map the Image Reference to the VMSS self.required_vmsss[scale_set_id]["platform"] = scale_set_values.platform.capitalize() @@ -298,13 +297,13 @@ def _set_vmss_stage(self): # All required VMSSs must have an image reference, tag and os for required_vmss_name, required_vmss_values in self.required_vmsss.items(): if required_vmss_values["image"] is None: - raise CuckooCriticalError(f"The VMSS '{required_vmss_name}' does not have an image reference.") + raise CuckooCriticalError("The VMSS '5s' does not have an image reference.", required_vmss_name) elif required_vmss_values["tag"] is None: - raise CuckooCriticalError(f"The VMSS '{required_vmss_name}' does not have an tag.") + raise CuckooCriticalError("The VMSS '%s' does not have an tag.", required_vmss_name) elif required_vmss_values["platform"] is None: - raise CuckooCriticalError(f"The VMSS '{required_vmss_name}' does not have an OS value.") + raise CuckooCriticalError("The VMSS '%s' does not have an OS value.", required_vmss_name) elif required_vmss_values["initial_pool_size"] is None: - raise CuckooCriticalError(f"The VMSS '{required_vmss_name}' does not have an initial pool size.") + raise CuckooCriticalError("The VMSS '%s' does not have an initial pool size.", required_vmss_name) self._process_pre_existing_vmsss() self._check_cpu_cores() @@ -329,11 +328,9 @@ def _process_pre_existing_vmsss(self): # Delete incorrectly named VMSSs or mark them as existing for vmss in existing_vmsss: - # If a VMSS does not have any tags or does not have the tag that we use to indicate that it is used for # Cuckoo (AUTO_SCALE_CAPE key-value pair), ignore if not vmss.tags or not vmss.tags.get(Azure.AUTO_SCALE_CAPE_KEY) == Azure.AUTO_SCALE_CAPE_VALUE: - # Ignoring... unless! They have one of the required names of the VMSSs that we are going to create if vmss.name in self.required_vmsss.keys(): async_delete_vmss = Azure._azure_api_call( @@ -493,7 +490,7 @@ def start(self, label=None): # Something bad happened, we are starting a task on a machine that needs to be deleted with vms_currently_being_deleted_lock: if label in vms_currently_being_deleted: - raise CuckooMachineError(f"Attempting to start a task with machine {label} while it is scheduled for deletion.") + raise CuckooMachineError("Attempting to start a task with machine %s while it is scheduled for deletion.", label) def stop(self, label=None): """ @@ -501,7 +498,7 @@ def stop(self, label=None): @param label: virtual machine label @return: End method call """ - log.debug(f"Stopping machine '{label}'") + log.debug("Stopping machine '%s'", label) # Parse the tag and instance id out to confirm which VMSS to modify vmss_name, instance_id = label.split("_") # If we aren't scaling down, then reimage @@ -561,7 +558,7 @@ def _add_machines_to_db(self, vmss_name): @param vmss_name: the name of the VMSS to be queried """ try: - log.debug(f"Adding machines to database for {vmss_name}.") + log.debug("Adding machines to database for %s.", vmss_name) # We don't want to re-add machines! Therefore, let's see what we're working with machines_in_db = self.db.list_machines() db_machine_labels = [machine.label for machine in machines_in_db] @@ -595,13 +592,13 @@ def _add_machines_to_db(self, vmss_name): continue if vmss_vm.name in vms_to_avoid_adding: # Don't add it if it is currently being deleted! - log.debug(f"{vmss_vm.name} is currently being deleted!") + log.debug("%s is currently being deleted!", vmss_vm.name) continue # According to Microsoft, the OS type is... platform = vmss_vm.storage_profile.os_disk.os_type.lower() if not vmss_vm.network_profile: - log.error(f"{vmss_vm.name} does not have a network profile") + log.error("%s does not have a network profile", vmss_vm.name) continue vmss_vm_nic = next( @@ -614,7 +611,7 @@ def _add_machines_to_db(self, vmss_name): ) if not vmss_vm_nic: log.error( - f"{vmss_vm.network_profile.network_interfaces[0].id.lower()} does not match any NICs in {[vmss_vm_nic.id.lower() for vmss_vm_nic in vmss_vm_nics]}" + "%s does not match any NICs in %s", vmss_vm.network_profile.network_interfaces[0].id.lower(), str([vmss_vm_nic.id.lower() for vmss_vm_nic in vmss_vm_nics]) ) continue # Sets "new_machine" object in configuration object to @@ -623,7 +620,7 @@ def _add_machines_to_db(self, vmss_name): private_ip = vmss_vm_nic.ip_configurations[0].private_ip_address if private_ip in db_machine_ips: - log.error(f"The IP '{private_ip}' is already associated with a machine in the DB. Moving on...") + log.error("The IP '%s' is already associated with a machine in the DB. Moving on...", private_ip) continue # Add machine to DB. @@ -659,15 +656,15 @@ def _add_machines_to_db(self, vmss_name): try: thr.join() except CuckooGuestCriticalTimeout: - log.debug(f"Rough start for {vm}, deleting.") + log.debug("Rough start for %s, deleting.", vm) self.delete_machine(vm) raise except Exception as e: - log.error(repr(e), exc_info=True) + log.exception(repr(e)) # If no machines on any VMSSs are in the db when we leave this method, CAPE will crash. if not self.machines() and self.required_vmsss[vmss_name]["retries"] > 0: - log.warning(f"No available VMs after initializing {vmss_name}. Attempting to reinitialize VMSS.") + log.warning("No available VMs after initializing %s. Attempting to reinitialize VMSS.", vmss_name) self.required_vmsss[vmss_name]["retries"] -= 1 start_time = timeit.default_timer() @@ -678,14 +675,14 @@ def _add_machines_to_db(self, vmss_name): continue self._update_or_create_vmsss(vmsss_dict={vmss_name: self.required_vmsss[vmss_name]}) return - log.debug(f"{vmss_name} initialize retry failed. Timed out waiting for VMs to be deleted.") + log.debug("%s initialize retry failed. Timed out waiting for VMs to be deleted.", vmss_name) def _delete_machines_from_db_if_missing(self, vmss_name): """ Delete machine from database if it does not exist in the VMSS. @param vmss_name: the name of the VMSS to be queried """ - log.debug(f"Deleting machines from database if they do not exist in the VMSS {vmss_name}.") + log.debug("Deleting machines from database if they do not exist in the VMSS %s.", vmss_name) # Get all VMs in the VMSS paged_vmss_vms = Azure._azure_api_call( self.options.az.sandbox_resource_group, @@ -735,16 +732,16 @@ def _thr_wait_for_ready_machine(machine_name, machine_ip): # We did it! break except socket.timeout: - log.debug(f"{machine_name}: Initializing...") + log.debug("%s: Initializing...", machine_name) except socket.error: - log.debug(f"{machine_name}: Initializing...") + log.debug("%s: Initializing...", machine_name) if (timeit.default_timer() - start) >= timeout: # We didn't do it :( raise CuckooGuestCriticalTimeout( - f"Machine {machine_name}: the guest initialization hit the critical timeout, analysis aborted." + "Machine %s: the guest initialization hit the critical timeout, analysis aborted.", machine_name ) time.sleep(10) - log.debug(f"Machine {machine_name} was created and available in {round(timeit.default_timer() - start)}s") + log.debug("Machine %s was created and available in %d s", machine_name, round(timeit.default_timer() - start)) @staticmethod def _azure_api_call(*args, **kwargs): @@ -764,13 +761,13 @@ def _azure_api_call(*args, **kwargs): api_call = f"{operation}({args},{kwargs})" try: - log.debug(f"Trying {api_call}") + log.debug("Trying %s", api_call) results = operation(*args, **kwargs) except Exception as exc: # For ClientRequestErrors, they do not have the attribute 'error' error = exc.error.error if getattr(exc, "error", False) else exc log.warning( - f"Failed to {api_call} due to the Azure error '{error}': '{exc.message if hasattr(exc, 'message') else repr(exc)}'." + "Failed to 5s due to the Azure error '%s': '%s'.", str(api_call), str(error), f"{exc.message if hasattr(exc, 'message') else repr(exc)}" ) if "NotFound" in repr(exc) or (hasattr(exc, "status_code") and exc.status_code == 404): # Note that this exception is used to represent if an Azure resource @@ -782,7 +779,7 @@ def _azure_api_call(*args, **kwargs): # Log the subscription limits headers = results._response.headers log.debug( - f"API Charge: {headers['x-ms-request-charge']}; Remaining Calls: {headers['x-ms-ratelimit-remaining-resource']}" + "API Charge: %s; Remaining Calls: %s", headers['x-ms-request-charge'], headers['x-ms-ratelimit-remaining-resource'] ) return results @@ -804,7 +801,7 @@ def _thr_create_vmss(self, vmss_name, vmss_image_ref, vmss_image_os): ).id # note the id attribute here except CuckooMachineError: raise CuckooCriticalError( - f"Subnet '{self.options.az.subnet}' does not exist in Virtual Network '{self.options.az.vnet}'" + "Subnet '%s' does not exist in Virtual Network '%s'", self.options.az.subnet, self.options.az.vnet ) vmss_managed_disk = models.VirtualMachineScaleSetManagedDiskParameters( @@ -915,7 +912,7 @@ def _thr_reimage_vmss(self, vmss_name): ) _ = self._handle_poller_result(async_restart_vmss) else: - log.error(repr(e), exc_info=True) + log.exception(repr(e)) raise with self.db.session.begin(): self._add_machines_to_db(vmss_name) @@ -985,7 +982,7 @@ def _scale_machine_pool(self, tag, per_platform=False): if number_of_relevant_machines_required > self.subnet_limit: number_of_relevant_machines_required = self.subnet_limit - log.debug("Scaling limited by the size of the subnet: %s" % self.subnet_limit) + log.debug("Scaling limited by the size of the subnet: %s", self.subnet_limit) number_of_machines = len(self.db.list_machines()) projected_total_machines = number_of_machines - number_of_relevant_machines + number_of_relevant_machines_required @@ -1024,7 +1021,7 @@ def _scale_machine_pool(self, tag, per_platform=False): number_of_relevant_machines + number_of_new_cpus_available / self.instance_type_cpus ) log.debug( - f"Quota could be exceeded with projected number of machines ({old_number_of_relevant_machines_required}). Setting new limit to {number_of_relevant_machines_required}" + "Quota could be exceeded with projected number of machines (%s). Setting new limit to %s", str(old_number_of_relevant_machines_required), str(number_of_relevant_machines_required) ) if machine_pools[vmss_name]["size"] == number_of_relevant_machines_required: @@ -1034,7 +1031,7 @@ def _scale_machine_pool(self, tag, per_platform=False): self._delete_machines_from_db_if_missing(vmss_name) # Update the VMSS size accordingly machine_pools[vmss_name]["size"] = len(self._get_relevant_machines(tag)) - log.debug(f"The size of the machine pool {vmss_name} is already the size that we want") + log.debug("The size of the machine pool %s is already the size that we want", vmss_name) machine_pools[vmss_name]["is_scaling"] = False if platform: is_platform_scaling[platform] = False @@ -1058,7 +1055,7 @@ def _scale_machine_pool(self, tag, per_platform=False): if relevant_task_queue == initial_number_of_locked_relevant_machines == 0: # The VMSS will scale in via the ScaleInPolicy. machine_pools[vmss_name]["wait"] = True - log.debug(f"System is at rest, scale down {vmss_name} capacity and delete machines.") + log.debug("System is at rest, scale down %s capacity and delete machines.", vmss_name) # System is not at rest, but task queue is 0, therefore set machines in use to delete elif relevant_task_queue == 0: machine_pools[vmss_name]["is_scaling_down"] = True @@ -1079,7 +1076,7 @@ def _scale_machine_pool(self, tag, per_platform=False): # We don't want to be stuck in this for longer than the timeout specified if (timeit.default_timer() - start_time) > AZURE_TIMEOUT: - log.debug(f"Breaking out of the while loop within the scale down section for {vmss_name}.") + log.debug("Breaking out of the while loop within the scale down section for %s.", vmss_name) break # Get the updated number of relevant machines required relevant_task_queue = self._get_number_of_relevant_tasks(tag) @@ -1094,7 +1091,7 @@ def _scale_machine_pool(self, tag, per_platform=False): # Relaxxxx time.sleep(self.options.az.scale_down_polling_period) log.debug( - f"Scaling {vmss_name} down until new task is received. {number_of_relevant_machines} -> {number_of_relevant_machines_required}" + "Scaling %s down until new task is received. %s -> %s", vmss_name, str(number_of_relevant_machines), str(number_of_relevant_machines_required) ) # Get an updated count of relevant machines @@ -1112,7 +1109,7 @@ def _scale_machine_pool(self, tag, per_platform=False): return # Update the capacity of the VMSS - log.debug(f"Scaling {vmss_name} size from {initial_capacity} -> {number_of_relevant_machines_required}") + log.debug("Scaling %s size from %s -> %s", vmss_name, initial_capacity, str(number_of_relevant_machines_required)) vmss = Azure._azure_api_call( self.options.az.sandbox_resource_group, vmss_name, @@ -1144,11 +1141,11 @@ def _scale_machine_pool(self, tag, per_platform=False): return timediff = timeit.default_timer() - start_time - log.debug(f"The scaling of {vmss_name} took {round(timediff)}s") + log.debug("The scaling of %s took %d s", vmss_name, round(timediff)) machine_pools[vmss_name]["size"] = number_of_relevant_machines_required # Alter the database based on if we scaled up or down - log.debug(f"Updated {vmss_name} capacity: {number_of_relevant_machines_required}; Initial capacity: {initial_capacity}") + log.debug("Updated %s capacity: %s; Initial capacity: %s", vmss_name, str(number_of_relevant_machines_required), str(initial_capacity)) if number_of_relevant_machines_required > initial_capacity: self._add_machines_to_db(vmss_name) else: @@ -1159,14 +1156,13 @@ def _scale_machine_pool(self, tag, per_platform=False): machine_pools[vmss_name]["is_scaling"] = False if platform: is_platform_scaling[platform] = False - log.debug(f"Scaling {vmss_name} has completed.") + log.debug("Scaling %s has completed.", vmss_name) except Exception as exc: machine_pools[vmss_name]["wait"] = False machine_pools[vmss_name]["is_scaling"] = False if platform: is_platform_scaling[platform] = False - log.error(repr(exc), exc_info=True) - log.debug(f"Scaling {vmss_name} has completed with errors {exc!r}.") + log.exception("Scaling %s has completed with errors %s.", vmss_name, str(exc)) @staticmethod def _handle_poller_result(lro_poller_object): @@ -1182,7 +1178,7 @@ def _handle_poller_result(lro_poller_object): raise CuckooMachineError(repr(e)) time_taken = timeit.default_timer() - start_time if time_taken >= AZURE_TIMEOUT: - raise CuckooMachineError(f"The task took {round(time_taken)}s to complete! Bad Azure!") + raise CuckooMachineError("The task took %ds to complete! Bad Azure!", round(time_taken)) else: return lro_poller_result @@ -1219,7 +1215,7 @@ def _get_relevant_machines(self, tag): """ # The number of relevant machines are those from the list of locked and unlocked machines # that have the correct tag in their name - return [machine for machine in self.db.list_machines([tag])] + return [machine for machine in self.db.list_machines(tags=[tag])] @staticmethod def _wait_for_concurrent_operations_to_complete(timeout=AZURE_TIMEOUT): @@ -1306,7 +1302,7 @@ def _thr_reimage_list_reader(self): operation=self.compute_client.virtual_machine_scale_sets.begin_reimage_all, ) except Exception as exc: - log.error(repr(exc), exc_info=True) + log.exception(repr(exc)) # If InvalidParameter: 'The provided instanceId x is not an active Virtual Machine Scale Set VM instanceId. # This means that the machine has been deleted # If BadRequest: The VM x creation in Virtual Machine Scale Set > with ephemeral disk is not complete. Please trigger a restart if required' @@ -1322,10 +1318,10 @@ def _thr_reimage_list_reader(self): for instance_id in instance_ids_that_should_not_be_reimaged_again: if "InvalidParameter" in repr(exc): - log.warning(f"Machine {vmss_to_reimage}_{instance_id} does not exist anymore. Deleting from database.") + log.warning("Machine %s does not exist anymore. Deleting from database.", f"{vmss_to_reimage}_{instance_id}") elif "BadRequest" in repr(exc): log.warning( - f"Machine {vmss_to_reimage}_{instance_id} cannot start due to ephemeral disk issues with Azure. Deleting from database and Azure." + "Machine %s cannot start due to ephemeral disk issues with Azure. Deleting from database and Azure.", f"{vmss_to_reimage}_{instance_id}" ) with vms_currently_being_deleted_lock: vms_currently_being_deleted.append(f"{vmss_to_reimage}_{instance_id}") @@ -1348,7 +1344,7 @@ def _thr_reimage_list_reader(self): reimaged = False log.warning( - f"Reimaging machines {instance_ids} in {vmss_to_reimage} took too long, deleting them from the DB and the VMSS." + "Reimaging machines %s in %s took too long, deleting them from the DB and the VMSS.", str(instance_ids), str(vmss_to_reimage) ) # That sucks, now we have mark each one for deletion for instance_id in instance_ids: @@ -1365,10 +1361,10 @@ def _thr_reimage_list_reader(self): current_vmss_operations -= 1 timediff = timeit.default_timer() - start_time log.debug( - f"{'S' if reimaged else 'Uns'}uccessfully reimaging instances {instance_ids} in {vmss_to_reimage} took {round(timediff)}s" + "%successfully reimaging instances %s in %s took %ds", {'S' if reimaged else 'Uns'}, str(instance_ids), str(vmss_to_reimage), round(timediff) ) except Exception as e: - log.error(f"Exception occurred in the reimage thread: {e}. Trying again...") + log.error("Exception occurred in the reimage thread: %s. Trying again...", str(e)) def _thr_delete_list_reader(self): global current_vmss_operations @@ -1411,7 +1407,7 @@ def _thr_delete_list_reader(self): operation=self.compute_client.virtual_machine_scale_sets.begin_delete_instances, ) except Exception as exc: - log.error(repr(exc), exc_info=True) + log.exception(repr(exc)) with current_operations_lock: current_vmss_operations -= 1 with vms_currently_being_deleted_lock: @@ -1423,7 +1419,7 @@ def _thr_delete_list_reader(self): while not async_delete_some_machines.done(): deleted = True if (timeit.default_timer() - start_time) > AZURE_TIMEOUT: - log.warning(f"Deleting machines {instance_ids} in {vmss_to_delete_from} took too long.") + log.warning("Deleting machines %s in %s took too long.", str(instance_ids), str(vmss_to_delete_from)) deleted = False break time.sleep(2) @@ -1431,7 +1427,7 @@ def _thr_delete_list_reader(self): if self.initializing and deleted: # All machines should have been removed from the db and the VMSS at this point. # To force the VMSS to scale to initial_pool_size, set the size to zero here. - log.debug(f"Setting size to 0 for VMSS {vmss_to_delete_from} after successful deletion") + log.debug("Setting size to 0 for VMSS %s after successful deletion", vmss_to_delete_from) machine_pools[vmss_to_delete_from]["size"] = 0 with vms_currently_being_deleted_lock: @@ -1441,7 +1437,7 @@ def _thr_delete_list_reader(self): with current_operations_lock: current_vmss_operations -= 1 log.debug( - f"{'S' if deleted else 'Uns'}uccessfully deleting instances {instance_ids} in {vmss_to_delete_from} took {round(timeit.default_timer() - start_time)}s" + "%successfully deleting instances %s in {vmss_to_delete_from} took %ss", 'S' if deleted else 'Uns', str(instance_ids), str(round(timeit.default_timer() - start_time)) ) except Exception as e: - log.error(f"Exception occurred in the delete thread: {e}. Trying again...") + log.error("Exception occurred in the delete thread: %s. Trying again...", str(e)) diff --git a/modules/machinery/vsphere.py b/modules/machinery/vsphere.py index 600d19499f4..692d2566aa6 100644 --- a/modules/machinery/vsphere.py +++ b/modules/machinery/vsphere.py @@ -88,7 +88,7 @@ def _initialize_check(self): sslContext = ssl._create_unverified_context() self.connect_opts["sslContext"] = sslContext - log.warn("Turning off SSL certificate verification!") + log.warning("Turning off SSL certificate verification!") # Check that a snapshot is configured for each machine # and that it was taken in a powered-on state diff --git a/modules/processing/CAPE.py b/modules/processing/CAPE.py index a9bc97cd57f..f3c552e1ec9 100644 --- a/modules/processing/CAPE.py +++ b/modules/processing/CAPE.py @@ -37,11 +37,12 @@ ) processing_conf = Config("processing") +integrations_conf = Config("integrations") externalservices_conf = Config("externalservices") HAVE_FLARE_CAPA = False # required to not load not enabled dependencies -if processing_conf.flare_capa.enabled and not processing_conf.flare_capa.on_demand: +if integrations_conf.flare_capa.enabled and not integrations_conf.flare_capa.on_demand: from lib.cuckoo.common.integrations.capa import HAVE_FLARE_CAPA, flare_capa_details MISP_HASH_LOOKUP = False @@ -163,6 +164,7 @@ def process_file(self, file_path, append_file, metadata: dict, *, category: str, """ if not path_exists(file_path): + log.debug("file doesn't exist: %s", file_path) return cape_names = set() @@ -206,7 +208,7 @@ def process_file(self, file_path, append_file, metadata: dict, *, category: str, type_string, append_file = self._metadata_processing(metadata, file_info, append_file) - if processing_conf.CAPE.targetinfo and category in ("static", "file"): + if category in ("static", "file"): if MISP_HASH_LOOKUP: misp_hash_lookup(file_info["sha256"], str(self.task["id"]), file_info) @@ -244,7 +246,8 @@ def process_file(self, file_path, append_file, metadata: dict, *, category: str, if file_info.get("pid"): _ = cape_name_from_yara(file_info, file_info["pid"], self.results) - if HAVE_FLARE_CAPA: + # ToDo https://github.com/mandiant/capa/issues/2620 + if HAVE_FLARE_CAPA and ("PE32" in file_info["type"] or "MS-DOS executable" in file_info["type"]): pretime = timeit.default_timer() capa_details = flare_capa_details(file_path, "procdump") if capa_details: @@ -256,21 +259,22 @@ def process_file(self, file_path, append_file, metadata: dict, *, category: str, # Process CAPE Yara hits # Prefilter extracted data + beauty is better than oneliner: all_files = [] - for extracted_file in file_info.get("extracted_files", []): - if not extracted_file["cape_yara"]: - continue - if extracted_file.get("data", b""): - extracted_file_data = make_bytes(extracted_file["data"]) - else: - extracted_file_data = Path(extracted_file["path"]).read_bytes() - for yara in extracted_file["cape_yara"]: - all_files.append( - ( - f"[{extracted_file.get('sha256', '')}]{file_info['path']}", - extracted_file_data, - yara, + for _, value in file_info.get("selfextract", {}).items(): + for file in value.get("extracted_files", []): + if not file.get("cape_yara", []): + continue + if file.get("data", b""): + extracted_file_data = make_bytes(file["data"]) + else: + extracted_file_data = Path(file["path"]).read_bytes() + for yara in file["cape_yara"]: + all_files.append( + ( + f"[{file.get('sha256', '')}]{file_info['path']}", + extracted_file_data, + yara, + ) ) - ) # Get the file data file_data = None @@ -320,7 +324,8 @@ def process_file(self, file_path, append_file, metadata: dict, *, category: str, append_file = is_duplicated_binary(file_info, cape_file, append_file) if append_file: - if HAVE_FLARE_CAPA and category == "CAPE": + # ToDo https://github.com/mandiant/capa/issues/2620 + if HAVE_FLARE_CAPA and category == "CAPE" and ("PE32" in file_info["type"] or "MS-DOS executable" in file_info["type"]): pretime = timeit.default_timer() capa_details = flare_capa_details(file_path, "cape") if capa_details: @@ -337,7 +342,7 @@ def run(self): """ self._set_dict_keys() meta = {} - # Required to control files extracted by selfextract.conf as we store them in dropped + # Required to control files extracted by integrations.conf as we store them in dropped duplicated: DuplicatesType = collections.defaultdict(set) if path_exists(self.files_metadata): for line in open(self.files_metadata, "rb"): diff --git a/modules/processing/amsi.py b/modules/processing/amsi.py index 0accb640888..ee33c5eb30f 100644 --- a/modules/processing/amsi.py +++ b/modules/processing/amsi.py @@ -23,10 +23,10 @@ def run(self): try: decoded = self.decode_event(json.loads(line)) except Exception: - log.exception(f"Failed to process line {idx} of {jsonl_file}.") + log.exception("Failed to process line %d of %s.", idx, jsonl_file) break result.append(decoded) - log.info(f"Processed {idx} AMSI event{'s' if idx != 1 else ''}.") + log.info("Processed %d AMSI event{'s' if idx != 1 else ''}.", idx) return result diff --git a/modules/processing/analysisinfo.py b/modules/processing/analysisinfo.py index 6b13436bfc2..0bb71ab0afd 100644 --- a/modules/processing/analysisinfo.py +++ b/modules/processing/analysisinfo.py @@ -7,19 +7,35 @@ import time from contextlib import suppress from datetime import datetime +from pathlib import Path from lib.cuckoo.common.abstracts import Processing -from lib.cuckoo.common.constants import CUCKOO_VERSION +from lib.cuckoo.common.constants import CUCKOO_ROOT, CUCKOO_VERSION from lib.cuckoo.common.exceptions import CuckooProcessingError from lib.cuckoo.common.path_utils import path_exists from lib.cuckoo.common.utils import get_options from lib.cuckoo.core.database import Database +# https://stackoverflow.com/questions/14989858/get-the-current-git-hash-in-a-python-script/68215738#68215738 + log = logging.getLogger(__name__) db = Database() +def get_running_commit() -> str: + try: + git_folder = Path(CUCKOO_ROOT, ".git") + head_name = Path(git_folder, "HEAD").read_text().split("\n")[0].split(" ")[-1] + return Path(git_folder, head_name).read_text().replace("\n", "") + except Exception as e: + log.error("Error getting running commit hash: %s", str(e)) + return "unknown" + + +CAPE_CURRENT_COMMIT_HASH = get_running_commit() + + class AnalysisInfo(Processing): """General information about analysis session.""" @@ -111,4 +127,5 @@ def run(self): "source_url": source_url, "route": self.task.get("route"), "user_id": self.task.get("user_id"), + "CAPE_current_commit": CAPE_CURRENT_COMMIT_HASH, } diff --git a/modules/processing/behavior.py b/modules/processing/behavior.py index 394a48f6842..3594eeb3fcc 100644 --- a/modules/processing/behavior.py +++ b/modules/processing/behavior.py @@ -3,6 +3,7 @@ # See the file 'docs/LICENSE' for copying permission. import datetime +import json import logging import os import struct @@ -25,6 +26,12 @@ log = logging.getLogger(__name__) cfg = Config() +integrations_conf = Config("integrations") + +HAVE_FLARE_CAPA = False +# required to not load not enabled dependencies +if integrations_conf.flare_capa.enabled and integrations_conf.flare_capa.behavior: + from lib.cuckoo.common.integrations.capa import HAVE_FLARE_CAPA, flare_capa_details class ParseProcessLog(list): @@ -309,7 +316,7 @@ def _parse(self, row): try: argument["value"] = convert_to_printable(arg_value, self.conversion_cache) except Exception: - log.error(arg_value, exc_info=True) + log.exception(arg_value) continue if not self.reporting_mode: if isinstance(arg_value_raw, bytes): @@ -352,6 +359,16 @@ def _parse(self, row): if call["thread_id"] not in self.threads: self.threads.append(call["thread_id"]) + if ( + api_name == "DllLoadNotification" + and len(arguments) == 3 + and arguments[-1].get("name", "") == "DllBase" + and arguments[0].get("value", "") == "load" + and "DllBase" not in self.environdict + and _clean_path(arguments[1]["value"], self.options.replace_patterns) in self.environdict.get("CommandLine", "") + ): + self.environdict.setdefault("DllBase", arguments[-1]["value"]) + return call @@ -399,8 +416,7 @@ def run(self): if current_log.process_id is None: continue - # If the current log actually contains any data, add its data to - # the results list. + # If the current log actually contains any data, add its data to the results list. results.append( { "process_id": current_log.process_id, @@ -1170,31 +1186,66 @@ def run(self): """Run analysis. @return: results dict. """ - behavior = {"processes": Processes(self.logs_path, self.task, self.options).run()} - - instances = [ - Anomaly(), - ProcessTree(), - Summary(self.options), - Enhanced(), - EncryptedBuffers(), - ] - enabled_instances = [instance for instance in instances if getattr(self.options, instance.key, True)] - - if enabled_instances: - # Iterate calls and tell interested signatures about them - for process in behavior["processes"]: - for call in process["calls"]: - for instance in enabled_instances: - try: - instance.event_apicall(call, process) - except Exception: - log.exception('Failure in partial behavior "%s"', instance.key) - - for instance in instances: + + behavior = {"processes": []} + if path_exists(self.logs_path) and len(os.listdir(self.logs_path)) != 0: + behavior = {"processes": Processes(self.logs_path, self.task, self.options).run()} + + instances = [ + Anomaly(), + ProcessTree(), + Summary(self.options), + Enhanced(), + EncryptedBuffers(), + ] + enabled_instances = [instance for instance in instances if getattr(self.options, instance.key, True)] + + if enabled_instances: + # Iterate calls and tell interested signatures about them + for process in behavior["processes"]: + for call in process["calls"]: + for instance in enabled_instances: + try: + instance.event_apicall(call, process) + except Exception: + log.exception('Failure in partial behavior "%s"', instance.key) + + for instance in instances: + try: + behavior[instance.key] = instance.run() + except Exception as e: + log.exception('Failed to run partial behavior class "%s" due to "%s"', instance.key, e) + else: + log.warning('Analysis results folder does not exist at path "%s"', self.logs_path) + # load behavior from json if exist or env CAPE_REPORT variable + json_path = False + if os.environ.get("CAPE_REPORT") and path_exists(os.environ["CAPE_REPORT"]): + json_path = os.environ["CAPE_REPORT"] + elif os.path.exists(os.path.join(self.reports_path, "report.json")): + json_path = os.path.join(self.reports_path, "report.json") + + if not json_path: + return behavior + + with open(json_path) as f: + try: + behavior = json.load(f).get("behavior", []) + except Exception as e: + log.error("Behavior. Can't load json: %s", str(e)) + + # https://github.com/mandiant/capa/issues/2620 + if ( + HAVE_FLARE_CAPA + and self.results.get("info", {}).get("category", "") == "file" + and "PE" in self.results.get("target", {}).get("file", "").get("type", "") + ): try: - behavior[instance.key] = instance.run() + self.results["capa_summary"] = flare_capa_details( + file_path=self.results["target"]["file"]["path"], + category="behavior", + backend="cape", + results={"behavior": behavior, **self.results}, + ) except Exception as e: - log.exception('Failed to run partial behavior class "%s" due to "%s"', instance.key, e) - + log.error("Can't generate CAPA summary: %s", str(e)) return behavior diff --git a/modules/processing/memory.py b/modules/processing/memory.py index 088ba8e5b9c..a872a74f9dc 100644 --- a/modules/processing/memory.py +++ b/modules/processing/memory.py @@ -25,6 +25,7 @@ JsonRenderer = "" + try: import volatility3.plugins import volatility3.symbols @@ -36,7 +37,7 @@ # from volatility3.plugins.windows import pslist HAVE_VOLATILITY = True except ImportError: - print("Missed dependency: pip3 install volatility3 -U") + print("Missed dependency: poetry run pip install volatility3 -U") HAVE_VOLATILITY = False log = logging.getLogger() @@ -56,31 +57,31 @@ def __init__(self): def __call__(self, progress: Union[int, float], description: str = None): pass - -class ReturnJsonRenderer(JsonRenderer): - def render(self, grid: interfaces.renderers.TreeGrid): - final_output = ({}, []) - - def visitor( - node: Optional[interfaces.renderers.TreeNode], - accumulator: Tuple[Dict[str, Dict[str, Any]], List[Dict[str, Any]]], - ) -> Tuple[Dict[str, Dict[str, Any]], List[Dict[str, Any]]]: - # Nodes always have a path value, giving them a path_depth of at least 1, we use max just in case - acc_map, final_tree = accumulator - node_dict = {} - for column_index, column in enumerate(grid.columns): - renderer = self._type_renderers.get(column.type, self._type_renderers["default"]) - data = renderer(list(node.values)[column_index]) - node_dict[column.name] = None if isinstance(data, interfaces.renderers.BaseAbsentValue) else data - if node.parent: - acc_map[node.parent.path]["__children"].append(node_dict) - else: - final_tree.append(node_dict) - acc_map[node.path] = node_dict - return (acc_map, final_tree) - - error = grid.populate(visitor, final_output, fail_on_errors=True) - return final_output[1], error +if HAVE_VOLATILITY: + class ReturnJsonRenderer(JsonRenderer): + def render(self, grid: interfaces.renderers.TreeGrid): + final_output = ({}, []) + + def visitor( + node: Optional[interfaces.renderers.TreeNode], + accumulator: Tuple[Dict[str, Dict[str, Any]], List[Dict[str, Any]]], + ) -> Tuple[Dict[str, Dict[str, Any]], List[Dict[str, Any]]]: + # Nodes always have a path value, giving them a path_depth of at least 1, we use max just in case + acc_map, final_tree = accumulator + node_dict = {} + for column_index, column in enumerate(grid.columns): + renderer = self._type_renderers.get(column.type, self._type_renderers["default"]) + data = renderer(list(node.values)[column_index]) + node_dict[column.name] = None if isinstance(data, interfaces.renderers.BaseAbsentValue) else data + if node.parent: + acc_map[node.parent.path]["__children"].append(node_dict) + else: + final_tree.append(node_dict) + acc_map[node.path] = node_dict + return (acc_map, final_tree) + + error = grid.populate(visitor, final_output, fail_on_errors=True) + return final_output[1], error class VolatilityAPI: diff --git a/modules/processing/network.py b/modules/processing/network.py index f1edfd3edd6..c87b4d287a4 100644 --- a/modules/processing/network.py +++ b/modules/processing/network.py @@ -168,6 +168,8 @@ def __init__(self, filepath, ja3_fprints, options): self.ja3_fprints = ja3_fprints self.options = options + self.ip_n_ports = {} + # List of all hosts. self.hosts = [] # List containing all non-private IP addresses. @@ -306,6 +308,7 @@ def _add_hosts(self, connection): # first packet they appear in. if not self._is_private_ip(ip): self.unique_hosts.append(ip) + self.ip_n_ports.setdefault(ip, []).append(connection["dport"]) def _enrich_hosts(self, unique_hosts): enriched_hosts = [] @@ -338,6 +341,7 @@ def _enrich_hosts(self, unique_hosts): "asn_name": asn_name, "hostname": hostname, "inaddrarpa": inaddrarpa, + "ports": self.ip_n_ports.get(ip, []), } ) return enriched_hosts @@ -531,6 +535,7 @@ def _add_domain(self, domain): """Add a domain to unique list. @param domain: domain name. """ + # ToDo global filter here right? filters = (".*\\.windows\\.com$", ".*\\.in\\-addr\\.arpa$", ".*\\.ip6\\.arpa$") regexps = [re.compile(filter) for filter in filters] @@ -1080,6 +1085,8 @@ def run(self): class NetworkAnalysis(Processing): """Network analysis.""" + key = "network" + # ToDo map this to suricata.tls.ja def _import_ja3_fprints(self): """ @@ -1101,7 +1108,6 @@ def _import_ja3_fprints(self): return ja3_fprints def run(self): - if not path_exists(self.pcap_path): log.debug('The PCAP file does not exist at path "%s"', self.pcap_path) return {} diff --git a/modules/processing/parsers/CAPE/AgentTesla.py b/modules/processing/parsers/CAPE/AgentTesla.py deleted file mode 100644 index b3c8146054c..00000000000 --- a/modules/processing/parsers/CAPE/AgentTesla.py +++ /dev/null @@ -1,74 +0,0 @@ -from contextlib import suppress - -from lib.cuckoo.common.integrations.strings import extract_strings - - -def extract_config(data): - config_dict = {} - with suppress(Exception): - if data[:2] == b"MZ": - lines = extract_strings(data=data, on_demand=True, minchars=3) - if not lines: - return - else: - lines = data.decode().split("\n") - base = next(i for i, line in enumerate(lines) if "Mozilla/5.0" in line) - if not base: - return - for x in range(1, 32): - # Data Exfiltration via Telegram - if "api.telegram.org" in lines[base + x]: - config_dict["Protocol"] = "Telegram" - config_dict["C2"] = lines[base + x] - config_dict["Password"] = lines[base + x + 1] - break - # Data Exfiltration via Discord - elif "discord" in lines[base + x]: - config_dict["Protocol"] = "Discord" - config_dict["C2"] = lines[base + x] - break - # Data Exfiltration via FTP - elif "ftp:" in lines[base + x]: - config_dict["Protocol"] = "FTP" - config_dict["C2"] = lines[base + x] - config_dict["Username"] = lines[base + x + 1] - config_dict["Password"] = lines[base + x + 2] - break - # Data Exfiltration via SMTP - elif "@" in lines[base + x]: - config_dict["Protocol"] = "SMTP" - if lines[base + x - 2].isdigit() and len(lines[base + x - 2]) <= 5: # check if length <= highest Port 65535 - # minchars 3 so Ports < 100 do not appear in strings / TBD: michars < 3 - config_dict["Port"] = lines[base + x - 2] - elif lines[base + x - 2] in {"true", "false"} and lines[base + x - 3].isdigit() and len(lines[base + x - 3]) <= 5: - config_dict["Port"] = lines[base + x - 3] - config_dict["C2"] = lines[base + +x - 1] - config_dict["Username"] = lines[base + x] - config_dict["Password"] = lines[base + x + 1] - if "@" in lines[base + x + 2]: - config_dict["EmailTo"] = lines[base + x + 2] - break - # Get Persistence Payload Filename - for x in range(2, 22): - if ".exe" in lines[base + x]: - config_dict["Persistence_Filename"] = lines[base + x] - break - # Get External IP Check Services - externalipcheckservices = [] - for x in range(-4, 19): - if "ipify.org" in lines[base + x] or "ip-api.com" in lines[base + x]: - externalipcheckservices.append(lines[base + x]) - if externalipcheckservices: - config_dict["ExternalIPCheckServices"] = externalipcheckservices - - # Data Exfiltration via HTTP(S) - temp_match = ["http://", "https://"] # TBD: replace with a better url validator (Regex) - if "Protocol" not in config_dict.keys(): - for index, string in enumerate(lines[base:]): - if string == "Win32_BaseBoard": - for x in range(1, 8): - if any(s in lines[base + index + x] for s in temp_match): - config_dict["Protocol"] = "HTTP(S)" - config_dict["C2"] = lines[base + index + x] - break - return config_dict diff --git a/modules/processing/parsers/CAPE/AsyncRAT.py b/modules/processing/parsers/CAPE/AsyncRAT.py deleted file mode 100644 index 1220071ea7d..00000000000 --- a/modules/processing/parsers/CAPE/AsyncRAT.py +++ /dev/null @@ -1,5 +0,0 @@ -from rat_king_parser.rkp import RATConfigParser - - -def extract_config(data: bytes): - return RATConfigParser(data).report.get("config", {}) diff --git a/modules/processing/parsers/CAPE/AuroraStealer.py b/modules/processing/parsers/CAPE/AuroraStealer.py deleted file mode 100644 index ef72590eebf..00000000000 --- a/modules/processing/parsers/CAPE/AuroraStealer.py +++ /dev/null @@ -1,73 +0,0 @@ -# Derived from https://github.com/RussianPanda95/Configuration_extractors/blob/main/aurora_config_extractor.py -# A huge thank you to RussianPanda95 - -import base64 -import json -import logging -import re - -log = logging.getLogger(__name__) -log.setLevel(logging.INFO) - -patterns = [ - rb"[A-Za-z0-9+/]{4}(?:[A-Za-z0-9+/]{4})*(?=[0-9]+)", - rb"(?:[A-Za-z0-9+/]{4}){2,}(?:[A-Za-z0-9+/]{2}[AEIMQUYcgkosw048]=|[A-Za-z0-9+/][AQgw]==)", -] - - -def extract_config(data): - config_dict = {} - matches = [] - for pattern in patterns: - matches.extend(re.findall(pattern, data)) - - matches = [match for match in matches if len(match) > 90] - - # Search for the configuration module in the binary - config_match = re.search(rb"eyJCdWlsZElEI[^&]{0,400}", data) - if config_match: - matched_string = config_match.group(0).decode("utf-8") - decoded_str = base64.b64decode(matched_string).decode() - for item in decoded_str.split(","): - key = item.split(":")[0].strip("{").strip('"') - value = item.split(":")[1].strip('"') - if key == "IP": - key = "C2" - if value: - config_dict[key] = value - - grabber_found = False - - # Extracting the modules - for match in matches: - match_str = match.decode("utf-8") - decoded_str = base64.b64decode(match_str) - - if b"DW" in decoded_str: - data_dict = json.loads(decoded_str) - for elem in data_dict: - if elem["Method"] == "DW": - config_dict["Loader module"] = elem - - if b"PS" in decoded_str: - data_dict = json.loads(decoded_str) - for elem in data_dict: - if elem["Method"] == "PS": - config_dict["PowerShell module"] = elem - - if b"Path" in decoded_str: - grabber_found = True - break - else: - grabber_match = re.search(b"W3siUGF0aCI6.{116}", data) - if grabber_match: - encoded_string = grabber_match.group(0) - decoded_str = base64.b64decode(encoded_string) - grabber_str = decoded_str[:95].decode("utf-8", errors="ignore") - cleanup_str = grabber_str.split("[")[-1].split("]")[0] - - if not grabber_found: - grabber_found = True - config_dict["Grabber"] = cleanup_str - - return config_dict diff --git a/modules/processing/parsers/CAPE/Azorult.py b/modules/processing/parsers/CAPE/Azorult.py deleted file mode 100644 index e3c886b5a82..00000000000 --- a/modules/processing/parsers/CAPE/Azorult.py +++ /dev/null @@ -1,80 +0,0 @@ -# Copyright (C) 2019 Kevin O'Reilly (kevoreilly@gmail.com) -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . - -import struct - -import pefile -import yara - -DESCRIPTION = "Azorult configuration parser." -AUTHOR = "kevoreilly" - -rule_source = """ -rule Azorult -{ - meta: - author = "kevoreilly" - description = "Azorult Payload" - cape_type = "Azorult Payload" - strings: - $ref_c2 = {6A 00 6A 00 6A 00 6A 00 68 ?? ?? ?? ?? FF 55 F0 8B D8 C7 47 10 ?? ?? ?? ?? 90 C7 45 B0 C0 C6 2D 00 6A 04 8D 45 B0 50 6A 06 53 FF 55 D4} - condition: - uint16(0) == 0x5A4D and all of them -} -""" - -MAX_STRING_SIZE = 32 - - -def yara_scan(raw_data, rule_name): - yara_rules = yara.compile(source=rule_source) - matches = yara_rules.match(data=raw_data) - - for match in matches: - if match.rule != "Azorult": - continue - - for block in match.strings: - for instance in block.instances: - if block.identifier == rule_name: - return {block.identifier: instance.offset} - - -def string_from_offset(data, offset): - return data[offset : offset + MAX_STRING_SIZE].split(b"\0", 1)[0] - - -def extract_config(filebuf): - pe = pefile.PE(data=filebuf, fast_load=False) - image_base = pe.OPTIONAL_HEADER.ImageBase - - ref_c2 = yara_scan(filebuf, "$ref_c2") - if ref_c2 is None: - return - - ref_c2_offset = int(ref_c2["$ref_c2"]) - - c2_list_va = struct.unpack("i", filebuf[ref_c2_offset + 21 : ref_c2_offset + 25])[0] - c2_list_rva = c2_list_va - image_base - - try: - c2_list_offset = pe.get_offset_from_rva(c2_list_rva) - except pefile.PEFormatError as err: - print(err) - - c2_domain = string_from_offset(filebuf, c2_list_offset) - if c2_domain: - return {"address": c2_domain.decode()} - - return {} diff --git a/modules/processing/parsers/CAPE/BackOffLoader.py b/modules/processing/parsers/CAPE/BackOffLoader.py deleted file mode 100644 index f62fd29461d..00000000000 --- a/modules/processing/parsers/CAPE/BackOffLoader.py +++ /dev/null @@ -1,44 +0,0 @@ -from binascii import hexlify -from hashlib import md5 -from struct import unpack_from -from sys import argv - -import pefile -from Cryptodome.Cipher import ARC4 - -CFG_START = "1020304050607080" - - -def RC4(key, data): - cipher = ARC4.new(key) - return cipher.decrypt(data) - - -def extract_config(data): - config_data = {} - pe = pefile.PE(data=data) - for section in pe.sections: - if b".data" in section.Name: - data = section.get_data() - if CFG_START != hexlify(unpack_from(">8s", data, offset=8)[0]): - return None - rc4_seed = bytes(bytearray(unpack_from(">8B", data, offset=24))) - key = md5(rc4_seed).digest()[:5] - enc_data = bytes(bytearray(unpack_from(">8192B", data, offset=32))) - dec_data = RC4(key, enc_data) - config_data = { - "Version": unpack_from(">5s", data, offset=16)[0], - "RC4Seed": hexlify(rc4_seed), - "EncryptionKey": hexlify(key), - "OnDiskConfigKey": unpack_from("20s", data, offset=8224)[0], - "Build": dec_data[:16].strip("\x00"), - "URLs": [url.strip("\x00") for url in dec_data[16:].split("|")], - } - return config_data - - -if __name__ == "__main__": - filename = argv[1] - with open(filename, "r") as infile: - t = extract_config(infile.read()) - print(t) diff --git a/modules/processing/parsers/CAPE/BackOffPOS.py b/modules/processing/parsers/CAPE/BackOffPOS.py deleted file mode 100644 index c3bc9693af0..00000000000 --- a/modules/processing/parsers/CAPE/BackOffPOS.py +++ /dev/null @@ -1,45 +0,0 @@ -from binascii import hexlify -from hashlib import md5 -from struct import unpack_from -from sys import argv - -import pefile -from Cryptodome.Cipher import ARC4 - -header_ptrn = b"Content-Type: application/x-www-form-urlencoded" - - -def RC4(key, data): - cipher = ARC4.new(key) - return cipher.decrypt(data) - - -def extract_config(data): - config_data = {} - pe = pefile.PE(data=data) - for section in pe.sections: - if b".data" in section.Name: - data = section.get_data() - cfg_start = data.find(header_ptrn) - if not cfg_start or cfg_start == -1: - return None - start_offset = cfg_start + len(header_ptrn) + 1 - rc4_seed = bytes(bytearray(unpack_from(">8B", data, offset=start_offset))) - key = md5(rc4_seed).digest()[:5] - enc_data = bytes(bytearray(unpack_from(">8192B", data, offset=start_offset + 8))) - dec_data = RC4(key, enc_data) - config_data = { - "RC4Seed": hexlify(rc4_seed), - "EncryptionKey": hexlify(key), - "Build": dec_data[:16].strip("\x00"), - "URLs": [url.strip("\x00") for url in dec_data[16:].split("|")], - "Version": unpack_from(">5s", data, offset=start_offset + 16 + 8192)[0], - } - return config_data - - -if __name__ == "__main__": - filename = argv[1] - with open(filename, "rb") as infile: - t = extract_config(infile.read()) - print(t) diff --git a/modules/processing/parsers/CAPE/BitPaymer.py b/modules/processing/parsers/CAPE/BitPaymer.py deleted file mode 100644 index d8c5cfed09d..00000000000 --- a/modules/processing/parsers/CAPE/BitPaymer.py +++ /dev/null @@ -1,91 +0,0 @@ -# Copyright (C) 2019 Kevin O'Reilly (kevoreilly@gmail.com) -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . - -DESCRIPTION = "BitPaymer configuration parser." -AUTHOR = "kevoreilly" - -import string - -import pefile -import yara -from Cryptodome.Cipher import ARC4 - -rule_source = """ -rule BitPaymer -{ - meta: - author = "kevoreilly" - description = "BitPaymer Payload" - cape_type = "BitPaymer Payload" - - strings: - $decrypt32 = {6A 40 58 3B C8 0F 4D C1 39 46 04 7D 50 53 57 8B F8 81 E7 3F 00 00 80 79 05 4F 83 CF C0 47 F7 DF 99 1B FF 83 E2 3F 03 C2 F7 DF C1 F8 06 03 F8 C1 E7 06 57} - $antidefender = "TouchMeNot" wide - condition: - uint16(0) == 0x5A4D and all of them -} -""" - -LEN_BLOB_KEY = 40 - - -def convert_char(c): - if c in (string.letters + string.digits + string.punctuation + " \t\r\n"): - # ToDo gonna break as its int - return c - return f"\\x{ord(c):02x}" - - -def decrypt_rc4(key, data): - cipher = ARC4.new(key) - return cipher.decrypt(data) - - -def yara_scan(raw_data, rule_name): - yara_rules = yara.compile(source=rule_source) - matches = yara_rules.match(data=raw_data) - for match in matches: - if match.rule != "BitPaymer": - continue - - for block in match.strings: - for instance in block.instances: - if block.identifier == rule_name: - return {block.identifier: instance.offset} - - -def extract_rdata(pe): - for section in pe.sections: - if ".rdata" in section.Name: - return section.get_data(section.VirtualAddress, section.SizeOfRawData) - return None - - -def extract_config(file_data): - pe = pefile.PE(data=file_data, fast_load=False) - config = {} - blobs = filter(None, [x.strip(b"\x00\x00\x00\x00") for x in extract_rdata(pe).split(b"\x00\x00\x00\x00")]) - for blob in blobs: - if len(blob) < LEN_BLOB_KEY: - continue - raw = decrypt_rc4(blob[:LEN_BLOB_KEY][::-1], blob[LEN_BLOB_KEY:]) - if not raw: - continue - for item in raw.split(b"\x00"): - data = "".join(convert_char(c) for c in item) - if len(data) == 760: - config["RSA public key"] = data - elif len(data) > 1 and "\\x" not in data: - config["strings"] = data - return config diff --git a/modules/processing/parsers/CAPE/BlackDropper.py b/modules/processing/parsers/CAPE/BlackDropper.py deleted file mode 100644 index 31f57e7095e..00000000000 --- a/modules/processing/parsers/CAPE/BlackDropper.py +++ /dev/null @@ -1,96 +0,0 @@ -# Copyright (C) 2024 enzok -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . - - -import datetime -import re -from contextlib import suppress - -import pefile - - -def get_current_year() -> str: - current_date = datetime.datetime.now() - return str(current_date.year) - - -def decrypt_string(encoded_string: str, key: str) -> str: - encoded_bytes = bytearray.fromhex(encoded_string) - key_bytes = bytearray(ord(char) for char in key) - encoded_length = len(encoded_bytes) - key_length = len(key_bytes) - decoded_bytes = bytearray(encoded_length) - - for i in range(encoded_length): - new_byte = (encoded_bytes[i] ^ key_bytes[i % key_length]) & 0xFF - decoded_bytes[i] = new_byte - - decoded_string = decoded_bytes.decode("ascii", errors="ignore") - - return decoded_string - - -def extract_config(data: bytes) -> dict: - pe = pefile.PE(data=data) - rdata_section = None - for section in pe.sections: - if b".rdata" in section.Name: - rdata_section = section - break - - if not rdata_section: - return {} - - rdata_data = rdata_section.get_data() - patterns = [b"Builder\.dll\x00", b"Builder\.exe\x00"] - matches = [] - for pattern in patterns: - matches.extend(re.finditer(pattern, rdata_data)) - - found_strings = set() - for match in matches: - start = max(0, match.start() - 1024) - end = min(len(rdata_data), match.end() + 1024) - found_strings.update(re.findall(b"[\x20-\x7E]{4,}?\x00", rdata_data[start:end])) - - result = {} - urls = [] - directories = [] - campaign = "" - - if found_strings: - for string in found_strings: - with suppress(UnicodeDecodeError): - decoded_string = string.decode("utf-8").rstrip("\x00") - - if re.match(r"^[0-9A-Fa-f]+$", decoded_string): - key = get_current_year() - url = decrypt_string(decoded_string, key) - if url: - urls.append(url) - elif decoded_string.count("\\") > 1: - directories.append(decoded_string) - elif re.match(r"^(?![A-Z]{6,}$)[a-zA-Z0-9\-=]{6,}$", decoded_string): - campaign = decoded_string - - result = {"urls": sorted(urls), "directories": directories, "campaign": campaign} - - return result - - -if __name__ == "__main__": - import sys - - with open(sys.argv[1], "rb") as f: - print(extract_config(f.read())) diff --git a/modules/processing/parsers/CAPE/BlackNix.py b/modules/processing/parsers/CAPE/BlackNix.py deleted file mode 100644 index 56dec06c9be..00000000000 --- a/modules/processing/parsers/CAPE/BlackNix.py +++ /dev/null @@ -1,57 +0,0 @@ -import pefile - - -def extract_raw_config(raw_data): - try: - pe = pefile.PE(data=raw_data) - rt_string_idx = [entry.id for entry in pe.DIRECTORY_ENTRY_RESOURCE.entries].index(pefile.RESOURCE_TYPE["RT_RCDATA"]) - rt_string_directory = pe.DIRECTORY_ENTRY_RESOURCE.entries[rt_string_idx] - for entry in rt_string_directory.directory.entries: - if str(entry.name) == "SETTINGS": - data_rva = entry.directory.entries[0].data.struct.OffsetToData - size = entry.directory.entries[0].data.struct.Size - data = pe.get_memory_mapped_image()[data_rva : data_rva + size] - return data.split("}") - except Exception: - return None - - -def decode(line): - return "".join(chr(ord(char) - 1) for char in line) - - -def domain_parse(config): - return [domain.split(":", 1)[0] for domain in config["Domains"].split(";")] - - -def extract_config(data): - try: - config_raw = extract_raw_config(data) - if config_raw: - return { - "Mutex": decode(config_raw[1])[::-1], - "Anti Sandboxie": decode(config_raw[2])[::-1], - "Max Folder Size": decode(config_raw[3])[::-1], - "Delay Time": decode(config_raw[4])[::-1], - "Password": decode(config_raw[5])[::-1], - "Kernel Mode Unhooking": decode(config_raw[6])[::-1], - "User More Unhooking": decode(config_raw[7])[::-1], - "Melt Server": decode(config_raw[8])[::-1], - "Offline Screen Capture": decode(config_raw[9])[::-1], - "Offline Keylogger": decode(config_raw[10])[::-1], - "Copy To ADS": decode(config_raw[11])[::-1], - "Domain": decode(config_raw[12])[::-1], - "Persistence Thread": decode(config_raw[13])[::-1], - "Active X Key": decode(config_raw[14])[::-1], - "Registry Key": decode(config_raw[15])[::-1], - "Active X Run": decode(config_raw[16])[::-1], - "Registry Run": decode(config_raw[17])[::-1], - "Safe Mode Startup": decode(config_raw[18])[::-1], - "Inject winlogon.exe": decode(config_raw[19])[::-1], - "Install Name": decode(config_raw[20])[::-1], - "Install Path": decode(config_raw[21])[::-1], - "Campaign Name": decode(config_raw[22])[::-1], - "Campaign Group": decode(config_raw[23])[::-1], - } - except Exception: - return None diff --git a/modules/processing/parsers/CAPE/Blister.py b/modules/processing/parsers/CAPE/Blister.py deleted file mode 100644 index 3207644ff9f..00000000000 --- a/modules/processing/parsers/CAPE/Blister.py +++ /dev/null @@ -1,557 +0,0 @@ -# BLISTER Configuration Extractor -# Python script to extract the configuration and payload from BLISTER samples. -# Author: soolidsnake (Elastic) -# https://elastic.github.io/security-research/tools/blister-config-extractor/ -# -# Modified for CAPE by kevoreilly - -import binascii -import json -import logging -import os -import sys -from optparse import OptionParser -from pathlib import Path -from struct import pack, unpack - -import pefile -import yara - -from lib.cuckoo.common.integrations.lznt1 import lznt1 - -log = logging.getLogger(__name__) - - -# https://github.com/Robin-Pwner/Rabbit-Cipher/ -def ROTL8(v, n): - return ((v << n) & 0xFF) | ((v >> (8 - n)) & 0xFF) - - -def ROTL16(v, n): - return ((v << n) & 0xFFFF) | ((v >> (16 - n)) & 0xFFFF) - - -def ROTL32(v, n): - return ((v << n) & 0xFFFFFFFF) | ((v >> (32 - n)) & 0xFFFFFFFF) - - -def ROTL64(v, n): - return ((v << n) & 0xFFFFFFFFFFFFFFFF) | ((v >> (64 - n)) & 0xFFFFFFFFFFFFFFFF) - - -def ROTR8(v, n): - return ROTL8(v, 8 - n) - - -def ROTR16(v, n): - return ROTL16(v, 16 - n) - - -def ROTR32(v, n): - return ROTL32(v, 32 - n) - - -def ROTR64(v, n): - return ROTL64(v, 64 - n) - - -def SWAP32(v): - return (ROTL32(v, 8) & 0x00FF00FF) | (ROTL32(v, 24) & 0xFF00FF00) - - -class Rabbit_state(object): - def __init__(self): - self.x = [0] * 8 - self.c = [0] * 8 - self.carry = 0 - - -class Rabbit_ctx(object): - def __init__(self): - self.m = Rabbit_state() - self.w = Rabbit_state() - - -class Rabbit(object): - def __init__(self, key, iv): - self.ctx = Rabbit_ctx() - self.set_iv(iv) - self.set_key(key) - if len(iv): - pass - - def g_func(self, x): - x = x & 0xFFFFFFFF - x = (x * x) & 0xFFFFFFFFFFFFFFFF - result = (x >> 32) ^ (x & 0xFFFFFFFF) - return result - - def set_key(self, key): - # generate four subkeys - key0 = unpack("> 16) & 0xFFFF) - s.x[3] = ((key0 << 16) & 0xFFFFFFFF) | ((key3 >> 16) & 0xFFFF) - s.x[5] = ((key1 << 16) & 0xFFFFFFFF) | ((key0 >> 16) & 0xFFFF) - s.x[7] = ((key2 << 16) & 0xFFFFFFFF) | ((key1 >> 16) & 0xFFFF) - # generate initial counter values - s.c[0] = ROTL32(key2, 16) - s.c[2] = ROTL32(key3, 16) - s.c[4] = ROTL32(key0, 16) - s.c[6] = ROTL32(key1, 16) - s.c[1] = (key0 & 0xFFFF0000) | (key1 & 0xFFFF) - s.c[3] = (key1 & 0xFFFF0000) | (key2 & 0xFFFF) - s.c[5] = (key2 & 0xFFFF0000) | (key3 & 0xFFFF) - s.c[7] = (key3 & 0xFFFF0000) | (key0 & 0xFFFF) - s.carry = 0 - - # Iterate system four times - for i in range(4): - self.next_state(self.ctx.m) - - for i in range(8): - # modify the counters - self.ctx.m.c[i] ^= self.ctx.m.x[(i + 4) & 7] - # Copy master instance to work instance - self.ctx.w = self.copy_state(self.ctx.m) - - def copy_state(self, state): - n = Rabbit_state() - n.carry = state.carry - - for i, j in enumerate(state.x): - n.x[i] = j - for i, j in enumerate(state.c): - n.c[i] = j - return n - - def set_iv(self, iv): - # generate four subvectors - v = [0] * 4 - v[0] = unpack("> 16) | (v[2] & 0xFFFF0000) - v[3] = ((v[2] << 16) | (v[0] & 0x0000FFFF)) & 0xFFFFFFFF - # Modify work's counter values - for i in range(8): - self.ctx.w.c[i] = self.ctx.m.c[i] ^ v[i & 3] - # Copy state variables but not carry flag - tmp = [] - - for cc in self.ctx.m.x: - tmp += [cc] - self.ctx.w.x = tmp - - # Iterate system four times - for i in range(4): - self.next_state(self.ctx.w) - - def next_state(self, state): - g = [0] * 8 - x = [0x4D34D34D, 0xD34D34D3, 0x34D34D34] - # calculate new counter values - for i in range(8): - tmp = state.c[i] - state.c[i] = (state.c[i] + x[i % 3] + state.carry) & 0xFFFFFFFF - state.carry = state.c[i] < tmp - # calculate the g-values - for i in range(8): - g[i] = self.g_func(state.x[i] + state.c[i]) - # calculate new state values - - j = 7 - i = 0 - while i < 8: - state.x[i] = (g[i] + ROTL32(g[j], 16) + ROTL32(g[j - 1], 16)) & 0xFFFFFFFF - i += 1 - j += 1 - state.x[i] = (g[i] + ROTL32(g[j & 7], 8) + g[j - 1]) & 0xFFFFFFFF - i += 1 - j += 1 - j &= 7 - - def crypt(self, msg): - plain = [] - msg_len = len(msg) - c = self.ctx - x = [0] * 4 - start = 0 - while True: - self.next_state(c.w) - for i in range(4): - x[i] = c.w.x[i << 1] - x[0] ^= (c.w.x[5] >> 16) ^ (c.w.x[3] << 16) - x[1] ^= (c.w.x[7] >> 16) ^ (c.w.x[5] << 16) - x[2] ^= (c.w.x[1] >> 16) ^ (c.w.x[7] << 16) - x[3] ^= (c.w.x[3] >> 16) ^ (c.w.x[1] << 16) - b = [0] * 16 - for i, j in enumerate(x): - for z in range(4): - b[z + 4 * i] = 0xFF & (j >> (8 * z)) - for i in range(16): - plain.append((msg[start] ^ b[i])) - start += 1 - if start == msg_len: - return bytes(plain) - - -def st(b): - a = "" - for x in b: - a += chr(x) - return a - - -def p32(a): - return pack(". - -from contextlib import suppress - -import pefile - -DESCRIPTION = "BuerLoader configuration parser." -AUTHOR = "kevoreilly" - - -def decrypt_string(string): - return "".join(chr(ord(char) - 6) for char in string) - - -def extract_config(filebuf): - cfg = {} - pe = pefile.PE(data=filebuf) - data_sections = [s for s in pe.sections if s.Name.find(b".data") != -1] - if not data_sections: - return None - data = data_sections[0].get_data() - for item in data.split(b"\x00\x00"): - with suppress(Exception): - dec = decrypt_string(item.lstrip(b"\x00").rstrip(b"\x00").decode()) - if "dll" not in dec and " " not in dec and ";" not in dec and "." in dec: - cfg.setdefault("address", []).append(dec) - return cfg diff --git a/modules/processing/parsers/CAPE/BumbleBee.py b/modules/processing/parsers/CAPE/BumbleBee.py deleted file mode 100644 index b53fdfc722f..00000000000 --- a/modules/processing/parsers/CAPE/BumbleBee.py +++ /dev/null @@ -1,242 +0,0 @@ -# Thanks to @MuziSec - https://github.com/MuziSec/malware_scripts/blob/main/bumblebee/extract_config.py -# 2024 updates by @enzok -# -import logging -import traceback -from contextlib import suppress - -import pefile -import regex as re -import yara -from Cryptodome.Cipher import ARC4 - -log = logging.getLogger(__name__) -log.setLevel(logging.INFO) - -rule_source = """ -rule BumbleBee -{ - meta: - author = "enzok" - description = "BumbleBee 2024" - strings: - $rc4key = {48 [6] 48 [6] E8 [4] 4C 89 AD [4] 4C 89 AD [4] 4C 89 B5 [4] 4C 89 AD [4] 44 88 AD [4] 48 8D 15 [4] 44 38 2D [4] 75} - $botidlgt = {4C 8B C1 B? 4F 00 00 00 48 8D 0D [4] E8 [4] 4C 8B C3 48 8D 0D [4] B? 4F 00 00 00 E8 [4] 4C 8B C3 48 8D 0D [4] B? FF 0F 00 00 E8} - $botid = {90 48 [6] E8 [4] 4C 89 AD [4] 4C 89 AD [4] 4C 89 B5 [4] 4C 89 AD [4] 44 88 AD [4] 48 8D 15 [4] 44 38 2D [4] 75} - $port = {4C 89 6D ?? 4C 89 6D ?? 4c 89 75 ?? 4C 89 6D ?? 44 88 6D ?? 48 8D 05 [4] 44 38 2D [4] 75} - $dga1 = {4C 89 75 ?? 4C 89 6D ?? 44 88 6D ?? 48 8B 1D [4] 48 8D 0D [4] E8 [4] 8B F8} - $dga2 = {48 8D 0D [4] E8 [4] 8B F0 4C 89 6D ?? 4C 89 6D ?? 4C 89 75 ?? 4C 89 6D ?? 44 88 6D ?? 48 8D 15 [4] 44 38 2D [4] 75} - condition: - $rc4key and all of ($botid*) and 2 of ($port, $port, $dga1, $dga2) -} -""" - -yara_rules = yara.compile(source=rule_source) - - -def extract_key_data(data, pe, key_match): - """ - Given key match, convert rva to file offset and return key data at that offset. - """ - try: - # Get relative rva. The LEA is using a relative address. This address is relative to the address of the next ins. - relative_rva = pe.get_rva_from_offset(key_match.start() + int(len(key_match.group()) / 2)) - # Now that we have the relative rva, we need to get the file offset - key_offset = pe.get_offset_from_rva(relative_rva + int.from_bytes(key_match.group("key"), byteorder="little")) - # Read arbitrary number of byes from key offset and split on null bytes to extract key - key = data[key_offset : key_offset + 0x40].split(b"\x00")[0] - except Exception as e: - log.debug(f"There was an exception extracting the key: {e}") - log.debug(traceback.format_exc()) - return False - return key - - -def extract_config_data(data, pe, config_match): - """ - Given config match, convert rva to file offset and return data at that offset. - The LEA ins are using relative addressing. Referenced data is relative to the address of the NEXT ins. - This is inefficient but I'm bad at Python, okay? - """ - try: - # Get campaign id ciphertext - campaign_id_rva = pe.get_rva_from_offset(config_match.start() + int(len(config_match.group("campaign_id_ins")))) - campaign_id_offset = pe.get_offset_from_rva( - campaign_id_rva + int.from_bytes(config_match.group("campaign_id"), byteorder="little") - ) - campaign_id_ct = data[campaign_id_offset : campaign_id_offset + 0x10] - except Exception as e: - log.debug(f"There was an exception extracting the campaign id: {e}") - log.debug(traceback.format_exc()) - return False, False, False - - try: - # Get botnet id ciphertext - botnet_id_rva = pe.get_rva_from_offset( - config_match.start() + int(len(config_match.group("campaign_id_ins"))) + int(len(config_match.group("botnet_id_ins"))) - ) - botnet_id_offset = pe.get_offset_from_rva( - botnet_id_rva + int.from_bytes(config_match.group("botnet_id"), byteorder="little") - ) - botnet_id_ct = data[botnet_id_offset : botnet_id_offset + 0x10] - except Exception as e: - log.debug(f"There was an exception extracting the botnet id: {e}") - log.debug(traceback.format_exc()) - return False, False, False - - # Get C2 ciphertext - try: - c2s_rva = pe.get_rva_from_offset( - config_match.start() - + int(len(config_match.group("campaign_id_ins"))) - + int(len(config_match.group("botnet_id_ins"))) - + int(len(config_match.group("c2s_ins"))) - ) - c2s_offset = pe.get_offset_from_rva(c2s_rva + int.from_bytes(config_match.group("c2s"), byteorder="little")) - c2s_ct = data[c2s_offset : c2s_offset + 0x400] - except Exception as e: - log.debug(f"There was an exception extracting the C2s: {e}") - log.debug(traceback.format_exc()) - return False, False, False - - return campaign_id_ct, botnet_id_ct, c2s_ct - - -def extract_2024(pe, filebuf): - cfg = {} - rc4key_init_offset = 0 - botid_init_offset = 0 - port_init_offset = 0 - dga1_init_offset = 0 - dga2_init_offset = 0 - botidlgt_init_offset = 0 - - matches = yara_rules.match(data=filebuf) - if not matches: - return - - for match in matches: - if match.rule != "BumbleBee": - continue - for item in match.strings: - for instance in item.instances: - if "$rc4key" in item.identifier: - rc4key_init_offset = int(instance.offset) - elif "$botidlgt" in item.identifier: - botidlgt_init_offset = int(instance.offset) - elif "$botid" in item.identifier: - botid_init_offset = int(instance.offset) - elif "$port" in item.identifier: - port_init_offset = int(instance.offset) - elif "$dga1" in item.identifier: - dga1_init_offset = int(instance.offset) - elif "$dga2" in item.identifier: - dga2_init_offset = int(instance.offset) - - if not rc4key_init_offset: - return - - key_offset = pe.get_dword_from_offset(rc4key_init_offset + 57) - key_rva = pe.get_rva_from_offset(rc4key_init_offset + 61) + key_offset - key = pe.get_string_at_rva(key_rva) - cfg["RC4 key"] = key.decode() - - botid_offset = pe.get_dword_from_offset(botid_init_offset + 51) - botid_rva = pe.get_rva_from_offset(botid_init_offset + 55) + botid_offset - botid_len_offset = pe.get_dword_from_offset(botidlgt_init_offset + 31) - botid_data = pe.get_data(botid_rva)[:botid_len_offset] - with suppress(Exception): - botid = ARC4.new(key).decrypt(botid_data).split(b"\x00")[0].decode() - cfg["Botid"] = botid - - port_offset = pe.get_dword_from_offset(port_init_offset + 23) - port_rva = pe.get_rva_from_offset(port_init_offset + 27) + port_offset - port_len_offset = pe.get_dword_from_offset(botidlgt_init_offset + 4) - port_data = pe.get_data(port_rva)[:port_len_offset] - with suppress(Exception): - port = ARC4.new(key).decrypt(port_data).split(b"\x00")[0].decode() - cfg["Port"] = port - - dgaseed_offset = pe.get_dword_from_offset(dga1_init_offset + 15) - dgaseed_rva = pe.get_rva_from_offset(dga1_init_offset + 19) + dgaseed_offset - dgaseed_data = pe.get_qword_at_rva(dgaseed_rva) - cfg["DGA seed"] = int(dgaseed_data) - - numdga_offset = pe.get_dword_from_offset(dga1_init_offset + 22) - numdga_rva = pe.get_rva_from_offset(dga1_init_offset + 26) + numdga_offset - numdga_data = pe.get_string_at_rva(numdga_rva) - cfg["Number DGA domains"] = numdga_data.decode() - - domainlen_offset = pe.get_dword_from_offset(dga2_init_offset + 3) - domainlen_rva = pe.get_rva_from_offset(dga2_init_offset + 7) + domainlen_offset - domainlen_data = pe.get_string_at_rva(domainlen_rva) - cfg["Domain length"] = domainlen_data.decode() - - tld_offset = pe.get_dword_from_offset(dga2_init_offset + 37) - tld_rva = pe.get_rva_from_offset(dga2_init_offset + 41) + tld_offset - tld_data = pe.get_string_at_rva(tld_rva).decode() - cfg["TLD"] = tld_data - - return cfg - - -def extract_config(data): - """ - Extract key and config and decrypt - """ - cfg = {} - pe = None - try: - with suppress(Exception): - pe = pefile.PE(data=data, fast_load=True) - - if not pe: - return cfg - - key_regex = re.compile(rb"(\x48\x8D.(?P....)\x80\x3D....\x00)", re.DOTALL) - regex = re.compile( - rb"(?\x48\x8D.(?P....))(?P\x48\x8D.(?P....))(?P\x48\x8D.(?P....))", - re.DOTALL, - ) - # Extract Key - key_match = list(key_regex.finditer(data)) - if len(key_match) > 1: - for index, match in enumerate(key_match): - key = extract_key_data(data, pe, match) - if not key: - continue - if index == 0: - cfg["Botnet ID"] = key.decode() - elif index == 1: - cfg["Campaign ID"] = key.decode() - elif index == 2: - cfg["Data"] = key.decode("latin-1") - elif index == 3: - cfg["C2s"] = list(key.decode().split(",")) - elif len(key_match) == 1: - key = extract_key_data(data, pe, key_match[0]) - if not key: - return cfg - cfg["RC4 Key"] = key.decode() - # Extract config ciphertext - config_match = regex.search(data) - campaign_id, botnet_id, c2s = extract_config_data(data, pe, config_match) - if campaign_id: - cfg["Campaign ID"] = ARC4.new(key).decrypt(campaign_id).split(b"\x00")[0].decode() - if botnet_id: - cfg["Botnet ID"] = ARC4.new(key).decrypt(botnet_id).split(b"\x00")[0].decode() - if c2s: - cfg["C2s"] = list(ARC4.new(key).decrypt(c2s).split(b"\x00")[0].decode().split(",")) - except Exception as e: - log.error("This is broken: %s", str(e), exc_info=True) - - if not cfg: - cfg = extract_2024(pe, data) - - return cfg - - -if __name__ == "__main__": - import sys - - print(extract_config(open(sys.argv[1], "rb").read())) diff --git a/modules/processing/parsers/CAPE/Carbanak.py b/modules/processing/parsers/CAPE/Carbanak.py deleted file mode 100644 index b3d2cb2e086..00000000000 --- a/modules/processing/parsers/CAPE/Carbanak.py +++ /dev/null @@ -1,186 +0,0 @@ -# Copyright (C) 2024 enzok -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . - - -import logging -import re -from contextlib import suppress - -import pefile -import yara - -log = logging.getLogger(__name__) -log.setLevel(logging.INFO) - -rule_source = """ -rule Carbanak -{ - meta: - author = "enzok" - description = "Carbanak sbox constants" - cape_type = "Carbanak Payload" - strings: - $constants = {0F B7 05 [3] 00 0F B7 1D [3] 00 83 25 [3] 00 00 89 05 [3] 00 0F B7 05 [3] 00 89 1D [3] 00 89 05 [3] 00 33 C0 4? 8D 4D} - condition: - all of them -} -""" - -yara_rules = yara.compile(source=rule_source) - -const_a = 0 -const_b = 0 -const_c = 0 - - -def decode_string(src, sbox): - lenstr = len(src) - 4 - if lenstr < 0: - lenstr = 0 - newstr = bytearray() - lenblock = int(lenstr / 4) - nb = 0 - rb = 0 - delta = 0 - n = 0 - i = 0 - while n < lenstr: - if rb == 0: - nb += 1 - if nb <= 4: - delta = src[i] - 97 - i += 1 - rb = lenblock - else: - rb = lenstr - n - elif rb > 0: - rb -= 1 - c = src[i] - if c < 32: - min = 1 - max = 31 - elif c < 128: - min = 32 - max = 127 - else: - min = 128 - max = 255 - c = sbox[c] - c -= delta - if c < min: - c = max - min + c - n += 1 - newstr.append(c) - i += 1 - return newstr - - -def scramble(sbox, start, end, count): - global const_a - length = end - start + 1 - while count > 0: - s1 = (const_c + const_a * const_b) & 0xFFFF - const_a = (const_c + s1 * const_b) & 0xFFFF - i = start + s1 % length - s3 = sbox[i] - j = start + const_a % length - sbox[i] = sbox[j] - sbox[j] = s3 - count -= 1 - return sbox - - -def extract_config(filebuf): - global const_a, const_b, const_c - cfg = {} - constants_offset = None - pe = pefile.PE(data=filebuf) - matches = yara_rules.match(data=filebuf) - if not matches: - return - - for match in matches: - if match.rule != "Carbanak": - continue - for item in match.strings: - for instance in item.instances: - if "$constants" in item.identifier: - constants_offset = int(instance.offset) - - if not constants_offset: - return - - data_sections = [s for s in pe.sections if s.Name.find(b".data") != -1] - text_sections = [s for s in pe.sections if s.Name.find(b".text") != -1] - - if not data_sections or not text_sections: - return None - - text_start = text_sections[0].PointerToRawData - rva = constants_offset - text_start + text_sections[0].VirtualAddress - const_b_offset = pe.get_dword_from_offset(constants_offset + 3) - const_b_rva = rva + const_b_offset + 7 - const_b_offset = const_b_rva - data_sections[0].VirtualAddress + data_sections[0].PointerToRawData - const_b = pe.get_word_from_offset(const_b_offset) - const_a = pe.get_word_from_offset(const_b_offset - 2) - const_c = pe.get_word_from_offset(const_b_offset + 2) - - # init sbox - sbox_init = bytearray(range(256)) - count = const_a % 1000 + 128 - sbox_init = scramble(sbox_init, 1, 31, count) - sbox_init = scramble(sbox_init, 32, 127, count) - sbox_init = scramble(sbox_init, 128, 255, count) - sbox = bytearray(256) - for idx, dst in enumerate(sbox_init): - sbox[dst] = idx - - rdata_sections = [s for s in pe.sections if s.Name.find(b".rdata") != -1] - if rdata_sections: - rdata = rdata_sections[0].get_data() - items = rdata.split(b"\x00") - items = [item for item in items if item != b""] - for item in items: - with suppress(IndexError, UnicodeDecodeError, ValueError): - dec = decode_string(item, sbox).decode("utf8") - if dec: - ver = re.findall("^(\d+\.\d+)$", dec) - if ver: - cfg["Version"] = ver[0] - - data = data_sections[0].get_data() - items = data.split(b"\x00") - - with suppress(IndexError, UnicodeDecodeError, ValueError): - cfg["Unknown 1"] = decode_string(items[0], sbox).decode("utf8") - cfg["Unknown 2"] = decode_string(items[8], sbox).decode("utf8") - c2_dec = decode_string(items[10], sbox).decode("utf8") - if "|" in c2_dec: - c2_dec = c2_dec.split("|") - cfg["C2"] = c2_dec - if float(cfg["Version"]) < 1.7: - cfg["Campaign Id"] = decode_string(items[276], sbox).decode("utf8") - else: - cfg["Campaign Id"] = decode_string(items[25], sbox).decode("utf8") - - return cfg - - -if __name__ == "__main__": - import sys - from pathlib import Path - - log.setLevel(logging.DEBUG) - data = Path(sys.argv[1]).read_bytes() - print(extract_config(data)) diff --git a/modules/processing/parsers/CAPE/ChChes.py b/modules/processing/parsers/CAPE/ChChes.py deleted file mode 100644 index e001f599743..00000000000 --- a/modules/processing/parsers/CAPE/ChChes.py +++ /dev/null @@ -1,75 +0,0 @@ -# Copyright (C) 2015 Kevin O'Reilly kevin.oreilly@contextis.co.uk -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . -DESCRIPTION = "ChChes configuration parser." -AUTHOR = "kevoreilly" - -import yara - -rule_source = """ -rule ChChes -{ - meta: - author = "kev" - description = "ChChes Payload" - cape_type = "ChChes Payload" - strings: - $payload1 = {55 8B EC 53 E8 EB FC FF FF E8 DB FF FF FF 05 10 FE 2A 00 33 DB 39 58 44 75 58 56 57 50 E8 57 00 00 00 59 8B F0 E8 AB FF FF FF B9 01 1F 2A 00 BF D0 1C 2A 00 2B CF 03 C1 39 5E 30 76 0F} - $payload2 = {55 8B EC 53 E8 8F FB FF FF E8 DB FF FF FF 05 00 07 FF 00 33 DB 39 58 44 75 58 56 57 50 E8 57 00 00 00 59 8B F0 E8 AB FF FF FF B9 5D 20 FE 00 BF D0 1C FE 00 2B CF 03 C1 39 5E 30 76 0F } - $payload3 = {55 8B EC 53 E8 E6 FC FF FF E8 DA FF FF FF 05 80 FC FC 00 33 DB 39 58 44 75 58 56 57 50 E8 57 00 00 00 59 8B F0 E8 AA FF FF FF B9 05 1F FC 00 BF D0 1C FC 00 2B CF 03 C1 39 5E 30 76 0F} - $payload4 = {55 8B EC E8 ?? ?? FF FF E8 D? FF FF FF 05 ?? ?? ?? 00 83 78 44 00 75 40 56 57 50 E8 3E 00 00 00 59 8B F0 6A 00 FF 76 30 E8 A8 FF FF FF B9 ?? ?? ?? 00 BF 00 1A E1 00 2B CF 03 C1 50 FF 56 70} - condition: - $payload1 or $payload2 or $payload3 or $payload4 -} -""" - -MAX_STRING_SIZE = 128 - - -def yara_scan(raw_data): - addresses = {} - yara_rules = yara.compile(source=rule_source) - matches = yara_rules.match(data=raw_data) - for match in matches: - if match.rule != "ChChes": - continue - - for block in match.strings: - for instance in block.instances: - addresses[block.identifier] = instance.offset - return addresses - - -def string_from_offset(data, offset): - return data[offset : offset + MAX_STRING_SIZE].split(b"\0", 1)[0] - - -def extract_config(filebuf): - tmp_config = {} - yara_matches = yara_scan(filebuf) - - c2_offsets = [] - if yara_matches.get("$payload1"): - c2_offsets.append(0xE455) - if yara_matches.get("$payload2"): - c2_offsets.append(0xED55) - if yara_matches.get("$payload3"): - c2_offsets.append(0xE2B9) - # no c2 for type4 - - for c2_offset in c2_offsets: - c2_url = string_from_offset(filebuf, c2_offset) - if c2_url: - tmp_config.setdefault("c2_url", []).append(c2_url) - - return tmp_config diff --git a/modules/processing/parsers/CAPE/CobaltStrikeBeacon.py b/modules/processing/parsers/CAPE/CobaltStrikeBeacon.py deleted file mode 100644 index 0201c0f4277..00000000000 --- a/modules/processing/parsers/CAPE/CobaltStrikeBeacon.py +++ /dev/null @@ -1,463 +0,0 @@ -""" -Parses CobaltStrike Beacon's configuration from PE file or memory dump. -By Gal Kristal from SentinelOne (gkristal.w@gmail.com) - -Inspired by https://github.com/JPCERTCC/aa-tools/blob/master/cobaltstrikescan.py - -TODO: - 1. Parse headers modifiers - 2. Dynamic size parsing -""" - -import argparse -import io -import json -import logging -import re -from base64 import b64encode -from collections import OrderedDict -from pathlib import Path -from socket import inet_ntoa -from struct import unpack - -import pefile - -try: - from netstruct import unpack as netunpack - - HAVE_NETSTRUCT = True -except ImportError: - HAVE_NETSTRUCT = False - -log = logging.getLogger(__name__) - -COLUMN_WIDTH = 35 -SUPPORTED_VERSIONS = (3, 4) - - -class Base64Encoder(json.JSONEncoder): - def default(self, o): - if isinstance(o, bytes): - return b64encode(o).decode() - return json.JSONEncoder.default(self, o) - - -class confConsts: - MAX_SETTINGS = 64 - TYPE_NONE = 0 - TYPE_SHORT = 1 - TYPE_INT = 2 - TYPE_STR = 3 - - START_PATTERNS = { - 3: b"\x69\x68\x69\x68\x69\x6b..\x69\x6b\x69\x68\x69\x6b..\x69\x6a", - 4: b"\x2e\x2f\x2e\x2f\x2e\x2c..\x2e\x2c\x2e\x2f\x2e\x2c..\x2e", - } - START_PATTERN_DECODED = b"\x00\x01\x00\x01\x00\x02..\x00\x02\x00\x01\x00\x02..\x00" - CONFIG_SIZE = 4096 - XORBYTES = {3: 0x69, 4: 0x2E} - - -def read_dword_be(fh): - data = fh.read(4) - if not data or len(data) != 4: - return None - return unpack(">I", data)[0] - - -class packedSetting: - def __init__( - self, - pos, - datatype, - length=0, - isBlob=False, - isHeaders=False, - isIpAddress=False, - isBool=False, - isDate=False, - boolFalseValue=0, - isProcInjectTransform=False, - isMalleableStream=False, - enum=None, - mask=None, - ): - self.pos = pos - self.datatype = datatype - self.is_blob = isBlob - self.is_headers = isHeaders - self.is_ipaddress = isIpAddress - self.is_bool = isBool - self.is_date = isDate - self.is_malleable_stream = isMalleableStream - self.bool_false_value = boolFalseValue - self.is_transform = isProcInjectTransform - self.enum = enum - self.mask = mask - if datatype == confConsts.TYPE_STR and length == 0: - raise (Exception("if datatype is TYPE_STR then length must not be 0")) - - if datatype == confConsts.TYPE_SHORT: - self.length = 2 - elif datatype == confConsts.TYPE_INT: - self.length = 4 - else: - self.length = length - - def binary_repr(self): - """ - Param number - Type - Length - Value - """ - self_repr = bytearray(6) - self_repr[1] = self.pos - self_repr[3] = self.datatype - self_repr[4:6] = self.length.to_bytes(2, "big") - return self_repr - - def pretty_repr(self, full_config_data): - data_offset = full_config_data.find(self.binary_repr()) - if data_offset < 0: - return "Not Found" - - repr_len = len(self.binary_repr()) - conf_data = full_config_data[data_offset + repr_len : data_offset + repr_len + self.length] - if self.datatype == confConsts.TYPE_SHORT: - conf_data = unpack(">H", conf_data)[0] - if conf_data is None: - return - if self.is_bool: - return str(conf_data != self.bool_false_value) - elif self.enum: - return self.enum[conf_data] - elif self.mask: - ret_arr = [] - for k, v in self.mask.items(): - if k == 0 == conf_data: - ret_arr.append(v) - if k & conf_data: - ret_arr.append(v) - return ret_arr - else: - return conf_data - - elif self.datatype == confConsts.TYPE_INT: - if self.is_ipaddress: - return inet_ntoa(conf_data) - - conf_data = unpack(">I", conf_data)[0] - if self.is_date and conf_data != 0: - fulldate = str(conf_data) - return f"{fulldate[:4]}-{fulldate[4:6]}-{fulldate[6:]}" - - return conf_data - - elif self.is_blob: - if self.enum is not None: - ret_arr = [] - i = 0 - while i < len(conf_data): - v = conf_data[i] - if v == 0: - return ret_arr - v = self.enum[v] - if v: - ret_arr.append(v) - i += 1 - - # Only EXECUTE_TYPE for now - else: - if HAVE_NETSTRUCT: - # Skipping unknown short value in the start - string1 = netunpack(b"I$", conf_data[i + 3 :])[0].decode() - string2 = netunpack(b"I$", conf_data[i + 3 + 4 + len(string1) :])[0].decode() - ret_arr.append("{}:{}".format(string1.strip("\x00"), string2.strip("\x00"))) - i += len(string1) + len(string2) + 11 - - elif self.is_transform: - if conf_data == bytes(len(conf_data)): - return "Empty" - - prepend_length = unpack(">I", conf_data[:4])[0] - prepend = conf_data[4 : 4 + prepend_length].hex() - append_length_offset = prepend_length + 4 - append_length = unpack(">I", conf_data[append_length_offset : append_length_offset + 4])[0] - append = conf_data[append_length_offset + 4 : append_length_offset + 4 + append_length].hex() - ret_arr = [ - prepend, - append if append_length < 256 and append != bytes(append_length) else "Empty", - ] - - return ret_arr - - elif self.is_malleable_stream: - prog = [] - with io.BytesIO(conf_data) as fh: - op = read_dword_be(fh) - while op: - if op == 1: - bytes_len = read_dword_be(fh) - prog.append(f"Remove {bytes_len} bytes from the end") - elif op == 2: - bytes_len = read_dword_be(fh) - prog.append(f"Remove {bytes_len} bytes from the beginning") - elif op == 3: - prog.append("Base64 decode") - elif op == 8: - prog.append("NetBIOS decode 'a'") - elif op == 11: - prog.append("NetBIOS decode 'A'") - elif op == 13: - prog.append("Base64 URL-safe decode") - elif op == 15: - prog.append("XOR mask w/ random key") - op = read_dword_be(fh) - conf_data = prog - else: - conf_data = conf_data.hex() - - return conf_data - - elif self.is_headers: - conf_data = conf_data.strip(b"\x00") - conf_data = [chunk[1:].decode() for chunk in conf_data.split(b"\x00") if len(chunk) > 1] - return conf_data - - conf_data = conf_data.strip(b"\x00").decode() - return conf_data - - -class BeaconSettings: - BEACON_TYPE = {0x0: "HTTP", 0x1: "Hybrid HTTP DNS", 0x2: "SMB", 0x4: "TCP", 0x8: "HTTPS", 0x10: "Bind TCP"} - ACCESS_TYPE = {0x1: "Use direct connection", 0x2: "Use IE settings", 0x4: "Use proxy server"} - EXECUTE_TYPE = { - 0x1: "CreateThread", - 0x2: "SetThreadContext", - 0x3: "CreateRemoteThread", - 0x4: "RtlCreateUserThread", - 0x5: "NtQueueApcThread", - 0x6: None, - 0x7: None, - 0x8: "NtQueueApcThread-s", - } - # TRANSFORMSTEP = {1: "append", 2: "prepend", 3: "base64", 4: "print", 5: "parameter", 6: "header", 7: "build", 8: "netbios", 9: "_parameter", 10: "_header", - # 11: "netbiosu", 12: "uri_append", 13: "base64_url", 14: "strrep", 15: "mask"} - ALLOCATION_FUNCTIONS = {0: "VirtualAllocEx", 1: "NtMapViewOfSection"} - - def __init__(self, version): - if version not in SUPPORTED_VERSIONS: - log.debug("Error: Only supports version 3 and 4, not %d", version) - self.version = version - self.settings = OrderedDict() - self.init() - - def init(self): - self.settings["BeaconType"] = packedSetting(1, confConsts.TYPE_SHORT, mask=self.BEACON_TYPE) - self.settings["Port"] = packedSetting(2, confConsts.TYPE_SHORT) - self.settings["SleepTime"] = packedSetting(3, confConsts.TYPE_INT) - self.settings["MaxGetSize"] = packedSetting(4, confConsts.TYPE_INT) - self.settings["Jitter"] = packedSetting(5, confConsts.TYPE_SHORT) - self.settings["MaxDNS"] = packedSetting(6, confConsts.TYPE_SHORT) - # Silencing for now - self.settings["PublicKey"] = packedSetting(7, confConsts.TYPE_STR, 256, isBlob=True) - self.settings["C2Server"] = packedSetting(8, confConsts.TYPE_STR, 256) - self.settings["UserAgent"] = packedSetting(9, confConsts.TYPE_STR, 128) - self.settings["HttpPostUri"] = packedSetting(10, confConsts.TYPE_STR, 64) - - # ref: https://www.cobaltstrike.com/help-malleable-c2 | https://usualsuspect.re/article/cobalt-strikes-malleable-c2-under-the-hood - self.settings["Malleable_C2_Instructions"] = packedSetting( - 11, confConsts.TYPE_STR, 256, isBlob=True, isMalleableStream=True - ) - self.settings["HttpGet_Metadata"] = packedSetting(12, confConsts.TYPE_STR, 256, isHeaders=True) - self.settings["HttpPost_Metadata"] = packedSetting(13, confConsts.TYPE_STR, 256, isHeaders=True) - self.settings["SpawnTo"] = packedSetting(14, confConsts.TYPE_STR, 16, isBlob=True) - self.settings["PipeName"] = packedSetting(15, confConsts.TYPE_STR, 128) - # Options 16-18 are deprecated in 3.4 - self.settings["DNS_Idle"] = packedSetting(19, confConsts.TYPE_INT, isIpAddress=True) - self.settings["DNS_Sleep"] = packedSetting(20, confConsts.TYPE_INT) - # Options 21-25 are for SSHAgent - self.settings["SSH_Host"] = packedSetting(21, confConsts.TYPE_STR, 256) - self.settings["SSH_Port"] = packedSetting(22, confConsts.TYPE_SHORT) - self.settings["SSH_Username"] = packedSetting(23, confConsts.TYPE_STR, 128) - self.settings["SSH_Password_Plaintext"] = packedSetting(24, confConsts.TYPE_STR, 128) - self.settings["SSH_Password_Pubkey"] = packedSetting(25, confConsts.TYPE_STR, 6144) - - self.settings["HttpGet_Verb"] = packedSetting(26, confConsts.TYPE_STR, 16) - self.settings["HttpPost_Verb"] = packedSetting(27, confConsts.TYPE_STR, 16) - self.settings["HttpPostChunk"] = packedSetting(28, confConsts.TYPE_INT) - self.settings["Spawnto_x86"] = packedSetting(29, confConsts.TYPE_STR, 64) - self.settings["Spawnto_x64"] = packedSetting(30, confConsts.TYPE_STR, 64) - self.settings["CryptoScheme"] = packedSetting(31, confConsts.TYPE_SHORT) - self.settings["Proxy_Config"] = packedSetting(32, confConsts.TYPE_STR, 128) - self.settings["Proxy_User"] = packedSetting(33, confConsts.TYPE_STR, 64) - self.settings["Proxy_Password"] = packedSetting(34, confConsts.TYPE_STR, 64) - self.settings["Proxy_Behavior"] = packedSetting(35, confConsts.TYPE_SHORT, enum=self.ACCESS_TYPE) - # Option 36 is deprecated - self.settings["Watermark"] = packedSetting(37, confConsts.TYPE_INT) - self.settings["bStageCleanup"] = packedSetting(38, confConsts.TYPE_SHORT, isBool=True) - self.settings["bCFGCaution"] = packedSetting(39, confConsts.TYPE_SHORT, isBool=True) - self.settings["KillDate"] = packedSetting(40, confConsts.TYPE_INT, isDate=True) - # Inner parameter, does not seem interesting so silencing - # self.settings["textSectionEnd (0 if !sleep_mask)"] = packedSetting(41, confConsts.TYPE_INT) - - # TODO: dynamic size parsing - # self.settings["ObfuscateSectionsInfo"] = packedSetting(42, confConsts.TYPE_STR, %d, isBlob=True) - self.settings["bProcInject_StartRWX"] = packedSetting(43, confConsts.TYPE_SHORT, isBool=True, boolFalseValue=4) - self.settings["bProcInject_UseRWX"] = packedSetting(44, confConsts.TYPE_SHORT, isBool=True, boolFalseValue=32) - self.settings["bProcInject_MinAllocSize"] = packedSetting(45, confConsts.TYPE_INT) - self.settings["ProcInject_PrependAppend_x86"] = packedSetting( - 46, confConsts.TYPE_STR, 256, isBlob=True, isProcInjectTransform=True - ) - self.settings["ProcInject_PrependAppend_x64"] = packedSetting( - 47, confConsts.TYPE_STR, 256, isBlob=True, isProcInjectTransform=True - ) - self.settings["ProcInject_Execute"] = packedSetting(51, confConsts.TYPE_STR, 128, isBlob=True, enum=self.EXECUTE_TYPE) - # If True then allocation is using NtMapViewOfSection - self.settings["ProcInject_AllocationMethod"] = packedSetting(52, confConsts.TYPE_SHORT, enum=self.ALLOCATION_FUNCTIONS) - - # Unknown data, silencing for now - # self.settings["ProcInject_Stub"] = packedSetting(53, confConsts.TYPE_STR, 16, isBlob=True) - self.settings["bUsesCookies"] = packedSetting(50, confConsts.TYPE_SHORT, isBool=True) - self.settings["HostHeader"] = packedSetting(54, confConsts.TYPE_STR, 128) - - -class cobaltstrikeConfig: - def __init__(self, data): - self.data = data - - """Parse the CobaltStrike configuration""" - - @staticmethod - def decode_config(cfg_blob, version): - return bytes(cfg_offset ^ confConsts.XORBYTES[version] for cfg_offset in cfg_blob) - - def _parse_config(self, version, quiet=False, as_json=False): - parsed_config = {} - comp_pattern = re.compile(confConsts.START_PATTERNS[version], re.DOTALL) - re_start_match = comp_pattern.search(self.data) - comp_pattern_decoded = re.compile(confConsts.START_PATTERN_DECODED, re.DOTALL) - re_start_decoded_match = comp_pattern_decoded.search(self.data) - - if not re_start_match and not re_start_decoded_match: - return False - encoded_config_offset = re_start_match.start() if re_start_match else -1 - decoded_config_offset = re_start_decoded_match.start() if re_start_decoded_match else -1 - - if encoded_config_offset >= 0: - full_config_data = cobaltstrikeConfig.decode_config( - self.data[encoded_config_offset : encoded_config_offset + confConsts.CONFIG_SIZE], version=version - ) - else: - full_config_data = self.data[decoded_config_offset : decoded_config_offset + confConsts.CONFIG_SIZE] - - settings = BeaconSettings(version).settings.items() - for conf_name, packed_conf in settings: - parsed_setting = packed_conf.pretty_repr(full_config_data) - - if as_json: - parsed_config[conf_name] = parsed_setting - continue - - if parsed_setting == "Not Found" and quiet: - continue - if not isinstance(parsed_setting, list): - log.debug("{: <{width}} - {val}".format(conf_name, width=COLUMN_WIDTH - 3, val=parsed_setting)) - elif parsed_setting == []: - log.debug("{: <{width}} - {val}".format(conf_name, width=COLUMN_WIDTH - 3, val="Empty")) - else: - log.debug("{: <{width}} - {val}".format(conf_name, width=COLUMN_WIDTH - 3, val=parsed_setting[0])) - for val in parsed_setting[1:]: - log.debug(" " * COLUMN_WIDTH, end="") - print(val) - - if as_json: - return parsed_config - - return True - - def parse_config(self, version=None, quiet=False, as_json=False): - """ - Parses beacon's configuration from stager dll or memory dump - :bool quiet: Whether to print missing settings - :bool as_json: Whether to dump as json - """ - - if not version: - for ver in SUPPORTED_VERSIONS: - conf = self._parse_config(version=ver, quiet=quiet, as_json=as_json) - if conf: - return conf - else: - conf = self._parse_config(version=version, quiet=quiet, as_json=as_json) - if conf: - return conf - - if __name__ == "__main__": - log.debug("Configuration not found. Are you sure this is a beacon?") - return None - - def parse_encrypted_config(self, version=None, quiet=False, as_json=False): - """ - Parses beacon's configuration from stager dll or memory dump - :bool quiet: Whether to print missing settings - :bool as_json: Whether to dump as json - """ - - THRESHOLD = 1100 - try: - pe = pefile.PE(data=self.data) - except pefile.PEFormatError: - return {} - data_sections = [s for s in pe.sections if s.Name.find(b".data") != -1] - if not data_sections: - return None - data = data_sections[0].get_data() - - offset = 0 - key_found = False - while offset < len(data): - key = data[offset : offset + 4] - if key != bytes(4) and data.count(key) >= THRESHOLD: - key_found = True - size = int.from_bytes(data[offset - 4 : offset], "little") - encrypted_data_offset = offset + 16 - (offset % 16) - break - - offset += 4 - - if not key_found: - log.debug("Failed to find encrypted data (try to lower the threshold constant)") - return None - - # decrypt and parse - enc_data = data[encrypted_data_offset : encrypted_data_offset + size] - dec_data = [c ^ key[i % 4] for i, c in enumerate(enc_data)] - dec_data = bytes(dec_data) - return cobaltstrikeConfig(dec_data).parse_config(version, quiet, as_json) - - -if __name__ == "__main__": - parser = argparse.ArgumentParser(description="Parses CobaltStrike Beacon's configuration from PE or memory dump.") - parser.add_argument("path", help="Stager's file path") - parser.add_argument("--json", help="Print as json", action="store_true", default=False) - parser.add_argument("--quiet", help="Do not print missing settings", action="store_true", default=False) - parser.add_argument( - "--version", - help="Try as specific cobalt version (3 or 4). If not specified, tries both. \n" - "For decoded configs, this must be set for accuracy.", - type=int, - ) - args = parser.parse_args() - data = Path(args.path).read_bytes() - parsed_config = cobaltstrikeConfig(data).parse_config(version=args.version, quiet=args.quiet, as_json=args.json) - if parsed_config is None: - parsed_config = cobaltstrikeConfig(data).parse_encrypted_config(quiet=args.quiet, as_json=args.json) - if args.json: - print(json.dumps(parsed_config, cls=Base64Encoder)) - - -# CAPE -def extract_config(data): - output = cobaltstrikeConfig(data).parse_config(as_json=True) - if output is None: - output = cobaltstrikeConfig(data).parse_encrypted_config(as_json=True) - return output diff --git a/modules/processing/parsers/CAPE/CobaltStrikeStager.py b/modules/processing/parsers/CAPE/CobaltStrikeStager.py deleted file mode 100644 index f49c0886b87..00000000000 --- a/modules/processing/parsers/CAPE/CobaltStrikeStager.py +++ /dev/null @@ -1,190 +0,0 @@ -#!/usr/bin/env python3 -""" -By Daniel Mayer (Daniel@Stairwell.com), @dan__mayer -""" - -import re -import struct - -DESCRIPTION = "Cobalt Strike Stager Configuration Extractor" -AUTHOR = "@dan__mayer " - -INET_CONSTANTS = { - "INTERNET_FLAG_IDN_DIRECT": 0x00000001, - "INTERNET_FLAG_IDN_PROXY": 0x00000002, - "INTERNET_FLAG_RELOAD": 0x80000000, - "INTERNET_FLAG_RAW_DATA": 0x40000000, - "INTERNET_FLAG_EXISTING_CONNECT": 0x20000000, - "INTERNET_FLAG_ASYNC": 0x10000000, - "INTERNET_FLAG_PASSIVE": 0x08000000, - "INTERNET_FLAG_NO_CACHE_WRITE": 0x04000000, - "INTERNET_FLAG_MAKE_PERSISTENT": 0x02000000, - "INTERNET_FLAG_FROM_CACHE": 0x01000000, - "INTERNET_FLAG_SECURE": 0x00800000, - "INTERNET_FLAG_KEEP_CONNECTION": 0x00400000, - "INTERNET_FLAG_NO_AUTO_REDIRECT": 0x00200000, - "INTERNET_FLAG_READ_PREFETCH": 0x00100000, - "INTERNET_FLAG_NO_COOKIES": 0x00080000, - "INTERNET_FLAG_NO_AUTH": 0x00040000, - "INTERNET_FLAG_RESTRICTED_ZONE": 0x00020000, - "INTERNET_FLAG_CACHE_IF_NET_FAIL": 0x00010000, - "INTERNET_FLAG_IGNORE_REDIRECT_TO_HTTP": 0x00008000, - "INTERNET_FLAG_IGNORE_REDIRECT_TO_HTTPS": 0x00004000, - "INTERNET_FLAG_IGNORE_CERT_DATE_INVALID": 0x00002000, - "INTERNET_FLAG_IGNORE_CERT_CN_INVALID": 0x00001000, - "INTERNET_FLAG_RESYNCHRONIZE": 0x00000800, - "INTERNET_FLAG_HYPERLINK": 0x00000400, - "INTERNET_FLAG_NO_UI": 0x00000200, - "INTERNET_FLAG_PRAGMA_NOCACHE": 0x00000100, - "INTERNET_FLAG_CACHE_ASYNC": 0x00000080, - "INTERNET_FLAG_FORMS_SUBMIT": 0x00000040, - "INTERNET_FLAG_FWD_BACK": 0x00000020, - "INTERNET_FLAG_NEED_FILE": 0x00000010, -} - -SMB_TEMPLATE = re.compile( - b""" - # Arguments to call API-Hashed CreateNamedPipeA - \x68\x00\xB0\x04\x00 # push 4B000h - \x68\x00\xB0\x04\x00 # push 4B000h - \x6A\x01 # push 1 - \x6A\x06 # push 6 - \x6A\x03 # push 3 - \x52 # push edx - \x68\x45\x70\xDF\xD4 # push 0D4DF7045h - .{110,180} - \xE8.\xFF\xFF\xFF # Call to listen on the named pipe that uses the - # return address to pass the pipe name as an argument - - (?P.{3,140}) # Name of pipe - \x00 # Null terminator at the end of the pipe string - (?P.{4})? # Watermark -""", - re.DOTALL | re.VERBOSE, -) - -DNS_TEMPLATE = re.compile( - b""" - \x69\x50\x68\x64\x6E # DNS api import - .{100,160} - \xE8.\xFF\xFF\xFF # Call to perform DNS stager requests that uses the - # return address to pass the stager domain as an argument - - \x00 # Null byte at the beginning of the domain string - (?P.{63}) # Domain string - ( # CS 4.0 stager-specific - .{90,130} - \x89\xD7\x81\xC7 - .{4} - \xFF\xE7 - (?P.{4})? # Watermark - )? -""", - re.DOTALL | re.VERBOSE, -) - -HTTP_TEMPLATE = re.compile( - b""" - (?:\xC1\x41\xB8|\x51\x51\x68) - (?P.{4}) # Specified port - .{10,50}\x68 - (?P....) # HttpOpenRequestA flags - \x52\x52.{40,140} - - \xE8.\xFF\xFF\xFF # Call to perform HttpOpenRequestA and HttpSendRequestA - # that uses the return address to pass the path - # and headers as an argument - - (?P.{79}) # URL path string - \x00 # Null terminator ending the path string - (?P.{303}) # Header strings, separated by CLRF - \x00 # Null terminator ending the header string - .{60,120} - - \xE8.\xFD\xFF\xFF # Call to perform InternetOpenA, which uses the return - # address to pass the netloc as an argument. - - (?P.+?) # Netloc string - \x00 # Null terminator ending the netloc string - (?P.{4})? # Watermark -""", - re.DOTALL | re.VERBOSE, -) - - -class StagerConfig: - def __init__(self, data): - """ - f: file path - """ - self.data = data - self.config = {} - self._parse_config() - - def _clean(self, s, data_type): - """ - s: bytestring to clean - data_type: string determining which cleaning method is appropriate - - Converts the bytes of the various stager fields into human-readable settings - """ - result = None - if data_type == "string": - result = s.split(b"\x00")[0].decode("utf-8") - elif data_type == "headers": - headers = self._clean(s, "string") - lines = headers.split("\r\n")[:-1] - result = {k: v for k, v in (line.split(": ") for line in lines)} - elif data_type == "port": - result = struct.unpack("I", s)[0] - elif data_type == "inet_flags": - n = struct.unpack(". - -DESCRIPTION = "DoppelPaymer configuration parser." -AUTHOR = "kevoreilly" - -import string - -import pefile -from Cryptodome.Cipher import ARC4 - -rule_source = """ -rule DoppelPaymer -{ - meta: - author = "kevoreilly" - description = "DoppelPaymer Payload" - cape_type = "DoppelPaymer Payload" - - strings: - $getproc32 = {81 FB ?? ?? ?? ?? 74 2D 8B CB E8 ?? ?? ?? ?? 85 C0 74 0C 8B C8 8B D7 E8 ?? ?? ?? ?? 5B 5F C3} - $cmd_string = "Setup run\\n" wide - condition: - uint16(0) == 0x5A4D and all of them -} -""" - -LEN_BLOB_KEY = 40 - - -def convert_char(c) -> str: - if isinstance(c, int): - c = chr(c) - if c in string.printable: - return c - return f"\\x{ord(c):02x}" - - -def decrypt_rc4(key, data): - cipher = ARC4.new(key) - return cipher.decrypt(data) - - -def extract_rdata(pe): - for section in pe.sections: - if b".rdata" in section.Name: - return section.get_data(section.VirtualAddress, section.SizeOfRawData) - return None - - -def extract_config(filebuf): - pe = pefile.PE(data=filebuf, fast_load=False) - config = {} - blobs = filter(None, [x.strip(b"\x00\x00\x00\x00") for x in extract_rdata(pe).split(b"\x00\x00\x00\x00")]) - for blob in blobs: - if len(blob) < LEN_BLOB_KEY: - continue - raw = decrypt_rc4(blob[:LEN_BLOB_KEY][::-1], blob[LEN_BLOB_KEY:]) - if not raw: - continue - for item in raw.split(b"\x00"): - data = "".join(convert_char(c) for c in item) - if len(data) == 406: - config["RSA public key"] = data - elif len(data) > 1 and "\\x" not in data: - config["strings"] = data - return config diff --git a/modules/processing/parsers/CAPE/DridexLoader.py b/modules/processing/parsers/CAPE/DridexLoader.py deleted file mode 100644 index ddee402c654..00000000000 --- a/modules/processing/parsers/CAPE/DridexLoader.py +++ /dev/null @@ -1,175 +0,0 @@ -# Copyright (C) 2018 Kevin O'Reilly (kevin.oreilly@contextis.co.uk) -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . - -import socket -import struct -from contextlib import suppress - -import pefile -import yara -from Cryptodome.Cipher import ARC4 - -DESCRIPTION = "DridexDropper configuration parser." -AUTHOR = "kevoreilly" - -rule_source = """ -rule DridexLoader -{ - meta: - author = "kevoreilly" - description = "Dridex v4 dropper C2 parsing function" - cape_type = "DridexLoader Payload" - - strings: - $c2parse_1 = {57 0F 95 C0 89 35 ?? ?? ?? ?? 88 46 04 33 FF 80 3D ?? ?? ?? ?? 00 76 54 8B 04 FD ?? ?? ?? ?? 8D 4D EC 83 65 F4 00 89 45 EC 66 8B 04 FD ?? ?? ?? ?? 66 89 45 F0 8D 45 F8 50} - $c2parse_2 = {89 45 00 0F B7 53 04 89 10 0F B6 4B 0C 83 F9 0A 7F 03 8A 53 0C 0F B6 53 0C 85 D2 7E B7 8D 74 24 0C C7 44 24 08 00 00 00 00 8D 04 7F 8D 8C 00} - $c2parse_3 = {89 08 66 39 1D ?? ?? ?? ?? A1 ?? ?? ?? ?? 0F 95 C1 88 48 04 80 3D ?? ?? ?? ?? 0A 77 05 A0 ?? ?? ?? ?? 80 3D ?? ?? ?? ?? 00 56 8B F3 76 4E 66 8B 04 F5} - $c2parse_4 = {0F B7 C0 89 01 A0 ?? ?? ?? ?? 3C 0A 77 ?? A0 ?? ?? ?? ?? A0 ?? ?? ?? ?? 57 33 FF 84 C0 74 ?? 56 BE} - $c2parse_5 = {0F B7 05 [4] 89 02 89 15 [4] 0F B6 15 [4] 83 FA 0A 7F 07 0F B6 05 [4] 0F B6 05 [4] 85 C0} - $c2parse_6 = {0F B7 53 ?? 89 10 0F B6 4B ?? 83 F9 0A 7F 03 8A 53 ?? 0F B6 53 ?? 85 D2 7E B9} - $botnet_id = {C7 00 00 00 00 00 8D 00 6A 04 50 8D 4C ?? ?? E8 ?? ?? ?? ?? 0F B7 05} - $rc4_key_1 = {56 52 BA [4] 8B F1 E8 [4] 8B C? 5? C3} - $rc4_key_2 = {5? 8B ?9 52 [5-6] E8 [4] 8B C? 5? C3} - condition: - uint16(0) == 0x5A4D and any of them -} -""" - -MAX_IP_STRING_SIZE = 16 # aaa.bbb.ccc.ddd\0 -LEN_BLOB_KEY = 40 -LEN_BOT_KEY = 107 - -yara_rules = yara.compile(source=rule_source) - - -def decrypt_rc4(key, data): - if not key: - return b"" - cipher = ARC4.new(key) - return cipher.decrypt(data) - - -def extract_rdata(pe): - for section in pe.sections: - if b".rdata" in section.Name: - return section.get_data(section.VirtualAddress, section.SizeOfRawData) - return None - - -def extract_config(filebuf): - cfg = {} - pe = pefile.PE(data=filebuf, fast_load=False) - image_base = pe.OPTIONAL_HEADER.ImageBase - line, c2va_offset, delta = 0, 0, 0 - botnet_code, botnet_rva, rc4_decode = 0, 0, 0 - num_ips_rva = 0 - num_ips = 4 - - matches = yara_rules.match(data=filebuf) - if not matches: - return - - for match in matches: - if match.rule != "DridexLoader": - continue - for block in match.strings: - for item in block.instances: - if "$c2parse" in block.identifier: - c2va_offset = item.offset - line = block.identifier - elif "$botnet_id" in block.identifier: - botnet_code = item.offset - elif "$rc4_key" in block.identifier and not rc4_decode: - rc4_decode = item.offset - if line == "$c2parse_6": - c2_rva = struct.unpack("i", filebuf[c2va_offset + 44 : c2va_offset + 48])[0] - image_base - botnet_rva = struct.unpack("i", filebuf[c2va_offset - 7 : c2va_offset - 3])[0] - image_base - num_ips_rva = c2_rva - 1 - elif line == "$c2parse_5": - c2_rva = struct.unpack("i", filebuf[c2va_offset + 75 : c2va_offset + 79])[0] - image_base - botnet_rva = struct.unpack("i", filebuf[c2va_offset + 3 : c2va_offset + 7])[0] - image_base - num_ips_rva = struct.unpack("i", filebuf[c2va_offset + 18 : c2va_offset + 22])[0] - image_base - elif line == "$c2parse_4": - c2_rva = struct.unpack("i", filebuf[c2va_offset + 6 : c2va_offset + 10])[0] - image_base + 1 - elif line == "$c2parse_3": - c2_rva = struct.unpack("i", filebuf[c2va_offset + 60 : c2va_offset + 64])[0] - image_base - delta = 2 - elif line == "$c2parse_2": - c2_rva = struct.unpack("i", filebuf[c2va_offset + 47 : c2va_offset + 51])[0] - image_base - elif line == "$c2parse_1": - c2_rva = struct.unpack("i", filebuf[c2va_offset + 27 : c2va_offset + 31])[0] - image_base - delta = 2 - else: - return - - try: - c2_offset = pe.get_offset_from_rva(c2_rva) - except pefile.PEFormatError: - return - - num_ips = 0 - if num_ips_rva: - num_ips_offset = pe.get_offset_from_rva(num_ips_rva) - ip_data = filebuf[num_ips_offset : num_ips_offset + 1] - if ip_data: - num_ips = struct.unpack("B", filebuf[num_ips_offset : num_ips_offset + 1])[0] - - for _ in range(num_ips): - ip = struct.unpack(">I", filebuf[c2_offset : c2_offset + 4])[0] - c2_address = socket.inet_ntoa(struct.pack("!L", ip)) - port = str(struct.unpack("H", filebuf[c2_offset + 4 : c2_offset + 6])[0]) - - if c2_address and port: - cfg.setdefault("address", []).append(f"{c2_address}:{port}") - - c2_offset += 6 + delta - - if rc4_decode: - zb = struct.unpack("B", filebuf[rc4_decode + 8 : rc4_decode + 9])[0] - if not zb: - rc4_rva = struct.unpack("i", filebuf[rc4_decode + 5 : rc4_decode + 9])[0] - image_base - else: - rc4_rva = struct.unpack("i", filebuf[rc4_decode + 3 : rc4_decode + 7])[0] - image_base - if rc4_rva: - rc4_offset = pe.get_offset_from_rva(rc4_rva) - if not zb: - raw = decrypt_rc4( - filebuf[rc4_offset : rc4_offset + LEN_BLOB_KEY][::-1], - filebuf[rc4_offset + LEN_BLOB_KEY : rc4_offset + LEN_BOT_KEY], - ) - else: - raw = decrypt_rc4( - filebuf[rc4_offset : rc4_offset + LEN_BLOB_KEY], filebuf[rc4_offset + LEN_BLOB_KEY : rc4_offset + LEN_BOT_KEY] - ) - for item in raw.split(b"\x00"): - if len(item) == LEN_BLOB_KEY - 1: - cfg["RC4 key"] = item.split(b";", 1)[0].decode() - - if botnet_code: - botnet_rva = struct.unpack("i", filebuf[botnet_code + 23 : botnet_code + 27])[0] - image_base - if botnet_rva: - with suppress(struct.error): - botnet_offset = pe.get_offset_from_rva(botnet_rva) - botnet_id = struct.unpack("H", filebuf[botnet_offset : botnet_offset + 2])[0] - cfg["Botnet ID"] = str(botnet_id) - - return cfg - - -if __name__ == "__main__": - import sys - from pathlib import Path - - data = Path(sys.argv[1]).read_bytes() - print(extract_config(data)) diff --git a/modules/processing/parsers/CAPE/Emotet.py b/modules/processing/parsers/CAPE/Emotet.py deleted file mode 100644 index 3aeb42326f2..00000000000 --- a/modules/processing/parsers/CAPE/Emotet.py +++ /dev/null @@ -1,832 +0,0 @@ -# Copyright (C) 2017-2021 Kevin O'Reilly (kevin.oreilly@contextis.co.uk) -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . - -import logging -import socket -import struct -from contextlib import suppress -from itertools import cycle -from pathlib import Path - -import pefile -import yara -from Cryptodome.PublicKey import ECC, RSA -from Cryptodome.Util import asn1 - -log = logging.getLogger(__name__) -log.setLevel(logging.INFO) - -try: - from unicorn import UC_ARCH_X86, UC_HOOK_CODE, UC_MODE_64, Uc, UcError - from unicorn.x86_const import UC_X86_REG_R9, UC_X86_REG_RAX, UC_X86_REG_RCX, UC_X86_REG_RDX, UC_X86_REG_RIP, UC_X86_REG_RSP -except ImportError: - log.error("Unicorn not installed") - -AUTHOR = "kevoreilly" - -rule_source = """ -rule Emotet -{ - meta: - author = "kevoreilly" - description = "Emotet Payload" - cape_type = "Emotet Payload" - strings: - $snippet1 = {FF 15 [4] 83 C4 0C 68 40 00 00 F0 6A 18} - $snippet3 = {83 3D [4] 00 C7 05 [8] C7 05 [8] 74 0A 51 E8 [4] 83 C4 04 C3 33 C0 C3} - $snippet4 = {33 C0 C7 05 [8] C7 05 [8] A3 [4] A3 [19] 00 40 A3 [4] 83 3C C5 [4] 00 75 F0 51 E8 [4] 83 C4 04 C3} - $snippet5 = {8B E5 5D C3 B8 [4] A3 [4] A3 [4] 33 C0 21 05 [4] A3 [4] 39 05 [4] 74 18 40 A3 [4] 83 3C C5 [4] 00 75 F0 51 E8 [4] 59 C3} - $snippet6 = {33 C0 21 05 [4] A3 [4] 39 05 [4] 74 18 40 A3 [4] 83 3C C5 [4] 00 75 F0 51 E8 [4] 59 C3} - $snippet7 = {8B 48 ?? C7 [5-6] C7 40 [4] ?? C7 [2] 00 00 00 [0-1] 83 3C CD [4] 00 74 0E 41 89 48 ?? 83 3C CD [4] 00 75 F2} - $snippet8 = {85 C0 74 3? B9 [2] 40 00 33 D2 89 ?8 [0-1] 89 [1-2] 8B [1-2] 89 [1-2] EB 0? 41 89 [1-2] 39 14 CD [2] 40 00 75 F? 8B CE E8 [4] 85 C0 74 05 33 C0 40 5E C3} - $snippet9 = {85 C0 74 4? 8B ?8 [0-1] C7 40 [5] C7 [5-6] C7 40 ?? 00 00 00 00 83 3C CD [4] 00 74 0? 41 89 [2-3] 3C CD [4] 00 75 F? 8B CF E8 [4] 85 C0 74 07 B8 01 00 00 00 5F C3} - $snippetA = {85 C0 74 5? 8B ?8 04 89 78 28 89 38 89 70 2C EB 04 41 89 48 04 39 34 CD [4] 75 F3 FF 75 DC FF 75 F0 8B 55 F8 FF 75 10 8B 4D EC E8 [4] 83 C4 0C 85 C0 74 05} - $snippetB = {EB 04 4? 89 [2] 39 [6] 75 F3} - $snippetC = {EB 03 4? 89 1? 39 [6] 75 F4} - $snippetD = {8D 44 [2] 50 68 [4] FF 74 [2] FF 74 [2] 8B 54 [2] 8B 4C [2] E8} - $snippetE = {FF 74 [2] 8D 54 [2] FF 74 [2] 68 [4] FF 74 [2] 8B 4C [2] E8 [4] 8B 54 [2] 83 C4 10 89 44 [2] 8B F8 03 44 [2] B9 [4] 89 44 [2] E9 [2] FF FF} - $snippetF = {FF 74 [2] 8D 44 [2] BA [4] FF 74 [2] 8B 4C [2] 50 E8 [4] 8B 54 [2] 8B D8 8B 84 [5] 83 C4 0C 03 C3 89 5C [2] 8B FB 89 44} - $snippetG = {FF 74 [2] 8B 54 [2] 8D 44 [2] 8B 4C [2] 50 E8 [4] 8B D0 83 C4 0C 8B 44 [2] 8B FA 03 C2 89 54 [2] 89 44} - $snippetH = {FF 74 [2] 8D 84 [5] 68 [4] 50 FF 74 [2] 8B 54 [2] 8B 4C [2] E8 [4] 8B 94 [5] 83 C4 10 89 84 [5] 8B F8 03 84} - $snippetI = {FF 74 [2] 8D 8C [5] FF 74 [2] 8B 54 [2] E8 [4] 8B 54 [2] 8B D8 8B 84 [5] 83 C4 0C 03 C3 89 5C [2] 8B FB 89 44 24 74} - $snippetJ = {FF 74 [2] 8B 4C [2] 8D 44 [2] 50 BA [4] E8 [4] 8B 54 [2] 8B F8 59 89 44 [2] 03 44 [2] 59 89 44 [2] B9 [4] E9} - $snippetK = {FF 74 [2] FF 74 [2] 8B 54 [2] E8 [4] 8B 54 [2] 83 C4 0C 89 44 [2] 8B F8 03 44 [2] B9 [4] 89 44 [2] E9} - $snippetL = {FF 74 [2] 8B 54 [2] 8D 4C [2] E8 [4] 59 89 44 [2] 8B F8 03 44 [2] 59 89 44 24 68 B9 [4] E9} - $snippetM = {FF 74 [2] 8D 84 [3] 00 00 B9 [4] 50 FF 74 [2] FF 74 [2] 8B 94 [3] 00 00 E8 [4] 83 C4 10 89 44 [2] 8B F8 B9 [4] 03 84 [3] 00 00 89 44 [2] E9} - $snippetN = {FF 74 [2] 8D 44 [2] B9 [4] FF 74 [2] 50 FF 74 [2] 8B 54 [2] E8 [4] 8B 8C [3] 00 00 83 C4 10 03 C8 89 44 [2] 89 4C [2] 8B F8 B9 45 89 77 05 E9} - $snippetO = {8D 44 [2] B9 [4] 50 FF 74 [2] 8B 54 [2] E8 [4] 8B D0 8B 44 [2] 59 59 03 C2 89 54 [2] 8B FA 89 44 [2] B9 [4] E9} - $snippetP = {FF 74 [2] 8B 54 [2] 8D 44 [2] 8B 4C [2] 68 [4] 50 E8 [4] 8B D0 83 C4 0C 8B 44 [2] 8B FA 03 C2 89 54 [2] 8B 54 [2] B9 [4] 89 44 [2] E9} - $snippetQ = {FF 74 [2] BA [4] 8D 4C [2] FF 74 [2] E8 [4] 59 89 84 [3] 00 00 8B F8 03 44 [2] 59 89 44 [2] B9 [4] 81 F9 [4] 74 28 8B 54 [2] E9} - $snippetR = {8D 44 [2] 50 FF 74 [2] 8B 54 [2] 8B 4C [2] 68 [4] E8 [4] 8B D0 83 C4 0C 8B 44 [2] 8B FA 03 C2 89 54 [2] 8B 54 [2] B9 [4] 89 44 [2] E9} - $snippetS = {FF 74 [2] 8D 54 [2] FF 74 [2] 8B 4C [2] E8 [4] 8B D0 83 C4 0C 8B 44 [2] 8B FA 03 C2 89 54 [2] 8B 54 [2] B9 [4] 89 44 [2] E9} - $snippetT = {8B 54 [2] 8D 44 [2] 8B 4C [2] 68 [4] 50 E8 [4] 8B 9C [3] 00 00 8B F8 59 59 03 D8 89 44 [2] 89 5C [2] B9 [4] EB} - $snippetU = {89 44 [2] 33 D2 8B 44 [2] F7 F1 B9 [4] 89 44 [2] 8D 44 [2] 81 74 [6] C7 44 [6] 81 44 [6] 81 74 [6] FF 74 [2] 50 FF 74 [2] FF 74 [2] 8B 54 [2] E8} - $snippetV = {81 74 [2] ED BC 9C 00 FF 74 [2] 50 68 [4] FF 74 [2] 8B 54 [2] 8B 4C [2] E8} - $snippetW = {4C 8D [2] 8B [2] 4C 8D 05 [4] F7 E1 2B CA D1 E9 03 CA C1 E9 06 89} - $snippetX = {4C 8D 0? [2] (00|01) 00 [0-80] 48 8D [0-9] 81 75 [5] C7 45 [5-14] 81} - $snippetY = {(3D [4] 0F 84 [4] 3D [4] 0F 85 [3] ??|B8 [4] E9 [3] ??) 48 8D 05 [4] 48 89 (81 [3] ??|41 ??) 48 8D 05 [4] 48 89 (81 [3] ??|41 ??) 48 8D 05 [4] 48 89} - $snippetZ = {(48 8B D8 48 85 C0 0F 84 [4-9] E9 [4-190] ?? | 55 53 48 8D AC 24 [2] FF FF 48 81 EC [2] 00 00 48 8B [3] 00 00 [0-80] ??) 48 8D 05 [4] 48 89 (85 [3] ??|4? ??) [0-220] 48 8D 05 [4] 48 89 (85 [3] ??|4? ??) [0-220] 48 8D 05 [4] 48 89 (85 [3] ??|4? ??)} - $comboA1 = {83 EC 28 56 FF 75 ?? BE} - $comboA2 = {83 EC 38 56 57 BE} - $comboA3 = {EB 04 40 89 4? ?? 83 3C C? 00 75 F6} - $ref_rsa = {6A 00 6A 01 FF [4-9] C0 [5-11] E8 ?? ?? FF FF 8D 4? [1-2] B9 ?? ?? ?? 00 8D 5? [4-6] E8} - $ref_ecc1 = {8D 84 [5] 50 68 [4] FF B4 24 [4] FF B4 24 [4] 8B 94 24 [4] 8B 8C 24 [4] E8 [4] 89 84 24 [4] 8D 84 24 [4] 50 68 [4] FF B4 24 [4] FF B4 24 [4] 8B 54 24 40 8B 8C 24 [4] E8} - $ref_ecc2 = {FF B4 [3] 00 00 8D 94 [3] 00 00 FF B4 [3] 00 00 68 [4] FF 74 [2] 8B 8C [3] 00 00 E8 [4] FF B4 [3] 00 00 8D 94 [3] 00 00 89 84 [3] 00 00 FF B4 [3] 00 00 68 [4] FF 74 [2] 8B 8C [3] 00 00 E8} - $ref_ecc3 = {8D 84 [5] BA [4] FF B4 [5] 8B 4C [2] 50 E8 [4] 83 C4 0C 89 84 [5] 8D 84 [5] BA [4] FF B4 [5] FF B4 [5] 8B 8C [5] 50 E8 05 05 01 00} - $ref_ecc4 = {FF 74 [2] 8B 94 [5] 8D 84 [5] 8B 8C [5] 50 E8 [4] 83 C4 0C 89 84 [5] 8D 84 [5] 68 [4] FF B4 [5] 8B 54 [2] 8B 8C [5] 50 E8} - $ref_ecc5 = {FF B4 [3] 00 00 8D 84 [3] 00 00 68 [4] 50 FF B4 [3] 00 00 8B 94 [3] 00 00 8B 4C [2] E8 [4] FF B4 [3] 00 00 89 84 [3] 00 00 8D 84} - $ref_ecc6 = {FF B4 [3] 00 00 8D 8C [3] 00 00 FF B4 [3] 00 00 8B 54 [2] E8 [4] 83 C4 0C 89 84 [5] 8D 8C [5] 68 [4] FF B4 [5] FF 74 [2] 8B 94 24 [4] E8} - $ref_ecc7 = {FF B4 [3] 00 00 8B 8C [3] 00 00 8D 84 [3] 00 00 50 BA [4] E8 [4] FF B4 [3] 00 00 8B 8C [3] 00 00 BA [4] 89 84 [3] 00 00 8D 84 [3] 00 00 50 E8} - $ref_ecc8 = {FF B4 [3] 00 00 FF B4 [3] 00 00 8B 94 [3] 00 00 E8 [4] 83 C4 0C 89 84 [3] 00 00 8D 84 [3] 00 00 B9 [4] 50 FF B4 [3]00 00 FF B4 [3]00 00 8B 94 [3]00 00 E8} - $ref_ecc9 = {FF B4 [3] 00 00 8B 54 [2] 8D 8C [3] 00 00 E8 [4] 68 [4] FF B4 [3] 00 00 8B 94 [3] 00 00 8D 8C [3] 00 00 89 84 [3] 00 00 E8} - $ref_eccA = {FF 74 [2] 8D 84 [3] 00 00 B9 [4] 50 FF 74 [2] FF B4 [3] 00 00 8B 94 [3] 00 00 E8 [4] FF B4 [3] 00 00 89 84 [3] 00 00 B9 [4] 8D 84 [3] 00 00 50} - $ref_eccB = {FF B4 [3] 00 00 8D 84 [3] 00 00 B9 [4] FF 74 [2] 50 FF B4 [3] 00 00 8B 94 [3] 00 00 E8 [4] FF B4 [3] 00 00 89 84 [3] 00 00 B9} - $ref_eccC = {8D 84 [3] 00 00 B9 [4] 50 FF 74 [2] 8B 94 [3] 00 00 E8 [4] 89 84 [3] 00 00 B9 [4] 8D 84 [3] 00 00 50 FF B4 [3] 00 00 8B 94 [3] 00 00 E8} - $ref_eccD = {FF B4 [3] 00 00 8B 54 [2] 8D 84 [3] 00 00 8B 8C [3] 00 00 68 [4] 50 E8 [4] 83 C4 0C 89 84 [3] 00 00 8D 84 [3] 00 00 FF B4 [3] 00 00 8B 94 [3] 00 00 8B 4C [2] 68 [4] 50 E8} - $ref_eccE = {FF B4 [3] 00 00 BA [4] 8D 8C [3] 00 00 FF B4 [3] 00 00 E8 [4] FF 74 [2] BA [4] 89 84 [3] 00 00 FF 74 [2] 8D 8C [3] 00 00 E8} - $ref_eccF = {FF B4 [3] 00 00 8D 94 [3] 00 00 FF B4 [3] 00 00 8B 4C [2] E8 [4] 83 C4 0C 89 84 [3] 00 00 8D 94 [3] 00 00 68 [4] FF 74 [2] FF B4 [3] 00 00 8B 4C [2] E8} - $ref_eccG = {8D 84 [3] 00 00 50 FF B4 [3] 00 00 8B 94 [3] 00 00 8B 8C [3] 00 00 68 [4] E8 [4] 83 C4 0C 89 84 [3] 00 00 8D 84 [3] 00 00 50 FF 74 [2] 8B 94 [3] 00 00 8B 8C [3] 00 00 68 [4] E8} - $ref_eccH = {8D 84 [5] 50 68 [4] FF 74 [2] FF B4 [3] 00 00 8B 94 [3] 00 00 8B 8C [3] 00 00 E8 [4] 89 84 [3] 00 00 8D 84 [3] 00 00 50 68} - $ref_eccI = {8B 94 [3] 00 00 8D 84 [3] 00 00 8B 8C [3] 00 00 68 [4] 50 E8 [4] 8B 54 [2] 8B 8C [3] 00 00 89 84 [3] 00 00 8D 84 [3] 00 00 68 [4] 50 E8} - $ref_eccJ = {8B 44 [2] 6A 6D 59 F7 F1 B9 [4] 89 44 [2] 8D 44 [2] 81 74 [6] C7 44 [6] C1 64 [3] C1 6C [3] 81 74 [6] C7 44 [6] 81 44 [6] 81 4C [6] 81 74 [6] FF 74 [2] 50 FF 74 [2] FF 74 [2] 8B 54 [2] E8} - $ref_eccK = {81 74 [2] 82 8D 0C 00 FF 74 [2] 50 68 [4] FF 74 [2] 8B 54 [2] 8B 4C [2] E8} - $ref_eccL = {4C 8D [3] 4C 8D [5] 81 85 ?? 00 00 00 [4] 81 B5 ?? 00 00 00 [4] C7 85 ?? 00 00 00} - $ref_eccM = {4C 8D 0D [4] 81 B5 ?? 00 00 00 [4] 81 B5 ?? 00 00 00 [4] C7 85 ?? 00 00 00 [4] 81 B5 ?? 00 00 00 [4] 6B 85} - $ref_eccN = {4C 8D 05 [4-28] F7 E1 2B CA D1 E9 03 CA C1 E9 05 89 8D ?? 00 00 00 C1 AD ?? 00 00 00 ?? 81 B5 ?? 00 00 00} - $ref_eccO = {4C 8D 0D [4] 8B 45 ?? 8D 0C ?? B8 [4] 03 C9 89 4D ?? 8B 4D ?? F7 E1 B8 [4] 2B CA D1 E9 03 CA C1 E9 05} - $ref_eccP = {40 55 48 8D 6C 24 ?? 48 81 EC [12-36] C7 45 [4] 00 [0-60] C7 45 [4] 00 [0-60] C7 45 [4] 00 [0-60] C7 45} - condition: - uint16(0) == 0x5A4D and any of ($snippet*) or 2 of ($comboA*) or $ref_rsa or any of ($ref_ecc*) -} -""" - -MAX_IP_STRING_SIZE = 16 # aaa.bbb.ccc.ddd\0 - - -def first_match(matches, pattern): - if not matches: - return 0 - for item in matches[0].strings: - if pattern == item.identifier: - return item.instances[0].offset - return 0 - - -def addresses_from_matches(matches, pattern): - addresses = [] - for match in matches: - for item in match.strings: - if item.identifier == pattern: - addresses.append(item.instances[0].offset) - return addresses - - -def c2_funcs_from_match(matches, pattern, data): - addresses = [] - addr = first_match(matches, pattern) - hit = addr + data[addr:].find(b"\x48\x8D\x05") - next = 1 - while next > 0: - addresses.append(struct.unpack("i", data[hit + 3 : hit + 7])[0] + hit + 7) - next = data[hit + 7 : hit + 600].find(b"\x48\x8D\x05") - if next != -1: - hit += next + 7 - return addresses - - -def xor_data(data, key): - return bytes(c ^ k for c, k in zip(data, cycle(key))) - - -def emotet_decode(data, size, xor_key): - offset = 8 - res = b"" - for count in range(int(size / 4)): - off_from = offset + count * 4 - off_to = off_from + 4 - encoded_dw = int.from_bytes(data[off_from:off_to], byteorder="little") - decoded = xor_key ^ encoded_dw - res += decoded.to_bytes(4, byteorder="little") - return res - - -# Thanks to Jason Reaves (@sysopfb), @pollo290987, phate1. -def extract_emotet_rsakey(pe): - for section in pe.sections: - if section.Name.replace(b"\x00", b"") == b".data": - data_section = section.get_data() - data_size = len(data_section) - res_list = [] - if data_size: - delta = 0 - while delta < data_size: - xor_key = int.from_bytes(data_section[delta : delta + 4], byteorder="little") - encoded_size = int.from_bytes(data_section[delta + 4 : delta + 8], byteorder="little") - decoded_size = ((xor_key ^ encoded_size) & 0xFFFFFFFC) + 4 - if decoded_size == 0x6C: - res_list.append(emotet_decode(data_section[delta:], decoded_size, xor_key)) - break - delta += 4 - if res_list: - res_list = list(set(res_list)) - pub_key = res_list[0][:106] - seq = asn1.DerSequence() - try: - seq.decode(pub_key) - except Exception as e: - logging.exception(e) - return - return RSA.construct((seq[0], seq[1])) - for section in pe.sections: - if section.Name.replace(b"\x00", b"") == b".text": - code_section = section.get_data() - code_size = len(code_section) - if code_size: - delta = 0 - while delta < code_size: - xor_key = int.from_bytes(code_section[delta : delta + 4], byteorder="little") - encoded_size = int.from_bytes(code_section[delta + 4 : delta + 8], byteorder="little") - decoded_size = ((xor_key ^ encoded_size) & 0xFFFFFFFC) + 4 - if decoded_size == 0x6C: - res_list.append(emotet_decode(code_section[delta:], decoded_size, xor_key)) - break - delta += 4 - if res_list: - res_list = list(set(res_list)) - pub_key = res_list[0][:106] - seq = asn1.DerSequence() - try: - seq.decode(pub_key) - except ValueError: - # log.error(e) - return - return RSA.construct((seq[0], seq[1])) - - -stack = 0x80000 -code_base = 0x180001000 - - -def hook_instr(uc, address, size, mode): - global call_count - ins = uc.mem_read(address + size, 1) - if ins == (b"\xe8"): - call_count = call_count + 1 - if call_count == 4: - call_count = 0 - uc.reg_write(UC_X86_REG_RAX, stack + 0x400) - uc.reg_write(UC_X86_REG_RIP, uc.reg_read(UC_X86_REG_RIP) + 9) - return True - - -def emulate(code, ep): - global call_count - call_count = 0 - with suppress(UcError): - uc = Uc(UC_ARCH_X86, UC_MODE_64) - size = int(len(code) / 0x1000) * 0x1000 - if len(code) % 0x1000: - size = size + 0x1000 - uc.mem_map(code_base, size) - uc.mem_write(code_base, code) - uc.mem_map(stack, 0x1000) - uc.mem_map(0x0, 0x1000) - uc.reg_write(UC_X86_REG_RSP, stack + 0x200) - uc.reg_write(UC_X86_REG_RCX, stack + 0x104) - uc.reg_write(UC_X86_REG_RDX, stack + 0x108) - uc.reg_write(UC_X86_REG_R9, stack + 0x108) - uc.hook_add(UC_HOOK_CODE, hook_instr, user_data=UC_MODE_64) - uc.emu_start(code_base + ep, code_base + len(code)) - return uc - - -def have_enough_memory_for_unicorn(): - """ - Avoid unicorn calling exit(1) due to memory leak. - - https://github.com/unicorn-engine/unicorn/issues/1766 - - https://github.com/unicorn-engine/unicorn/pull/1629 - """ - try: - from mmap import MAP_ANON, MAP_PRIVATE, PROT_EXEC, PROT_READ, PROT_WRITE, mmap - - mm = mmap( - -1, - 1024 * 1024 * 1024, - MAP_PRIVATE | MAP_ANON, - PROT_WRITE | PROT_READ | PROT_EXEC, - ) - mm.close() - return True - except OSError: - return False - - -def extract_config(filebuf): - conf_dict = {} - pe = None - with suppress(Exception): - pe = pefile.PE(data=filebuf, fast_load=False) - code = filebuf[pe.sections[0].PointerToRawData : pe.sections[0].PointerToRawData + pe.sections[0].SizeOfRawData] - - if pe is None: - return - - image_base = pe.OPTIONAL_HEADER.ImageBase - c2found = False - c2list_va_offset = 0 - c2_list_offset = 0 - delta = 0 - c2_funcs = [] - ecc_funcs = [] - - yara_rules = yara.compile(source=rule_source) - yara_matches = yara_rules.match(data=filebuf) - - if first_match(yara_matches, "$snippet3"): - c2list_va_offset = first_match(yara_matches, "$snippet3") - c2_list_va = struct.unpack("I", filebuf[c2list_va_offset + 2 : c2list_va_offset + 6])[0] - c2_list_rva = c2_list_va & 0xFFFF if c2_list_va - image_base > 0x20000 else c2_list_va - image_base - try: - c2_list_offset = pe.get_offset_from_rva(c2_list_rva) - except pefile.PEFormatError: - pass - - while True: - try: - ip = struct.unpack(" 0x20000 else c2_list_va - image_base - try: - c2_list_offset = pe.get_offset_from_rva(c2_list_rva) - except pefile.PEFormatError: - pass - while True: - try: - ip = struct.unpack(" 0x40000 else c2_list_va - image_base - try: - c2_list_offset = pe.get_offset_from_rva(c2_list_rva) - except pefile.PEFormatError as err: - log.error(err) - return - while True: - preip = filebuf[c2_list_offset : c2_list_offset + 4] - if not preip: - return - try: - ip = struct.unpack(" 0x20000 else c2_list_va - image_base - try: - c2_list_offset = pe.get_offset_from_rva(c2_list_rva) - except pefile.PEFormatError: - pass - while True: - try: - ip = struct.unpack(" 0x20000 else c2_list_va - image_base - try: - c2_list_offset = pe.get_offset_from_rva(c2_list_rva) - except pefile.PEFormatError: - pass - while True: - try: - ip = struct.unpack(" 1000: - log.debug("Anomalous C2 list size 0x%x", size) - return - c2_list_offset += 8 - c2_list = xor_data(filebuf[c2_list_offset:], key) - offset = 0 - while offset < size: - try: - ip = struct.unpack(">I", c2_list[offset : offset + 4])[0] - except Exception: - break - if ip == struct.unpack(">I", key)[0]: - break - c2_address = socket.inet_ntoa(struct.pack("!L", ip)) - port = str(struct.unpack(">H", c2_list[offset + 4 : offset + 6])[0]) - if not c2_address or not port: - break - conf_dict.setdefault("address", []).append(f"{c2_address}:{port}") - c2found = True - offset += 8 - elif c2_funcs: - for address in c2_funcs: - if not have_enough_memory_for_unicorn(): - log.warning("not enough memory for unicorn") - continue - uc = emulate(code, address - pe.sections[0].PointerToRawData) - c2_address = socket.inet_ntoa(struct.pack("!L", int.from_bytes(uc.mem_read(stack + 0x104, 4), byteorder="big"))) - flag = str(int.from_bytes(uc.mem_read(stack + 0x108, 2), byteorder="little")) - port = str(int.from_bytes(uc.mem_read(stack + 0x10A, 2), byteorder="little")) - if flag == "1" and port != "0": - conf_dict.setdefault("address", []).append(f"{c2_address}:{port}") - c2found = True - - if not c2found: - return - pem_key = False - with suppress(ValueError): - pem_key = extract_emotet_rsakey(pe) - if pem_key: - conf_dict.setdefault("RSA public key", pem_key.exportKey().decode()) - else: - if first_match(yara_matches, "$ref_rsa"): - ref_rsa_offset = first_match(yara_matches, "$ref_rsa") - ref_rsa_va = 0 - zb = struct.unpack("b", filebuf[ref_rsa_offset + 31 : ref_rsa_offset + 32])[0] - if not zb: - ref_rsa_va = struct.unpack("I", filebuf[ref_rsa_offset + 28 : ref_rsa_offset + 32])[0] - else: - zb = struct.unpack("b", filebuf[ref_rsa_offset + 29 : ref_rsa_offset + 30])[0] - if not zb: - ref_rsa_va = struct.unpack("I", filebuf[ref_rsa_offset + 26 : ref_rsa_offset + 30])[0] - else: - zb = struct.unpack("b", filebuf[ref_rsa_offset + 28 : ref_rsa_offset + 29])[0] - if not zb: - ref_rsa_va = struct.unpack("I", filebuf[ref_rsa_offset + 25 : ref_rsa_offset + 29])[0] - else: - zb = struct.unpack("b", filebuf[ref_rsa_offset + 38 : ref_rsa_offset + 39])[0] - if not zb: - ref_rsa_va = struct.unpack("I", filebuf[ref_rsa_offset + 35 : ref_rsa_offset + 39])[0] - if not ref_rsa_va: - return - ref_rsa_rva = ref_rsa_va - image_base - try: - ref_rsa_offset = pe.get_offset_from_rva(ref_rsa_rva) - except Exception: - return - key = struct.unpack(". - -DESCRIPTION = "Enfal configuration parser." -AUTHOR = "kevoreilly" - -import yara - -rule_source = """ -rule Enfal -{ - meta: - author = "kev" - description = "Enfal configuration blob" - cape_type = "Enfal Config" - strings: - $config = {BF 49 ?? 75 22 12 ?? 75 4B 65 72 6E 65 6C 33 32 2E 64 6C 6C} - - condition: - $config -} -""" - -MAX_STRING_SIZE = 128 - - -def yara_scan(raw_data, rule_name): - addresses = {} - yara_rules = yara.compile(source=rule_source) - matches = yara_rules.match(data=raw_data) - for match in matches: - if match.rule == "Enfal": - for item in match.strings: - if item.identifier == rule_name: - addresses[item.identifier] = item.instances[0].offset - return addresses - - -def string_from_offset(data, offset): - return data[offset : offset + MAX_STRING_SIZE].split(b"\0", 1)[0] - - -def list_from_offset(data, offset): - string = data[offset : offset + MAX_STRING_SIZE].split(b"\0", 1)[0] - return string.split(b",") - - -def extract_config(filebuf): - config = yara_scan(filebuf, "$config") - return_conf = {} - if config: - yara_offset = int(config["$config"]) - - c2_address = string_from_offset(filebuf, yara_offset + 0x2E8) - if c2_address: - return_conf["c2_address"] = c2_address - - c2_url = string_from_offset(filebuf, yara_offset + 0xE8) - if c2_url: - return_conf["c2_url"] = c2_url - - if filebuf[yara_offset + 0x13B0 : yara_offset + 0x13B1] == "S": - registrypath = string_from_offset(filebuf, yara_offset + 0x13B0) - elif filebuf[yara_offset + 0x13C0 : yara_offset + 0x13C1] == "S": - registrypath = string_from_offset(filebuf, yara_offset + 0x13C0) - elif filebuf[yara_offset + 0x13D0 : yara_offset + 0x13D1] == "S": - registrypath = string_from_offset(filebuf, yara_offset + 0x13D0) - else: - registrypath = "" - - if registrypath: - return_conf["registrypath"] = registrypath - - if filebuf[yara_offset + 0x14A2 : yara_offset + 0x14A3] == "C": - servicename = "" - filepaths = list_from_offset(filebuf, yara_offset + 0x14A2) - filepaths[0] = filepaths[0].split(b" ", 1)[0] - elif filebuf[yara_offset + 0x14B0 : yara_offset + 0x14B1] != "\0": - servicename = string_from_offset(filebuf, yara_offset + 0x14B0) - filepaths = list_from_offset(filebuf, yara_offset + 0x14C0) - elif filebuf[yara_offset + 0x14C0 : yara_offset + 0x14C1] != "\0": - servicename = string_from_offset(filebuf, yara_offset + 0x14C0) - filepaths = list_from_offset(filebuf, yara_offset + 0x14D0) - elif filebuf[yara_offset + 0x14D0 : yara_offset + 0x14D1] != "\0": - servicename = string_from_offset(filebuf, yara_offset + 0x14D0) - filepaths = list_from_offset(filebuf, yara_offset + 0x14E0) - else: - servicename = "" - filepaths = [] - - if servicename: - return_conf["servicename"] = servicename - if filepaths: - for path in filepaths: - return_conf.setdefault("filepath", []).append(path) diff --git a/modules/processing/parsers/CAPE/EvilGrab.py b/modules/processing/parsers/CAPE/EvilGrab.py deleted file mode 100644 index 3e59277fa3c..00000000000 --- a/modules/processing/parsers/CAPE/EvilGrab.py +++ /dev/null @@ -1,110 +0,0 @@ -# Copyright (C) 2015 Kevin O'Reilly kevin.oreilly@contextis.co.uk -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . - -DESCRIPTION = "EvilGrab configuration parser." -AUTHOR = "kevoreilly" - -import struct - -import pefile -import yara - -rule_source = """ -rule EvilGrab -{ - meta: - author = "kev" - description = "EvilGrab configuration function" - cape_type = "EvilGrab Payload" - strings: - $configure1 = {8D 44 24 ?? 50 6A 01 E8 ?? ?? ?? ?? 85 C0 74 07 33 C0 E9 9? 00 00 00 68 ?? ?? ?? ?? E8 ?? ?? ?? ?? 83 F8 07 59 73 ?? 68 ?? ?? ?? ?? 68 ?? ?? ?? ?? E8 ?? ?? ?? ?? 68} - $configure2 = {8D 44 24 ?? 50 6A 01 E8 ?? ?? ?? ?? 85 C0 74 07 33 C0 E9 9? 00 00 00 68 ?? ?? ?? ?? E8 ?? ?? ?? ?? 83 F8 07 59 73 ?? 68 ?? ?? ?? ?? 68 ?? ?? ?? ?? E8 ?? ?? ?? ?? 83} - $configure3 = {8D 95 60 ?? ?? ?? 52 6A 01 E8 ?? ?? ?? ?? 85 C0 74 13 33 C0 8B 4D F4 64 89 0D 00 00 00 00 5F 5E 5B 8B E5 5D C3 BF ?? ?? ?? ?? 83 C9 FF 33 C0 F2 AE} - - condition: - //check for MZ Signature at offset 0 - uint16(0) == 0x5A4D - - and - - $configure1 or $configure2 or $configure3 -} -""" - -MAX_STRING_SIZE = 65 - - -def yara_scan(raw_data): - addresses = {} - yara_rules = yara.compile(source=rule_source) - matches = yara_rules.match(data=raw_data) - for match in matches: - if match.rule == "EvilGrab": - for item in match.strings: - addresses[item.identifier] = item.instances[0].offset - return addresses - - -def pe_data(pe, va, size): - image_base = pe.OPTIONAL_HEADER.ImageBase - rva = va - image_base - return pe.get_data(rva, size) - - -def string_from_va(pe, offset): - image_base = pe.OPTIONAL_HEADER.ImageBase - string_rva = struct.unpack("i", pe.__data__[offset : offset + 4])[0] - image_base - string_offset = pe.get_offset_from_rva(string_rva) - return pe.__data__[string_offset : string_offset + MAX_STRING_SIZE].split(b"\0", 1)[0] - - -map_offset = { - "$configure1": [24, 71, 60, 90, 132, 186], - "$configure2": [27, 78, 67, 91, 133, 188], - "$configure3": [38, 99, 132, 167, 195], -} - - -def extract_config(filebuf): - pe = pefile.PE(data=filebuf, fast_load=False) - # image_base = pe.OPTIONAL_HEADER.ImageBase - yara_matches = yara_scan(filebuf) - end_config = {} - for key, values in map_offset.keys(): - if not yara_matches.get(key): - continue - - yara_offset = int(yara_matches[key]) - - c2_address = string_from_va(pe, yara_offset + values[0]) - if c2_address: - end_config["c2_address"] = c2_address - port = str(struct.unpack("h", filebuf[yara_offset + values[1] : yara_offset + values[1] + 2])[0]) - if port: - end_config["port"] = [port, "tcp"] - missionid = string_from_va(pe, yara_offset + values[3]) - if missionid: - end_config["missionid"] = missionid - version = string_from_va(pe, yara_offset + values[4]) - if version: - end_config["version"] = version - injectionprocess = string_from_va(pe, yara_offset + values[5]) - if injectionprocess: - end_config["injectionprocess"] = injectionprocess - if key != "$configure3": - mutex = string_from_va(pe, yara_offset - values[6]) - if mutex: - end_config["mutex"] = mutex - - return end_config diff --git a/modules/processing/parsers/CAPE/Fareit.py b/modules/processing/parsers/CAPE/Fareit.py deleted file mode 100644 index c72482cc304..00000000000 --- a/modules/processing/parsers/CAPE/Fareit.py +++ /dev/null @@ -1,69 +0,0 @@ -import re -import sys -from pathlib import Path - -""" -rule pony { - meta: - author = "adam" - description = "Detect pony" - - strings: - $s1 = "{%08X-%04X-%04X-%02X%02X-%02X%02X%02X%02X%02X%02X}" - $s2 = "YUIPWDFILE0YUIPKDFILE0YUICRYPTED0YUI1.0" - - condition: - $s1 and $s2 -} -""" - - -gate_url = re.compile(b".*\\.php$") -exe_url = re.compile(b".*\\.exe$") -dll_url = re.compile(b".*\\.dll$") - - -def extract_config(memdump_path, read=False): - if read: - F = Path(memdump_path).read_bytes() - else: - F = memdump_path - """ - # Get the aPLib header + data - buf = re.findall(r"aPLib .*PWDFILE", cData, re.DOTALL|re.MULTILINE) - # Strip out the header - if buf and len(buf[0]) > 200: - cData = buf[0][200:] - """ - artifacts_raw = { - "controllers": [], - "downloads": [], - } - - start = F.find(b"YUIPWDFILE0YUIPKDFILE0YUICRYPTED0YUI1.0") - if start: - F = F[start - 600 : start + 500] - - output = re.findall( - b"(https?://.[A-Za-z0-9-\\.\\_\\~\\:\\/\\?\\#\\[\\]\\@\\!\\$\\&'\\(\\)\\*\\+\\,\\;\\=]+(?:\\.php|\\.exe|\\.dll))", F - ) - for url in output: - try: - if b"\x00" not in url: - # url = self._check_valid_url(url) - if url is None: - continue - if gate_url.match(url): - artifacts_raw["controllers"].append(url.lower().decode()) - elif exe_url.match(url) or dll_url.match(url): - artifacts_raw["downloads"].append(url.lower().decode()) - except Exception as e: - print(e, sys.exc_info(), "PONY") - artifacts_raw["controllers"] = list(set(artifacts_raw["controllers"])) - artifacts_raw["downloads"] = list(set(artifacts_raw["downloads"])) - return artifacts_raw if len(artifacts_raw["controllers"]) != 0 or len(artifacts_raw["downloads"]) != 0 else False - - -if __name__ == "__main__": - res = extract_config(sys.argv[1], read=True) - print(res) diff --git a/modules/processing/parsers/CAPE/Formbook.py b/modules/processing/parsers/CAPE/Formbook.py deleted file mode 100644 index d24980fc78b..00000000000 --- a/modules/processing/parsers/CAPE/Formbook.py +++ /dev/null @@ -1,22 +0,0 @@ -def extract_config(data): - config_dict = {} - i = 0 - try: - lines = data.decode().split("\n") - except Exception: - return - if lines[0].startswith("POST"): - while lines[i] != "dat=": - i += 1 - if lines[i] == "dat=": - i += 1 - elif "www." not in lines[0]: - return - config_dict["C2"] = lines[i] - decoys = [] - i += 1 - while len(lines[i]) > 0: - decoys.append(lines[i]) - i += 1 - config_dict["Decoys"] = decoys - return config_dict diff --git a/modules/processing/parsers/CAPE/Greame.py b/modules/processing/parsers/CAPE/Greame.py deleted file mode 100644 index 0a7aeb1c128..00000000000 --- a/modules/processing/parsers/CAPE/Greame.py +++ /dev/null @@ -1,90 +0,0 @@ -import string - -import pefile - - -def get_config(data): - try: - pe = pefile.PE(data=data) - rt_string_idx = [entry.id for entry in pe.DIRECTORY_ENTRY_RESOURCE.entries].index(pefile.RESOURCE_TYPE["RT_RCDATA"]) - rt_string_directory = pe.DIRECTORY_ENTRY_RESOURCE.entries[rt_string_idx] - for entry in rt_string_directory.directory.entries: - if str(entry.name) == "GREAME": - data_rva = entry.directory.entries[0].data.struct.OffsetToData - size = entry.directory.entries[0].data.struct.Size - data = pe.get_memory_mapped_image()[data_rva : data_rva + size] - return data.split("####@####") - except Exception: - return None - - -def xor_decode(data): - key = 0xBC - encoded = bytearray(data) - for i in range(len(encoded)): - encoded[i] ^= key - return [x for x in str(encoded) if x in string.printable] - - -def parse_config(raw_config): - if len(raw_config) <= 20: - return None - domains = "" - ports = "" - # Config sections 0 - 19 contain a list of Domains and Ports - for x in range(19): - if len(raw_config[x]) > 1: - domains += xor_decode(raw_config[x]).split(":", 1)[0] - domains += "|" - ports += xor_decode(raw_config[x]).split(":", 2)[1] - ports += "|" - config_dict = { - "Domain": domains[:-1], - "Port": ports[:-1], - "ServerID": xor_decode(raw_config[20]), - "Password": xor_decode(raw_config[21]), - "Install Flag": xor_decode(raw_config[22]), - "Install Directory": xor_decode(raw_config[25]), - "Install File Name": xor_decode(raw_config[26]), - "Active X Startup": xor_decode(raw_config[27]), - "REG Key HKLM": xor_decode(raw_config[28]), - "REG Key HKCU": xor_decode(raw_config[29]), - "Enable Message Box": xor_decode(raw_config[30]), - "Message Box Icon": xor_decode(raw_config[31]), - "Message Box Button": xor_decode(raw_config[32]), - "Install Message Title": xor_decode(raw_config[33]), - "Install Message Box": xor_decode(raw_config[34]).replace("\r\n", " "), - "Activate Keylogger": xor_decode(raw_config[35]), - "Keylogger Backspace = Delete": xor_decode(raw_config[36]), - "Keylogger Enable FTP": xor_decode(raw_config[37]), - "FTP Address": xor_decode(raw_config[38]), - "FTP Directory": xor_decode(raw_config[39]), - "FTP UserName": xor_decode(raw_config[41]), - "FTP Password": xor_decode(raw_config[42]), - "FTP Port": xor_decode(raw_config[43]), - "FTP Interval": xor_decode(raw_config[44]), - "Persistance": xor_decode(raw_config[59]), - "Hide File": xor_decode(raw_config[60]), - "Change Creation Date": xor_decode(raw_config[61]), - "Mutex": xor_decode(raw_config[62]), - "Melt File": xor_decode(raw_config[63]), - "Startup Policies": xor_decode(raw_config[69]), - "USB Spread": xor_decode(raw_config[70]), - "P2P Spread": xor_decode(raw_config[71]), - "Google Chrome Passwords": xor_decode(raw_config[73]), - } - if xor_decode(raw_config[57]) == 0: - config_dict["Process Injection"] = "Disabled" - elif xor_decode(raw_config[57]) == 1: - config_dict["Process Injection"] = "Default Browser" - elif xor_decode(raw_config[57]) == 2: - config_dict["Process Injection"] = xor_decode(raw_config[58]) - else: - config_dict["Process Injection"] = "None" - return config_dict - - -def extract_config(data): - raw_config = get_config(data) - if raw_config: - return parse_config(raw_config) diff --git a/modules/processing/parsers/CAPE/GuLoader.py b/modules/processing/parsers/CAPE/GuLoader.py deleted file mode 100644 index 1d858ac7d6d..00000000000 --- a/modules/processing/parsers/CAPE/GuLoader.py +++ /dev/null @@ -1,17 +0,0 @@ -try: - import re2 as re -except ImportError: - import re - -url_regex = re.compile(rb"http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+") - - -def extract_config(data): - try: - urls = [url.lower().decode() for url in url_regex.findall(data)] - if urls: - return {"URLs": urls} - except Exception as e: - print(e) - - return None diff --git a/modules/processing/parsers/CAPE/Hancitor.py b/modules/processing/parsers/CAPE/Hancitor.py deleted file mode 100644 index 7cf6a679924..00000000000 --- a/modules/processing/parsers/CAPE/Hancitor.py +++ /dev/null @@ -1,78 +0,0 @@ -""" - Hancitor config extractor -""" - -import hashlib -import logging -import re -import struct - -import pefile -from Cryptodome.Cipher import ARC4 - -DESCRIPTION = "Hancitor config extractor." -AUTHOR = "threathive, cccs-j" - -log = logging.getLogger(__name__) - - -def getHashKey(key_data): - # source: https://github.com/OALabs/Lab-Notes/blob/main/Hancitor/hancitor.ipynb - m = hashlib.sha1() - m.update(key_data) - key = m.digest()[:5] - return key - - -def get_key_config_data(filebuf, pe): - # source: https://github.com/OALabs/Lab-Notes/blob/main/Hancitor/hancitor.ipynb - RE_KEY = rb"\x6a(.)\x68(....)\x68\x00\x20\x00\x00" - m = re.search(RE_KEY, filebuf) - if not m: - return - key_len = struct.unpack("b", m.group(1))[0] - key_address = struct.unpack(". - -DESCRIPTION = "HttpBrowser configuration parser." -AUTHOR = "kevoreilly" - - -import struct - -import pefile -import yara - -rule_source = """ -rule HttpBrowser -{ - meta: - author = "kev" - description = "HttpBrowser C2 connect function" - cape_type = "HttpBrowser Payload" - strings: - $connect_1 = {33 C0 68 06 02 00 00 66 89 ?? ?? ?? ?? ?? 8D ?? ?? ?? ?? ?? 5? 50 E8 ?? ?? 00 00 8B 35 ?? ?? ?? ?? 83 C4 0C 6A 01 BB ?? ?? ?? ?? 53 FF D6 59 50 BF} - $connect_2 = {33 C0 68 06 02 00 00 66 89 ?? ?? ?? 8D ?? ?? ?? 5? 50 E8 ?? ?? 00 00 8B 35 ?? ?? ?? ?? 83 C4 0C 6A 01 BB ?? ?? ?? ?? 53 FF D6 59 50 BF} - $connect_3 = {68 40 1F 00 00 FF 15 ?? ?? ?? ?? 8B 35 ?? ?? ?? ?? BB ?? ?? ?? ?? 53 FF D6 59 50 BF ?? ?? ?? ?? 57 E8 ?? ?? ?? ?? 59 59} - $connect_4 = {33 C0 57 66 89 85 ?? ?? ?? ?? 8D 85 ?? ?? ?? ?? 56 50 E8 ?? ?? ?? ?? 6A 01 FF 75 08 8D 85 ?? ?? ?? ?? 68 ?? ?? ?? ?? 68} - condition: - //check for MZ Signature at offset 0 - uint16(0) == 0x5A4D - - and - - $connect_1 or $connect_2 or $connect_3 or $connect_4 -} -""" - -MAX_STRING_SIZE = 67 - - -def yara_scan(raw_data): - addresses = {} - yara_rules = yara.compile(source=rule_source) - matches = yara_rules.match(data=raw_data) - for match in matches: - if match.rule == "HttpBrowser": - for item in match.strings: - addresses[item.identifier] = item.instances[0].offset - return addresses - - -def pe_data(pe, va, size): - image_base = pe.OPTIONAL_HEADER.ImageBase - rva = va - image_base - return pe.get_data(rva, size) - - -def ascii_from_va(pe, offset): - image_base = pe.OPTIONAL_HEADER.ImageBase - string_rva = struct.unpack("i", pe.__data__[offset : offset + 4])[0] - image_base - string_offset = pe.get_offset_from_rva(string_rva) - return pe.__data__[string_offset : string_offset + MAX_STRING_SIZE].split(b"\0", 1)[0] - - -def unicode_from_va(pe, offset): - image_base = pe.OPTIONAL_HEADER.ImageBase - string_rva = struct.unpack("i", pe.__data__[offset : offset + 4])[0] - image_base - string_offset = pe.get_offset_from_rva(string_rva) - return pe.__data__[string_offset : string_offset + MAX_STRING_SIZE].split(b"\x00\x00", 1)[0] - - -match_map = { - "$connect_1": [39, 49], - "$connect_2": [35, 45], - "$connect_3": [18, 28, 66], - "$connect_4": [35, 90, 13], -} - - -def extract_config(filebuf): - pe = pefile.PE(data=filebuf, fast_load=True) - # image_base = pe.OPTIONAL_HEADER.ImageBase - - yara_matches = yara_scan(filebuf) - tmp_config = {} - for key, values in match_map.keys(): - if yara_matches.get(key): - yara_offset = int(yara_matches[key]) - - if key in ("$connect_1", "$connect_2", "$connect_3"): - port = ascii_from_va(pe, yara_offset + values[0]) - if port: - tmp_config["port"] = [port, "tcp"] - - c2_address = unicode_from_va(pe, yara_offset + values[1]) - if c2_address: - tmp_config.setdefault("c2_address", []).append(c2_address) - - if key == "$connect_3": - c2_address = unicode_from_va(pe, yara_offset + values[2]) - if c2_address: - tmp_config.setdefault("c2_address", []).append(c2_address) - else: - c2_address = unicode_from_va(pe, yara_offset + values[0]) - if c2_address: - tmp_config["c2_address"] = c2_address - - filepath = unicode_from_va(pe, yara_offset + values[1]) - if filepath: - tmp_config["filepath"] = filepath - - injectionprocess = unicode_from_va(pe, yara_offset - values[2]) - if injectionprocess: - tmp_config["injectionprocess"] = injectionprocess - - return tmp_config diff --git a/modules/processing/parsers/CAPE/IcedID.py b/modules/processing/parsers/CAPE/IcedID.py deleted file mode 100644 index 62de3cb5afc..00000000000 --- a/modules/processing/parsers/CAPE/IcedID.py +++ /dev/null @@ -1,81 +0,0 @@ -# Copyright (C) 2019 Kevin O'Reilly (kevoreilly@gmail.com) -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . -# -# Updates to handle stage 1 Based on initial work referenced here and modified to work with python3 -# https://sysopfb.github.io/malware,/icedid/2020/04/28/IcedIDs-updated-photoloader.html -# https://gist.github.com/sysopfb/93eb0090ef47c08e4e516cb045b48b96 -# https://www.group-ib.com/blog/icedid - -import logging -import os -import struct - -import pefile -import yara -from Cryptodome.Cipher import ARC4 - -from lib.cuckoo.common.constants import CUCKOO_ROOT - -yara_path = os.path.join(CUCKOO_ROOT, "data", "yara", "CAPE", "IcedID.yar") -with open(yara_path, "r") as yara_rule: - yara_rules = yara.compile(source=yara_rule.read()) - -log = logging.getLogger(__name__) - -DESCRIPTION = "IcedID Stage 2 configuration parser." -AUTHOR = "kevoreilly,threathive,sysopfb" - - -def yara_scan(raw_data): - try: - return yara_rules.match(data=raw_data) - except Exception as e: - print(e) - - -def extract_config(filebuf): - yara_hit = yara_scan(filebuf) - - for hit in yara_hit: - if hit.rule == "IcedID": # can be either a dll or a exe - enc_data = None - try: - pe = pefile.PE(data=filebuf, fast_load=True) - for section in pe.sections: - if section.Name == b".data\x00\x00\x00": - enc_data = section.get_data() - key = enc_data[:8] - enc_config = enc_data[8:592] - decrypted_data = ARC4.new(key).decrypt(enc_config) - config = list(filter(None, decrypted_data.split(b"\x00"))) - return { - "family": "IcedID", - "version": str(struct.unpack("I", decrypted_data[4:8])[0]), - "paths": [{"path": config[1].decode(), "usage": "other"}], - "http": [{"uri": controller[1:].decode()} for controller in config[2:]], - "other": { - "Bot ID": str(struct.unpack("I", decrypted_data[:4])[0]), - }, - } - except Exception as e: - log.error("Error: %s", e) - - return {} - - -if __name__ == "__main__": - import sys - - with open(sys.argv[1], "rb") as f: - print(extract_config(f.read())) diff --git a/modules/processing/parsers/CAPE/IcedIDLoader.py b/modules/processing/parsers/CAPE/IcedIDLoader.py deleted file mode 100644 index a9251d35df2..00000000000 --- a/modules/processing/parsers/CAPE/IcedIDLoader.py +++ /dev/null @@ -1,48 +0,0 @@ -# Copyright (C) 2021 kevoreilly, enzo -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . - -import struct -from contextlib import suppress - -import pefile - - -def extract_config(filebuf): - cfg = {} - pe = None - with suppress(Exception): - pe = pefile.PE(data=filebuf, fast_load=False) - if pe is None: - return - for section in pe.sections: - if section.Name == b".d\x00\x00\x00\x00\x00\x00": - config_section = bytearray(section.get_data()) - dec = [] - for n, x in enumerate(config_section): - k = x ^ config_section[n + 64] - dec.append(k) - if n > 32: - break - campaign, c2 = struct.unpack("I30s", bytes(dec)) - cfg["C2"] = c2.split(b"\00", 1)[0].decode() - cfg["Campaign"] = campaign - return cfg - - -if __name__ == "__main__": - import sys - from pathlib import Path - - data = Path(sys.argv[1]).read_bytes() - print(extract_config(data)) diff --git a/modules/processing/parsers/CAPE/KoiLoader.py b/modules/processing/parsers/CAPE/KoiLoader.py deleted file mode 100644 index 75563816613..00000000000 --- a/modules/processing/parsers/CAPE/KoiLoader.py +++ /dev/null @@ -1,130 +0,0 @@ -import re -import struct -from contextlib import suppress -from itertools import cycle - -import pefile -import yara - -# Hash = b462e3235c7578450b2b56a8aff875a3d99d22f6970a01db3ba98f7ecb6b01a0 - -RULE_SOURCE = """ -rule KoiLoaderResources -{ - meta: - author = "YungBinary" - description = "Find KoiLoader XOR key and payload resource ids" - strings: - $payload_resource = {8D [2] 50 68 [4] E8} - $xor_key_resource = {8D [2] 51 68 [4] E8} - condition: - uint16(0) == 0x5A4D and $payload_resource and $xor_key_resource -} -""" - - -def yara_scan(raw_data): - yara_rules = yara.compile(source=RULE_SOURCE) - matches = yara_rules.match(data=raw_data) - payload_resource_id = None - xor_key_resource_id = None - - for match in matches: - if match.rule != "KoiLoaderResources": - continue - for item in match.strings: - if "$payload_resource" in item.identifier: - payload_offset = item.instances[0].offset - payload_resource_id = struct.unpack("i", raw_data[payload_offset + 5 : payload_offset + 9])[0] - - elif "$xor_key_resource" in item.identifier: - xor_key_offset = item.instances[0].offset - xor_key_resource_id = struct.unpack("i", raw_data[xor_key_offset + 5 : xor_key_offset + 9])[0] - - return (payload_resource_id, xor_key_resource_id) - - -def remove_nulls(buffer, buffer_size): - """ - Modify a buffer removing null bytes - """ - num_nulls = count_nulls(buffer) - result = skip_nth(buffer, num_nulls + 1) - return bytearray(result) - - -def count_nulls(buffer): - """ - Count null separation in a buffer - """ - num_nulls = 0 - idx = 1 - while True: - cur_byte = buffer[idx] - if cur_byte == 0: - num_nulls += 1 - idx += 1 - continue - else: - break - - return num_nulls - - -def skip_nth(buffer, n): - iterable = list(buffer) - yield from (value for index, value in enumerate(iterable) if (index + 1) % n and (index - 1) % n) - - -def find_c2(decoded_buffer): - decoded_buffer = bytearray(skip_nth(decoded_buffer, 2)) - url_regex = re.compile(rb"http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|[!*\(\),]|(?:%[0-9a-fA-F][0-9a-fA-F]))+") - urls = [url.lower().decode() for url in url_regex.findall(decoded_buffer)] - return urls - - -def xor_data(data, key): - return bytes(c ^ k for c, k in zip(data, cycle(key))) - - -def extract_config(data): - config_dict = {"C2": []} - - xor_key = b"" - encoded_payload = b"" - - payload_resource_id, xor_key_resource_id = yara_scan(data) - - if payload_resource_id is None or xor_key_resource_id is None: - return - - with suppress(Exception): - pe = pefile.PE(data=data) - for entry in pe.DIRECTORY_ENTRY_RESOURCE.entries: - resource_type = pefile.RESOURCE_TYPE.get(entry.struct.Id) - for directory in entry.directory.entries: - for resource in directory.directory.entries: - if resource_type != "RT_RCDATA": - continue - if directory.struct.Id == xor_key_resource_id: - offset = resource.data.struct.OffsetToData - xor_phrase_size = resource.data.struct.Size - xor_key = pe.get_memory_mapped_image()[offset : offset + xor_phrase_size] - elif directory.struct.Id == payload_resource_id: - offset = resource.data.struct.OffsetToData - encoded_payload_size = resource.data.struct.Size - encoded_payload = pe.get_memory_mapped_image()[offset : offset + encoded_payload_size] - - encoded_payload = remove_nulls(encoded_payload, encoded_payload_size) - decoded_payload = xor_data(encoded_payload, xor_key) - - config_dict["C2"] = find_c2(decoded_payload) - - return config_dict - - -if __name__ == "__main__": - import sys - - with open(sys.argv[1], "rb") as f: - print(extract_config(f.read())) diff --git a/modules/processing/parsers/CAPE/Latrodectus.py b/modules/processing/parsers/CAPE/Latrodectus.py deleted file mode 100644 index 32b6315e339..00000000000 --- a/modules/processing/parsers/CAPE/Latrodectus.py +++ /dev/null @@ -1,199 +0,0 @@ -# Copyright (C) 2024 enzok -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . - - -import logging -import os -import re -from contextlib import suppress - -import pefile -import yara -from cryptography.hazmat.backends import default_backend -from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes - -from lib.cuckoo.common.constants import CUCKOO_ROOT - -yara_path = os.path.join(CUCKOO_ROOT, "data", "yara", "CAPE", "Latrodectus.yar") -with open(yara_path, "r") as yara_rule: - yara_rules = yara.compile(source=yara_rule.read()) - -log = logging.getLogger(__name__) - -DESCRIPTION = "Latrodectus configuration parser." -AUTHOR = "enzok" - - -def yara_scan(raw_data): - try: - return yara_rules.match(data=raw_data) - except Exception as e: - print(e) - - -def initialize_key_schedule(key: bytes, iv: bytes) -> Cipher: - backend = default_backend() - cipher = Cipher(algorithms.AES(key), modes.CBC(iv), backend=backend) - return cipher - - -def decrypt_with_ctr(cbc_cipher: Cipher, iv: bytes, data: bytes) -> bytes: - key = cbc_cipher.algorithm.key - backend = default_backend() - cipher = Cipher(algorithms.AES(key), modes.CTR(iv), backend=backend) - decryptor = cipher.decryptor() - plaintext = decryptor.update(data) + decryptor.finalize() - return plaintext - - -def decrypt_string_aes(data: bytes, key: bytes) -> bytes: - len_data = int.from_bytes(data[:2], "little") - iv = data[2:18] - data = data[18 : 18 + len_data] - cbc_cipher = initialize_key_schedule(key, iv) - decrypted_data = decrypt_with_ctr(cbc_cipher, iv, data) - return decrypted_data - - -def prng_seed(seed): - sub_expr = (seed + 11865) << 31 | (seed + 11865) >> 1 - expr1 = (sub_expr << 31 | sub_expr >> 1) << 30 & (2**64 - 1) - sub_expr = (expr1 & 0xFFFFFFFF) | (expr1 >> 32) - expr2 = ((sub_expr ^ 0x151D) >> 30) | (4 * (sub_expr ^ 0x151D)) & (2**32 - 1) - return ((expr2 >> 31) | (2 * expr2)) & 0xFFFFFFFF - - -def decrypt_string(data, type): - seed = int.from_bytes(data[:4], "little") & 0xFFFFFFFF - length = (int.from_bytes(data[4:6], "little")) ^ (int.from_bytes(data[:2], "little")) & 0xFFFF - src = data[6:] - result = bytearray() - - for i in range(length): - if type == 1: - seed += 1 - elif type == 2: - seed = prng_seed(seed) - result.append((seed ^ src[i]) & 0xFF) - return result - - -def get_aes_string(data, key): - str_val = "" - with suppress(Exception): - str_val = decrypt_string_aes(data, key).decode("ascii").replace("\00", "") - return str_val - - -def get_string(match, data): - str_val = "" - i = match.start() // 2 - with suppress(Exception): - str_val = decrypt_string(data[i:], 1).decode("ascii").replace("\00", "") - - if not str_val: - with suppress(Exception): - str_val = decrypt_string(data[i:], 2).decode("ascii").replace("\00", "") - - return str_val - - -def fnv_hash(data): - decode = 0x811C9DC5 - for key in data: - decode = 0x1000193 * (decode ^ key) & 0xFFFFFFFF - return decode - - -def extract_config(filebuf): - yara_hit = yara_scan(filebuf) - cfg = {} - - for hit in yara_hit: - rule = hit.rule - if "Latrodectus" in rule: - version = "" - is_aes = False - key = "" - if "AES" in rule: - is_aes = True - - for item in hit.strings: - for instance in item.instances: - if "$version" in item.identifier and not version: - data = instance.matched_data[::-1] - major = int.from_bytes(data[4:5], byteorder="big") - minor = int.from_bytes(data[12:13], byteorder="big") - version = f"{major}.{minor}" - if "$key" in item.identifier: - key = instance.matched_data[4::5] - try: - pe = pefile.PE(data=filebuf, fast_load=True) - data_sections = [s for s in pe.sections if s.Name.find(b".data") != -1] - if not data_sections: - return - data = data_sections[0].get_data() - str_vals = [] - c2 = [] - campaign = "" - rc4_key = "" - - if is_aes and key: - for i in range(len(data)): - str_val = get_aes_string(data[i : i + 256], key) - if str_val and len(str_val) > 2: - str_vals.append(str_val) - else: - hex_pattern = "".join([rf"{byte:02X}" for byte in data[:4]]) - regex = re.compile(hex_pattern.lower()) - matches = regex.finditer(data.hex()) - - for match in matches: - str_val = get_string(match, data) - if str_val and len(str_val) > 2: - str_vals.append(str_val) - - for i in range(len(str_vals) - 1): - val = str_vals[i] - if "/files/" in val: - offset = 1 - if is_aes: - offset += 1 - campaign = str_vals[i + offset] - elif "ERROR" in val: - rc4_key = str_vals[i + 1] - elif "http" in val: - c2.append(val) - - for item in c2: - str_vals.remove(item) - - cfg = { - "C2": c2, - "Group name": campaign, - "Campaign ID": fnv_hash(campaign.encode()), - "Version": version, - "RC4 key": rc4_key, - "Strings": str_vals, - } - except Exception as e: - log.error("Error: %s", e) - return cfg - - -if __name__ == "__main__": - import sys - - with open(sys.argv[1], "rb") as f: - print(extract_config(f.read())) diff --git a/modules/processing/parsers/CAPE/LokiBot.py b/modules/processing/parsers/CAPE/LokiBot.py deleted file mode 100644 index a6ebf1cad96..00000000000 --- a/modules/processing/parsers/CAPE/LokiBot.py +++ /dev/null @@ -1,168 +0,0 @@ -# MIT License -# -# Copyright (c) Jason Reaves - @sysopfb -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in all -# copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. - -import re -import struct -import sys - -import pefile -from Cryptodome.Cipher import DES3 -from Cryptodome.Util.Padding import unpad - -DESCRIPTION = "LokiBot configuration parser." -AUTHOR = "sysopfb" - - -def find_iv(img): - iv = b"" - temp = re.findall(rb"\x68...\x00.{1,10}\x68...\x00\x68...\x00\x68...\x00\x03\xc1", img) - if temp != []: - (addr,) = struct.unpack_from(".) # 6A 08 push 8 - \x59 # 59 pop ecx - \xBE(?P.{4}) # BE D0 88 41 00 mov esi, offset encrypted_data1 - \x8D\xBD.{4} # 8D BD 68 FE FF FF lea edi, [ebp+encrypted_data_list] - \xF3\xA5 # F3 A5 rep movsd - \x6A. # 6A 43 push 43h ; 'C' - \x5B # 5B pop ebx - \x53 # 53 push ebx - \x8D\x85.{4} # 8D 85 89 FE FF FF lea eax, [ebp+var_177] - \xA4 # A4 movsb - \x6A\x00 # 6A 00 push 0 - \x50 # 50 push eax - \xE8.{4} # E8 78 E9 FE FF call about_memset - """, - re.DOTALL | re.VERBOSE, - ) - num_addr_re2 = re.compile( - rb""" - \x6A(?P.) # 6A 08 push 8 - \x59 # 59 pop ecx - \xBE(?P.{4}) # BE F4 88 41 00 mov esi, offset encrypted_data2 - \x8D.{2,5} # 8D BD CC FE FF FF lea edi, [ebp+var_134] - \xF3\xA5 # F3 A5 rep movsd - \x53 # 53 push ebx - \x8D.{2,5} # 8D 85 ED FE FF FF lea eax, [ebp+var_113] - \x6A\x00 # 6A 00 push 0 - \x50 # 50 push eax - \xA4 # A4 movsb - \xE8.{4} # E8 58 E9 FE FF call about_memset - """, - re.DOTALL | re.VERBOSE, - ) - - num_addr_list = re.findall(num_addr_re1, img) - num_addr_list.extend(re.findall(num_addr_re2, img)) - - for num, addr in num_addr_list: - dlen = ord(num) * 4 - (addr,) = struct.unpack_from("= key_len: - break - decoded.append(data[i] ^ key[i]) - return decoded - - -def contains_non_printable(byte_array): - for byte in byte_array: - if not chr(byte).isprintable(): - return True - return False - - -def extract_config(data): - config_dict = {"C2": []} - - try: - lines = data.decode().split("\n") - for line in lines: - try: - if "." in line and len(line) > 2: - if not contains_non_printable(line): - config_dict["C2"].append(line) - except Exception: - continue - except Exception: - pass - - # If no C2s with the old method, - # try with newer version xor decoding - if not config_dict["C2"]: - try: - rdata = get_rdata(data) - strings = extract_strings(rdata, 44) - base64_strings = get_base64_strings(strings) - - for base64_str in base64_strings: - try: - decoded_bytes = base64.b64decode(base64_str, validate=True) - encoded_c2 = decoded_bytes[:32] - xor_key = decoded_bytes[32:] - decoded_c2 = xor_data(encoded_c2, xor_key) - - if not contains_non_printable(decoded_c2): - config_dict["C2"].append(decoded_c2.decode()) - except Exception: - continue - except Exception: - return - - return config_dict - - -if __name__ == "__main__": - import sys - - with open(sys.argv[1], "rb") as f: - print(extract_config(f.read())) diff --git a/modules/processing/parsers/CAPE/NanoCore.py b/modules/processing/parsers/CAPE/NanoCore.py deleted file mode 100644 index f8f3912eedd..00000000000 --- a/modules/processing/parsers/CAPE/NanoCore.py +++ /dev/null @@ -1,200 +0,0 @@ -# based on https://github.com/nict-csl/NanoCoreRAT-Analysis.git - -import datetime -import io -import logging -import uuid -import zlib -from contextlib import suppress -from enum import Enum - -import pefile - -HAVE_PYCYPTODOMEX = False -with suppress(ImportError): - from Cryptodome.Cipher import DES - from Cryptodome.Util.Padding import unpad - - HAVE_PYCYPTODOMEX = True - -log = logging.getLogger(__name__) - -DES_KEY = b"\x72\x20\x18\x78\x8c\x29\x48\x97" -DES_IV = DES_KEY - - -class DataType(Enum): - BOOL = 0 - BYTE = 1 - BYTEARRAY = 2 - CHAR = 3 - CHARARRAY = 4 - DECIMAL = 5 - DOUBLE = 6 - INT = 7 - LONG = 8 - SBYTE = 9 - SHORT = 10 - FLOAT = 11 - STRING = 12 - UINT = 13 - ULONG = 14 - USHORT = 15 - DATETIME = 16 - STRINGARRAY = 17 - GUID = 18 - SIZE = 19 - RECTANGLE = 20 - VERSION = 21 - UNKNOWN = 100 - - -def des_decrypt(data): - cipher = DES.new(key=DES_KEY, iv=DES_IV, mode=DES.MODE_CBC) - dec_data = cipher.decrypt(data) - if not dec_data: - return b"" - return unpad(dec_data, DES.block_size) - - -def bool_from_byte(byte): - return byte == b"\x01" - - -def deserialize_datetime(ticks): - base_ticks = 0x489F7FF5F7B58000 # 1970/01/01 00:00:00 - unixtime = (ticks - base_ticks) / 10000000 - try: - return datetime.datetime.fromtimestamp(unixtime) - except ValueError: - return ticks - - -def decode(payload): - payload_len = int.from_bytes(payload[:4], "little") - try: - payload_body = des_decrypt(payload[4 : payload_len + 4]) - except ValueError: - return None - - f = io.BytesIO(payload_body) - compressed_mode = bool_from_byte(f.read(1)) - if compressed_mode: - # data length after raw inflate. - data_len = int.from_bytes(f.read(4), "little") - deflate_data = f.read() - inflate_data = zlib.decompress(deflate_data, wbits=-15) - payload_len = len(inflate_data) - f.close() - f = io.BytesIO(inflate_data) - - flag1 = int.from_bytes(f.read(1), "little") # unknown data - flag2 = int.from_bytes(f.read(1), "little") # unknown data - guid = uuid.UUID(bytes=b"\x00" * 16) - params = [] - - check_guid = bool_from_byte(f.read(1)) - if check_guid: - guid_bytes = f.read(16) - guid = uuid.UUID(bytes_le=guid_bytes) - - position = f.tell() - while payload_len > position: - type_num = int.from_bytes(f.read(1), "little") - data_type = DataType(type_num) - if data_type == DataType.BOOL: - value = bool_from_byte(f.read(1)) - elif data_type == DataType.BYTE: - value = f.read(1) - elif data_type == DataType.BYTEARRAY: - data_len = int.from_bytes(f.read(4), "little") - value = f.read(data_len) - elif data_type in (DataType.INT, DataType.UINT): - value = int.from_bytes(f.read(4), "little") - elif data_type in (DataType.LONG, DataType.ULONG): - value = int.from_bytes(f.read(8), "little") - elif data_type in (DataType.SHORT, DataType.USHORT): - value = int.from_bytes(f.read(2), "little") - elif data_type == DataType.FLOAT: - value = float(int.from_bytes(f.read(4), "little")) - elif data_type in (DataType.STRING, DataType.VERSION): - data_len = int.from_bytes(f.read(1), "little") - value = f.read(data_len).decode() - elif data_type == DataType.DATETIME: - ticks = int.from_bytes(f.read(8), "little") - value = deserialize_datetime(ticks) - elif data_type == DataType.GUID: - value = uuid.UUID(bytes_le=f.read(16)) - else: # TODO: Other Types - data_type = DataType.UNKNOWN - value = f.read() - - if position == f.tell(): - break - position = f.tell() - params.append({"type": data_type, "value": value}) - f.close() - - result = {"uuid": guid, "compressed_mode": compressed_mode, "flags": [flag1, flag2], "params": params} - return result - - -def extract_config(filebuf): - if not HAVE_PYCYPTODOMEX: - log.error("Missed pycryptodomex. Run: poetry install") - return {} - pe = False - with suppress(pefile.PEFormatError, ValueError): - pe = pefile.PE(data=filebuf) - for section in pe.sections: - if b".rsrc" in section.Name: - break - - if not pe: - return - - config_dict = {} - try: - with io.BytesIO(filebuf) as f: - offset = 0x58 # resource section header - f.seek(section.PointerToRawData + offset) - data_len = int.from_bytes(f.read(4), "little") - _guid = f.read(data_len) - enc_data = f.read() - dec_data = decode(enc_data) - - # dec_data to config format - - params = iter(dec_data["params"]) - for param in params: - if DataType.STRING == param["type"]: - item_name = param["value"] - param = next(params) - if DataType.BYTEARRAY == param["type"]: - pass - elif DataType.DATETIME == param["type"]: - dt = param["value"] - config_dict[item_name] = dt.strftime("%Y-%m-%d %H:%M:%S.%f") - else: - config_dict[item_name] = str(param["value"]) - except Exception as e: - log.error("nanocore error: %s", e) - - cncs = [] - - if config_dict.get("PrimaryConnectionHost"): - cncs.append(config_dict["PrimaryConnectionHost"]) - if config_dict.get("PrimaryConnectionHost"): - cncs.append(config_dict["BackupConnectionHost"]) - if config_dict.get("ConnectionPort") and cncs: - port = config_dict["ConnectionPort"] - config_dict["cncs"] = [f"{cnc}:{port}" for cnc in cncs] - return config_dict - - -if __name__ == "__main__": - import sys - from pathlib import Path - - data = Path(sys.argv[1]).read_bytes() - print(extract_config(data)) diff --git a/modules/processing/parsers/CAPE/Nighthawk.py b/modules/processing/parsers/CAPE/Nighthawk.py deleted file mode 100644 index cc6d1d62b90..00000000000 --- a/modules/processing/parsers/CAPE/Nighthawk.py +++ /dev/null @@ -1,356 +0,0 @@ -import gzip -import itertools -import json -import struct - -import pefile -import regex as re -from Cryptodome.Cipher import AES - -DESCRIPTION = "NightHawk C2 DLL configuration parser." -AUTHOR = "Nikhil Ashok Hegde <@ka1do9>, Amjad Alsharafi" - - -def _decode_str(encoded_string, plaintext_alphabet, ciphertext_alphabet): - """ - This function implements the substitution cipher that Nighthawk uses. - Encoded strings are decoded. - Borrowed from https://www.proofpoint.com/us/blog/threat-insight/nighthawk-and-coming-pentest-tool-likely-gain-threat-actor-notice - which is no longer available, but here's an archive link: - https://web.archive.org/web/20221128090619/https://www.proofpoint.com/us/blog/threat-insight/nighthawk-and-coming-pentest-tool-likely-gain-threat-actor-notice - - :param encoded_string: String encoded with Nighthawk substitution cipher - :type encoded_string: - :param plaintext_alphabet: Plaintext alphabet used in the substitution cipher - :type plaintext_alphabet: - :param ciphertext_alphabet: Ciphertext alphabet used in the substitution cipher - :type ciphertext_alphabet: - :return: Decoded string - :rtype: str - """ - - decoded_string_list = [] - - for enc_str in bytes(encoded_string, "utf-8"): - if enc_str in ciphertext_alphabet: - decoded_string_list.append(chr(plaintext_alphabet[ciphertext_alphabet.find(enc_str)])) - else: - decoded_string_list.append(chr(enc_str)) - - return "".join(decoded_string_list) - - -def decode_config_part(item, plaintext_alphabet, ciphertext_alphabet): - """ - This function handles each element type of the NightHawk config. - Encoded strings are decoded. - :param item: config inner item - :type item: any - :param plaintext_alphabet: Plaintext alphabet used in the substitution cipher - :type plaintext_alphabet: - :param ciphertext_alphabet: Ciphertext alphabet used in the substitution cipher - :type ciphertext_alphabet: - :return: same object type as the input, but with decoded strings - :rtype: any - """ - if isinstance(item, dict): - return decode_config_strings(item.copy(), plaintext_alphabet, ciphertext_alphabet) - elif isinstance(item, str): - return _decode_str(item, plaintext_alphabet, ciphertext_alphabet) - elif isinstance(item, list): - newlist = [] - for s in item: - newlist.append(decode_config_part(s, plaintext_alphabet, ciphertext_alphabet)) - return newlist - # pass as is - return item - - -def decode_config_strings(config, plaintext_alphabet, ciphertext_alphabet): - """ - This function implements the substitution cipher that Nighthawk uses. - Encoded strings are decoded. - :param decrypted_config: Decrypted Nighthawk config - :type decrypted_config: dict - :param plaintext_alphabet: Plaintext alphabet used in the substitution cipher - :type plaintext_alphabet: - :param ciphertext_alphabet: Ciphertext alphabet used in the substitution cipher - :type ciphertext_alphabet: - :return: JSON with decoded strings - :rtype: dict - """ - result = {} - for k in config.keys(): - decoded_string = _decode_str(k, plaintext_alphabet, ciphertext_alphabet) - result[decoded_string] = decode_config_part(config[k], plaintext_alphabet, ciphertext_alphabet) - return result - - -def _get_section_data(data, section_name, take_first=True): - """ - Function to return data belonging to `section_name` section in PE `data` - - :param data: Nighthawk DLL contents - :type data: - :param section_name: Name of section whose data is to be retrieved - :type section_name: str - :return: section data - :rtype: or None - """ - - try: - pe = pefile.PE(data=data, fast_load=False) - except Exception: - pe = None - - if not pe: - return None - - data = None - for section in pe.sections: - if section.Name.strip(b"\x00") == section_name: - data = section.get_data() - # if we care about first one, just break, otherwise keep looking for the next section with same name - if take_first: - break - - return data - - -def _alphabet_heuristics(alphabets): - """ - This function implements heuristics to determine if an identified alphabet - string is actually an alphabet. These heuristics are purely based on my - observations. - - :param alpha: Possible alphabet strings - :type alpha: list of - :return: set of possible alphabet bytestrings - :rtype: set of - """ - - candidates = {} - finalists = set() - - for alpha in alphabets: - num_whitespace = len(re.split(rb"\s+", alpha)) - if num_whitespace > 3: - # I've observed alphabets usually have num_whitespace == 2 - continue - - num_unique_chars = len(set(alpha)) - if num_unique_chars < 15: - # I've observed that alphabets have large number of unique characters - # Random low threshold, though - continue - - if num_unique_chars not in candidates: - candidates[num_unique_chars] = set() - candidates[num_unique_chars].add(alpha) - - # I've observed that the plaintext and ciphertext alphabets both have the - # same number of num_unique_chars - for _, alphabets_ in candidates.items(): - if len(alphabets_) > 1: - finalists.update(alphabets_) - - return finalists - - -def get_possible_alphabet(data): - """ - Nighthawk is known to encode strings using a simple substitution cipher. - Decoding requires knowing the plaintext and ciphertext alphabets used. - - :param data: Nighthawk DLL contents - :type data: - :return: Permutation of possible plaintext and ciphertext alphabets - :rtype: or None - """ - - alphabets_regex = rb"[\w\s!\\\"\#\$%\&'\(\)\*\+,\-\./:;<=>\?@\[\]\^_`\{\}\~\|]{86}\x00" - alphabets_regexc = re.compile(alphabets_regex) - - # Alphabets are known to exist in the .rdata section, so just search there - rdata_data = _get_section_data(data, b".rdata") - matches = alphabets_regexc.findall(rdata_data) - - if matches: - alphabets = _alphabet_heuristics(matches) - if alphabets: - # At this point, I have candidate alphabet strings but I don't know - # which is the plaintext alphabet and which is ciphertext alphabet - # To brute force, I'll calculate different permutations of length 2 - return itertools.permutations(alphabets, 2) - - return None - - -def decrypt_config(encrypted_config, decryption_key): - """ - Nighthawk config is gzip compressed and then encrypted with AES-128 CBC mode. - - :param encrypted_config: Encrypted config data - :type encrypted_config: - :param decryption_key: Config decryption key - :type decryption_key: - :return: decrypted config - :rtype: dict or None - """ - - cipher = AES.new(decryption_key, AES.MODE_CBC, IV=16 * b"\x00") - gzip_config = cipher.decrypt(encrypted_config) - - if gzip_config[:2] != b"\x1F\x8B": - # gzip magic signature is b'\x1F\x8B' at offset 0 - return None - - # I've noticed gzip_config containing additional data at the end. - # Below statements truncate gzip_config to the rightmost b'\x00\x00' - # which is gzip end-of-stream marker - i = gzip_config.rindex(b"\x00\x00") - gzip_config = gzip_config[: i + 2] - - config = gzip.decompress(gzip_config).decode("utf-8") - return json.loads(config) - - -def get_encoded_config(profile_section_contents): - """ - The contents of Nighthawk DLL .profile section contain 4 components: - 1. Keying method - 2. Config decryption key (optional) - 2. Size of configuration - 3. Encrypted configuration - - At this point, it is confirmed that the keying method == 0 and config - decryption key is available in the .profile section. - - :param data: Nighthawk DLL .profile section contents - :type data: - :return: Encrypted config data - :rtype: or None - """ - - config_size = struct.unpack(" (len(profile_section_contents) - 1 - 16 - 4): - # max config size == size of .profile section - keying method 1 byte - 16 - # bytes config decryption key - 4 bytes config size field. - # Actual config size cannot be greater than max possible config size - return None - - return profile_section_contents[21 : 21 + config_size] - - -def get_decryption_key(profile_section_contents): - """ - The contents of Nighthawk DLL .profile section contain 4 components: - 1. Keying method - 2. Config decryption key (optional) - 2. Size of configuration - 3. Encrypted configuration - - :param data: Nighthawk DLL .profile section contents - :type data: - :return: Config decryption key - :rtype: or None - """ - - keying_method = profile_section_contents[0] - if keying_method == 0: - # Config decryption key is embedded in .profile section contents - return profile_section_contents[1:17] - - return None - - -def get_profile_section_contents(data): - """ - Nighthawk DLLs are known to contain a .profile section which contains - configuration information. - - :param data: Nighthawk DLL contents - :type data: - :return: .profile section contents - :rtype: or None - """ - - return _get_section_data(data, b".profile") - - -def get_last_text_section(data): - """ - Newer Nighthawk DLLs are known to contain a .text at the end which contains - configuration information. - - :param data: Nighthawk DLL contents - :type data: - :return: last .text section contents - :rtype: or None - """ - - return _get_section_data(data, b".text", take_first=False) - - -def get_config_section_content(data): - """ - Get the config section data either from .profile, or from the last .text section - which is available - - :param data: Nighthawk DLL contents - :type data: - :return: the config data contents - :rtype: or None - """ - - return get_profile_section_contents(data) or get_last_text_section(data) - - -def extract_config(data): - """ - Configuration extractor for Nighthawk DLL - - :param data: Nighthawk DLL contents - :type data: - :return: Decrypted and decoded config - :rtype: dict or None - """ - - # Will contain the final config that is passed to CAPEv2 - cfg = {} - - profile_section_contents = get_config_section_content(data) - if profile_section_contents is None: - return None - - decryption_key = get_decryption_key(profile_section_contents) - if decryption_key is None: - return None - - config = get_encoded_config(profile_section_contents) - - decrypted_config = decrypt_config(config, decryption_key) - - # decrypt_config is the decrypted configuration, but key and values strings - # are still encoded and need to be decoded. Nighthawk is known to encode - # strings using a simple substitution cipher. The real challenge is to extract - # the ciphertext and plaintext alphabet from the DLL - - possible_alphabets = get_possible_alphabet(data) - - for plaintext_alphabet, ciphertext_alphabet in possible_alphabets: - config_ = decode_config_strings( - decrypted_config, - plaintext_alphabet, - ciphertext_alphabet, - ) - - if "implant-config" in config_: - # This is a heuristic and may fail in future versions - cfg["Plaintext Alphabet"] = plaintext_alphabet - cfg["Ciphertext Alphabet"] = ciphertext_alphabet - cfg["Config AES-128 CBC Decryption Key"] = decryption_key - cfg["Implant Config"] = config_ - break - - return cfg diff --git a/modules/processing/parsers/CAPE/Njrat.py b/modules/processing/parsers/CAPE/Njrat.py deleted file mode 100644 index 27fa080e26e..00000000000 --- a/modules/processing/parsers/CAPE/Njrat.py +++ /dev/null @@ -1,193 +0,0 @@ -import base64 -import re -import sys -from contextlib import suppress - -import dnfile - - -class Parser: - def __init__(self, data: bytes): - self.dotnet_file = dnfile.dnPE(data=data) - - # ex: 72 9F 00 00 70 ldstr foo, the index is what comes after 0x72 opcode -> 0x9F - def get_user_string_from_index(self, index): - return self.dotnet_file.net.user_strings.get(index).value - - # in little-endian token is: 12 00 00 04 (0x40000012), where 0x04 is field table index, and 0x12 is the field index - def get_field_name_from_index(self, index): - return self.dotnet_file.net.mdtables.Field.get_with_row_index(index).Name - - def close(self): - self.dotnet_file.close() - - -CONFIG_MAPPING = { - "DR": "directory", - "EXE": "executable", - "H": "domain", - "P": "port", - "VN": "campaign_id", - "VR": "version", - "RG": "registry_value", - "x": "port", - "ss": "domain", -} - -REPLACES_MAPPING = { - "विनी": "M", - "蒂": "T", - "मे": "A", - "बीपी": "Z", - "粹": "M", - "ता": "T", - "의도": "A", - "에": "e", - "!": "=", - "FRANSESCO": "M", - "Strik": "=", -} - - -def get_patterns(): - # ldstr, stsfld - pattern_1 = re.compile( - Rb"""(?x) - \x72(...)\x70 - \x80(...)\x04 - """ - ) - - # ldstr, call Conversions.ToBoolean, stsfld - pattern_2 = re.compile( - Rb"""(?x) - \x72(...)\x70 - \x28\x04\x00\x00\x0A - \x80(...)\x04 - """ - ) - - return [pattern_1, pattern_2] - - -def get_matches(data, patterns): - matches = [] - - for pattern in patterns: - matches.extend(pattern.findall(data)) - - return matches - - -def get_config_dict(parser, data): - patterns = get_patterns() - matches = get_matches(data, patterns) - - if matches: - - config_dict = {} - - for match in matches: - string_index = int.from_bytes(match[0], "little") - field_index = int.from_bytes(match[1], "little") - - # get each string variable name and value - field_name = parser.get_field_name_from_index(field_index).__str__() - field_value = parser.get_user_string_from_index(string_index).__str__() - config_dict[field_name] = field_value - - return config_dict - - -def normalize_config(config_dict): - normalized_config_dict = {} - - # get only the interesting configs and normalize names - for key in config_dict: - if key in CONFIG_MAPPING: - normalized_key = CONFIG_MAPPING[key] - normalized_config_dict[normalized_key] = config_dict[key] - - return normalized_config_dict - - -def decode_b64_values(config): - if "campaign_id" in config: - config["campaign_id"] = base64.b64decode(config["campaign_id"]).decode() - - return config - - -def do_string_replaces(s): - for key in REPLACES_MAPPING: - if key in s: - s = s.replace(key, REPLACES_MAPPING[key]) - - return s - - -def replaces_and_b6d_decode(config): - clean_domain = do_string_replaces(config["domain"]) - clean_port = do_string_replaces(config["port"]) - - config["domain"] = base64.b64decode(clean_domain).decode() - config["port"] = base64.b64decode(clean_port).decode() - - return config - - -def clean_https_reversed_port_and_domain(config): - if "https" in config["port"]: - config["port"] = config["port"].replace("https://", "")[::-1] - config["domain"] = config["domain"].replace("https://", "")[::-1] - - return config - - -def decode_domain_and_port(config): - try: - if "port" in config and int(config["port"]): - pass - except ValueError: - config = replaces_and_b6d_decode(config) - - return config - - -def decode_reversed_ss_and_x(config): - return config - - -def get_clean_config(config_dict): - with suppress(Exception): - config = normalize_config(config_dict) - config = decode_b64_values(config) - config = clean_https_reversed_port_and_domain(config) - config = decode_domain_and_port(config) - config = decode_reversed_ss_and_x(config) - - return config - - -def extract_config(data): - conf = {} - dotnet_file_parser = Parser(data=data) - config_dict = get_config_dict(dotnet_file_parser, data) - config = get_clean_config(config_dict) - - if config.get("domain") and config.get("port"): - conf["cncs"] = [f"{config['domain']}:{config['port']}"] - - if config.get("campaign_id"): - conf["campaign id"] = config["campaign_id"] - - if config.get("version"): - conf["version"] = config["version"] - - dotnet_file_parser.close() - return conf - - -if "__main__" == __name__: - with open(sys.argv[1], "rb") as f: - print(extract_config(f.read())) diff --git a/modules/processing/parsers/CAPE/Oyster.py b/modules/processing/parsers/CAPE/Oyster.py deleted file mode 100644 index 4d328529852..00000000000 --- a/modules/processing/parsers/CAPE/Oyster.py +++ /dev/null @@ -1,126 +0,0 @@ -# Copyright (C) 2024 enzok -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . - - -import logging -import os -import re -import struct -from contextlib import suppress - -import pefile -import yara - -from lib.cuckoo.common.constants import CUCKOO_ROOT - -yara_path = os.path.join(CUCKOO_ROOT, "data", "yara", "CAPE", "Oyster.yar") -if not os.path.exists(yara_path): - yara_path = os.path.join(CUCKOO_ROOT, "custom", "yara", "CAPE", "Oyster.yar") - -with open(yara_path, "r") as yara_rule: - yara_rules = yara.compile(source=yara_rule.read()) - -log = logging.getLogger(__name__) - -DESCRIPTION = "Oyster configuration parser." -AUTHOR = "enzok" - - -def transform(src, lookup_table): - length = len(src) - i = 0 - num = length // 2 - if num > 0: - pVal = length - 1 - while i < num: - k = src[pVal] - n = src[i] - src[i] = lookup_table[k] - i += 1 - result = lookup_table[n] - src[pVal] = result - pVal -= 1 - return src - - -def yara_scan(raw_data): - try: - return yara_rules.match(data=raw_data) - except Exception as e: - print(e) - - -def extract_config(filebuf): - yara_hit = yara_scan(filebuf) - cfg = {} - - for hit in yara_hit: - if hit.rule == "Oyster": - start_offset = "" - lookup_va = "" - for item in hit.strings: - if "$start_exit" == item.identifier: - start_offset = item.instances[0].offset - if "$decode" == item.identifier: - decode_offset = item.instances[0].offset - lookup_va = filebuf[decode_offset + 12 : decode_offset + 16] - if not (start_offset and lookup_va): - return - try: - pe = pefile.PE(data=filebuf, fast_load=True) - lookup_offset = pe.get_offset_from_rva(struct.unpack("I", lookup_va)[0] - pe.OPTIONAL_HEADER.ImageBase) - lookup_table = filebuf[lookup_offset : lookup_offset + 256] - data = filebuf[start_offset + 4 : start_offset + 8092] - hex_strings = re.split(rb"\x00+", data) - hex_strings = [s for s in hex_strings if s] - str_vals = [] - c2 = [] - dll_version = "" - - c2_pattern = r"\b[a-zA-Z0-9-]+(?:\.[a-zA-Z0-9-]+)*\.(?!txt\b|dll\b|exe\b)[a-zA-Z]{2,}" - - for item in hex_strings: - with suppress(Exception): - decoded = transform(bytearray(item), bytearray(lookup_table)).decode("utf-8") - if not decoded: - continue - if "http" in decoded: - if "\r\n" in decoded: - c2.extend(list(filter(None, decoded.split("\r\n")))) - else: - c2.append(decoded) - elif "dll_version" in decoded: - dll_version = decoded.split('":"')[-1] - elif "api" in decoded or "Content-Type" in decoded: - str_vals.append(decoded) - else: - c2_matches = re.findall(c2_pattern, decoded) - if c2_matches: - c2.extend(c2_matches) - - cfg = { - "C2": c2, - "Dll Version": dll_version, - "Strings": str_vals, - } - except Exception as e: - log.error("Error: %s", e) - return cfg - - -if __name__ == "__main__": - import sys - - with open(sys.argv[1], "rb") as f: - print(extract_config(f.read())) diff --git a/modules/processing/parsers/CAPE/Pandora.py b/modules/processing/parsers/CAPE/Pandora.py deleted file mode 100644 index 637d844312d..00000000000 --- a/modules/processing/parsers/CAPE/Pandora.py +++ /dev/null @@ -1,77 +0,0 @@ -import pefile - - -def version_21(raw_config): - if raw_config is None: - return None - return { - "Version": "2.1", - "Domain": raw_config[0], - "Port": raw_config[1], - "Password": raw_config[2], - "Install Path": raw_config[3], - "Install Name": raw_config[4], - "HKCU Key": raw_config[5], - "ActiveX Key": raw_config[6], - "Install Flag": raw_config[7], - "StartupFlag": raw_config[8], - "ActiveXFlag": raw_config[9], - "HKCU Flag": raw_config[10], - "Mutex": raw_config[11], - "userMode Hooking": raw_config[12], - "Melt": raw_config[13], - "Keylogger": raw_config[14], - "Campaign ID": raw_config[15], - "UnknownFlag9": raw_config[16], - } - - -def version_22(raw_config): - if raw_config is None: - return None - return { - "Version": "2.2", - "Domain": raw_config[0], - "Port": raw_config[1], - "Password": raw_config[2], - "Install Path": raw_config[3], - "Install Name": raw_config[4], - "HKCU Key": raw_config[5], - "ActiveX Key": raw_config[6], - "Install Flag": raw_config[7], - "StartupFlag": raw_config[8], - "ActiveXFlag": raw_config[9], - "HKCU Flag": raw_config[10], - "Mutex": raw_config[11], - "userMode Hooking": raw_config[12], - "Melt": raw_config[13], - "Keylogger": raw_config[14], - "Campaign ID": raw_config[15], - "UnknownFlag9": raw_config[16], - } - - -def get_config(data): - try: - pe = pefile.PE(data=data) - rt_string_idx = [entry.id for entry in pe.DIRECTORY_ENTRY_RESOURCE.entries].index(pefile.RESOURCE_TYPE["RT_RCDATA"]) - rt_string_directory = pe.DIRECTORY_ENTRY_RESOURCE.entries[rt_string_idx] - for entry in rt_string_directory.directory.entries: - if str(entry.name) == "CFG": - data_rva = entry.directory.entries[0].data.struct.OffsetToData - size = entry.directory.entries[0].data.struct.Size - data = pe.get_memory_mapped_image()[data_rva : data_rva + size] - cleaned = data.replace("\x00", "") - return cleaned.split("##") - except Exception: - return - - -def extract_config(data): - raw_config = get_config(data) - if raw_config: - if len(raw_config) == 19: - clean_config = version_21(raw_config) - elif len(raw_config) == 20: - clean_config = version_22(raw_config) - return clean_config diff --git a/modules/processing/parsers/CAPE/PhemedroneStealer.py b/modules/processing/parsers/CAPE/PhemedroneStealer.py deleted file mode 100644 index 19018b83536..00000000000 --- a/modules/processing/parsers/CAPE/PhemedroneStealer.py +++ /dev/null @@ -1,191 +0,0 @@ -# Phemedrone Stealer config extractor by @tccontre18 - Br3akp0int -# https://github.com/tccontre/KnowledgeBase/blob/main/malware_re_tools/phemdrone_cfg_extractor/phemdrone_extractor_s.py - -import abc - -CilMethodBodyReaderBase = abc.ABC - -try: - import dnfile - from dnfile.enums import MetadataTables - - HAVE_DNFILE = True -except ImportError: - HAVE_DNFILE = False - -try: - from dncil.cil.body import CilMethodBody - from dncil.cil.body.reader import CilMethodBodyReaderBase - from dncil.cil.error import MethodBodyFormatError - from dncil.clr.token import InvalidToken, StringToken, Token - - HAVE_DNCIL = True -except ImportError: - print("Missed dependency: poetry run pip install dncil") - HAVE_DNCIL = False - - -class DnfileMethodBodyReader(CilMethodBodyReaderBase): - def __init__(self, pe, row): - """ """ - self.pe = pe - self.offset = self.pe.get_offset_from_rva(row.Rva) - - def read(self, n): - """ """ - data = self.pe.get_data(self.pe.get_rva_from_offset(self.offset), n) - self.offset += n - return data - - def tell(self): - """ """ - return self.offset - - def seek(self, offset): - """ """ - self.offset = offset - return self.offset - - -class DnfileParse: - DOTNET_META_TABLES_BY_INDEX = {table.value: table.name for table in MetadataTables} - - @staticmethod - def read_dotnet_user_string(pe, token): - """read user string from #US stream""" - try: - user_string = pe.net.user_strings.get(token.rid) - except UnicodeDecodeError: - return InvalidToken(token.value) - - if user_string is None: - return InvalidToken(token.value) - - return user_string.value - - @staticmethod - def resolve_token(pe, token): - """ """ - if isinstance(token, StringToken): - return DnfileParse.read_dotnet_user_string(pe, token) - - table_name = DnfileParse.DOTNET_META_TABLES_BY_INDEX.get(token.table, "") - if not table_name: - # table_index is not valid - return InvalidToken(token.value) - - table = getattr(pe.net.mdtables, table_name, None) - if table is None: - # table index is valid but table is not present - return InvalidToken(token.value) - - try: - return table.rows[token.rid - 1] - except IndexError: - # table index is valid but row index is not valid - return InvalidToken(token.value) - - @staticmethod - def read_method_body(pe, row): - """ """ - return CilMethodBody(DnfileMethodBodyReader(pe, row)) - - @staticmethod - def format_operand(pe, operand): - """ """ - if isinstance(operand, Token): - operand = DnfileParse.resolve_token(pe, operand) - - if isinstance(operand, str): - return f'"{operand}"' - elif isinstance(operand, int): - return hex(operand) - elif isinstance(operand, list): - return f"[{', '.join(['({:04X})'.format(x) for x in operand])}]" - elif isinstance(operand, dnfile.mdtable.MemberRefRow): - if isinstance(operand.Class.row, (dnfile.mdtable.TypeRefRow,)): - return f"{str(operand.Class.row.TypeNamespace)}.{operand.Class.row.TypeName}::{operand.Name}" - elif isinstance(operand, dnfile.mdtable.TypeRefRow): - return f"{str(operand.TypeNamespace)}.{operand.TypeName}" - elif isinstance(operand, (dnfile.mdtable.FieldRow, dnfile.mdtable.MethodDefRow)): - return f"{operand.Name}" - elif operand is None: - return "" - - return str(operand) - - @staticmethod - def get_instruction_text(pe, insn): - return ( - "{:04X}".format(insn.offset) - + " " - + f"{' '.join('{:02x}'.format(b) for b in insn.get_bytes()) : <20}" - + f"{str(insn.opcode) : <15}" - + DnfileParse.format_operand(pe, insn.operand) - ) - - -def check_next_inst(pe, body, DnfileParse, index): - - str_list = [] - for i in range(1, len(body.instructions) << 2): - if index + i >= len(body.instructions): - break - return None - else: - next_inst = body.instructions[index + i] - next_inst_ = DnfileParse.get_instruction_text(pe, next_inst) - if str(next_inst.opcode) == "ldstr": - str_list.append(DnfileParse.resolve_token(pe, next_inst.operand)) - elif str(next_inst.opcode) == "stsfld": - return (next_inst_.split(" ")[-1]), str_list - - -def extract_config(data): - config_dict = {} - if not HAVE_DNFILE or not HAVE_DNCIL: - return - try: - pe = dnfile.dnPE(data=data) - except dnfile.PEFormatError: - return - for row in pe.net.mdtables.MethodDef: - # skip methods that do not have a method body - if not row.ImplFlags.miIL or any((row.Flags.mdAbstract, row.Flags.mdPinvokeImpl)): - continue - try: - body = DnfileParse.read_method_body(pe, row) - except MethodBodyFormatError: - continue - if not body.instructions: - continue - if row.Name == ".cctor": - index = 0 - if len(body.instructions) >= 20 and str(body.instructions[0].opcode) == "ldstr": - for index in range(0, len(body.instructions)): - value_data = "" - config_field_name = "" - inst = body.instructions[index] - inst_ = DnfileParse.get_instruction_text(pe, inst) - if str(inst.opcode) == "ldstr": - value_data = DnfileParse.resolve_token(pe, inst.operand) - config_field_name, str_list = check_next_inst(pe, body, DnfileParse, index) - if config_field_name is not None and config_field_name not in config_dict: - str_list.insert(0, value_data) - config_dict[config_field_name] = ", ".join(str_list) - else: - pass - if "ldc.i4." in str(inst.opcode): - if inst_.split(".")[-1].strip() == "0": - value_data = "False" - config_field_name, str_list = check_next_inst(pe, body, DnfileParse, index) - config_dict[config_field_name] = value_data - elif inst_.split(".")[-1].strip() == "1": - value_data = "True" - config_field_name, str_list = check_next_inst(pe, body, DnfileParse, index) - config_dict[config_field_name] = value_data - else: - value_data = inst_.split(".")[-1].strip() - config_field_name, str_list = check_next_inst(pe, body, DnfileParse, index) - config_dict[config_field_name] = value_data - return config_dict diff --git a/modules/processing/parsers/CAPE/PikaBot.py b/modules/processing/parsers/CAPE/PikaBot.py deleted file mode 100644 index 0ab670fb3a8..00000000000 --- a/modules/processing/parsers/CAPE/PikaBot.py +++ /dev/null @@ -1,186 +0,0 @@ -import base64 -import logging -import re -import struct -from contextlib import suppress -from io import BytesIO - -import pefile -import yara - -rule_source = """ -rule PikaBot -{ - meta: - author = "enzo" - description = "Pikabot config extraction" - packed = "" - strings: - $config = {C7 44 24 [3] 00 00 C7 44 24 [4] 00 89 [1-4] ?? E8 [4] 31 C0 C7 44 24 [3] 00 00 89 44 24 ?? C7 04 24 [4] E8} - condition: - uint16(0) == 0x5A4D and all of them -} -""" - -yara_rules = yara.compile(source=rule_source) - -log = logging.getLogger(__name__) - - -class PikaException(Exception): - pass - - -def yara_scan(raw_data): - try: - return yara_rules.match(data=raw_data) - except Exception as e: - print(e) - - -def xor(data, key): - return bytes([c ^ key for c in data]) - - -def wide_finder(data): - str_end = len(data) - for i in range(0, len(data) - 1, 2): - if not chr(data[i]).isascii(): - str_end = i - break - if data[i + 1] != 0: - str_end = i - break - return data[:str_end] - - -def get_url(ps_string): - out = None - m = re.search(r"http[^ ]*", ps_string) - if m: - out = m.group() - return out - - -def get_wchar_string(data, length): - data = data.read(length) - return data.decode("utf-16-le") - - -def get_strings(data, count): - w_strings = [] - for _ in range(count): - length = struct.unpack("I", data.read(4))[0] - w_string = get_wchar_string(data, length) - w_strings.append(w_string) - return w_strings - - -def get_c2s(data, count): - c2_list = [] - for _ in range(count): - c2_size = struct.unpack("I", data.read(4))[0] - c2 = get_wchar_string(data, c2_size) - port, val1, val2 = struct.unpack("III", data.read(12)) - c2_list.append(f"{c2}:{port}") - return c2_list - - -def get_config(input_data): - data = BytesIO(input_data) - rounds, config_size, _, version_size = struct.unpack("=IIBI", data.read(13)) - version = get_wchar_string(data, version_size) - campaign_size = struct.unpack("I", data.read(4))[0] - campaign_name = get_wchar_string(data, campaign_size) - registry_key_size = struct.unpack("I", data.read(4))[0] - registry_key = get_wchar_string(data, registry_key_size) - user_agent_size = struct.unpack("I", data.read(4))[0] - user_agent = get_wchar_string(data, user_agent_size) - number_of_http_headers = struct.unpack("I", data.read(4))[0] - get_strings(data, number_of_http_headers) - number_of_api_cmds = struct.unpack("I", data.read(4))[0] - get_strings(data, number_of_api_cmds) - number_of_c2s = struct.unpack("I", data.read(4))[0] - c2s = get_c2s(data, number_of_c2s) - - return { - "Version": version, - "Campaign Name": campaign_name, - "Registry Key": registry_key, - "User Agent": user_agent, - # "request_headers": request_headers, - # "api_cmds": api_cmds, - "C2s": c2s, - } - - -def extract_config(filebuf): - pe = None - with suppress(Exception): - pe = pefile.PE(data=filebuf, fast_load=False) - - if not pe: - return - - r_data = None - data = None - - r_data_sections = [s for s in pe.sections if s.Name.find(b".rdata") != -1] - if r_data_sections: - r_data = r_data_sections[0].get_data() - - data_sections = [s for s in pe.sections if s.Name.find(b".data") != -1] - if data_sections: - data = data_sections[0].get_data() - - if r_data: - big_null = r_data.find(b"\x00" * 30) - r_data = r_data[:big_null] - out = None - - for i in range(1, 0xFF): - egg = bytes([i]) * 16 - if egg in r_data: - test_out = xor(r_data, i) - # This might break if the extra crud on the end of the blob is not b64 friendly - try: - test_out_ptxt = base64.b64decode(test_out) - except Exception: - continue - if "http".encode("utf-16le") in test_out_ptxt: - out = wide_finder(test_out_ptxt).decode("utf-16le") - if out: - url = get_url(out) - return {"C2": [url], "PowerShell": out} - - if data: - yara_hit = yara_scan(filebuf) - cfg_va = None - cfg_offset = None - cfg_length = 0 - - for hit in yara_hit: - if hit.rule == "PikaBot": - for item in hit.strings: - if "$config" == item.identifier: - offset = item.instances[0].offset - cfg_va = filebuf[offset + 12 : offset + 16] - with suppress(Exception): - pe = pefile.PE(data=filebuf, fast_load=True) - cfg_offset = pe.get_offset_from_rva(struct.unpack("I", cfg_va)[0] - pe.OPTIONAL_HEADER.ImageBase) - cfg_length = struct.unpack("H", filebuf[offset + 4 : offset + 6])[0] - break - - if cfg_offset: - data = filebuf[cfg_offset : cfg_offset + cfg_length] - if data[4:8] == b"\x00\x00\x00\x00": - return - with suppress(Exception): - config = get_config(data) - return config - - -if __name__ == "__main__": - import sys - - print(extract_config(sys.argv[1])) diff --git a/modules/processing/parsers/CAPE/PlugX.py b/modules/processing/parsers/CAPE/PlugX.py deleted file mode 100644 index 098c04097e3..00000000000 --- a/modules/processing/parsers/CAPE/PlugX.py +++ /dev/null @@ -1,325 +0,0 @@ -# PlugX config parser for CAPE -# -# Based on PlugX RAT detection and analysis for Volatility 2.0, version 1.2 -# -# Author: Fabien Perigaud -# -# Modified for CAPE by Kevin O'Reilly -# -# This plugin is based on poisonivy.py by Andreas Schuster. -# -# This program is free software; you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation; either version 2 of the License, or (at -# your option) any later version. -# -# This program is distributed in the hope that it will be useful, but -# WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU -# General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program; if not, write to the Free Software -# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA - -import socket -from collections import OrderedDict, defaultdict -from socket import inet_ntoa -from struct import calcsize, unpack_from - -persistence = defaultdict(lambda: "Unknown", {0: "Service + Run Key", 1: "Service", 2: "Run key", 3: "None"}) -regs = defaultdict( - lambda: "Unknown", - { - 0x80000000: "HKEY_CLASSES_ROOT", - 0x80000001: "HKEY_CURRENT_USER", - 0x80000002: "HKEY_LOCAL_MACHINE", - 0x80000003: "HKEY_USERS", - 0x80000005: "HKEY_CURRENT_CONFIG", - }, -) - - -def get_str_utf16le(buff): - tstrend = buff.find(b"\x00\x00") - tstr = buff[: tstrend + (tstrend & 1)] - return tstr.decode() - - -def get_proto(proto): - ret = [] - if proto & 0x1: - ret.append("TCP") - if proto & 0x2: - ret.append("HTTP") - if proto & 0x4: - ret.append("UDP") - if proto & 0x8: - ret.append("ICMP") - if proto & 0x10: - ret.append("DNS") - if proto > 0x1F: - ret.append("OTHER_UNKNOWN") - return " / ".join(ret) - - -def get_proto2(proto): - protos = ("???", "???", "????", "TCP", "HTTP", "DNS", "UDP", "ICMP", "RAW", "???", "???") - try: - ret = protos[proto] + f"({proto})" - except Exception: - ret = f"UNKNOWN ({proto})" - return ret - - -def get_timer_string(timer: tuple) -> str: - timer_str = "" - if timer[0] != 0: - timer_str += f"{timer[0]} days, " - if timer[1] != 0: - timer_str += f"{timer[1]} hours, " - if timer[2] != 0: - timer_str += f"{timer[2]} mins, " - timer_str += f"{timer[3]} secs" - return timer_str - - -def extract_config(cfg_blob): - cfg_sz = len(cfg_blob) - config_output = OrderedDict() - if cfg_sz not in (0xBE4, 0x150C, 0x1510, 0x170C, 0x1B18, 0x1D18, 0x2540, 0x254C, 0x2D58, 0x36A4, 0x4EA4): - return None - if cfg_sz == 0x1510: - cfg_blob = cfg_blob[12:] - elif cfg_sz in (0x36A4, 0x4EA4): - cfg_blob = cfg_blob - else: - cfg_blob = cfg_blob[8:] - # Flags - if cfg_sz == 0xBE4: - desc = " 0 and str(url) != "HTTP://": - url_list.append(str(url)) - if url_list: - config_output.update({"URL": url_list}) - # Proxies - proxy_list = [] - proxy_creds = [] - for _ in range(4): - ptype, port, proxy, user, passwd = unpack_from("<2H64s64s64s", cfg_blob) - cfg_blob = cfg_blob[calcsize("<2H64s64s64s") :] - if proxy[0] != "\x00": - proxy_list.append("{}:{}".format(proxy.split(b"\x00", 1)[0].decode(), port)) - if user[0] != b"\x00": - proxy_creds.append(f"{user.decode()} / {passwd.decode()}\0") - if proxy_list: - config_output.update({"Proxy": proxy_list}) - if proxy_creds: - config_output.update({"Proxy credentials": proxy_creds}) - str_sz = 0x80 if cfg_sz == 0xBE4 else 0x200 - # Persistence - if cfg_sz in (0x1B18, 0x1D18, 0x2540, 0x254C, 0x2D58, 0x36A4, 0x4EA4): - persistence_type = unpack_from("I", data[c2_offset + 1 : c2_offset + 5])[0])) - port = str(struct.unpack(">H", data[c2_offset + 5 : c2_offset + 7])[0]) - controllers.append(f"{ip}:{port}") - return controllers - - -def parse_binary_c2_2(data): - """ - Parses the binary CNC block format introduced April'21 - """ - expected_sha1 = data[:0x14] - data = data[0x14:] - actual_sha1 = hashlib.sha1(data).digest() - - if actual_sha1 != expected_sha1: - log.error("Expected sha1: %s actual: %s", expected_sha1, actual_sha1) - return - - length = len(data) - - controllers = [] - alignment = 0 - if len(data) % 7 == 0: - alignment = 7 - elif len(data) % 8 == 0: - alignment = 8 - - if not alignment: - return controllers - - for c2_offset in range(0, length, alignment): - ip = socket.inet_ntoa(struct.pack("!L", struct.unpack(">I", data[c2_offset + 1 : c2_offset + 5])[0])) - port = str(struct.unpack(">H", data[c2_offset + 5 : c2_offset + 7])[0]) - controllers.append(f"{ip}:{port}") - return controllers - - -def decompress(data): - """ - Decompress data with blzpack decompression - """ - if not HAVE_BLZPACK: - return - return blzpack.decompress_data(BRIEFLZ_HEADER.join(data.split(QAKBOT_HEADER))) - - -def decrypt_data(data): - """ - Decrypts the data using the last 20 bytes as a rc4 key. - Validates the decryption with the sha1 sum contained within the first 20 bytes of the decrypted data. - """ - if not data: - return - - key = data[:0x14] - decrypted_data = ARC4.new(key).decrypt(data[0x14:]) - - if not decrypted_data: - return - - if hashlib.sha1(decrypted_data[0x14:]).digest() != decrypted_data[:0x14]: - return - - return decrypted_data[0x14:] - - -def decrypt_data2(data): - if not data: - return - - hash_obj = hashlib.sha1(b"\\System32\\WindowsPowerShell\\v1.0\\powershell.exe") - rc4_key = hash_obj.digest() - decrypted_data = ARC4.new(rc4_key).decrypt(data) - - if not decrypted_data: - return - - return decrypted_data - - -def decrypt_data3(data): - if not data: - return - - hash_obj = hashlib.sha1(b"\\System32\\WindowsPowerShel1\\v1.0\\powershel1.exe") - rc4_key = hash_obj.digest() - decrypted_data = ARC4.new(rc4_key).decrypt(data) - - if not decrypted_data: - return - - if hashlib.sha1(decrypted_data[0x14:]).digest() == decrypted_data[:0x14]: - return decrypted_data - - # From around 403.902 onwards (30-09-2022) - hash_obj = hashlib.sha1(b"Muhcu#YgcdXubYBu2@2ub4fbUhuiNhyVtcd") - rc4_key = hash_obj.digest() - decrypted_data = ARC4.new(rc4_key).decrypt(data) - - if not decrypted_data: - return - - if rc4_key == decrypted_data[:0x14]: - return decrypted_data - - decrypted_data = ARC4.new(decrypted_data[0x14:0x28]).decrypt(decrypted_data[0x28:]) - if not decrypted_data: - return - - if hashlib.sha1(decrypted_data[0x14:]).digest() != decrypted_data[:0x14]: - return - - return decrypted_data - - -def decrypt_data4(data): - if not data: - return - - hash_obj = hashlib.sha1(b"bUdiuy81gYguty@4frdRdpfko(eKmudeuMncueaN") - rc4_key = hash_obj.digest() - decrypted_data = ARC4.new(rc4_key).decrypt(data) - - if not decrypted_data: - return - - decrypted_data = ARC4.new(decrypted_data[0x14:0x28]).decrypt(decrypted_data[0x28:]) - if not decrypted_data: - return - - if hashlib.sha1(decrypted_data[0x14:]).digest() != decrypted_data[:0x14]: - return - - return decrypted_data - - -def get_sha256_hash(data): - sha256 = SHA256.new() - sha256.update(data) - return sha256.digest() - - -def decrypt_aes_cbc(encrypted_data, key, iv): - decoded = "" - with suppress(Exception): - cipher = AES.new(key, AES.MODE_CBC, iv) - decrypted_data = cipher.decrypt(encrypted_data) - decoded = unpad(decrypted_data, AES.block_size) - - return decoded - - -def get_ips(data): - ip_addresses = [] - segments = data.split(b"\x00") - - for segment in segments: - with suppress(Exception): - (_, ip_int, port) = struct.unpack("!BIH", segment) - ip_addr = str(ipaddress.ip_address(ip_int)) - ip_addresses.append(f"{ip_addr}:{port}") - - return ip_addresses - - -def decrypt_strings(data, xor_key): - decoded_strings = [] - current_string = bytearray() - key_index = 0 - num = 0 - key_length = len(xor_key) - - for byte in data: - decoded_byte = byte ^ xor_key[key_index] & 0xFF - if decoded_byte != 0: - current_string.append(decoded_byte) - else: - with suppress(Exception): - dec_str = current_string.decode("utf-8") - dec_str = f"{num - len(dec_str)}|{dec_str}" - decoded_strings.append(dec_str) - current_string.clear() - key_index = (key_index + 1) % key_length - num += 1 - - return decoded_strings - - -def extract_config(filebuf): - end_config = {} - if filebuf[:2] == b"MZ": - try: - pe = pefile.PE(data=filebuf, fast_load=False) - matches = yara_rules.match(data=filebuf) - if matches: - decrypt_offset = "" - c2decrypt = "" - confdecrypt = "" - - for match in matches: - if match.rule != "QakBot5": - continue - for item in match.strings: - if "$c2list" == item.identifier: - c2decrypt = item.instances[0].offset - elif "$campaign" == item.identifier: - confdecrypt = item.instances[0].offset - elif "$decrypt_str" == item.identifier: - decrypt_offset = item.instances[0].offset - - if not (decrypt_offset and c2decrypt and confdecrypt): - return - - aes_pwd_disp = pe.get_dword_from_offset(decrypt_offset + 7) - aes_pwd_rva = pe.get_rva_from_offset(decrypt_offset + 11) + aes_pwd_disp - aes_pwd_size = pe.get_dword_from_offset(decrypt_offset + 15) - aes_pwd = pe.get_data(aes_pwd_rva, aes_pwd_size) - key = get_sha256_hash(aes_pwd) - enc_xor_disp = pe.get_dword_from_offset(decrypt_offset + 40) - enc_xor_rva = pe.get_rva_from_offset(decrypt_offset + 44) + enc_xor_disp - enc_xor_size = pe.get_dword_from_offset(decrypt_offset + 28) - enc_xor = pe.get_data(enc_xor_rva, enc_xor_size) - enc_strs_disp = pe.get_dword_from_offset(decrypt_offset + 22) - enc_strs_rva = pe.get_rva_from_offset(decrypt_offset + 26) + enc_strs_disp - enc_strs_size = pe.get_dword_from_offset(decrypt_offset + 45) - enc_strs = pe.get_data(enc_strs_rva, enc_strs_size) - - iv = enc_xor[:16] - encrypted_buffer = enc_xor[16:] - xor_key = decrypt_aes_cbc(encrypted_buffer, key, iv) - decoded = decrypt_strings(enc_strs, xor_key) - - if not decoded: - return - - c2blob_disp = pe.get_dword_from_offset(c2decrypt + 29) - c2blob_rva = pe.get_rva_from_offset(c2decrypt + 33) + c2blob_disp - c2blob_size_disp = pe.get_dword_from_offset(c2decrypt + 3) - c2blob_size_rva = pe.get_rva_from_offset(c2decrypt + 7) + c2blob_size_disp - c2blob_size = pe.get_word_at_rva(c2blob_size_rva) - c2blob = pe.get_data(c2blob_rva, c2blob_size) - c2blob_pwd_index = str(pe.get_dword_from_offset(c2decrypt + 8)) - - confblob_disp = pe.get_dword_from_offset(confdecrypt + 30) - confblob_rva = pe.get_rva_from_offset(confdecrypt + 34) + confblob_disp - confblob_size_disp = pe.get_dword_from_offset(confdecrypt + 3) - confblob_size_rva = pe.get_rva_from_offset(confdecrypt + 7) + confblob_size_disp - confblob_size = pe.get_word_at_rva(confblob_size_rva) - confblob = pe.get_data(confblob_rva, confblob_size) - - ip_list = [] - config = "" - for val in decoded: - index, aes_pwd = val.split("|") - if index == c2blob_pwd_index: - key = get_sha256_hash(aes_pwd.encode("utf-8")) - iv = c2blob[1:17] - encrypted_buffer = c2blob[17:] - decoded = decrypt_aes_cbc(encrypted_buffer, key, iv) - if decoded: - cncs = decoded[32:] - ip_list = get_ips(cncs) - - if ip_list: - end_config.setdefault("C2s", ip_list) - - iv = confblob[1:17] - encrypted_buffer = confblob[17:] - decoded = decrypt_aes_cbc(encrypted_buffer, key, iv) - - if decoded: - conf = decoded[32:] - config = parse_config(conf) - - if config: - end_config.update(config) - - break - - else: - if not hasattr(pe, "DIRECTORY_ENTRY_RESOURCE"): - return end_config - for rsrc in pe.DIRECTORY_ENTRY_RESOURCE.entries: - for entry in rsrc.directory.entries: - if entry.name is None: - continue - # log.info("id: %s", entry.name) - controllers = [] - config = {} - offset = entry.directory.entries[0].data.struct.OffsetToData - size = entry.directory.entries[0].data.struct.Size - res_data = pe.get_memory_mapped_image()[offset : offset + size] - if str(entry.name) == "307": - # we found the parent process and still need to decrypt/(blzpack) decompress the main DLL - dec_bytes = decrypt_data(res_data) - decompressed = decompress(dec_bytes) - end_config["Loader Build"] = parse_build(pe).decode() - pe2 = pefile.PE(data=decompressed) - if not hasattr(pe2, "DIRECTORY_ENTRY_RESOURCE"): - continue - for rsrc in pe2.DIRECTORY_ENTRY_RESOURCE.entries: - for entry in rsrc.directory.entries: - if entry.name is None: - continue - offset = entry.directory.entries[0].data.struct.OffsetToData - size = entry.directory.entries[0].data.struct.Size - res_data = pe2.get_memory_mapped_image()[offset : offset + size] - if str(entry.name) == "308": - dec_bytes = decrypt_data(res_data) - config = parse_config(dec_bytes) - # log.info("qbot_config: %s", config) - end_config["Core DLL Build"] = parse_build(pe2).decode() - elif str(entry.name) == "311": - dec_bytes = decrypt_data(res_data) - controllers = parse_controllers(dec_bytes) - elif str(entry.name) == "308": - dec_bytes = decrypt_data(res_data) - config = parse_config(dec_bytes) - elif str(entry.name) == "311": - dec_bytes = decrypt_data(res_data) - controllers = parse_binary_c2(dec_bytes) - elif str(entry.name) in ("118", "3719"): - dec_bytes = decrypt_data2(res_data) - controllers = parse_binary_c2_2(dec_bytes) - elif str(entry.name) in ("524", "5812"): - dec_bytes = decrypt_data2(res_data) - config = parse_config(dec_bytes) - elif str(entry.name) in ("18270D2E", "BABA", "103", "89210AF9"): - dec_bytes = decrypt_data3(res_data) - config = parse_config(dec_bytes) - elif str(entry.name) in ("26F517AB", "EBBA", "102", "3C91E639"): - dec_bytes = decrypt_data3(res_data) - controllers = parse_binary_c2_2(dec_bytes) - elif str(entry.name) in ("89290AF9", "COMPONENT_07"): - dec_bytes = decrypt_data4(res_data) - config = parse_config(dec_bytes) - elif str(entry.name) in ("3C91E539", "COMPONENT_08"): - dec_bytes = decrypt_data4(res_data) - controllers = parse_binary_c2_2(dec_bytes) - end_config["Loader Build"] = parse_build(pe).decode() - for k, v in config.items(): - # log.info({ k: v }) - end_config.setdefault(k, v) - # log.info("controllers: %s", controllers) - for controller in controllers: - end_config.setdefault("address", []).append(controller) - except Exception as e: - log.warning(e) - elif filebuf[:1] == b"\x01": - controllers = parse_binary_c2(filebuf[: len(filebuf) - 20]) - for controller in controllers: - end_config.setdefault("address", []).append(controller) - elif b"=" in filebuf: - config = parse_config(filebuf[: len(filebuf) - 20]) - for k, v in config.items(): - end_config.setdefault(k, v) - return end_config - - -if __name__ == "__main__": - import sys - from pathlib import Path - - log.setLevel(logging.DEBUG) - data = Path(sys.argv[1]).read_bytes() - print(extract_config(data)) diff --git a/modules/processing/parsers/CAPE/QuasarRAT.py b/modules/processing/parsers/CAPE/QuasarRAT.py deleted file mode 100644 index 1220071ea7d..00000000000 --- a/modules/processing/parsers/CAPE/QuasarRAT.py +++ /dev/null @@ -1,5 +0,0 @@ -from rat_king_parser.rkp import RATConfigParser - - -def extract_config(data: bytes): - return RATConfigParser(data).report.get("config", {}) diff --git a/modules/processing/parsers/CAPE/Quickbind.py b/modules/processing/parsers/CAPE/Quickbind.py deleted file mode 100644 index ec316d6eb53..00000000000 --- a/modules/processing/parsers/CAPE/Quickbind.py +++ /dev/null @@ -1,87 +0,0 @@ -import logging -import re -import struct -from contextlib import suppress - -import pefile -from Cryptodome.Cipher import ARC4 - -log = logging.getLogger(__name__) -log.setLevel(logging.INFO) - - -def is_hex(hex_string): - if len(hex_string) % 2 != 0: - return False - - if not re.fullmatch(r"[0-9a-fA-F]+", hex_string): - return False - - return True - - -def extract_config(filebuf): - cfg = {} - pe = pefile.PE(data=filebuf) - - data_sections = [s for s in pe.sections if s.Name.find(b".data") != -1] - - if not data_sections: - return None - - data = data_sections[0].get_data() - - offset = 0 - entries = [] - while offset < len(data): - if offset + 8 > len(data): - break - size, key = struct.unpack_from("I4s", data, offset) - if b"\x00\x00\x00" in key or size > 256: - offset += 4 - continue - offset += 8 - data_format = f"{size}s" - encrypted_string = struct.unpack_from(data_format, data, offset)[0] - offset += size - padding = (8 - (offset % 8)) % 8 - offset += padding - - with suppress(IndexError, UnicodeDecodeError, ValueError): - decrypted_result = ARC4.new(key).decrypt(encrypted_string).replace(b"\x00", b"").decode("utf-8") - if decrypted_result and len(decrypted_result) > 1: - entries.append(decrypted_result) - - if entries: - c2s = [] - mutexes = [] - - for item in entries: - if item.count(".") == 3 and re.fullmatch(r"\d+", item.replace(".", "")): - c2s.append(item) - - elif "http" in item: - c2s.append(item) - - elif item.count("-") == 4: - mutexes.append(item) - - elif len(item) in [16] and is_hex(item): - cfg["Encryption Key"] = item - - if c2s: - cfg["C2"] = c2s - - if mutexes: - cfg["Mutex"] = list(set(mutexes)) - - return cfg - - -if __name__ == "__main__": - import sys - from pathlib import Path - - log.setLevel(logging.DEBUG) - data = Path(sys.argv[1]).read_bytes() - print(extract_config(data)) diff --git a/modules/processing/parsers/CAPE/RCSession.py b/modules/processing/parsers/CAPE/RCSession.py deleted file mode 100644 index 7b9e56d89b4..00000000000 --- a/modules/processing/parsers/CAPE/RCSession.py +++ /dev/null @@ -1,121 +0,0 @@ -# Copyright (C) 2015 Kevin O'Reilly kevin.oreilly@contextis.co.uk -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . - -DESCRIPTION = "RCSession configuration parser." -AUTHOR = "kevoreilly" - -import struct - -import pefile -import yara - -rule_source = """ -rule RCSession -{ - meta: - author = "kevoreilly" - description = "RCSession Payload" - cape_type = "RCSession Payload" - strings: - $a1 = {56 33 F6 39 74 24 08 7E 4C 53 57 8B F8 2B FA 8B C6 25 03 00 00 80 79 05 48 83 C8 FC 40 83 E8 00 74 19 48 74 0F 48 74 05 6B C9 09 EB 15 8B C1 C1 E8 02 EB 03 8D 04 09 2B C8} - $a2 = {83 C4 10 85 C0 74 ?? BE ?? ?? ?? ?? 89 74 24 10 E8 ?? ?? ?? ?? 6A 03 68 48 0B 00 00 56 53 57 68 02 00 00 80 E8 ?? ?? ?? ?? 83 C4 18 85 C0 74 18 E8 ?? ?? ?? ?? 6A 03 68 48} - condition: - (any of ($a*)) -} -""" - -MAX_IP_STRING_SIZE = 16 # aaa.bbb.ccc.ddd\0 -UINT_MAX = 0xFFFFFFFF - - -def yara_scan(raw_data, rule_name): - addresses = {} - yara_rules = yara.compile(source=rule_source) - matches = yara_rules.match(data=raw_data) - for match in matches: - if match.rule == "RCSession": - for item in match.strings: - if item.identifier == rule_name: - addresses[item.identifier] = item.instances[0].offset - return addresses - - -def unicode_string_from_offset(buffer, offset, max): - return buffer[offset : offset + max].decode("utf-16") - - -def decode(ciphertext, size, key): - - if size == 0: - return - - v4 = 0 - decoded_chars = bytearray(size) - - while v4 < size: - if v4 % 4 == 0: - key = (key + (key >> 4)) & UINT_MAX - elif v4 % 4 == 1: - v6 = (2 * key) & UINT_MAX - key = (key - v6) & UINT_MAX - elif v4 % 4 == 2: - v6 = (key >> 2) & UINT_MAX - key = (key - v6) & UINT_MAX - else: - key = (key * 9) & UINT_MAX - decoded_chars[v4] = struct.unpack("B", ciphertext[v4 : v4 + 1])[0] ^ (key & 0xFF) - v4 += 1 - - return decoded_chars - - -def extract_config(filebuf): - pe = pefile.PE(data=filebuf, fast_load=False) - image_base = pe.OPTIONAL_HEADER.ImageBase - decrypt_config = yara_scan(filebuf, "$a2") - if decrypt_config: - yara_offset = int(decrypt_config["$a2"]) - else: - return - - config_rva = struct.unpack("i", filebuf[yara_offset + 8 : yara_offset + 12])[0] - image_base - config_offset = pe.get_offset_from_rva(config_rva) - size = struct.unpack("i", filebuf[yara_offset + 88 : yara_offset + 92])[0] - key = struct.unpack("i", filebuf[config_offset + 128 : config_offset + 132])[0] - end_config = {} - tmp_config = decode(filebuf[config_offset : config_offset + size], size, key) - - c2_address = str(tmp_config[156 : 156 + MAX_IP_STRING_SIZE]) - if c2_address: - end_config.setdefault("c2_address", []).append(c2_address) - c2_address = str(tmp_config[224 : 224 + MAX_IP_STRING_SIZE]) - if c2_address: - end_config.setdefault("c2_address", []).append(c2_address) - installdir = unicode_string_from_offset(bytes(tmp_config), 0x2A8, 128) - if installdir: - end_config["directory"] = installdir - executable = unicode_string_from_offset(tmp_config, 0x4B0, 128) - if executable: - end_config["filename"] = executable - servicename = unicode_string_from_offset(tmp_config, 0x530, 128) - if servicename: - end_config["servicename"] = servicename - displayname = unicode_string_from_offset(tmp_config, 0x738, 128) - if displayname: - end_config["servicedisplayname"] = displayname - description = unicode_string_from_offset(tmp_config, 0x940, 512) - if description: - end_config["servicedescription"] = description - - return end_config diff --git a/modules/processing/parsers/CAPE/REvil.py b/modules/processing/parsers/CAPE/REvil.py deleted file mode 100644 index c5c20c48439..00000000000 --- a/modules/processing/parsers/CAPE/REvil.py +++ /dev/null @@ -1,85 +0,0 @@ -# Copyright (C) 2019 R3MRUM (https://twitter.com/R3MRUM) -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . - -#!/usr/bin/python - -import json -import struct - -import pefile - - -def getSectionNames(sections): - return [section.Name.partition(b"\0")[0] for section in sections] - - -def getREvilKeyAndConfig(pesections, section_name): - for section in pesections: - if section.Name.partition(b"\0")[0] == section_name: - data = section.get_data() - if len(data) > 32: - key = data[:32] - encoded_config = data[32:] - return key, encoded_config - - -def decodeREvilConfig(config_key, config_data): - init255 = list(range(256)) - - key = config_key - config_len = struct.unpack(". - -DESCRIPTION = "RedLeaf configuration parser." -AUTHOR = "kevoreilly" - -import struct - -import pefile -import yara - -rule_source = """ -rule RedLeaf -{ - meta: - author = "kev" - description = "RedLeaf configuration parser." - cape_type = "RedLeaf Payload" - strings: - $crypto = {6A 10 B8 ?? ?? ?? 10 E8 ?? ?? 01 00 8B F1 89 75 E4 8B 7D 08 83 CF 07 81 FF FE FF FF 7F 76 05 8B 7D 08 EB 29 8B 4E 14 89 4D EC D1 6D EC 8B C7 33 D2 6A 03 5B F7 F3 8B 55 EC 3B D0 76 10 BF FE FF FF} - $decrypt_config = {55 8B EC 83 EC 20 A1 98 9F 03 10 33 C5 89 45 FC 56 33 F6 33 C0 80 B0 ?? ?? ?? ?? ?? 40 3D ?? ?? ?? ?? 72 F1 68 70 99 03 10 56 56 FF 15 2C 11 03 10 FF 15 B8 11 03 10 3D B7 00 00 00 75 06 56 E8 5F 9E} - condition: - //check for MZ Signature at offset 0 - uint16(0) == 0x5A4D - - and - - $crypto and $decrypt_config -} -""" - -MAX_STRING_SIZE = 64 -MAX_IP_STRING_SIZE = 16 # aaa.bbb.ccc.ddd\0 - - -def yara_scan(raw_data, rule_name): - addresses = {} - yara_rules = yara.compile(source=rule_source) - matches = yara_rules.match(data=raw_data) - for match in matches: - if match.rule == "RedLeaf": - for item in match.strings: - if item.identifier == rule_name: - addresses[item.identifier] = item.instances[0].offset - return addresses - - -def pe_data(pe, va, size): - image_base = pe.OPTIONAL_HEADER.ImageBase - rva = va - image_base - return pe.get_data(rva, size) - - -def string_from_offset(buffer, offset): - return buffer[offset : offset + MAX_STRING_SIZE].split(b"\0", 1)[0] - - -def unicode_string_from_offset(buffer, offset): - return buffer[offset : offset + MAX_STRING_SIZE].split(b"\x00\x00", 1)[0] - - -def extract_config(filebuf): - pe = pefile.PE(data=filebuf, fast_load=False) - image_base = pe.OPTIONAL_HEADER.ImageBase - - decrypt_config = yara_scan(filebuf, "$decrypt_config") - - if decrypt_config: - yara_offset = int(decrypt_config["$decrypt_config"]) - else: - return - - config_rva = struct.unpack("i", filebuf[yara_offset + 23 : yara_offset + 27])[0] - image_base - config_offset = pe.get_offset_from_rva(config_rva) - xor_key = struct.unpack("b", filebuf[yara_offset + 27 : yara_offset + 28])[0] - config_size = struct.unpack("i", filebuf[yara_offset + 30 : yara_offset + 34])[0] - tmp_config = "".join([chr(xor_key ^ ord(x)) for x in filebuf[config_offset : config_offset + config_size]]) - end_config = {} - c2_address = tmp_config[8 : 8 + MAX_IP_STRING_SIZE] - if c2_address: - end_config.setdefault("c2_address", []).append(c2_address) - c2_address = tmp_config[0x48 : 0x48 + MAX_IP_STRING_SIZE] - if c2_address: - end_config.setdefault("c2_address", []).append(c2_address) - c2_address = tmp_config[0x88 : 0x88 + MAX_IP_STRING_SIZE] - if c2_address: - end_config.setdefault("c2_address", []).append(c2_address) - missionid = string_from_offset(tmp_config, 0x1EC) - if missionid: - end_config["missionid"] = missionid - mutex = unicode_string_from_offset(tmp_config, 0x508) - if mutex: - end_config["mutex"] = mutex - key = string_from_offset(tmp_config, 0x832) - if key: - end_config["key"] = key - - return end_config diff --git a/modules/processing/parsers/CAPE/RedLine.py b/modules/processing/parsers/CAPE/RedLine.py deleted file mode 100644 index 2110cbbdeec..00000000000 --- a/modules/processing/parsers/CAPE/RedLine.py +++ /dev/null @@ -1,182 +0,0 @@ -# Thanks to Gi7w0rm -# https://github.com/kevthehermit/RATDecoders/blob/master/malwareconfig/decoders/RedLine.py - -import base64 -import logging -import re -from contextlib import suppress - -from lib.cuckoo.common.dotnet_utils import dotnet_user_strings -from lib.cuckoo.common.integrations.strings import extract_strings - -try: - import dnfile - - HAVE_DNFILE = True -except ImportError: - HAVE_DNFILE = False - -log = logging.getLogger(__name__) -log.setLevel(logging.INFO) - - -def decrypt(str_to_dec, Key): - dec_xor = "" - first_dec = base64.b64decode(str_to_dec) - len_first_dec = len(first_dec) - for i in range(len_first_dec): - Key = Key + str(Key[i % len(Key)]) - a_list = [chr(ord(chr(a)) ^ ord(b)) for a, b in zip(first_dec, Key)] - dec_xor = "".join(a_list) - third_dec = base64.b64decode(dec_xor) - tocut = str(third_dec) - cut = tocut[2:-1] - return cut - - -def extract_config(data): - config_dict = {} - - pattern = re.compile( - Rb"""(?x) - \x02\x72(...)\x70\x7D...\x04 - \x02\x72(...)\x70\x7D...\x04 - \x02\x72(...)\x70\x7D...\x04 - \x02\x72(...)\x70\x7D...\x04 - """ - ) - - pattern2 = re.compile( - Rb"""(?x) - \x72(...)\x70\x0A - \x72(...)\x70\x0B - \x72(...)\x70\x0C - \x72(...)\x70\x0D - """ - ) - - pattern3 = re.compile( - Rb"""(?x) - \x02\x72(...)\x70\x7D...\x04 - \x02\x72(...)\x70\x7D...\x04 - """ - ) - - pattern4 = re.compile( - Rb"""(?x) - \x02\x28...\x0A - \x02\x72(...)\x70\x7D...\x04 - \x02\x72(...)\x70\x7D...\x04 - """ - ) - - pattern5 = re.compile( - Rb"""(?x) - \x72(...)\x70\x80...\x04 - \x72(...)\x70\x80...\x04 - \x72(...)\x70\x80...\x04 - \x72(...)\x70\x80...\x04 - """ - ) - - # If the config file is stored in plaintext format - pattern6 = re.compile( - Rb"""(?x) - \x72(...)\x70\x80...\x04 - \x72(...)\x70\x80...\x04 - """ - ) - patterns = [pattern, pattern2, pattern3, pattern4, pattern5, pattern6] - key = c2 = botnet = base_location = None - - user_strings = extract_strings(data=data, on_demand=True) - if not user_strings: - user_strings = dotnet_user_strings(data=data) - if not user_strings: - return - - with suppress(Exception): - base_location = user_strings.index("Yandex\\YaAddon") - if base_location: - # newer samples - with suppress(Exception): - key = user_strings[base_location - 1] - c2 = decrypt(user_strings[base_location - 3], key) - if not c2 or "." not in c2: - c2 = decrypt(user_strings[base_location - 4], key) - botnet = decrypt(user_strings[base_location - 3], key) - else: - botnet = decrypt(user_strings[base_location - 2], key) - - # older samples - if not c2 or "." not in c2: - with suppress(Exception): - key = user_strings[base_location + 3] - c2 = decrypt(user_strings[base_location + 1], key) - botnet = decrypt(user_strings[base_location + 2], key) - - base_location = None - with suppress(Exception): - if "Authorization" in user_strings: - base_location = user_strings.index("Authorization") - if base_location: - if not c2 or "." not in c2: - delta = base_location - while True: - delta += 1 - if "==" in user_strings[delta]: - c2 = user_strings[delta] - if "=" in user_strings[delta + 1]: - botnet = user_strings[delta + 1] - key = user_strings[delta + 2] - if "=" in key: - key = user_strings[delta + 3] - else: - botnet = None - key = user_strings[delta + 1] - c2 = decrypt(c2, key) - if botnet: - botnet = decrypt(botnet, key) - break - - if not c2 or "." not in c2 and HAVE_DNFILE: - with suppress(Exception): - dn = dnfile.dnPE(data=data) - for p in patterns: - extracted = [] - for match in p.findall(data): - for item in match: - user_string = dn.net.user_strings.get(int.from_bytes(item, "little")).value - if user_string: - extracted.append(user_string) - if extracted: - # Case-1: If the config file is stored in encrypted format - if len(extracted) == 3: - key = extracted[2] - c2 = decrypt(extracted[0], key) - botnet = decrypt(extracted[1], key) - if "." in c2: - break - - # Case-2: If the config file is stored in plaintext format - else: - c2 = extracted[0] - botnet = extracted[1] - dn.close() - - if not c2 or "." not in c2: - return - - config_dict = {"C2": c2, "Botnet": botnet, "Key": key} - if "Authorization" in user_strings: - base_location = user_strings.index("Authorization") - if base_location: - config_dict["Authorization"] = user_strings[base_location - 1] - return config_dict - - -if __name__ == "__main__": - import sys - - with open(sys.argv[1], "rb") as f: - print(extract_config(f.read())) diff --git a/modules/processing/parsers/CAPE/Remcos.py b/modules/processing/parsers/CAPE/Remcos.py deleted file mode 100644 index a5ebbf9c331..00000000000 --- a/modules/processing/parsers/CAPE/Remcos.py +++ /dev/null @@ -1,207 +0,0 @@ -# This file is part of CAPE Sandbox - https://github.com/ctxis/CAPE -# See the file 'docs/LICENSE' for copying permission. -# -# This decoder is based on: -# Decryptor POC for Remcos RAT version 2.7.1 and earlier -# By Talos July 2018 - https://github.com/Cisco-Talos/remcos-decoder -# Updates based on work presented here https://gist.github.com/sysopfb/11e6fb8c1377f13ebab09ab717026c87 - -DESCRIPTION = "Remcos config extractor." -AUTHOR = "threathive,sysopfb,kevoreilly" - -import base64 -import logging -import re -import string -from collections import OrderedDict - -import pefile -from Cryptodome.Cipher import ARC4 - -# From JPCERT -FLAG = {b"\x00": "Disable", b"\x01": "Enable"} - -# From JPCERT -idx_list = { - 0: "Host:Port:Password", - 1: "Assigned name", - 2: "Connect interval", - 3: "Install flag", - 4: "Setup HKCU\\Run", - 5: "Setup HKLM\\Run", - 6: "Setup HKLM\\Explorer\\Run", - 7: "Setup HKLM\\Winlogon\\Shell", - 8: "Setup HKLM\\Winlogon\\Userinit", - 9: "Install path", - 10: "Copy file", - 11: "Startup value", - 12: "Hide file", - 13: "Unknown13", - 14: "Mutex", - 15: "Keylog flag", - 16: "Keylog path", - 17: "Keylog file", - 18: "Keylog crypt", - 19: "Hide keylog file", - 20: "Screenshot flag", - 21: "Screenshot time", - 22: "Take Screenshot option", - 23: "Take screenshot title", - 24: "Take screenshot time", - 25: "Screenshot path", - 26: "Screenshot file", - 27: "Screenshot crypt", - 28: "Mouse option", - 29: "Unknown29", - 30: "Delete file", - 31: "Unknown31", - 32: "Unknown32", - 33: "Unknown33", - 34: "Unknown34", - 35: "Unknown35", - 36: "Audio record time", - 37: "Audio path", - 38: "Audio folder", - 39: "Unknown39", - 40: "Unknown40", - 41: "Connect delay", - 42: "Unknown42", - 43: "Unknown43", - 44: "Unknown44", - 45: "Unknown45", - 46: "Unknown46", - 47: "Unknown47", - 48: "Copy folder", - 49: "Keylog folder", - 50: "Unknown50", - 51: "Unknown51", - 52: "Unknown52", - 53: "Unknown53", - 54: "Keylog file max size (base64)", - 55: "Unknown55 (base64)", - 56: "TLS client certificate (base64)", - 57: "TLS client private key (base64)", - 58: "TLS server certificate (base64)", - 59: "Unknown59", - 60: "Unknown60", - 61: "Unknown61", - 62: "Unknown62", - 63: "Unknown63", - 64: "Unknown64", - 65: "Unknown65", - 66: "Unknown66", -} - -# From JPCERT -setup_list = { - 0: "Temp", - 2: "Root", - 3: "Windows", - 4: "System32", - 5: "Program Files", - 6: "AppData", - 7: "User Profile", - 8: "Application path", -} - -utf_16_string_list = ["Copy file", "Startup value", "Keylog file", "Take screenshot title", "Copy folder", "Keylog folder"] -logger = logging.getLogger(__name__) - - -def get_rsrc(pe): - ret = [] - if not hasattr(pe, "DIRECTORY_ENTRY_RESOURCE"): - return ret - - for resource_type in pe.DIRECTORY_ENTRY_RESOURCE.entries: - name = str(resource_type.name if resource_type.name is not None else pefile.RESOURCE_TYPE.get(resource_type.struct.Id)) - if hasattr(resource_type, "directory"): - for resource_id in resource_type.directory.entries: - if hasattr(resource_id, "directory"): - for resource_lang in resource_id.directory.entries: - data = pe.get_data(resource_lang.data.struct.OffsetToData, resource_lang.data.struct.Size) - ret.append((name, data, resource_lang.data.struct.Size, resource_type)) - - return ret - - -def get_strings(data, min=4): - result = "" - for c in data: - if chr(c) in string.printable: - result += chr(c) - continue - if len(result) >= min: - yield result - result = "" - if len(result) >= min: - yield result - - -def check_version(filedata): - s = "" - # find strings in binary file - slist = get_strings(filedata) - - # find and extract version string e.g. "2.0.5 Pro", "1.7 Free" or "1.7 Light" - for s in slist: - if bool(re.search(r"^\d+\.\d+(\.\d+)?\s+\w+$", s)): - return s - return "" - - -def extract_config(filebuf): - config = {} - - try: - pe = pefile.PE(data=filebuf) - blob = False - ResourceData = get_rsrc(pe) - for rsrc in ResourceData: - if rsrc[0] in ("RT_RCDATA", "SETTINGS"): - blob = rsrc[1] - break - - if blob: - keylen = blob[0] - key = blob[1 : keylen + 1] - decrypted_data = ARC4.new(key).decrypt(blob[keylen + 1 :]) - p_data = OrderedDict() - p_data["Version"] = check_version(filebuf) - - configs = re.split(rb"\|\x1e\x1e\x1f\|", decrypted_data) - - for i, cont in enumerate(configs): - if cont in (b"\x00", b"\x01"): - p_data[idx_list[i]] = FLAG[cont] - elif i in (9, 16, 25, 37): - # observed config values in bytes instead of ascii - if cont[0] > 8: - p_data[idx_list[i]] = setup_list[int(chr(cont[0]))] - else: - p_data[idx_list[i]] = setup_list[cont[0]] - elif i in (54, 55, 56, 57, 58): - p_data[idx_list[i]] = base64.b64encode(cont) - elif i == 0: - # various separators have been observed - separator = next((x for x in (b"|", b"\x1e", b"\xff\xff\xff\xff") if x in cont)) - host, port, password = cont.split(separator, 1)[0].split(b":") - p_data["Control"] = f"tcp://{host.decode()}:{port.decode()}:{password.decode()}" - else: - p_data[idx_list[i]] = cont - - for k, v in p_data.items(): - if k in utf_16_string_list: - v = v.decode("utf16").strip("\00") if isinstance(v, bytes) else v - config[k] = v - - except Exception as e: - logger.error(f"Caught an exception: {e}") - - return config - - -if __name__ == "__main__": - import sys - - print(extract_config(open(sys.argv[1], "rb").read())) diff --git a/modules/processing/parsers/CAPE/Retefe.py b/modules/processing/parsers/CAPE/Retefe.py deleted file mode 100644 index 0daf08b9340..00000000000 --- a/modules/processing/parsers/CAPE/Retefe.py +++ /dev/null @@ -1,146 +0,0 @@ -# This is adapted for CAPE from Tomasuh's retefe-unpacker script: -# https://github.com/Tomasuh/retefe-unpacker -# http://tomasuh.github.io/2018/12/28/retefe-unpack.html -# Many thanks to Tomasuh - -DESCRIPTION = "Retefe configuration parser." -AUTHOR = "Tomasuh" - -import struct - -import pefile -import yara - -rule_source = """ -rule Retefe -{ - meta: - author = "Tomasuh" - description = "Retefe Payload" - cape_type = "Retefe Payload" - strings: - $retefe_encoded_buffer = {48 8b 44 24 20 8b 40 08 48 8b 4c 24 20 48 8d 15} - $retefe_xor_seed = {24 20 48 8b 44 24 20 C7 40 08} - $retefe_xor_seed_2ndarg = {89 54 24 10 48 89 4c 24 08 48 83 ec 58 ba} - $retefe_shift_and_sub_match = {c1 e0 ?? b9} - condition: - uint16(0) == 0x5A4D and (all of them) -} -""" - - -def yara_scan(raw_data): - addresses = {} - yara_rules = yara.compile(source=rule_source) - matches = yara_rules.match(data=raw_data) - for match in matches: - if match.rule == "Emotet": - for item in match.strings: - addresses[item.identifier] = item.instances[0].offset - return addresses - - -def number_gen_rec(buffer_size, number): - if number == 1: - return buffer_size - return 0xFFFFFFFF & buffer_size * (0xFFFFFFFF & number_gen_rec(buffer_size, number - 1)) - - -def number_gen(buffer_size, number, shifts, subtract_val): - calculated_number = number_gen_rec(buffer_size, number) - - number = calculated_number << shifts # * 8 - number = subtract_val - number - - return number & 0xFFFFFFFF - - -def pwd_calc(buffer_size, number, shifts, subtract_val): - xor_arr = [] - seed = number_gen(buffer_size, number, shifts, subtract_val) - - while seed: - xor_arr.append(seed & 0xFF) - seed >>= 8 - - return xor_arr - - -def extract_config(filebuf): - pe = pefile.PE(data=filebuf, fast_load=False) - - yara_matches = yara_scan(filebuf) - - if not all( - [ - yara_matches.get(key) - for key in ("$retefe_xor_seed", "$retefe_xor_seed_2ndarg", "$retefe_shift_and_sub_match", "$retefe_encoded_buffer") - ] - ): - return - - # Offset to seed for xor - offset = int(yara_matches["$retefe_xor_seed"]) - # Offset to value that will be used to take xor^value - offset2 = int(yara_matches["$retefe_xor_seed_2ndarg"]) - # Offset to values that will be used in part of subtraction and shifts of xor^value - offset3 = int(yara_matches["$retefe_shift_and_sub_match"]) - offset4 = int(yara_matches["$retefe_encoded_buffer"]) - - # Offset starts at match, we want end of match - seed_val = struct.unpack(" bytes: - return bytes.fromhex(hashlib.md5(string).hexdigest()) - - -def handle_plain(dotnet_file, c2_type, user_strings): - user_strings_list = list(user_strings.values()) - if c2_type == "Telegram": - token = dotnet_file.net.user_strings.get(user_strings_list[15]).value.__str__() - chat_id = dotnet_file.net.user_strings.get(user_strings_list[16]).value.__str__() - return {"Type": "Telegram", "C2": f"https://api.telegram.org/bot{token}/sendMessage?chat_id={chat_id}"} - elif c2_type == "SMTP": - smtp_from = dotnet_file.net.user_strings.get(user_strings_list[7]).value.__str__() - smtp_password = dotnet_file.net.user_strings.get(user_strings_list[8]).value.__str__() - smtp_host = dotnet_file.net.user_strings.get(user_strings_list[9]).value.__str__() - smtp_to = dotnet_file.net.user_strings.get(user_strings_list[10]).value.__str__() - smtp_port = dotnet_file.net.user_strings.get(user_strings_list[11]).value.__str__() - return { - "Type": "SMTP", - "Host": smtp_host, - "Port": smtp_port, - "From Address": smtp_from, - "To Address": smtp_to, - "Password": smtp_password, - } - elif c2_type == "FTP": - ftp_username = dotnet_file.net.user_strings.get(user_strings_list[12]).value.__str__() - ftp_password = dotnet_file.net.user_strings.get(user_strings_list[13]).value.__str__() - ftp_host = dotnet_file.net.user_strings.get(user_strings_list[14]).value.__str__() - return {"Type": "FTP", "Host": ftp_host, "Username": ftp_username, "Password": ftp_password} - - -def handle_encrypted(dotnet_file, data, c2_type, user_strings): - # Match decrypt string pattern - decrypt_string_pattern = re.compile( - Rb"""(?x) - \x72(...)\x70 - \x7E(...)\x04 - \x28...\x06 - \x80...\x04 - """ - ) - - config_dict = None - decrypted_strings = [] - - matches2 = decrypt_string_pattern.findall(data) - for match in matches2: - string_index = int.from_bytes(match[0], "little") - user_string = dotnet_file.net.user_strings.get(string_index).value - # Skip user strings that are empty/not base64 - if user_string == "Yx74dJ0TP3M=" or not is_base64(user_string): - continue - field_row_index = int.from_bytes(match[1], "little") - field_name = dotnet_file.net.mdtables.Field.get_with_row_index(field_row_index).Name.__str__() - key_index = user_strings[field_name] - key_str = dotnet_file.net.user_strings.get(key_index).value.__str__() - key = md5(key_str.encode())[:8] - des = DES.new(key, DES.MODE_ECB) - - decoded_str = base64.b64decode(user_string) - padded_str = pad(decoded_str) - decrypted_text = des.decrypt(padded_str) - plaintext_bytes = unpad(decrypted_text, DES.block_size) - plaintext = plaintext_bytes.decode() - decrypted_strings.append(plaintext) - - if decrypted_strings: - if c2_type == "Telegram": - token, chat_id = decrypted_strings - config_dict = {"Type": "Telegram", "C2": f"https://api.telegram.org/bot{token}/sendMessage?chat_id={chat_id}"} - elif c2_type == "SMTP": - smtp_from, smtp_password, smtp_host, smtp_to, smtp_port = decrypted_strings - config_dict = { - "Type": "SMTP", - "Host": smtp_host, - "Port": smtp_port, - "From Address": smtp_from, - "To Address": smtp_to, - "Password": smtp_password, - } - elif c2_type == "FTP": - ftp_username, ftp_password, ftp_host = decrypted_strings - config_dict = {"Type": "FTP", "Host": ftp_host, "Username": ftp_username, "Password": ftp_password} - return config_dict - - -def extract_config(data): - - try: - dotnet_file = dnfile.dnPE(data=data) - except Exception as e: - log.debug(f"Exception when attempting to parse .NET file: {e}") - log.debug(traceback.format_exc()) - - # ldstr, stsfld - static_strings = re.compile( - Rb"""(?x) - \x72(...)\x70 - \x80(...)\x04 - """ - ) - - # Get user strings and C2 type - user_strings = {} - c2_type = None - matches = static_strings.findall(data) - for match in matches: - try: - string_index = int.from_bytes(match[0], "little") - string_value = dotnet_file.net.user_strings.get(string_index).value.__str__() - field_index = int.from_bytes(match[1], "little") - field_name = dotnet_file.net.mdtables.Field.get_with_row_index(field_index).Name.__str__() - if string_value == "$%TelegramDv$": - c2_type = "Telegram" - - elif string_value == "$%SMTPDV$": - c2_type = "SMTP" - - elif string_value == "%FTPDV$": - c2_type = "FTP" - else: - user_strings[field_name] = string_index - except Exception as e: - log.debug(f"There was an exception parsing user strings: {e}") - log.debug(traceback.format_exc()) - - if c2_type is None: - raise ValueError("Could not identify C2 type.") - - # Handle encrypted strings - config_dict = handle_encrypted(dotnet_file, data, c2_type, user_strings) - if config_dict is None: - # Handle plain strings - config_dict = handle_plain(dotnet_file, c2_type, user_strings) - - return config_dict - - -if __name__ == "__main__": - import sys - - with open(sys.argv[1], "rb") as f: - print(extract_config(f.read())) diff --git a/modules/processing/parsers/CAPE/Socks5Systemz.py b/modules/processing/parsers/CAPE/Socks5Systemz.py deleted file mode 100644 index 0a7549d25c0..00000000000 --- a/modules/processing/parsers/CAPE/Socks5Systemz.py +++ /dev/null @@ -1,25 +0,0 @@ -import socket -from contextlib import suppress - - -def _is_ip(ip): - try: - socket.inet_aton(ip) - return True - except Exception: - return False - - -def extract_config(data): - config_dict = {"C2s": []} - with suppress(Exception): - if data[:2] == b"MZ": - return - for line in data.decode().split("\n"): - if _is_ip(line) and line not in config_dict["C2s"]: - config_dict["C2s"].append(line) - elif line and "\\" in line: - config_dict.setdefault("Timestamp path", []).append(line) - elif "." in line: - config_dict.setdefault("Dummy domain", []).append(line) - return config_dict diff --git a/modules/processing/parsers/CAPE/SparkRAT.py b/modules/processing/parsers/CAPE/SparkRAT.py deleted file mode 100644 index e16add20870..00000000000 --- a/modules/processing/parsers/CAPE/SparkRAT.py +++ /dev/null @@ -1,73 +0,0 @@ -import io -import json -import logging -from contextlib import suppress - -HAVE_PYCYPTODOMEX = False -with suppress(ImportError): - from Cryptodome.Cipher import AES - from Cryptodome.Util import Counter - - HAVE_PYCYPTODOMEX = True - -log = logging.getLogger(__name__) - - -DESCRIPTION = "SparkRAT configuration parser." -AUTHOR = "t-mtsmt" - - -def extract_data_before_string(data, search_string, offset): - search_bytes = search_string.encode("utf-8") - - position = data.find(search_bytes) - if position == -1: - return b"" - - start_position = max(position - offset, 0) - return data[start_position:position] - - -def decrypt_config(enc_data, key, iv): - counter = Counter.new(128, initial_value=int.from_bytes(iv, "big")) - cipher = AES.new(key, mode=AES.MODE_CTR, counter=counter) - dec_data = cipher.decrypt(enc_data) - config = dec_data.decode("utf-8") - return json.loads(config) - - -def extract_config(data): - if not HAVE_PYCYPTODOMEX: - log.error("Missed pycryptodomex. Run: poetry install") - return {} - - search_string = "DXGI_ERROR_DRIVER_INTERNAL" - config_buf_size = 0x180 - config_buf = extract_data_before_string(data, search_string, offset=config_buf_size) - - if len(config_buf) == 0: - log.error("Configuration is not found.") - return {} - - if config_buf == b"\x19" * config_buf_size: - log.debug("Configuration does not exist because the template data in the ConfigBuffer was not replaced.") - return {} - - try: - with io.BytesIO(config_buf) as f: - data_len = int.from_bytes(f.read(2), "big") - key = f.read(16) - iv = f.read(16) - enc_data = f.read(data_len - 32) - return decrypt_config(enc_data, key, iv) - except Exception as e: - log.error("Configuration decryption failed: %s", e) - return {} - - -if __name__ == "__main__": - import sys - from pathlib import Path - - data = Path(sys.argv[1]).read_bytes() - print(extract_config(data)) diff --git a/modules/processing/parsers/CAPE/SquirrelWaffle.py b/modules/processing/parsers/CAPE/SquirrelWaffle.py deleted file mode 100644 index 1c8960ae4b8..00000000000 --- a/modules/processing/parsers/CAPE/SquirrelWaffle.py +++ /dev/null @@ -1,87 +0,0 @@ -# Copyright (C) 2021 Kevin O'Reilly (kevoreilly@gmail.com) -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . -import struct -from itertools import cycle - -import pefile -import yara - -rule_source = """ -rule SquirrelWaffle -{ - strings: - $config = {83 C2 04 83 C1 04 83 EE 04 73 EF 83 FE FC 74 34 8A 02 3A 01 75 27 83 FE FD 74 29 8A 42 01 3A 41 01 75 1A 83 FE FE 74 1C 8A 42 02 3A 41 02 75 0D} - $decode = {F7 75 ?? 83 7D ?? 10 8D 4D ?? 8D 45 ?? C6 45 ?? 00 0F 43 4D ?? 83 7D ?? 10 0F 43 45 ?? 8A 04 10 32 04 39} - $c2key = {83 EC 18 8B CC 89 A5 [4] 6A 05 C7 41 ?? 00 00 00 00 C7 41 ?? 0F 00 00 00 68} - condition: - uint16(0) == 0x5A4D and any of them -} -""" - -yara_rules = yara.compile(source=rule_source) - -MAX_STRING_SIZE = 32 - - -def string_from_offset(data, offset): - return data[offset : offset + MAX_STRING_SIZE].split(b"\0", 1)[0] - - -def extract_rdata(pe): - for section in pe.sections: - if b".rdata" in section.Name: - return section.get_data(section.VirtualAddress, section.SizeOfRawData) - return None - - -def xor_data(data, key): - return bytes(c ^ k for c, k in zip(data, cycle(key))) - - -def extract_config(data): - config = {} - pe = None - try: - pe = pefile.PE(data=data) - except Exception: - return config - - if pe is not None: - rdata = extract_rdata(pe) - if len(rdata) == 0: - return config - chunks = [x for x in rdata.split(b"\x00") if x != b""] - for i, line in enumerate(chunks): - if len(line) > 100: - try: - decrypted = xor_data(line, chunks[i + 1]).decode() - if "\r\n" in decrypted and "|" not in decrypted: - config["IP Blocklist"] = list(filter(None, decrypted.split("\r\n"))) - elif "|" in decrypted and "." in decrypted and "\r\n" not in decrypted: - config["URLs"] = list(filter(None, decrypted.split("|"))) - except Exception: - continue - matches = yara_rules.match(data=data) - if not matches: - return config - for match in matches: - if match.rule != "SquirrelWaffle": - continue - for item in match.strings: - if "$c2key" in item.identifier: - c2key_offset = item.instances[0].offset - key_rva = struct.unpack("i", data[c2key_offset + 28 : c2key_offset + 32])[0] - pe.OPTIONAL_HEADER.ImageBase - key_offset = pe.get_offset_from_rva(key_rva) - config["C2 key"] = string_from_offset(data, key_offset).decode() - return config diff --git a/modules/processing/parsers/CAPE/Stealc.py b/modules/processing/parsers/CAPE/Stealc.py deleted file mode 100644 index 0d2f5a35025..00000000000 --- a/modules/processing/parsers/CAPE/Stealc.py +++ /dev/null @@ -1,107 +0,0 @@ -import struct -from contextlib import suppress - -import pefile -import yara - -MAX_STRING_SIZE = 100 - -# Hash = 619751f5ed0a9716318092998f2e4561f27f7f429fe6103406ecf16e33837470 - -RULE_SOURCE = """rule StealC -{ - meta: - author = "Yung Binary" - hash = "619751f5ed0a9716318092998f2e4561f27f7f429fe6103406ecf16e33837470" - strings: - $decode_1 = { - 6A ?? - 68 ?? ?? ?? ?? - 68 ?? ?? ?? ?? - E8 ?? ?? ?? ?? - 83 C4 0C - A3 ?? ?? ?? ?? - } - - condition: - $decode_1 -}""" - - -def yara_scan(raw_data): - yara_rules = yara.compile(source=RULE_SOURCE) - matches = yara_rules.match(data=raw_data) - - for match in matches: - for block in match.strings: - for instance in block.instances: - yield instance.offset - - -def xor_data(data, key): - decoded = bytearray() - for i in range(len(data)): - decoded.append(data[i] ^ key[i]) - return decoded - - -def string_from_offset(data, offset): - return data[offset : offset + MAX_STRING_SIZE].split(b"\0", 1)[0] - - -def extract_config(data): - config_dict = {} - - # Attempt to extract via old method - try: - domain = "" - uri = "" - lines = data.decode().split("\n") - for line in lines: - if line.startswith("http") and "://" in line: - domain = line - if line.startswith("/") and line[-4] == ".": - uri = line - if domain and uri: - config_dict.setdefault("C2", []).append(f"{domain}{uri}") - except Exception: - pass - - # Try with new method - if not config_dict.get("C2"): - with suppress(Exception): - # config_dict["Strings"] = [] - pe = pefile.PE(data=data, fast_load=False) - image_base = pe.OPTIONAL_HEADER.ImageBase - - for str_decode_offset in yara_scan(data): - str_size = int(data[str_decode_offset + 1]) - # Ignore size 0 strings - if not str_size: - continue - - key_rva = data[str_decode_offset + 3 : str_decode_offset + 7] - encoded_str_rva = data[str_decode_offset + 8 : str_decode_offset + 12] - # dword_rva = data[str_decode_offset + 21 : str_decode_offset + 25] - - key_offset = pe.get_offset_from_rva(struct.unpack("i", key_rva)[0] - image_base) - encoded_str_offset = pe.get_offset_from_rva(struct.unpack("i", encoded_str_rva)[0] - image_base) - # dword_offset = hex(struct.unpack("i", dword_rva)[0])[2:] - - key = string_from_offset(data, key_offset) - encoded_str = string_from_offset(data, encoded_str_offset) - - decoded_str = xor_data(encoded_str, key).decode() - if ("http://" in decoded_str or "https://" in decoded_str) and len(decoded_str) > 11: - config_dict.setdefault("C2", []).append(decoded_str) - # else: - # config_dict["Strings"].append({f"dword_{dword_offset}" : decoded_str}) - - return config_dict - - -if __name__ == "__main__": - import sys - - with open(sys.argv[1], "rb") as f: - print(extract_config(f.read())) diff --git a/modules/processing/parsers/CAPE/Strrat.py b/modules/processing/parsers/CAPE/Strrat.py deleted file mode 100644 index c6d5390185c..00000000000 --- a/modules/processing/parsers/CAPE/Strrat.py +++ /dev/null @@ -1,80 +0,0 @@ -# MIT License -# -# Copyright (c) 2021 enzok -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in all -# copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. - -import base64 -import zipfile -from hashlib import pbkdf2_hmac - -from Cryptodome.Cipher import AES - -from lib.cuckoo.common.utils import store_temp_file - - -def unpad(s): - return s[: -s[-1]] - - -def unzip_config(filepath): - data = "" - try: - with zipfile.ZipFile(filepath.decode()) as z: - for name in z.namelist(): - if "config.txt" in name: - data = z.read(name) - break - except Exception: - return - return data - - -def aesdecrypt(data, passkey): - iv = data[4:20] - key = pbkdf2_hmac("sha1", passkey, iv, 65536, 16) - aes = AES.new(key, AES.MODE_CBC, iv) - return unpad(aes.decrypt(data[20:])) - - -def decode(data): - decoded = "" - try: - data = base64.b64decode(data) - except Exception as exc: - return exc - if data: - passkey = b"strigoi" - try: - decoded = aesdecrypt(data, passkey) - except Exception: - return - return decoded.decode() - - -def extract_config(data): - raw_config = {} - configdata = "" - tmpzip = store_temp_file(data, "badjar.zip", b"strrat_tmp") - configdata = unzip_config(tmpzip) - - if configdata: - raw_config["config"] = decode(configdata) - - return raw_config diff --git a/modules/processing/parsers/CAPE/TSCookie.py b/modules/processing/parsers/CAPE/TSCookie.py deleted file mode 100644 index 4575ea30d9d..00000000000 --- a/modules/processing/parsers/CAPE/TSCookie.py +++ /dev/null @@ -1,170 +0,0 @@ -#!/usr/bin/env python -# -# LICENSE -# the GNU General Public License version 2 -# -# Credit to JPCERT - this is derived from https://github.com/JPCERTCC/aa-tools/blob/master/tscookie_decode.py - -import collections -import re -import sys -from struct import unpack, unpack_from - -import pefile - -# Resource pattern -RESOURCE_PATTERNS = [ - re.compile("\x50\x68(....)\x68(.)\x00\x00\x00(.)\xE8", re.DOTALL), - re.compile("(.)\x68(...)\x00\x68(.)\x00\x00\x00\x6A\x00\xE8(....)\x83(..)\xC3", re.DOTALL), - re.compile("\x04(.....)\x68(.)\x00\x00\x00\x6A\x00\xE8", re.DOTALL), - re.compile("\x56\xBE(....)\x56\x68(.)\x00\x00\x00\x6A\x00\xE8", re.DOTALL), - re.compile("\x53\x68(....)\x6A(.)\x56\xFF", re.DOTALL), -] - -# RC4 key pattern -RC4_KEY_PATTERNS = [ - re.compile("\x80\x68\x80\x00\x00\x00\x50\xC7\x40", re.DOTALL), - re.compile("\x80\x68\x80\x00\x00\x00(...)\x50\x52\x53\xC7\x40", re.DOTALL), -] -RC4_KEY_LENGTH = 0x80 - -# Config pattern -CONFIG_PATTERNS = [ - re.compile("\xC3\x90\x68(....)\xE8(....)\x59\x6A\x01\x58\xC3", re.DOTALL), - re.compile("\x6A\x04\x68(....)\x8D(.....)\x56\x50\xE8", re.DOTALL), -] -CONFIG_SIZE = 0x8D4 - - -# RC4 -def rc4(data, key): - x = 0 - box = list(range(256)) - for i in range(256): - x = (x + box[i] + ord(key[i % len(key)])) % 256 - box[i], box[x] = box[x], box[i] - x = 0 - y = 0 - out = [] - for char in data: - x = (x + 1) % 256 - y = (y + box[x]) % 256 - box[x], box[y] = box[y], box[x] - out.append(chr(ord(char) ^ box[(box[x] + box[y]) % 256])) - - return "".join(out) - - -# helper function for formatting string -def __format_string(data): - return data.split("\x00", 1)[0] - - -# Parse config -def parse_config(config): - config_dict = collections.OrderedDict() - for i in range(4): - if config[0x10 + 0x100 * i] != "\x00": - config_dict[f"Server name #{i + 1}"] = __format_string( - unpack_from("<240s", config, 0x10 + 0x100 * i)[0].decode("utf-16") - ) - config_dict[f"Main port #{i + 1}"] = unpack_from("I', config, 0x604)[0]:X}" - config_dict["Sleep time"] = unpack_from(" 200: - resource_id = ord(unpack("c", data[mr.start() + 8])[0]) - if resource_id == 104: - resource_id = ord(unpack("c", data[mr.start() + 21])[0]) - break - except Exception: - return - if not mr: - sys.exit("[!] Resource id not found") - - for idx in pe.DIRECTORY_ENTRY_RESOURCE.entries: - if str(idx.name) in str(resource_name): - for entry in idx.directory.entries: - if entry.id == resource_id: - try: - data_rva = entry.directory.entries[0].data.struct.OffsetToData - size = entry.directory.entries[0].data.struct.Size - rc_data = pe.get_memory_mapped_image()[data_rva : data_rva + size] - except Exception: - return - - return rc_data - - -def extract_config(data): - try: - dll = pefile.PE(data=data) - except Exception: - return None - - for pattern in CONFIG_PATTERNS: - mc = re.search(pattern, data) - if mc: - try: - (config_rva,) = unpack("=I", data[mc.start() + 3 : mc.start() + 7]) - config_addr = dll.get_physical_by_rva(config_rva - dll.NT_HEADERS.OPTIONAL_HEADER.ImageBase) - enc_config_data = data[config_addr : config_addr + CONFIG_SIZE] - except Exception: - return - - for pattern in RESOURCE_PATTERNS: - mr2 = re.search(pattern, data) - - if mr2: - rc2_data = load_resource(dll, data) - key_end = load_rc4key(data) - decode_resource(rc2_data, key_end, "TSCookie.2nd.decode") - - try: - enc_config = enc_config_data[4:] - rc4key = enc_config_data[:4] - config = rc4(enc_config, rc4key) - except Exception: - return - - return parse_config(config) diff --git a/modules/processing/parsers/CAPE/TrickBot.py b/modules/processing/parsers/CAPE/TrickBot.py deleted file mode 100644 index e0824344b62..00000000000 --- a/modules/processing/parsers/CAPE/TrickBot.py +++ /dev/null @@ -1,198 +0,0 @@ -# MIT License -# -# Copyright (c) 2017 Jason Reaves -# Copyright (c) 2019 Graham Austin -# -# Permission is hereby granted, free of charge, to any person obtaining a copy -# of this software and associated documentation files (the "Software"), to deal -# in the Software without restriction, including without limitation the rights -# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -# copies of the Software, and to permit persons to whom the Software is -# furnished to do so, subject to the following conditions: -# -# The above copyright notice and this permission notice shall be included in all -# copies or substantial portions of the Software. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -# SOFTWARE. - -import hashlib -import struct -import xml.etree.ElementTree as ET - -import pefile -import yara -from Cryptodome.Cipher import AES - -rule_source = """ -rule TrickBot -{ - meta: - author = "grahamaustin" - description = "TrickBot Payload" - cape_type = "TrickBot Payload" - strings: - $snippet1 = {B8 ?? ?? 00 00 85 C9 74 32 BE ?? ?? ?? ?? BA ?? ?? ?? ?? BF ?? ?? ?? ?? BB ?? ?? ?? ?? 03 F2 8B 2B 83 C3 04 33 2F 83 C7 04 89 29 83 C1 04 3B DE 0F 43 DA} - condition: - uint16(0) == 0x5A4D and ($snippet1) -} -""" - - -def yara_scan(raw_data, rule_name): - addresses = {} - yara_rules = yara.compile(source=rule_source) - matches = yara_rules.match(data=raw_data) - for match in matches: - if match.rule == "TrickBot": - for item in match.strings: - if item.identifier == rule_name: - addresses[item.identifier] = item.instances[0].offset - return addresses - - -def xor_data(data, key, key_len): - decrypted_blob = b"" - for i, x in enumerate(range(0, len(data), 4)): - xor = struct.unpack("= section.VirtualAddress and rva < section.VirtualAddress + section.Misc_VirtualSize: - return rva - section.VirtualAddress + section.PointerToRawData - - -# Thanks Robert Giczewski - https://malware.love/malware_analysis/reverse_engineering/2020/11/17/trickbots-latest-trick.html -def convert_to_real_ip(ip_str): - octets = ip_str.split(".") - o1 = int(octets[0]) - o2 = int(octets[2]) - o3 = int(octets[3]) - o4 = int(octets[1]) - x = ((~o1 & 0xFF) & 0xB8 | (o1 & 0x47)) ^ ((~o2 & 0xFF) & 0xB8 | (o2 & 0x47)) - o = (o3 & (~o2 & 0xFF)) | ((~o3 & 0xFF) & o2) - result_octets = [ - str(x), - str(((~o & 0xFF) & o4) | (o & (~o4 & 0xFF))), - str(o), - str(((~o2 & 0xFF) & o4) | ((~o4 & 0xFF) & o2)), - ] - return f"{'.'.join(result_octets)}:443" - - -def get_ip(ip_str, tag): - if tag == "srva": - return convert_to_real_ip(ip_str.split(":", 1)[0]) - return ip_str - - -def decode_onboard_config(data): - try: - pe = pefile.PE(data=data) - rsrcs = get_rsrc(pe) - except Exception: - return - if rsrcs != []: - a = rsrcs[0][1] - data = trick_decrypt(a[4:]) - length = struct.unpack_from(". - -import binascii -import struct - -from Cryptodome.PublicKey import RSA - -MAX_STRING_SIZE = 256 - -# JOINER_SECTIONS = { -# 0xE1285E64: "CRC_PUBLIC_KEY", -# 0x8FB1DDE1: "CRC_CLIENT_INI", -# 0xD722AFCB: "CRC_CLIENT_INI", -# 0x4F75CEA7: "CRC_LOADER_DLL", -# 0x90F8AAB5: "CRC_LOADER_DLL", -# 0x7A042A8A: "CRC_INSTALL_INI", -# 0x90F8AAB4: "CRC_CLIENT64", -# 0xDA57D71A: "CRC_WORDLIST", -# 0xC535D8BF: "CRC_LOADER_DLL", -# } - -# INI_PARAMS = { -# 0x4FA8693E: "CRC_SERVERKEY", -# 0xD0665BF6: "CRC_HOSTS", -# 0x656B798A: "CRC_GROUP", -# 0x556AED8F: "CRC_SERVER", -# 0x11271C7F: "CONF_TIMEOUT", -# 0x48295783: "CONFIG_FAIL_TIMEOUT", -# 0xEA9EA760: "CRC_BOOTSTRAP", -# 0x31277BD5: "CRC_TASKTIMEOUT", -# 0x955879A6: "CRC_SENDTIMEOUT", -# 0x9FD13931: "CRC_BCSERVER", -# 0x6DE85128: "CRC_BCTIMEOUT", -# 0xACC79A02: "CRC_KNOCKERTIMEOUT", -# 0x602C2C26: "CRC_KEYLOGLIST", -# 0xD7A003C9: "CRC_CONFIGTIMEOUT", -# 0x18A632BB: "CRC_CONFIGFAILTIMEOUT", -# 0x73177345: "CRC_DGA_SEED_URL", -# 0x510F22D2: "CRC_TORSERVER", -# 0xEC99DF2E: "CRC_EXTERNALIP", -# 0xC61EFA7A: "CRC_DGATLDS", -# 0xDF351E24: "CRC_32BITDOWNLOAD", -# 0x4B214F54: "CRC_64BITDOWNLOAD", -# 0xCD850E68: "DGA_CRC", -# 0xDF2E7488: "DGA_COUNT", -# 0x584E5925: "TIMER", -# } - -SECTION_KEYS = { - 0xD0665BF6: "Domains", - 0x73177345: "DGA Base URL", - 0xCD850E68: "DGA CRC", - 0xC61EFA7A: "DGA TLDs", - 0x510F22D2: "TOR Domains", - 0xDF351E24: "32-bit DLL URLs", - 0x4B214F54: "64-bit DLL URLs", - 0xEC99DF2E: "IP Service", - 0x11271C7F: "Timer", - 0xDF2E7488: "DGA count", - 0x556AED8F: "Server", - 0x4FA8693E: "Encryption key", - 0xD7A003C9: "Config Fail Timeout", - 0x18A632BB: "Config Timeout", - 0x31277BD5: "Task Timeout", - 0x955879A6: "Send Timeout", - 0xACC79A02: "Knocker Timeout", - 0x6DE85128: "BC Timeout", - 0x656B798A: "Botnet ID", - 0xEFC574AE: "Value 11", - # 0x584E5925: 'EndPointer', - 0xD3AA96D0: "New unknown", -} - - -def string_from_offset(buffer, offset): - return buffer[offset : offset + MAX_STRING_SIZE].split(b"\0", 1)[0].decode() - - -def get_config_item(config, offset): - config_string = string_from_offset(config, offset) - return config_string.split(" ") if " " in config_string else config_string - - -def convert_pubkey(pub): - # bit = struct.unpack_from('. - -import struct -from contextlib import suppress - -import pefile - -DESCRIPTION = "WarzoneRAT configuration extractor." -AUTHOR = "enzo" - - -def ksa(key: bytearray) -> bytearray: - sbox = bytearray(256) - for i in range(256): - sbox[i] = i - - j = 0 - for i in range(256): - j = (j + key[i % 250] + sbox[i]) & 0xFF - sbox[i] ^= sbox[j] & 0xFF - sbox[j] ^= sbox[i] & 0xFF - sbox[i] ^= sbox[j] & 0xFF - return sbox - - -def decrypt(sbox: bytearray, src_buf: bytearray) -> bytes: - i, j, k = 0, 0, 0 - dst_buf = bytearray(len(src_buf)) - - while k < len(src_buf): - i += 1 - uc = sbox[i % 256] & 0xFF - c = uc - 256 if uc > 127 else uc - j = j + c - 256 if j + c > 256 else j + c - d = sbox[j % 256] - sbox[i % 256] = d - sbox[j % 256] = uc - e1 = (i >> 3) ^ (32 * j) - e = sbox[e1 % 256] - g1 = ((int.from_bytes(struct.pack(">i", j), "big") >> 3) ^ (32 * i)) & 0xFF - g2 = sbox[g1 % 256] - g = (e + g2) & 0xFF - e = sbox[(j + d) % 256] - h = sbox[(g ^ 0xAA) % 256] - xor_key = (e ^ (h + sbox[(d + uc) % 256])) & 0xFF - dst_buf[k] = src_buf[k] ^ xor_key - i += 1 - k += 1 - - return bytes(dst_buf) - - -def extract_bss_data(pe): - for section in pe.sections: - if b".bss" in section.Name: - return section.get_data(section.VirtualAddress, section.SizeOfRawData) - return None - - -def extract_config(data): - cfg = {} - pe = None - with suppress(Exception): - pe = pefile.PE(data=data, fast_load=False) - if not pe: - return - try: - key = bytearray(250) - bss_data = extract_bss_data(pe) - if not bss_data: - return cfg - key_size = struct.unpack("i", bss_data[:4])[0] - key_bytes = bss_data[4 : 4 + key_size] - for k in range(len(key_bytes)): - key[k] = key_bytes[k] - etxt = bss_data[4 + key_size : 260 + key_size] - dtxt = decrypt(ksa(key), bytearray(etxt)) - - offset = 4 - c2_size = struct.unpack("i", dtxt[:offset])[0] - c2_host = dtxt[offset : offset + c2_size].decode("utf-16") - offset += c2_size - c2_port = struct.unpack("H", dtxt[offset : offset + 2])[0] - cfg["C2"] = f"{c2_host}:{c2_port}" - offset += 2 - # unk1 = dtxt[offset : offset + 7] - offset += 7 - unk2_size = struct.unpack("i", dtxt[offset : offset + 4])[0] - offset += 4 - # unk2 = dtxt[offset : offset + unk2_size] - offset += unk2_size - # unk3 = dtxt[offset : offset + 2] - offset += 2 - runkey_size = struct.unpack("i", dtxt[offset : offset + 4])[0] - offset += 4 - cfg["Run Key Name"] = dtxt[offset : offset + runkey_size].decode("utf-16") - except struct.error: - # there is a lot of failed data validation muting it - return - except Exception as e: - print("warzone", e) - - return cfg - - -if __name__ == "__main__": - import sys - from pathlib import Path - - data = Path(sys.argv[1]).read_bytes() - print(extract_config(data)) diff --git a/modules/processing/parsers/CAPE/XWorm.py b/modules/processing/parsers/CAPE/XWorm.py deleted file mode 100644 index 1220071ea7d..00000000000 --- a/modules/processing/parsers/CAPE/XWorm.py +++ /dev/null @@ -1,5 +0,0 @@ -from rat_king_parser.rkp import RATConfigParser - - -def extract_config(data: bytes): - return RATConfigParser(data).report.get("config", {}) diff --git a/modules/processing/parsers/CAPE/XenoRAT.py b/modules/processing/parsers/CAPE/XenoRAT.py deleted file mode 100644 index 1220071ea7d..00000000000 --- a/modules/processing/parsers/CAPE/XenoRAT.py +++ /dev/null @@ -1,5 +0,0 @@ -from rat_king_parser.rkp import RATConfigParser - - -def extract_config(data: bytes): - return RATConfigParser(data).report.get("config", {}) diff --git a/modules/processing/parsers/CAPE/Zloader.py b/modules/processing/parsers/CAPE/Zloader.py deleted file mode 100644 index eb0027cb191..00000000000 --- a/modules/processing/parsers/CAPE/Zloader.py +++ /dev/null @@ -1,139 +0,0 @@ -# Copyright (C) 2020 Kevin O'Reilly (kevoreilly@gmail.com) -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . - -DESCRIPTION = "Zloader configuration parser" -AUTHOR = "kevoreilly" - -import logging -import struct - -import pefile -import yara -from Cryptodome.Cipher import ARC4 - -log = logging.getLogger(__name__) - -rule_source = """ -rule Zloader -{ - meta: - author = "kevoreilly, enzok" - description = "Zloader Payload" - cape_type = "Zloader Payload" - strings: - $rc4_init = {31 [1-3] 66 C7 8? 00 01 00 00 00 00 90 90 [0-5] 8? [5-90] 00 01 00 00 [0-15] (74|75)} - $decrypt_conf = {e8 ?? ?? ?? ?? e8 ?? ?? ?? ?? e8 ?? ?? ?? ?? e8 ?? ?? ?? ?? 68 ?? ?? ?? ?? 68 ?? ?? ?? ?? e8 ?? ?? ?? ?? 83 c4 08 e8 ?? ?? ?? ??} - $decrypt_conf_1 = {48 8d [5] [0-6] e8 [4] 48 [3-4] 48 [3-4] 48 [6] E8} - $decrypt_conf_2 = {48 8d [5] 4? [5] e8 [4] 48 [3-4] 48 8d [5] E8 [4] 48} - $decrypt_key_1 = {66 89 C2 4? 8D 0D [3] 00 4? B? FC 03 00 00 E8 [4] 4? 83 C4} - $decrypt_key_2 = {48 8d 0d [3] 00 66 89 ?? 4? 89 F0 4? [2-5] E8 [4-5] 4? 83 C4} - $decrypt_key_3 = {48 8d 0d [3] 00 e8 [4] 66 89 [3] b? [4] e8 [4] 66 8b} - condition: - uint16(0) == 0x5A4D and 1 of ($decrypt_conf*) and (1 of ($decrypt_key*) or $rc4_init) -} -""" -MAX_STRING_SIZE = 32 - -yara_rules = yara.compile(source=rule_source) - - -def decrypt_rc4(key, data): - cipher = ARC4.new(key) - return cipher.decrypt(data) - - -def string_from_offset(data, offset): - return data[offset : offset + MAX_STRING_SIZE].split(b"\0", 1)[0] - - -def extract_config(filebuf): - end_config = {} - pe = pefile.PE(data=filebuf, fast_load=False) - image_base = pe.OPTIONAL_HEADER.ImageBase - matches = yara_rules.match(data=filebuf) - if not matches: - return - conf_type = "" - decrypt_key = "" - conf_size = 1020 - for match in matches: - if match.rule != "Zloader": - continue - for item in match.strings: - if "$decrypt_conf" == item.identifier: - decrypt_conf = item.instances[0].offset + 21 - conf_type = "1" - elif "$decrypt_conf_1" == item.identifier: - decrypt_conf = item.instances[0].offset - cva = 3 - conf_type = "2" - elif "$decrypt_conf_2" == item.identifier: - decrypt_conf = item.instances[0].offset - cva = 3 - conf_type = "2" - elif "$decrypt_key_1" == item.identifier: - decrypt_key = item.instances[0].offset - kva_s = 6 - elif "$decrypt_key_2" == item.identifier: - decrypt_key = item.instances[0].offset - kva_s = 3 - elif "$decrypt_key_3" == item.identifier: - decrypt_key = item.instances[0].offset - kva_s = 3 - - if conf_type == "1": - va = struct.unpack("I", filebuf[decrypt_conf : decrypt_conf + 4])[0] - key = string_from_offset(filebuf, pe.get_offset_from_rva(va - image_base)) - data_offset = pe.get_offset_from_rva(struct.unpack("I", filebuf[decrypt_conf + 5 : decrypt_conf + 9])[0] - image_base) - enc_data = filebuf[data_offset:].split(b"\0\0", 1)[0] - raw = decrypt_rc4(key, enc_data) - items = list(filter(None, raw.split(b"\x00\x00"))) - end_config["Botnet name"] = items[1].lstrip(b"\x00") - end_config["Campaign ID"] = items[2] - for item in items: - item = item.lstrip(b"\x00") - if item.startswith(b"http"): - end_config.setdefault("address", []).append(item) - elif len(item) == 16: - end_config["RC4 key"] = item - elif conf_type == "2" and decrypt_key: - conf_va = struct.unpack("I", filebuf[decrypt_conf + cva : decrypt_conf + cva + 4])[0] - conf_offset = pe.get_offset_from_rva(conf_va + pe.get_rva_from_offset(decrypt_conf) + cva + 4) - # if not conf_size: - # conf_size = struct.unpack("I", filebuf[decrypt_key + size_s : decrypt_key + size_s + 4])[0] - key_va = struct.unpack("I", filebuf[decrypt_key + kva_s : decrypt_key + kva_s + 4])[0] - key_offset = pe.get_offset_from_rva(key_va + pe.get_rva_from_offset(decrypt_key) + kva_s + 4) - key = string_from_offset(filebuf, key_offset) - conf_data = filebuf[conf_offset : conf_offset + conf_size] - raw = decrypt_rc4(key, conf_data) - items = list(filter(None, raw.split(b"\x00\x00"))) - end_config["Botnet name"] = items[0].decode("utf-8") - end_config["Campaign ID"] = items[1].decode("utf-8") - for item in items: - item = item.lstrip(b"\x00") - if item.startswith(b"http"): - end_config.setdefault("address", []).append(item.decode("utf-8")) - elif b"PUBLIC KEY" in item: - end_config["Public key"] = item.decode("utf-8").replace("\n", "") - - return end_config - - -if __name__ == "__main__": - import sys - from pathlib import Path - - log.setLevel(logging.DEBUG) - data = Path(sys.argv[1]).read_bytes() - print(extract_config(data)) diff --git a/modules/processing/parsers/CAPE/__init__.py b/modules/processing/parsers/CAPE/__init__.py deleted file mode 100644 index f39e5e8d683..00000000000 --- a/modules/processing/parsers/CAPE/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# Init diff --git a/modules/processing/parsers/CAPE/deprecated/JavaDropper.py b/modules/processing/parsers/CAPE/deprecated/JavaDropper.py deleted file mode 100644 index af210be7b1f..00000000000 --- a/modules/processing/parsers/CAPE/deprecated/JavaDropper.py +++ /dev/null @@ -1,105 +0,0 @@ -#!/usr/bin/env python -import hashlib -import string -import zlib -from base64 import b64decode -from io import StringIO -from zipfile import ZipFile - -# Non Standard Imports -from Cryptodome.Cipher import AES, ARC4, XOR - -# Helper Functions Go Here - - -def string_print(line): - return [x for x in line if x in string.printable] - - -#### Ciphers #### -def decrypt_RC4(enckey, data): - cipher = ARC4.new(enckey) - return cipher.decrypt(data) - - -def decrypt_AES(enckey, data): - cipher = AES.new(enckey) - return cipher.decrypt(data) - - -def decrypt_XOR(enckey, data): - cipher = XOR.new(enckey) - return cipher.decrypt(data) - - -def parse_ek(key, drop): - enc_key = key[:16] - coded = drop - drop_details = key[16:] - decoded = decrypt_AES(enc_key, coded) - for section in drop_details.split(","): - print(b64decode(section).decode("hex")) - return decoded - - -def parse_load(key, drop): - raw_key = f"{key}ALSKEOPQLFKJDUSIKSJAUIE" - enc_key = hashlib.sha256(raw_key).hexdigest() - return decrypt_RC4(enc_key, drop) - - -def parse_stub(drop): - keys = ("0kwi38djuie8oq89", "0B4wCrd5N2OxG93h") - - for key in keys: - decoded = decrypt_AES(key, drop) - if "META-INF" in decoded: - print("Found Embedded Jar") - return decoded - if "Program" in decoded: - print("Found Embedded EXE") - return decoded - - -def parse_xor(key, drop): - key2 = 'FYj&w3bd"m/kSZjD' - decoded = decrypt_XOR(key2, drop) - return zlib.decompress(decoded, 16 + zlib.MAX_WBITS) - - -# Jar Parser -def extract_config(raw_data): - decoded = False - jar_data = StringIO(raw_data) - with ZipFile(jar_data, "r") as jar: - files = jar.namelist() - if "e" in files and "k" in files: - print("Found EK Dropper") - key = jar.read("k") - drop = jar.read("e") - decoded = parse_ek(key, drop) - - if "config.ini" in files and "password.ini" in files: - print("Found LoadStub Dropper") - key = jar.read("password.ini") - drop = jar.read("config.ini") - decoded = parse_load(key, drop) - - if "stub/stub.dll" in files: - print("Found Stub Dropper") - drop = jar.read("stub/stub.dll") - decoded = parse_stub(drop) - - if "c.dat" in files: - print("Found XOR Dropper") - key_file = b64decode(jar.read("c.dat")) - key_text = decrypt_XOR("\xdd", key_file) - drop_file = key_text.split("\n", 2)[1] - key = key_text.split("\n", 6)[5] - print(key) - decoded = parse_xor(key, jar.read(drop_file)) - - if decoded: - return decoded - else: - print("Unable to decode") diff --git a/modules/processing/parsers/CAPE/deprecated/Nymaim.py b/modules/processing/parsers/CAPE/deprecated/Nymaim.py deleted file mode 100644 index 9be09243a0b..00000000000 --- a/modules/processing/parsers/CAPE/deprecated/Nymaim.py +++ /dev/null @@ -1,264 +0,0 @@ -import re -import string -import struct -from io import StringIO - - -class Stream(StringIO): - def string(self): - r = [] - c = "" - while c != "\x00": - c = self.read(1) - r.append(c) - return "".join(r).strip("\x00") - - def dword(self): - return struct.unpack("I", self.read(4))[0] - - def word(self): - return struct.unpack("H", self.read(2))[0] - - def byte(self): - return struct.unpack("=B", self.read(1))[0] - - -def get_strings(d): - return re.findall("[ -~]{3,}", d) - - -indent_level = 0 -indent_text = " " -silent = False - - -def pprint(*text): - global indent_level, silent - if not silent: - text = " ".join(str(s) for s in text) - print(indent_level * indent_text + text) - - -def rol(x, n, b=32): - n = (b - 1) & n - return x << n | 2**n - 1 & x >> b - n - - -def chunks(data, n): - return [data[i * n : (i + 1) * n] for i in range(len(data) / n)] - - -def ror(n, bits, b=32): - return ((n & b) >> bits) | ((n << (b - bits)) & b) - - -def uint32(i): - return struct.unpack("= self.base else a - return self._data[a : (a + n) if n else None] - - -class NymaimExtractor: - - CFG_DNS = 0x1B69E661 - CFG_TTL = 0xE6F7E88D - CFG_URL = 0x6B02C248 - CFG_DGA_HASH = 0x2AA0AED9 - CFG_ENC_KEY = 0x22E60B51 - CFG_DOMAINS = 0x1D4B5D09 - CFG_RSA_KEY = 0x2F127FFB - CTG_32BIT_TMPL_1 = 0x4C1AD0BB - CTG_32BIT_TMPL_2 = 0x1AE78782 - CTG_64BIT_TMPL = 0xF34A67FF - CFT_NOTEPAD_TMPL = 0xB2EA894D - CFG_FAKE_ERROR_MSG = 0xFCCE74B6 - CFG_PEER_DOMAINS = 0xF212B5AF - - CFG_BINARY_TYPES = { - 1: "botnet_peer", - 20: "dropper", - 30: "payload", - } - - def __init__(self): - pass - - def nymaim_decrypt_data_2(self, raw, key0, key1): - """ - decrypt final config (only raw data, keys passed as parameters) - """ - prev_chr = 0 - result = "" - for c in raw: - bl = ((key0 & 0x000000FF) + prev_chr) & 0xFF - key0 = (key0 & 0xFFFFFF00) + bl - prev_chr = ord(c) ^ bl - result += chr(prev_chr) - key0 = (key0 + key1) & 0xFFFFFFFF - key0 = ((key0 & 0x00FFFFFF) << 8) + ((key0 & 0xFF000000) >> 24) - return result - - def nymaim_extract_blob(self, mem, ndx): - """ - decrypt final config (read keys and length and decrypt raw data) - """ - key0 = mem.dword(ndx) - key1 = mem.dword(ndx + 4) - len = mem.dword(ndx + 8) - return self.nymaim_decrypt_data_2(mem.read(ndx + 12, len), key0, key1) - - def nymaim_parse_blob(self, blob): - """ - decrypt and interpret config (uses hardcoded hashes) - """ - parsed = {"domains": [], "urls": [], "dns": []} - for hash, raw in NymCfgStream(blob): - try: - pprint(f"<{hash:08x}>: {raw.encode().hex() if len(raw) == 4 else raw}") - if hash == self.CFG_URL: # '48c2026b': - parsed["urls"] += [{"url": append_http(raw[20:].rstrip(";"))}] - elif hash == self.CFG_DGA_HASH: # 'd9aea02a': - parsed["dga_hash"] = [uint32(h) for h in chunks(raw, 4)] - elif hash == self.CFG_DOMAINS: # '095d4b1d': - parsed["domains"] += [{"cnc": append_http(raw[4:].rstrip(";"))}] - elif hash == self.CFG_ENC_KEY: # '510be622': - parsed["encryption_key"] = raw - elif hash == self.CFG_RSA_KEY: # 'fb7f122f': - bits = uint32(raw[:4]) - bytes = bits / 8 - d = raw[4 : 4 + bytes].encode().hex() - e = raw[4 + bytes : 4 + bytes + bytes].encode().hex() - parsed["public_key"] = { - "n": str(int(d, 16)), - "e": int(e, 16), - } - elif hash == self.CFG_TTL: # '8de8f7e6': - if len(raw) == 12: - year, month, day = uint32(raw[-4:]), uint32(raw[4:-4]), uint32(raw[:4]) - parsed["time_restriction"] = f"{year}-{month:02}-{day:02}" - else: - parsed["time_restriction"] = [raw.encode().hex()] - elif hash == self.CFG_DNS: - parsed["dns"] += raw.split(";") - elif hash == self.CTG_32BIT_TMPL_1: - parsed["template_32bit_1"] = raw - elif hash == self.CTG_32BIT_TMPL_2: - parsed["template_32bit_2"] = raw - elif hash == self.CTG_64BIT_TMPL: - parsed["template_64bit_2"] = raw - elif hash == self.CFT_NOTEPAD_TMPL: # notepad template - parsed["notepad_template"] = raw - elif hash == self.CFG_FAKE_ERROR_MSG: # fake error message, shown to user on startup - parsed["fake_error_message"] = raw - elif hash == self.CFG_PEER_DOMAINS: - parsed["domains"] += [{"cnc": x} for x in raw.split(";") if x] - elif (all(c in string.printable for c in raw) and len(raw) > 3) or len( - [c for c in raw if c in string.printable] - ) > 10: - if "other_strings" not in parsed: - parsed["other_strings"] = {} - parsed["other_strings"][hex(hash)] = raw.encode().hex() - except RuntimeError: - # error during parsing... - if "errored_on" not in parsed: - parsed["errored_on"] = [] - parsed["errored_on"] += [{"hash": hash, "raw": raw.encode().hex()}] - return parsed - - def nymaim_brute_blob(self, mem): - """ - bruteforce start index of config in decrypted data (decrypted data contains more than config block). - Lame, but should be stable and fast enough. - """ - for i in reversed(list(range(mem.base, mem.base + mem.dsize - 12))): - blob_len = mem.dword(i + 8) - if 100 < blob_len < 8000: - blob = self.nymaim_extract_blob(mem, i) - if "8.8.8.8" in blob or "rundll" in blob or ("~[" in blob and "]/" in blob and ":53" in blob): - return self.nymaim_parse_blob(blob) - - -def set_prog_version(m, hit, *args): - mem = m.read(hit, 100) - type_id_offset = mem.find("C745D0".decode("hex")) - binary_id_offset = mem.find("C745D4".decode("hex")) - - type_id = uint32(mem[type_id_offset + 3 : type_id_offset + 7]) - binary_id = uint32(mem[binary_id_offset + 3 : binary_id_offset + 7]) - - if type_id in NymaimExtractor.CFG_BINARY_TYPES: - type_name = NymaimExtractor.CFG_BINARY_TYPES[type_id] - else: - type_name = str(type_id) - - return { - "exe_type": type_name, - "exe_version": binary_id, - } - - -def extract_config(raw): - m = Mem(raw, 0) - ext = NymaimExtractor() - return ext.nymaim_brute_blob(m) diff --git a/modules/processing/parsers/CAPE/deprecated/PredatorPain.py b/modules/processing/parsers/CAPE/deprecated/PredatorPain.py deleted file mode 100644 index c4a1652b73b..00000000000 --- a/modules/processing/parsers/CAPE/deprecated/PredatorPain.py +++ /dev/null @@ -1,178 +0,0 @@ -from base64 import b64decode -from binascii import unhexlify - -# import pefile -import pype32 -from Cryptodome.Cipher import AES -from pbkdf2 import PBKDF2 - - -def extract_config(raw_data): - try: - pe = pype32.PE(data=raw_data) - # pe = pefile.PE(data=raw_data) - string_list = get_strings(pe, 2) - vers = get_version(string_list) - if vers == "v12": - config_dict = config_12(string_list) - elif vers == "v13": - key, salt = "PredatorLogger", unhexlify("3000390039007500370038003700390037003800370038003600") - config_dict = config_13(key, salt, string_list) - elif vers == "v14": - key, salt = "EncryptedCredentials", unhexlify("3000390039007500370038003700390037003800370038003600") - config_dict = config_14(key, salt, string_list) - else: - return False - # C2 Line is not a straight domain on this one. - - return config_dict or False - except Exception as e: - print("PREDATORPAIN EXTRACTOR", e) - return False - - -# Helper Functions Go Here - - -def string_clean(line): - return "".join((char for char in line if 32 < ord(char) < 127)) - - -# Cryptodome.Stuffs -def decrypt_string(key, salt, coded): - # try: - # Derive key - generator = PBKDF2(key, salt) - aes_iv = generator.read(16) - aes_key = generator.read(32) - # Crypto - mode = AES.MODE_CBC - cipher = AES.new(aes_key, mode, IV=aes_iv) - return cipher.decrypt(b64decode(coded)).replace("\x00", "") - - -# except Exception: -# return False - - -# Get a list of strings from a section -def get_strings(pe, dir_type): - string_list = [] - m = pe.ntHeaders.optionalHeader.DATA_DIRECTORY[14].info - # m = pe.NT_HEADERS.OPTIONAL_HEADER.DATA_DIRECTORY[14].dump_dict().get("VirtualAddress", {}) - for s in m.netMetaDataStreams[dir_type].info: - string_list.extend(s.values()) - return string_list - - -# Find Version -def get_version(string_list): - # Pred v12 - if "Predator Pain v12 - Server Ran - [" in string_list: - print(" [-] Found Predator Pain v12") - return "v12" - # Pred v13 - elif "Predator Pain v13 - Server Ran - [" in string_list: - print(" [-] Found Predator Pain v13") - return "v13" - # Pred v14 - elif "EncryptedCredentials" in string_list: - print(" [-] Found Predator Pain v14") - return "v14" - - -def config_12(string_list): - config_dict = { - "Version": "Predator Pain v12", - "Email Address": string_list[4], - "Email Password": string_list[5], - "SMTP Server": string_list[6], - "SMTP Port": string_list[7], - "Interval Timer": string_list[8], - "BindFile1": "False" if string_list[9].startswith("ReplaceBind") else "True", - } - - config_dict["BindFile2"] = "False" if string_list[10].startswith("ReplaceBind") else "True" - return config_dict - - -# Turn the strings in to a python config_dict -def config_13(key, salt, string_list): - """ - Identical Strings are not stored multiple times. - We need to check for duplicate passwords which mess up the positionl arguemnts. - """ - - if "email" in string_list[13]: - dup = True - elif "email" in string_list[14]: - dup = False - - config_dict = { - "Version": "Predator Pain v13", - "Email Address": decrypt_string(key, salt, string_list[4]), - "Email Password": decrypt_string(key, salt, string_list[5]), - "SMTP Server": decrypt_string(key, salt, string_list[6]), - "SMTP Port": string_list[7], - "Interval Timer": string_list[8], - "FTP Host": decrypt_string(key, salt, string_list[10]), - "FTP User": decrypt_string(key, salt, string_list[11]), - } - if dup: - config_dict["FTP Pass"] = decrypt_string(key, salt, string_list[5]) - config_dict["PHP Link"] = decrypt_string(key, salt, string_list[12]) - config_dict["Use Email"] = string_list[13] - config_dict["Use FTP"] = string_list[14] - config_dict["Use PHP"] = string_list[15] - config_dict["Download & Exec"] = string_list[20] - config_dict["Bound Files"] = "False" if string_list[19] == "bindfiles" else "True" - else: - config_dict["FTP Pass"] = decrypt_string(key, salt, string_list[12]) - config_dict["PHP Link"] = decrypt_string(key, salt, string_list[13]) - config_dict["Use Email"] = string_list[14] - config_dict["Use FTP"] = string_list[15] - config_dict["Use PHP"] = string_list[16] - config_dict["Download & Exec"] = string_list[21] - config_dict["Bound Files"] = "False" if string_list[20] == "bindfiles" else True - return config_dict - - -# Turn the strings in to a python config_dict -def config_14(key, salt, string_list): - """ - Identical Strings are not stored multiple times. - possible pass and date dupes make it harder to test - """ - - # date Duplicate - if "email" in string_list[18]: - dup = True - elif "email" in string_list[19]: - dup = False - - config_dict = { - "Version": "Predator Pain v14", - "Email Address": decrypt_string(key, salt, string_list[4]), - "Email Password": decrypt_string(key, salt, string_list[5]), - "SMTP Server": decrypt_string(key, salt, string_list[6]), - "SMTP Port": string_list[7], - "Interval Timer": string_list[8], - "FTP Host": decrypt_string(key, salt, string_list[12]), - "FTP User": decrypt_string(key, salt, string_list[13]), - "FTP Pass": decrypt_string(key, salt, string_list[14]), - "PHP Link": decrypt_string(key, salt, string_list[15]), - } - if dup: - config_dict["PHP Link"] = decrypt_string(key, salt, string_list[15]) - config_dict["Use Email"] = string_list[18] - config_dict["Use FTP"] = string_list[19] - config_dict["Use PHP"] = string_list[20] - config_dict["Download & Exec"] = string_list[25] - config_dict["Bound Files"] = "False" if string_list[24] == "bindfiles" else "True" - else: - config_dict["Use Email"] = string_list[19] - config_dict["Use FTP"] = string_list[20] - config_dict["Use PHP"] = string_list[21] - config_dict["Download & Exec"] = string_list[26] - config_dict["Bound Files"] = "False" if string_list[25] == "bindfiles" else "True" - return config_dict diff --git a/modules/processing/parsers/CAPE/deprecated/_ShadowTech.py b/modules/processing/parsers/CAPE/deprecated/_ShadowTech.py deleted file mode 100644 index 262291ef7de..00000000000 --- a/modules/processing/parsers/CAPE/deprecated/_ShadowTech.py +++ /dev/null @@ -1,117 +0,0 @@ -#!/usr/bin/env python -""" -ShadowTech Config Extractor -""" - -import re -import string - -import createIOC -import database - -new_line = "#-@NewLine@-#" -split_string = "ESILlzCwXBSrQ1Vb72t6bIXtKRzHJkolNNL94gD8hIi9FwLiiVlrznTz68mkaaJQQSxJfdLyE4jCnl5QJJWuPD4NeO4WFYURvmkth8" -enc_key = "pSILlzCwXBSrQ1Vb72t6bIXtKRzAHJklNNL94gD8hIi9FwLiiVlr" # Actual key is "KeY11PWD24" - - -# Helper Functions Go Here - - -def string_print(line): - return [x for x in line if x in string.printable] - - -def get_config(data): - config_list = [] - config_string = data.split(split_string) - for x in range(1, len(config_string)): - try: - output = "" - hex_pairs = [config_string[x][i : i + 2] for i in range(0, len(config_string[x]), 2)] - for i in range(len(config_string[x]) // 2): - data_slice = int(hex_pairs[i], 16) # get next hex value - key_slice = ord(enc_key[i + 1]) # get next Char For Key - output += chr(data_slice ^ key_slice) # xor Hex and Key Char - print(output) - except Exception: - output = "DecodeError" - config_list.append(output) - return config_list - - -# returns pretty config -def parse_config(config_list): - return { - "Domain": config_list[0], - "Port": config_list[1], - "CampaignID": config_list[2], - "Password": config_list[3], - "InstallFlag": config_list[4], - "RegistryKey": config_list[5], - "Melt": config_list[6], - "Persistance": config_list[7], - "Mutex": config_list[8], - "ShowMsgBox": config_list[9], - # "Flag5": config_list[10] # MsgBox Icon, - # "Flag6": config_list[11] # MsgBox Buttons, - "MsgBoxTitle": config_list[12], - "MsgBoxText": config_list[13], - } - - -""" -def decrypt_XOR(enckey, data): - # ToDo fix it yourself, XOR not defined - cipher = XOR.new(enckey) # set the cipher - return cipher.decrypt(data) # decrpyt the data -""" - - -def snortRule(md5, config_dict): - rules = [] - domain = config_dict["Domain"] - ipPattern = re.compile(r"\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}") - ipTest = ipPattern.search(domain) - if len(domain) > 1: - if ipTest: - rules.append( - f"""alert tcp any any -> {domain}""" - f""" any (msg: "ShadowTech Beacon Domain: {domain}""" - """"; classtype:trojan-activity; sid:5000000; rev:1; priority:1; reference:url,http://malwareconfig.com;)""" - ) - else: - rules.append( - f"""alert udp any any -> any 53 (msg: "ShadowTech Beacon Domain: {domain}""" - f""""; content:"|0e|{domain}""" - """|00|"; nocase; classtype:trojan-activity; sid:5000000; rev:1; priority:1; reference:url,http://malwareconfig.com;)""" - ) - rules.append( - f"""alert tcp any any -> any 53 (msg: "ShadowTech Beacon Domain: {domain}""" - f""""; content:"|0e|{domain}""" - """|00|"; nocase; classtype:trojan-activity; sid:5000000; rev:1; priority:1; reference:url,http://malwareconfig.com;)""" - ) - database.insertSnort(md5, rules) - - -# IOC Creator Two elements Domain or install -def generateIOC(md5, config_dict): - items = [ - [ - ("is", "PortItem", "PortItem/remotePort", "string", config_dict["Port"]), - ("contains", "Network", "Network/DNS", "string", config_dict["Domain"]), - ] - ] - IOC = createIOC.main(items, "ShadowTech", md5) - database.insertIOC(md5, IOC) - - -def run(md5, data): - raw_config = get_config(data) - - # lets Process this and format the config - config_dict = parse_config(raw_config) - if len(config_dict["Domain"]) > 0: - snortRule(md5, config_dict) - generateIOC(md5, config_dict) - database.insertDomain(md5, [config_dict["Domain"]]) - return config_dict diff --git a/modules/processing/parsers/CAPE/deprecated/_VirusRat.py b/modules/processing/parsers/CAPE/deprecated/_VirusRat.py deleted file mode 100644 index 108bac4e239..00000000000 --- a/modules/processing/parsers/CAPE/deprecated/_VirusRat.py +++ /dev/null @@ -1,69 +0,0 @@ -import re - -import database -import ioc - - -def run(md5, data): - config_dict = {} - config = data.split("abccba") - if len(config) > 5: - config_dict = { - "Domain": config[1], - "Port": config[2], - "Campaign Name": config[3], - "Copy StartUp": config[4], - "StartUp Name": config[5], - "Add To Registry": config[6], - "Registry Key": config[7], - "Melt + Inject SVCHost": config[8], - "Anti Kill Process": config[9], - "USB Spread": config[10], - "Kill AVG 2012-2013": config[11], - "Kill Process Hacker": config[12], - "Kill Process Explorer": config[13], - "Kill NO-IP": config[14], - "Block Virus Total": config[15], - "Block Virus Scan": config[16], - "HideProcess": config[17], - } - snortRule(md5, config_dict) - createIOC(md5, config_dict) - database.insertDomain(md5, [config_dict["Domain"]]) - return config_dict - - -def snortRule(md5, config_dict): - rules = [] - domain = config_dict["Domain"] - ipPattern = re.compile(r"\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}") - ipTest = ipPattern.search(domain) - if len(domain) > 1: - if ipTest: - rules.append( - f"""alert tcp any any -> {domain}""" - f""" any (msg: "VirusRat Beacon Domain: {domain}""" - """"; classtype:trojan-activity; sid:5000000; rev:1; priority:1; reference:url,http://malwareconfig.com;)""" - ) - else: - rules.append( - f"""alert udp any any -> any 53 (msg: "VirusRat Beacon Domain: {domain}""" - f""""; content:"|0e|{domain}""" - """|00|"; nocase; classtype:trojan-activity; sid:5000000; rev:1; priority:1; reference:url,http://malwareconfig.com;)""", - f"""alert tcp any any -> any 53 (msg: "VirusRat Beacon Domain: {domain}""" - f""""; content:"|0e|{domain}""" - """|00|"; nocase; classtype:trojan-activity; sid:5000000; rev:1; priority:1; reference:url,http://malwareconfig.com;)""", - ) - database.insertSnort(md5, rules) - - -# IOC Creator Two elements Domain or install -def createIOC(md5, config_dict): - items = [ - ("contains", "Network", "Network/DNS", "string", config_dict["Domain"]), - ("is", "PortItem", "PortItem/remotePort", "string", config_dict["Port"]), - ("is", "ProcessItem", "ProcessItem/name", "string", config_dict["StartUp Name"]), - ("is", "RegistryItem", "RegistryItem/Value", "string", config_dict["Registry Key"]), - ] - IOC = ioc.main(items) - database.insertIOC(md5, IOC) diff --git a/modules/processing/parsers/CAPE/deprecated/_jRat.py b/modules/processing/parsers/CAPE/deprecated/_jRat.py deleted file mode 100644 index 3d143a7310e..00000000000 --- a/modules/processing/parsers/CAPE/deprecated/_jRat.py +++ /dev/null @@ -1,196 +0,0 @@ -import re -from base64 import b64decode -from io import StringIO -from zipfile import ZipFile - -import database -from Cryptodome.Cipher import AES, DES3 - - -def run(md5, data): - print("[+] Extracting Data from Jar") - enckey, conf = get_parts(data) - if enckey is None: - return - print(f"[+] Decoding Config with Key: {enckey.encode().hex()}") - if len(enckey) == 16: - # Newer versions use a base64 encoded config.dat - # this is not a great test but should work 99% of the time - decrypt_func = new_aes if "==" in conf else old_aes - raw_config = decrypt_func(conf, enckey) - elif len(enckey) == 32: - raw_config = old_des(conf, enckey) - config_dict = parse_config(raw_config, enckey) - snortRule(md5, config_dict) - database.insertDomain(md5, [config_dict["Domain"]]) - return config_dict - - -# Helper Functions Go Here - - -# This extracts the Encryption Key and Config File from the Jar and or Dropper -def get_parts(data): - new_zip = StringIO(data) - enckey = None - dropper = None - conf = None - try: - with ZipFile(new_zip, "r") as zip: - for name in zip.namelist(): # get all the file names - if name == "key.dat": # this file contains the encrytpion key - enckey = zip.read(name) - elif name == "enc.dat": # if this file exists, jrat has an installer / dropper - dropper = zip.read(name) - elif name == "config.dat": # this is the encrypted config file - conf = zip.read(name) - except Exception: - print(f"[+] Dropped File is not Jar File starts with Hex Chars: {data[:5].encode().hex()}") - return None, None - if enckey and conf: - return enckey, conf - elif enckey and dropper: - newkey, conf = get_dropper(enckey, dropper) - return newkey, conf - return None, None - - -# This extracts the Encryption Key and New conf from a 'Dropper' jar -def get_dropper(enckey, dropper): - split = enckey.split("\x2c") - key = split[0][:16] - print("[+] Dropper Detected") - for x in split: # grab each line of the config and decode it. - try: - drop = b64decode(x).decode("hex") - print(f" [-] {drop}".replace("\x0d\x0a", "")) - except Exception: - drop = b64decode(x[16:]).decode("hex") - print(f" [-] {drop}") - new_zipdata = decrypt_aes(key, dropper) - new_key, conf = get_parts(new_zipdata) - return new_key, conf - - -# Returns only printable chars -def string_print(line): - return "".join((char for char in line if 32 < ord(char) < 127)) - - -# Messy Messy Messy -def messy_split(long_line): - # this is a messy way to split the data but it works for now. - """ - Split on = gives me the right sections but deletes the b64 padding - use modulo math to restore padding. - return new list. - """ - new_list = [] - old_list = long_line.split("=") - for line in old_list: - if len(line) != 0: - line += "=" * ((4 - len(line) % 4) % 4) - new_list.append(line) - return new_list - - -# AES Decrypt -def decrypt_aes(enckey, data): - cipher = AES.new(enckey) # set the cipher - return cipher.decrypt(data) # decrpyt the data - - -# DES Decrypt -def decrypt_des(enckey, data): - cipher = DES3.new(enckey) # set the ciper - return cipher.decrypt(data) # decrpyt the data - - -# Process Versions 3.2.2 > 4.2. -def old_aes(conf, enckey): - decoded_config = decrypt_aes(enckey, conf) - clean_config = string_print(decoded_config) - return clean_config.split("SPLIT") - - -# Process versions 4.2. > -def new_aes(conf, enckey): - sections = messy_split(conf) - decoded_config = "".join(decrypt_aes(enckey, b64decode(x)) for x in sections) - return string_print(decoded_config).split("SPLIT") - - -# process versions < 3.2.2 -def old_des(conf, enckey): - decoded_config = decrypt_des(conf, enckey) - clean_config = string_print(decoded_config) - return clean_config.split("SPLIT") - - -def parse_config(raw_config, enckey): - config_dict = {} - for kv in raw_config: - if kv == "": - continue - kv = string_print(kv) - key, value = kv.split("=") - if key == "ip": - config_dict["Domain"] = value - elif key == "port": - config_dict["Port"] = value - elif key == "os": - config_dict["OS"] = value - elif key == "mport": - config_dict["MPort"] = value - elif key == "perms": - config_dict["Perms"] = value - elif key == "error": - config_dict["Error"] = value - elif key == "reconsec": - config_dict["RetryInterval"] = value - elif key == "ti": - config_dict["TI"] = value - elif key == "pass": - config_dict["Password"] = value - elif key == "id": - config_dict["CampaignID"] = value - elif key == "mutex": - config_dict["Mutex"] = value - elif key == "toms": - config_dict["TimeOut"] = value - elif key == "per": - config_dict["Persistance"] = value - elif key == "name": - config_dict["Name"] = value - elif key == "tiemout": - config_dict["TimeOutFlag"] = value - elif key == "debugmsg": - config_dict["DebugMsg"] = value - config_dict["EncryptionKey"] = enckey - return config_dict - - -def snortRule(md5, conf): - rules = [] - domain = conf["Domain"] - ipPattern = re.compile(r"\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}") - ipTest = ipPattern.search(domain) - if len(domain) > 1: - if ipTest: - rules.append( - f"""alert tcp any any -> {domain}""" - f""" any (msg: "jRat Beacon Domain: {domain}""" - """"; classtype:trojan-activity; sid:5000000; rev:1; priority:1; reference:url,http://malwareconfig.com;)""" - ) - else: - rules.append( - f"""alert udp any any -> any 53 (msg: "jRat Beacon Domain: {domain}""" - f""""; content:"|0e|{domain}""" - """|00|"; nocase; classtype:trojan-activity; sid:5000000; rev:1; priority:1; reference:url,http://malwareconfig.com;)""" - ) - rules.append( - f"""alert tcp any any -> any 53 (msg: "jRat Beacon Domain: {domain}""" - f""""; content:"|0e|{domain}""" - """|00|"; nocase; classtype:trojan-activity; sid:5000000; rev:1; priority:1; reference:url,http://malwareconfig.com;)""" - ) - database.insertSnort(md5, rules) diff --git a/modules/processing/parsers/CAPE/deprecated/unrecom.py b/modules/processing/parsers/CAPE/deprecated/unrecom.py deleted file mode 100644 index bfc02371e8a..00000000000 --- a/modules/processing/parsers/CAPE/deprecated/unrecom.py +++ /dev/null @@ -1,64 +0,0 @@ -import string -import xml.etree.ElementTree as ET -from io import StringIO -from zipfile import ZipFile - -from Cryptodome.Cipher import ARC4 - - -def extract_embedded(zip_data): - raw_embedded = None - archive = StringIO(zip_data) - with ZipFile(archive) as zip: - for name in zip.namelist(): # get all the file names - if name == "load/ID": # contains first part of key - partial_key = zip.read(name) - enckey = f"{partial_key}DESW7OWKEJRU4P2K" # complete key - if name == "load/MANIFEST.MF": # this is the embedded jar - raw_embedded = zip.read(name) - if raw_embedded is None: - return None - # Decrypt the raw file - return ARC4.new(enckey).decrypt(raw_embedded) - - -def parse_embedded(data): - newzipdata = data - # Write new zip file to memory instead of to disk - with StringIO(newzipdata) as newZip: - with ZipFile(newZip) as zip: - for name in zip.namelist(): - if name == "config.xml": # this is the config in clear - config = zip.read(name) - return config - - -def parse_config(config): - xml = [x for x in config if x in string.printable] - root = ET.fromstring(xml) - raw_config = {} - for child in root: - if child.text.startswith("Unrecom"): - raw_config["Version"] = child.text - else: - raw_config[child.attrib["key"]] = child.text - return { - "Version": raw_config["Version"], - "Delay": raw_config["delay"], - "Domain": raw_config["dns"], - "Extension": raw_config["extensionname"], - "Install": raw_config["install"], - "Port1": raw_config["p1"], - "Port2": raw_config["p2"], - "Password": raw_config["password"], - "PluginFolder": raw_config["pluginfoldername"], - "Prefix": raw_config["prefix"], - } - - -def extract_config(data): - embedded = extract_embedded(data) - if embedded is None: - return None - config = parse_embedded(embedded) - return parse_config(config) if config is not None else None diff --git a/modules/processing/parsers/CAPE/deprecated/xRAT.py b/modules/processing/parsers/CAPE/deprecated/xRAT.py deleted file mode 100644 index 34bc46ddca7..00000000000 --- a/modules/processing/parsers/CAPE/deprecated/xRAT.py +++ /dev/null @@ -1,114 +0,0 @@ -import hashlib -import re -from base64 import b64decode - -import pefile -from Cryptodome.Cipher import AES, XOR - - -def string_print(line): - return "".join((char for char in line if 32 < ord(char) < 127)) - - -def parse_config(config_list, ver): - config_dict = {} - if ver == "V1": - config_dict["Version"] = "1.0.x" - config_dict["Domain"] = config_list[1] - config_dict["Port"] = config_list[2] - config_dict["Password"] = config_list[3] - config_dict["CampaignID"] = config_list[4] - config_dict["InstallName"] = config_list[5] - config_dict["HKCUKey"] = config_list[6] - config_dict["InstallDir"] = config_list[7] - config_dict["Flag1"] = config_list[8] - config_dict["Flag2"] = config_list[9] - config_dict["Mutex"] = config_list[10] - if ver == "V2": - config_dict["Version"] = config_list[0] - config_dict["Domain"] = config_list[1] - config_dict["Password"] = config_list[2] - config_dict["InstallSub"] = config_list[3] - config_dict["InstallName"] = config_list[4] - config_dict["Mutex"] = config_list[5] - config_dict["RegistryKey"] = config_list[6] - return config_dict - - -def get_long_line(data): - try: - raw_config = None - pe = pefile.PE(data=data) - for entry in pe.DIRECTORY_ENTRY_RESOURCE.entries: - if str(entry.name) == "RT_RCDATA": - new_dirs = entry.directory - for entry in new_dirs.entries: - if str(entry.name) == "0": - data_rva = entry.directory.entries[0].data.struct.OffsetToData - size = entry.directory.entries[0].data.struct.Size - data = pe.get_memory_mapped_image()[data_rva : data_rva + size] - raw_config = data - except Exception: - raw_config = None - if raw_config is not None: - return raw_config, "V1" - try: - m = re.search("\x69\x00\x6F\x00\x6E\x00\x00\x59(.*)\x6F\x43\x00\x61\x00\x6E", data) - raw_config = m.group(0)[4:-12] - return raw_config, "V2" - except Exception: - return None, None - - -def decrypt_XOR(enckey, data): - cipher = XOR.new(enckey) # set the cipher - return cipher.decrypt(data) # decrpyt the data - - -# decrypt function -def decrypt_aes(enckey, data): - iv = data[:16] - cipher = AES.new(enckey, AES.MODE_CBC, iv) # set the cipher - return cipher.decrypt(data[16:]) # decrpyt the data - - -# converts the enc key to an md5 key -def aes_key(enc_key): - return hashlib.md5(enc_key).hexdigest().decode("hex") - - -# This will split all the b64 encoded strings and the encryption key -def get_parts(long_line): - coded_config = [] - raw_line = long_line - small_lines = raw_line.split("\x00\x00") - for line in small_lines: - new_line = line[1:] if len(line) % 2 == 0 else line[2:] - coded_config.append(new_line.replace("\x00", "")) - return coded_config - - -def extract_config(data): - long_line, ver = get_long_line(data) - if ver is None: - return - config_list = [] - if ver == "V1": - # The way the XOR Cypher was implemented the keys are off by 1. - key1 = "RAT11x" # Used for First level of encryption actual key is 'xRAT11' - key2 = "eY11K" # used for individual sections, actual key is 'KeY11' - key3 = "eY11PWD24K" # used for password section only. Actual key is 'KeY11PWD24' - config = long_line.decode("hex") - first_decode = decrypt_XOR(key1, config) - sections = first_decode.split("|//\\\\|") # Split is |//\\| the extra \\ are for escaping. - for i, section in enumerate(sections): - enc_key = key3 if i == 3 else key2 - config_list.append(decrypt_XOR(enc_key, section.decode("hex"))) - elif ver == "V2": - coded_lines = get_parts(long_line) - enc_key = aes_key(coded_lines[-1]) - for i in range(1, (len(coded_lines) - 1)): - decoded_line = b64decode(coded_lines[i]) - decrypt_line = decrypt_aes(enc_key, decoded_line) - config_list.append(string_print(decrypt_line)) - return parse_config(config_list, ver) diff --git a/modules/processing/parsers/CAPE/test_cape.py b/modules/processing/parsers/CAPE/test_cape.py deleted file mode 100644 index f9190c3c306..00000000000 --- a/modules/processing/parsers/CAPE/test_cape.py +++ /dev/null @@ -1,2 +0,0 @@ -def extract_config(): - pass diff --git a/modules/processing/parsers/MACO/AgentTesla.py b/modules/processing/parsers/MACO/AgentTesla.py deleted file mode 100644 index 04615864ac3..00000000000 --- a/modules/processing/parsers/MACO/AgentTesla.py +++ /dev/null @@ -1,64 +0,0 @@ -import os - -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.AgentTesla import extract_config - - -def convert_to_MACO(raw_config: dict) -> MACOModel: - if not raw_config: - return - - protocol = raw_config.get("Protocol") - if not protocol: - return - - parsed_result = MACOModel(family="AgentTesla", other=raw_config) - if protocol == "Telegram": - parsed_result.http.append(MACOModel.Http(uri=raw_config["C2"], password=raw_config["Password"], usage="c2")) - - elif protocol in ["HTTP(S)", "Discord"]: - parsed_result.http.append(MACOModel.Http(uri=raw_config["C2"], usage="c2")) - - elif protocol == "FTP": - parsed_result.ftp.append( - MACOModel.FTP( - username=raw_config["Username"], - password=raw_config["Password"], - hostname=raw_config["C2"].replace("ftp://", ""), - usage="c2", - ) - ) - - elif protocol == "SMTP": - smtp = dict( - username=raw_config["Username"], - password=raw_config["Password"], - hostname=raw_config["C2"], - mail_to=[raw_config["EmailTo"]], - usage="c2", - ) - if "Port" in raw_config: - smtp["port"] = raw_config["Port"] - parsed_result.smtp.append(MACOModel.SMTP(**smtp)) - - if "Persistence_Filename" in raw_config: - parsed_result.paths.append(MACOModel.Path(path=raw_config["Persistence_Filename"], usage="storage")) - - if "ExternalIPCheckServices" in raw_config: - for service in raw_config["ExternalIPCheckServices"]: - parsed_result.http.append(MACOModel.Http(uri=service, usage="other")) - - return parsed_result - - -class AgentTesla(Extractor): - author = "kevoreilly" - family = "AgentTesla" - last_modified = "2024-10-20" - sharing = "TLP:CLEAR" - yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/AsyncRAT.py b/modules/processing/parsers/MACO/AsyncRAT.py deleted file mode 100644 index 6f64368cda7..00000000000 --- a/modules/processing/parsers/MACO/AsyncRAT.py +++ /dev/null @@ -1,51 +0,0 @@ -import os - -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.AsyncRAT import extract_config - - -def convert_to_MACO(raw_config: dict) -> MACOModel: - if not raw_config: - return - - parsed_result = MACOModel(family="AsyncRAT", other=raw_config) - - # Mutex - parsed_result.mutex.append(raw_config["Mutex"]) - - # Version - parsed_result.version = raw_config["Version"] - - # Was persistence enabled? - if raw_config["Install"] == "true": - parsed_result.capability_enabled.append("persistence") - else: - parsed_result.capability_disabled.append("persistence") - - # Installation Path - if raw_config.get("Folder"): - parsed_result.paths.append(MACOModel.Path(path=os.path.join(raw_config["Folder"], raw_config["Filename"]), usage="install")) - - # C2s - for i in range(len(raw_config.get("C2s", []))): - parsed_result.http.append(MACOModel.Http(hostname=raw_config["C2s"][i], port=int(raw_config["Ports"][i]), usage="c2")) - # Pastebin - if raw_config.get("Pastebin") not in ["null", None]: - # TODO: Is it used to download the C2 information if not embedded? - # Ref: https://www.netskope.com/blog/asyncrat-using-fully-undetected-downloader - parsed_result.http.append(MACOModel.Http(uri=raw_config["Pastebin"], usage="download")) - - return parsed_result - - -class AsyncRAT(Extractor): - author = "kevoreilly" - family = "AsyncRAT" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/AuroraStealer.py b/modules/processing/parsers/MACO/AuroraStealer.py deleted file mode 100644 index 1a63055f07e..00000000000 --- a/modules/processing/parsers/MACO/AuroraStealer.py +++ /dev/null @@ -1,29 +0,0 @@ -import os - -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.AuroraStealer import extract_config - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - parsed_result = MACOModel(family="AuroraStealer", other=raw_config) - if raw_config.get("C2"): - # IP related to C2 - parsed_result.http.append(MACOModel.Http(hostname=raw_config["C2"], usage="c2")) - - return parsed_result - - -class AuroraStealer(Extractor): - author = "kevoreilly" - family = "AuroraStealer" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/Azorult.py b/modules/processing/parsers/MACO/Azorult.py deleted file mode 100644 index 4b462eacd74..00000000000 --- a/modules/processing/parsers/MACO/Azorult.py +++ /dev/null @@ -1,22 +0,0 @@ -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.Azorult import extract_config, rule_source - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - return MACOModel(family="Azorult", http=[MACOModel.Http(hostname=raw_config["address"])], other=raw_config) - - -class Azorult(Extractor): - author = "kevoreilly" - family = "Azorult" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - yara_rule = rule_source - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/BackOffLoader.py b/modules/processing/parsers/MACO/BackOffLoader.py deleted file mode 100644 index 155fe0d8b9a..00000000000 --- a/modules/processing/parsers/MACO/BackOffLoader.py +++ /dev/null @@ -1,33 +0,0 @@ -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.BackOffLoader import extract_config - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - parsed_result = MACOModel(family="BackOffLoader", other=raw_config) - - # Version - parsed_result.version = raw_config["Version"] - - # Encryption details - parsed_result.encryption.append( - MACOModel.Encryption(algorithm="rc4", key=raw_config["EncryptionKey"], seed=raw_config["RC4Seed"]) - ) - for url in raw_config["URLs"]: - parsed_result.http.append(MACOModel.Http(url=url)) - - return parsed_result - - -class BackOffLoader(Extractor): - author = "kevoreilly" - family = "BackOffLoader" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/BackOffPOS.py b/modules/processing/parsers/MACO/BackOffPOS.py deleted file mode 100644 index 2dfd7b89bbd..00000000000 --- a/modules/processing/parsers/MACO/BackOffPOS.py +++ /dev/null @@ -1,33 +0,0 @@ -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.BackOffPOS import extract_config - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - parsed_result = MACOModel(family="BackOffPOS", other=raw_config) - - # Version - parsed_result.version = raw_config["Version"] - - # Encryption details - parsed_result.encryption.append( - MACOModel.Encryption(algorithm="rc4", key=raw_config["EncryptionKey"], seed=raw_config["RC4Seed"]) - ) - for url in raw_config["URLs"]: - parsed_result.http.append(MACOModel.Http(url=url)) - - return parsed_result - - -class BackOffPOS(Extractor): - author = "kevoreilly" - family = "BackOffPOS" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/BitPaymer.py b/modules/processing/parsers/MACO/BitPaymer.py deleted file mode 100644 index 34d0590fb08..00000000000 --- a/modules/processing/parsers/MACO/BitPaymer.py +++ /dev/null @@ -1,29 +0,0 @@ -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.BitPaymer import extract_config, rule_source - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - parsed_result = MACOModel(family="BitPaymer", other=raw_config) - - # Extracted strings - parsed_result.decoded_strings = raw_config["strings"] - - # Encryption details - parsed_result.encryption.append(MACOModel.Encryption(algorithm="rsa", public_key=raw_config["RSA public key"])) - return parsed_result - - -class BitPaymer(Extractor): - author = "kevoreilly" - family = "BitPaymer" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - yara_rule = rule_source - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/BlackDropper.py b/modules/processing/parsers/MACO/BlackDropper.py deleted file mode 100644 index da619a6cbc4..00000000000 --- a/modules/processing/parsers/MACO/BlackDropper.py +++ /dev/null @@ -1,32 +0,0 @@ -import os - -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.BlackDropper import extract_config - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - parsed_result = MACOModel(family="BlackDropper", campaign_id=[raw_config["campaign"]], other=raw_config) - - for dir in raw_config.get("directories", []): - parsed_result.paths.append(MACOModel.Path(path=dir)) - - for url in raw_config.get("urls", []): - parsed_result.http.append(MACOModel.Http(uri=url)) - - return parsed_result - - -class BlackDropper(Extractor): - author = "kevoreilly" - family = "BlackDropper" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/BlackNix.py b/modules/processing/parsers/MACO/BlackNix.py deleted file mode 100644 index 70408d7828e..00000000000 --- a/modules/processing/parsers/MACO/BlackNix.py +++ /dev/null @@ -1,66 +0,0 @@ -import os - -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.BlackNix import extract_config - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - parsed_result = MACOModel(family="BlackNix", other=raw_config) - - # Mutex - parsed_result.mutex.append(raw_config["Mutex"]) - - # Capabilities that are enabled/disabled - # TODO: Review if these are all capabilities set by a boolean flag - for capa in [ - "Anti Sandboxie", - "Kernel Mode Unhooking", - "User Mode Unhooking", - "Melt Server", - "Offline Screen Capture", - "Offline Keylogger", - "Copy to ADS", - "Safe Mode Startup", - "Inject winlogon.exe", - "Active X Run", - "Registry Run", - ]: - if raw_config[capa].lower() == "true": - parsed_result.capability_enabled.append(capa) - else: - parsed_result.capability_disabled.append(capa) - - # Delay Time - parsed_result.sleep_delay = raw_config["Delay Time"] - - # Password - parsed_result.password.append(raw_config["Password"]) - - # C2 Domain - parsed_result.http.append(MACOModel.Http(hostname=raw_config["Domain"], usage="c2")) - # Registry - parsed_result.registry.append(MACOModel.Registry(key=raw_config["Registry Key"])) - - # Install Path - parsed_result.paths.append( - MACOModel.Path(path=os.path.join(raw_config["Install Path"], raw_config["Install Name"]), usage="install") - ) - - # Campaign Group/Name - parsed_result.campaign_id = [raw_config["Campaign Name"], raw_config["Campaign Group"]] - return parsed_result - - -class BlackNix(Extractor): - author = "kevoreilly" - family = "BlackNix" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/Blister.py b/modules/processing/parsers/MACO/Blister.py deleted file mode 100644 index 1045539c2bc..00000000000 --- a/modules/processing/parsers/MACO/Blister.py +++ /dev/null @@ -1,36 +0,0 @@ -import os - -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.Blister import extract_config - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - parsed_result = MACOModel(family="Blister", other=raw_config) - - for capa in ["Persistence", "Sleep after injection"]: - if raw_config[capa]: - parsed_result.capability_enabled.append(capa) - else: - parsed_result.capability_disabled.append(capa) - - # Rabbit encryption - parsed_result.encryption.append( - MACOModel.Encryption(algorithm="rabbit", key=raw_config["Rabbit key"], iv=raw_config["Rabbit IV"]) - ) - return parsed_result - - -class Blister(Extractor): - author = "kevoreilly" - family = "Blister" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/BruteRatel.py b/modules/processing/parsers/MACO/BruteRatel.py deleted file mode 100644 index bfd7e32fda4..00000000000 --- a/modules/processing/parsers/MACO/BruteRatel.py +++ /dev/null @@ -1,32 +0,0 @@ -import os - -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.BruteRatel import extract_config - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - parsed_result = MACOModel(family="BruteRatel", other=raw_config) - - for url in raw_config["C2"]: - for path in raw_config["URI"]: - parsed_result.http.append( - MACOModel.Http(uri=url, user_agent=raw_config["User Agent"], port=raw_config["Port"], path=path, usage="c2") - ) - - return parsed_result - - -class BruteRatel(Extractor): - author = "kevoreilly" - family = "BruteRatel" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/BuerLoader.py b/modules/processing/parsers/MACO/BuerLoader.py deleted file mode 100644 index fdda64590ae..00000000000 --- a/modules/processing/parsers/MACO/BuerLoader.py +++ /dev/null @@ -1,28 +0,0 @@ -import os - -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.BuerLoader import extract_config - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - parsed_result = MACOModel(family="BuerLoader", other=raw_config) - - for c2 in raw_config["address"]: - parsed_result.http.append(MACOModel.Http(hostname=c2, usage="c2")) - return parsed_result - - -class BuerLoader(Extractor): - author = "kevoreilly" - family = "BuerLoader" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/BumbleBee.py b/modules/processing/parsers/MACO/BumbleBee.py deleted file mode 100644 index 27fa023e9e6..00000000000 --- a/modules/processing/parsers/MACO/BumbleBee.py +++ /dev/null @@ -1,46 +0,0 @@ -import os - -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.BumbleBee import extract_config - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - parsed_result = MACOModel(family="BumbleBee", other=raw_config) - - # Campaign ID - if raw_config.get("Campaign ID"): - parsed_result.campaign_id.append(raw_config["Campaign ID"]) - - # Botnet ID - if raw_config.get("Botnet ID"): - parsed_result.identifier.append(raw_config["Botnet ID"]) - - # C2s - for c2 in raw_config.get("C2s", []): - parsed_result.http.append(MACOModel.Http(hostname=c2, usage="c2")) - - # Data - if raw_config.get("Data"): - parsed_result.binaries.append(MACOModel.Binary(data=raw_config["Data"])) - - # RC4 Key - if raw_config.get("RC4 Key"): - parsed_result.encryption.append(MACOModel.Encryption(algorithm="rc4", key=raw_config["RC4 Key"])) - - return parsed_result - - -class BumbleBee(Extractor): - author = "kevoreilly" - family = "BumbleBee" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/Carbanak.py b/modules/processing/parsers/MACO/Carbanak.py deleted file mode 100644 index 8df0573348b..00000000000 --- a/modules/processing/parsers/MACO/Carbanak.py +++ /dev/null @@ -1,45 +0,0 @@ -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.Carbanak import extract_config, rule_source - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - parsed_result = MACOModel(family="Carbanak", other=raw_config) - - # Version - if raw_config.get("Version"): - parsed_result.version = raw_config["Version"] - - # Unknown strings - for i in [1, 2]: - if raw_config.get(f"Unknown {i}"): - parsed_result.decoded_strings.append(raw_config[f"Unknown {i}"]) - - # C2 - if raw_config.get("C2"): - if isinstance(raw_config["C2"], str): - parsed_result.http.append(MACOModel.Http(hostname=raw_config["C2"], usage="c2")) - else: - for c2 in raw_config["C2"]: - parsed_result.http.append(MACOModel.Http(hostname=c2, usage="c2")) - - # Campaign Id - if raw_config.get("Campaign Id"): - parsed_result.campaign_id.append(raw_config["Campaign Id"]) - - return parsed_result - - -class Carbanak(Extractor): - author = "kevoreilly" - family = "Carbanak" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - yara_rule = rule_source - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/ChChes.py b/modules/processing/parsers/MACO/ChChes.py deleted file mode 100644 index 02977e00e33..00000000000 --- a/modules/processing/parsers/MACO/ChChes.py +++ /dev/null @@ -1,28 +0,0 @@ -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.ChChes import extract_config, rule_source - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - parsed_result = MACOModel(family="ChChes", other=raw_config) - - # C2 URLs - for c2_url in raw_config.get("c2_url", []): - parsed_result.http.append(MACOModel.Http(uri=c2_url, usage="c2")) - - return parsed_result - - -class ChChes(Extractor): - author = "kevoreilly" - family = "ChChes" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - yara_rule = rule_source - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/CobaltStrikeBeacon.py b/modules/processing/parsers/MACO/CobaltStrikeBeacon.py deleted file mode 100644 index f639cbb9cc8..00000000000 --- a/modules/processing/parsers/MACO/CobaltStrikeBeacon.py +++ /dev/null @@ -1,50 +0,0 @@ -import os - -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.CobaltStrikeBeacon import extract_config - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - parsed_result = MACOModel(family="CobaltStrikeBeacon", other=raw_config) - - clean_config = {k: v for k, v in raw_config.items() if v != "Not Found"} - capabilities = {k[1:]: clean_config.pop(k) for k in list(clean_config.keys()) if clean_config[k] in ["True", "False"]} - - for capability, enabled in capabilities.items(): - if enabled.lower() == "true": - parsed_result.capability_enabled.append(capability) - else: - parsed_result.capability_disabled.append(capability) - - if "C2Server" in clean_config: - host, get_path = clean_config.pop("C2Server").split(",") - port = clean_config.pop("Port") - parsed_result.http.append(MACOModel.Http(hostname=host, port=port, method="GET", path=get_path, usage="c2")) - parsed_result.http.append( - MACOModel.Http(hostname=host, port=port, method="POST", path=clean_config.pop("HttpPostUri"), usage="c2") - ) - - parsed_result.sleep_delay = clean_config.pop("SleepTime") - parsed_result.sleep_delay_jitter = clean_config.pop("Jitter") - - for path_key in ["Spawnto_x86", "Spawnto_x64"]: - if path_key in clean_config: - parsed_result.paths.append(MACOModel.Path(path=clean_config.pop(path_key))) - - return parsed_result - - -class CobaltStrikeBeacon(Extractor): - author = "kevoreilly" - family = "CobaltStrikeBeacon" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/CobaltStrikeStager.py b/modules/processing/parsers/MACO/CobaltStrikeStager.py deleted file mode 100644 index 3d3759a0503..00000000000 --- a/modules/processing/parsers/MACO/CobaltStrikeStager.py +++ /dev/null @@ -1,26 +0,0 @@ -import os - -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.CobaltStrikeStager import extract_config - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - parsed_result = MACOModel(family="CobaltStrikeStager", other=raw_config) - - return parsed_result - - -class CobaltStrikeStager(Extractor): - author = "kevoreilly" - family = "CobaltStrikeStager" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/DCRat.py b/modules/processing/parsers/MACO/DCRat.py deleted file mode 100644 index fba00548801..00000000000 --- a/modules/processing/parsers/MACO/DCRat.py +++ /dev/null @@ -1,27 +0,0 @@ -import os - -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.DCRat import extract_config - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - # TODO: Assign fields to MACO model - parsed_result = MACOModel(family="DCRat", other=raw_config) - - return parsed_result - - -class DCRat(Extractor): - author = "kevoreilly" - family = "DCRat" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/DarkGate.py b/modules/processing/parsers/MACO/DarkGate.py deleted file mode 100644 index 6d382f80cd0..00000000000 --- a/modules/processing/parsers/MACO/DarkGate.py +++ /dev/null @@ -1,52 +0,0 @@ -import os -from copy import deepcopy - -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.DarkGate import extract_config - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - parsed_result = MACOModel(family="DarkGate", other=raw_config) - - # Create a copy of the raw configuration for parsing - config = deepcopy(raw_config) - - # Go through capabilities/settings that are boolean in nature - for k, v in list(config.items()): - if v not in ["Yes", "No"]: - continue - - if v == "Yes": - parsed_result.capability_enabled.append(k) - else: - parsed_result.capability_disabled.append(k) - - # Remove key from raw config - config.pop(k) - - # C2 - c2_port = config.pop("c2_port", None) - for c2_url in config.pop("C2", []): - parsed_result.http.append(MACOModel.Http(uri=c2_url, port=c2_port, usage="c2")) - - # Mutex - if config.get("internal_mutex"): - parsed_result.mutex.append(config.pop("internal_mutex")) - - return parsed_result - - -class DarkGate(Extractor): - author = "kevoreilly" - family = "DarkGate" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/DoppelPaymer.py b/modules/processing/parsers/MACO/DoppelPaymer.py deleted file mode 100644 index 1e1d97a8b43..00000000000 --- a/modules/processing/parsers/MACO/DoppelPaymer.py +++ /dev/null @@ -1,30 +0,0 @@ -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.DoppelPaymer import extract_config, rule_source - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - parsed_result = MACOModel(family="DoppelPaymer") - - if "strings" in raw_config: - parsed_result.decoded_strings = raw_config["strings"] - - if "RSA public key" in raw_config: - parsed_result.encryption.append(MACOModel.Encryption(algorithm="RSA", public_key=raw_config["RSA public key"])) - - return parsed_result - - -class DoppelPaymer(Extractor): - author = "kevoreilly" - family = "DoppelPaymer" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - yara_rule = rule_source - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/DridexLoader.py b/modules/processing/parsers/MACO/DridexLoader.py deleted file mode 100644 index 7a1097ab71a..00000000000 --- a/modules/processing/parsers/MACO/DridexLoader.py +++ /dev/null @@ -1,33 +0,0 @@ -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.DridexLoader import extract_config, rule_source - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - parsed_result = MACOModel(family="DridexLoader", other=raw_config) - - for c2_address in raw_config.get("address", []): - parsed_result.http.append(MACOModel.Http(uri=c2_address, usage="c2")) - - if "RC4 key" in raw_config: - parsed_result.encryption.append(MACOModel.Encryption(algorithm="RC4", key=raw_config["RC4 key"])) - - if "Botnet ID" in raw_config: - parsed_result.identifier.append(raw_config["Botnet ID"]) - - return parsed_result - - -class DridexLoader(Extractor): - author = "kevoreilly" - family = "DridexLoader" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - yara_rule = rule_source - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/Emotet.py b/modules/processing/parsers/MACO/Emotet.py deleted file mode 100644 index 6cc29da2b0b..00000000000 --- a/modules/processing/parsers/MACO/Emotet.py +++ /dev/null @@ -1,32 +0,0 @@ -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.Emotet import extract_config, rule_source - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - parsed_result = MACOModel(family="Emotet", other=raw_config) - - for c2_address in raw_config.get("address", []): - parsed_result.http.append(MACOModel.Http(uri=c2_address, usage="c2")) - - if "RC4 public key" in raw_config: - parsed_result.encryption.append(MACOModel.Encryption(algorithm="RC4", public_key=raw_config["RSA public key"])) - - parsed_result.other = {k: raw_config[k] for k in raw_config.keys() if k not in ["address", "RSA public key"]} - - return parsed_result - - -class Emotet(Extractor): - author = "kevoreilly" - family = "Emotet" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - yara_rule = rule_source - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/Enfal.py b/modules/processing/parsers/MACO/Enfal.py deleted file mode 100644 index 8fe4d6f2ff6..00000000000 --- a/modules/processing/parsers/MACO/Enfal.py +++ /dev/null @@ -1,25 +0,0 @@ -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.Enfal import extract_config, rule_source - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - # TODO: Assign fields to MACO model - parsed_result = MACOModel(family="Enfal", other=raw_config) - - return parsed_result - - -class Enfal(Extractor): - author = "kevoreilly" - family = "Enfal" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - yara_rule = rule_source - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/EvilGrab.py b/modules/processing/parsers/MACO/EvilGrab.py deleted file mode 100644 index e32975f06bc..00000000000 --- a/modules/processing/parsers/MACO/EvilGrab.py +++ /dev/null @@ -1,38 +0,0 @@ -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.EvilGrab import extract_config, rule_source - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - parsed_result = MACOModel(family="EvilGrab", other=raw_config) - - if "mutex" in raw_config: - parsed_result.mutex.append(raw_config["mutex"]) - - if "missionid" in raw_config: - parsed_result.campaign_id.append(raw_config["missionid"]) - - if "version" in raw_config: - parsed_result.version = raw_config["version"] - - if "c2_address" in raw_config: - parsed_result.http.append( - parsed_result.Http(uri=raw_config["c2_address"], port=raw_config["port"][0] if "port" in raw_config else None) - ) - - return parsed_result - - -class EvilGrab(Extractor): - author = "kevoreilly" - family = "EvilGrab" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - yara_rule = rule_source - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/Fareit.py b/modules/processing/parsers/MACO/Fareit.py deleted file mode 100644 index d09c1492600..00000000000 --- a/modules/processing/parsers/MACO/Fareit.py +++ /dev/null @@ -1,27 +0,0 @@ -import os - -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.Fareit import extract_config - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - # TODO: Assign fields to MACO model - parsed_result = MACOModel(family="Fareit", other=raw_config) - - return parsed_result - - -class Fareit(Extractor): - author = "kevoreilly" - family = "Fareit" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/Formbook.py b/modules/processing/parsers/MACO/Formbook.py deleted file mode 100644 index 73a2d4dae8c..00000000000 --- a/modules/processing/parsers/MACO/Formbook.py +++ /dev/null @@ -1,32 +0,0 @@ -import os - -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.Formbook import extract_config - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - parsed_result = MACOModel(family="Formbook", other=raw_config) - - if "C2" in raw_config: - parsed_result.http.append(MACOModel.Http(uri=raw_config["C2"], usage="c2")) - - for decoy in raw_config.get("Decoys", []): - parsed_result.http.append(MACOModel.Http(uri=decoy, usage="decoy")) - - return parsed_result - - -class Formbook(Extractor): - author = "kevoreilly" - family = "Formbook" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/Greame.py b/modules/processing/parsers/MACO/Greame.py deleted file mode 100644 index bb06c40646c..00000000000 --- a/modules/processing/parsers/MACO/Greame.py +++ /dev/null @@ -1,23 +0,0 @@ -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.Greame import extract_config - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - parsed_result = MACOModel(family="Greame", other=raw_config) - - return parsed_result - - -class Greame(Extractor): - author = "kevoreilly" - family = "Greame" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/GuLoader.py b/modules/processing/parsers/MACO/GuLoader.py deleted file mode 100644 index e0a0ceae0e2..00000000000 --- a/modules/processing/parsers/MACO/GuLoader.py +++ /dev/null @@ -1,29 +0,0 @@ -import os - -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.GuLoader import extract_config - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - parsed_result = MACOModel(family="GuLoader", other=raw_config) - - for url in raw_config.get("URLs", []): - parsed_result.http.append(MACOModel.Http(uri=url, usage="download")) - - return parsed_result - - -class GuLoader(Extractor): - author = "kevoreilly" - family = "GuLoader" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], "data/yara/CAPE/Guloader.yar")).read() - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/Hancitor.py b/modules/processing/parsers/MACO/Hancitor.py deleted file mode 100644 index 1a9add97f8b..00000000000 --- a/modules/processing/parsers/MACO/Hancitor.py +++ /dev/null @@ -1,32 +0,0 @@ -import os - -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.Hancitor import extract_config - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - parsed_result = MACOModel(family="Hancitor", other=raw_config) - - for url in raw_config.get("address", []): - parsed_result.http.append(MACOModel.Http(uri=url, usage="c2")) - - if "Build ID" in raw_config: - parsed_result.identifier.append(raw_config["Build ID"]) - - return parsed_result - - -class Hancitor(Extractor): - author = "kevoreilly" - family = "Hancitor" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/HttpBrowser.py b/modules/processing/parsers/MACO/HttpBrowser.py deleted file mode 100644 index 6b851fd0178..00000000000 --- a/modules/processing/parsers/MACO/HttpBrowser.py +++ /dev/null @@ -1,35 +0,0 @@ -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.HttpBrowser import extract_config, rule_source - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - parsed_result = MACOModel(family="HttpBrowser", other=raw_config) - - port = raw_config["port"][0] if "port" in raw_config else None - - if "c2_address" in raw_config: - parsed_result.http.append(MACOModel.Http(uri=raw_config["c2_address"], port=port, usage="c2")) - - if "filepath" in raw_config: - parsed_result.paths.append(MACOModel.Path(path=raw_config["filepath"])) - - if "injectionprocess" in raw_config: - parsed_result["injectionprocess"] = raw_config["injectionprocess"] - - return parsed_result - - -class HttpBrowser(Extractor): - author = "kevoreilly" - family = "HttpBrowser" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - yara_rule = rule_source - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/IcedID.py b/modules/processing/parsers/MACO/IcedID.py deleted file mode 100644 index 5ef0778118a..00000000000 --- a/modules/processing/parsers/MACO/IcedID.py +++ /dev/null @@ -1,24 +0,0 @@ -import os - -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.IcedID import extract_config - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - return MACOModel(**raw_config) - - -class IcedID(Extractor): - author = "kevoreilly" - family = "IcedID" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/IcedIDLoader.py b/modules/processing/parsers/MACO/IcedIDLoader.py deleted file mode 100644 index 46c6ea4cad4..00000000000 --- a/modules/processing/parsers/MACO/IcedIDLoader.py +++ /dev/null @@ -1,32 +0,0 @@ -import os - -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.IcedIDLoader import extract_config - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - parsed_result = MACOModel(family="IcedIDLoader", other=raw_config) - - if "C2" in raw_config: - parsed_result.http.append(MACOModel.Http(hostname=raw_config["C2"], usage="c2")) - - if "Campaign" in raw_config: - parsed_result.campaign_id.append(str(raw_config["Campaign"])) - - return parsed_result - - -class IcedIDLoader(Extractor): - author = "kevoreilly" - family = "IcedIDLoader" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/KoiLoader.py b/modules/processing/parsers/MACO/KoiLoader.py deleted file mode 100644 index 63c0c75134d..00000000000 --- a/modules/processing/parsers/MACO/KoiLoader.py +++ /dev/null @@ -1,27 +0,0 @@ -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.KoiLoader import RULE_SOURCE, extract_config - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - parsed_result = MACOModel(family="KoiLoader", other=raw_config) - - for c2_url in raw_config.get("C2", []): - parsed_result.http.append(MACOModel.Http(uri=c2_url, usage="c2")) - - return parsed_result - - -class KoiLoader(Extractor): - author = "kevoreilly" - family = "KoiLoader" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - yara_rule = RULE_SOURCE - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/Latrodectus.py b/modules/processing/parsers/MACO/Latrodectus.py deleted file mode 100644 index 4ad7cbd1515..00000000000 --- a/modules/processing/parsers/MACO/Latrodectus.py +++ /dev/null @@ -1,44 +0,0 @@ -import os - -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.Latrodectus import extract_config - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - parsed_result = MACOModel(family="Latrodectus", other=raw_config) - - for c2_url in raw_config.get("C2", []): - parsed_result.http.append(MACOModel.Http(uri=c2_url, usage="c2")) - - if "Group name" in raw_config: - parsed_result.identifier.append(raw_config["Group name"]) - - if "Campaign ID" in raw_config: - parsed_result.campaign_id.append(str(raw_config["Campaign ID"])) - - if "Version" in raw_config: - parsed_result.version = raw_config["Version"] - - if "RC4 key" in raw_config: - parsed_result.encryption.append(MACOModel.Encryption(algorithm="RC4", key=raw_config["RC4 key"])) - - if "Strings" in raw_config: - parsed_result.decoded_strings = raw_config["Strings"] - - return parsed_result - - -class Latrodectus(Extractor): - author = "kevoreilly" - family = "Latrodectus" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/LokiBot.py b/modules/processing/parsers/MACO/LokiBot.py deleted file mode 100644 index 01d36594953..00000000000 --- a/modules/processing/parsers/MACO/LokiBot.py +++ /dev/null @@ -1,29 +0,0 @@ -import os - -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.LokiBot import extract_config - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - parsed_result = MACOModel(family="LokiBot", other=raw_config) - - for address in raw_config.get("address", []): - parsed_result.http.append(MACOModel.Http(uri=address)) - - return parsed_result - - -class LokiBot(Extractor): - author = "kevoreilly" - family = "LokiBot" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/Lumma.py b/modules/processing/parsers/MACO/Lumma.py deleted file mode 100644 index 5f5153c0b5c..00000000000 --- a/modules/processing/parsers/MACO/Lumma.py +++ /dev/null @@ -1,29 +0,0 @@ -import os - -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.Lumma import extract_config - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - parsed_result = MACOModel(family="Lumma", other=raw_config) - - for address in raw_config.get("C2", []): - parsed_result.http.append(MACOModel.Http(hostname=address, usage="c2")) - - return parsed_result - - -class Lumma(Extractor): - author = "kevoreilly" - family = "Lumma" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/NanoCore.py b/modules/processing/parsers/MACO/NanoCore.py deleted file mode 100644 index 309f798de01..00000000000 --- a/modules/processing/parsers/MACO/NanoCore.py +++ /dev/null @@ -1,44 +0,0 @@ -from copy import deepcopy - -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.NanoCore import extract_config - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - parsed_result = MACOModel(family="NanoCore", other=raw_config) - - config_copy = deepcopy(raw_config) - capabilities = {k: config_copy.pop(k) for k in list(config_copy.keys()) if config_copy[k] in ["True", "False"]} - - if "Version" in config_copy: - parsed_result.version = config_copy.pop("Version") - - if "Mutex" in config_copy: - parsed_result.mutex.append(config_copy.pop("Mutex")) - - for capability, enabled in capabilities.items(): - if enabled.lower() == "true": - parsed_result.capability_enabled.append(capability) - else: - parsed_result.capability_disabled.append(capability) - - for address in config_copy.pop("cncs", []): - host, port = address.split(":") - parsed_result.http.append(MACOModel.Http(hostname=host, port=port, usage="c2")) - - return parsed_result - - -class NanoCore(Extractor): - author = "kevoreilly" - family = "NanoCore" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/Nighthawk.py b/modules/processing/parsers/MACO/Nighthawk.py deleted file mode 100644 index b1872886bed..00000000000 --- a/modules/processing/parsers/MACO/Nighthawk.py +++ /dev/null @@ -1,26 +0,0 @@ -import os - -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.Nighthawk import extract_config - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - parsed_result = MACOModel(family="Nighthawk", other=raw_config) - - return parsed_result - - -class Nighthawk(Extractor): - author = "kevoreilly" - family = "Nighthawk" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/Njrat.py b/modules/processing/parsers/MACO/Njrat.py deleted file mode 100644 index f3f9b27de27..00000000000 --- a/modules/processing/parsers/MACO/Njrat.py +++ /dev/null @@ -1,33 +0,0 @@ -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.Njrat import extract_config - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - parsed_result = MACOModel(family="Njrat", other=raw_config) - - if "version" in raw_config: - parsed_result.version = raw_config["version"] - - if "campaign_id" in raw_config: - parsed_result.campaign_id.append(raw_config["campaign_id"]) - - for c2 in raw_config.get("cncs", []): - host, port = c2.split(":") - parsed_result.http.append(MACOModel.Http(hostname=host, port=port, usage="c2")) - - return parsed_result - - -class Njrat(Extractor): - author = "kevoreilly" - family = "Njrat" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/Oyster.py b/modules/processing/parsers/MACO/Oyster.py deleted file mode 100644 index 4a80f038cbf..00000000000 --- a/modules/processing/parsers/MACO/Oyster.py +++ /dev/null @@ -1,35 +0,0 @@ -import os - -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.Oyster import extract_config - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - parsed_result = MACOModel(family="Oyster", other=raw_config) - - for address in raw_config.get("C2", []): - parsed_result.http.append(MACOModel.Http(uri=address, usage="c2")) - - if "Dll Version" in raw_config: - parsed_result.version = raw_config["Dll Version"] - - if "Strings" in raw_config: - parsed_result.decoded_strings = raw_config["Strings"] - - return parsed_result - - -class Oyster(Extractor): - author = "kevoreilly" - family = "Oyster" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/Pandora.py b/modules/processing/parsers/MACO/Pandora.py deleted file mode 100644 index b82bad0c02c..00000000000 --- a/modules/processing/parsers/MACO/Pandora.py +++ /dev/null @@ -1,50 +0,0 @@ -import os -from copy import deepcopy - -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.Pandora import extract_config - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - config_copy = deepcopy(raw_config) - parsed_result = MACOModel( - family="Pandora", - mutex=[config_copy.pop("Mutex")], - campaign_id=[config_copy.pop("Campaign ID")], - version=config_copy.pop("Version"), - http=[dict(hostname=config_copy.pop("Domain"), port=config_copy.pop("Port"), password=config_copy.pop("Password"))], - other=raw_config, - ) - - parsed_result.paths.append( - MACOModel.Path(path=os.path.join(config_copy.pop("Install Path"), config_copy.pop("Install Name")), usage="install") - ) - - parsed_result.registry.append(MACOModel.Registry(key=config_copy.pop("HKCU Key"))) - parsed_result.registry.append(MACOModel.Registry(key=config_copy.pop("ActiveX Key"))) - - for field in list(config_copy.keys()): - # TODO: Unsure what's the value of the remaining fields - if config_copy[field].lower() in ["true", "false"]: - enabled = config_copy.pop(field).lower() == "true" - if enabled: - parsed_result.capability_enabled.append(field) - else: - parsed_result.capability_disabled.append(field) - - return parsed_result - - -class Pandora(Extractor): - author = "kevoreilly" - family = "Pandora" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/PhemedroneStealer.py b/modules/processing/parsers/MACO/PhemedroneStealer.py deleted file mode 100644 index ef30b9032bf..00000000000 --- a/modules/processing/parsers/MACO/PhemedroneStealer.py +++ /dev/null @@ -1,23 +0,0 @@ -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.PhemedroneStealer import extract_config - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - parsed_result = MACOModel(family="PhemedroneStealer", other=raw_config) - - return parsed_result - - -class PhemedroneStealer(Extractor): - author = "kevoreilly" - family = "PhemedroneStealer" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/PikaBot.py b/modules/processing/parsers/MACO/PikaBot.py deleted file mode 100644 index 4409b7f6cab..00000000000 --- a/modules/processing/parsers/MACO/PikaBot.py +++ /dev/null @@ -1,35 +0,0 @@ -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.PikaBot import extract_config, rule_source - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - parsed_result = MACOModel(family="PikaBot", other=raw_config) - - if "C2" in raw_config: - [parsed_result.http.append(MACOModel.Http(uri=c2, usage="c2")) for c2 in raw_config["C2"]] - parsed_result.binaries.append(MACOModel.Binary(datatype="payload", data=raw_config["Powershell"])) - elif "C2s" in raw_config: - parsed_result.version = raw_config["Version"] - parsed_result.campaign_id.append(raw_config["Campaign Name"]) - parsed_result.registry.append(MACOModel.Registry(key=raw_config["Registry Key"])) - for c2 in raw_config["C2s"]: - host, port = c2.split(":") - parsed_result.http.append(MACOModel.Http(hostname=host, port=port, user_agent=raw_config["User Agent"])) - - return parsed_result - - -class PikaBot(Extractor): - author = "kevoreilly" - family = "PikaBot" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - yara_rule = rule_source - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/PlugX.py b/modules/processing/parsers/MACO/PlugX.py deleted file mode 100644 index c2ae83ea952..00000000000 --- a/modules/processing/parsers/MACO/PlugX.py +++ /dev/null @@ -1,23 +0,0 @@ -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.PlugX import extract_config - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - parsed_result = MACOModel(family="PlugX", other=raw_config) - - return parsed_result - - -class PlugX(Extractor): - author = "kevoreilly" - family = "PlugX" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/PoisonIvy.py b/modules/processing/parsers/MACO/PoisonIvy.py deleted file mode 100644 index e18175fa42d..00000000000 --- a/modules/processing/parsers/MACO/PoisonIvy.py +++ /dev/null @@ -1,45 +0,0 @@ -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.PoisonIvy import extract_config - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - parsed_result = MACOModel(family="PoisonIvy", other=raw_config) - - if "Campaign ID" in raw_config: - parsed_result.campaign_id.append(raw_config["Campaign ID"]) - if "Group ID" in raw_config: - parsed_result.identifier.append(raw_config["Group ID"]) - if "Domains" in raw_config: - for domain_port in raw_config["Domains"].split("|"): - host, port = domain_port.split(":") - parsed_result.http.append(MACOModel.Http(hostname=host, port=port)) - if "Password" in raw_config: - parsed_result.password.append(raw_config["Password"]) - if "Mutex" in raw_config: - parsed_result.mutex.append(raw_config["Mutex"]) - - for field in list(raw_config.keys()): - value = raw_config[field] - if value.lower() == "true": - parsed_result.capability_enabled.append(field) - elif value.lower() == "false": - parsed_result.capability_disabled.append(field) - - return parsed_result - - -class PoisonIvy(Extractor): - author = "kevoreilly" - family = "PoisonIvy" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - - def run(self, stream, matches): - output = extract_config(stream.read()) - if output: - return convert_to_MACO(output[0]) diff --git a/modules/processing/parsers/MACO/Punisher.py b/modules/processing/parsers/MACO/Punisher.py deleted file mode 100644 index 6bdfbb3c1be..00000000000 --- a/modules/processing/parsers/MACO/Punisher.py +++ /dev/null @@ -1,46 +0,0 @@ -import os -from copy import deepcopy - -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.Punisher import extract_config - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - config_copy = deepcopy(raw_config) - parsed_result = MACOModel( - family="Punisher", - campaign_id=config_copy["Campaign Name"], - password=[config_copy["Password"]], - registry=[MACOModel.Registry(key=config_copy["Registry Key"])], - paths=[MACOModel.Path(path=os.path.join(config_copy["Install Path"], config_copy["Install Name"]))], - http=[MACOModel.Http(hostname=config_copy["Domain"], port=config_copy["Port"])], - other=raw_config, - ) - - for field in raw_config.keys(): - value = raw_config[field] - if value.lower() == "true": - parsed_result.capability_enabled.append(field) - elif value.lower() == "false": - parsed_result.capability_disabled.append(field) - else: - parsed_result.other[field] = value - - return parsed_result - - -class Punisher(Extractor): - author = "kevoreilly" - family = "Punisher" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - - def run(self, stream, matches): - output = extract_config(stream.read()) - if output: - return convert_to_MACO(output[0]) diff --git a/modules/processing/parsers/MACO/QakBot.py b/modules/processing/parsers/MACO/QakBot.py deleted file mode 100644 index d8ee5c8c023..00000000000 --- a/modules/processing/parsers/MACO/QakBot.py +++ /dev/null @@ -1,28 +0,0 @@ -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.QakBot import extract_config, rule_source - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - parsed_result = MACOModel(family="QakBot", other=raw_config) - - for address in raw_config.get("address", []) + raw_config.get("C2s", []): - host, port = address.split(":") - parsed_result.http.append(MACOModel.Http(hostname=host, port=port, usage="c2")) - - return parsed_result - - -class QakBot(Extractor): - author = "kevoreilly" - family = "QakBot" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - yara_rule = rule_source - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/QuasarRAT.py b/modules/processing/parsers/MACO/QuasarRAT.py deleted file mode 100644 index e7a0aadf5e9..00000000000 --- a/modules/processing/parsers/MACO/QuasarRAT.py +++ /dev/null @@ -1,26 +0,0 @@ -import os - -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.QuasarRAT import extract_config - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - parsed_result = MACOModel(family="QuasarRAT", other=raw_config) - - return parsed_result - - -class QuasarRAT(Extractor): - author = "kevoreilly" - family = "QuasarRAT" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/Quickbind.py b/modules/processing/parsers/MACO/Quickbind.py deleted file mode 100644 index 2a0b9101766..00000000000 --- a/modules/processing/parsers/MACO/Quickbind.py +++ /dev/null @@ -1,35 +0,0 @@ -import os - -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.Quickbind import extract_config - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - parsed_result = MACOModel(family="Quickbind", other=raw_config) - - if "Mutex" in raw_config: - parsed_result.mutex = raw_config["Mutex"] - - for c2 in raw_config.get("C2", []): - parsed_result.http.append(MACOModel.Http(hostname=c2, usage="c2")) - - if "Encryption Key" in raw_config: - parsed_result.encryption.append(MACOModel.Encryption(key=raw_config["Encryption Key"])) - - return parsed_result - - -class Quickbind(Extractor): - author = "kevoreilly" - family = "Quickbind" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/RCSession.py b/modules/processing/parsers/MACO/RCSession.py deleted file mode 100644 index 3c79bc89e32..00000000000 --- a/modules/processing/parsers/MACO/RCSession.py +++ /dev/null @@ -1,44 +0,0 @@ -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.RCSession import extract_config, rule_source - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - parsed_result = MACOModel(family="RCSession", other=raw_config) - - for address in raw_config.get("c2_address", []): - parsed_result.http.append(MACOModel.Http(hostname=address, usage="c2")) - - if "directory" in raw_config: - parsed_result.paths.append(MACOModel.Path(path=raw_config["directory"], usage="install")) - - service = {} - - if "servicename" in raw_config: - service["name"] = raw_config["servicename"] - if "servicedisplayname" in raw_config: - service["display_name"] = raw_config["servicedisplayname"] - if "servicedescription" in raw_config: - service["description"] = raw_config["servicedescription"] - if "filename" in raw_config: - service["dll"] = raw_config["filename"] - - if service: - parsed_result.service.append(MACOModel.Service(**service)) - - return parsed_result - - -class RCSession(Extractor): - author = "kevoreilly" - family = "RCSession" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - yara_rule = rule_source - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/REvil.py b/modules/processing/parsers/MACO/REvil.py deleted file mode 100644 index f05f9196733..00000000000 --- a/modules/processing/parsers/MACO/REvil.py +++ /dev/null @@ -1,23 +0,0 @@ -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.REvil import extract_config - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - parsed_result = MACOModel(family="REvil", other=raw_config) - - return parsed_result - - -class REvil(Extractor): - author = "kevoreilly" - family = "REvil" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/RedLeaf.py b/modules/processing/parsers/MACO/RedLeaf.py deleted file mode 100644 index 22038c489ab..00000000000 --- a/modules/processing/parsers/MACO/RedLeaf.py +++ /dev/null @@ -1,36 +0,0 @@ -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.RedLeaf import extract_config, rule_source - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - parsed_result = MACOModel(family="RedLeaf", other=raw_config) - - for address in raw_config.get("c2_address", []): - parsed_result.http.append(MACOModel.Http(hostname=address, usage="c2")) - - if "missionid" in raw_config: - parsed_result.campaign_id.append(raw_config["missionid"]) - - if "mutex" in raw_config: - parsed_result.mutex.append(raw_config["mutex"]) - - if "key" in raw_config: - parsed_result.other["key"] = raw_config["key"] - - return parsed_result - - -class RedLeaf(Extractor): - author = "kevoreilly" - family = "RedLeaf" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - yara_rule = rule_source - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/RedLine.py b/modules/processing/parsers/MACO/RedLine.py deleted file mode 100644 index 3db57707287..00000000000 --- a/modules/processing/parsers/MACO/RedLine.py +++ /dev/null @@ -1,27 +0,0 @@ -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.RedLine import extract_config - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - parsed_result = MACOModel(family="RedLine", other=raw_config) - - if "C2" in raw_config: - host, port = raw_config["C2"].split(":") - parsed_result.http.append(MACOModel.Http(hostname=host, port=port, usage="c2")) - - return parsed_result - - -class RedLine(Extractor): - author = "kevoreilly" - family = "RedLine" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/Remcos.py b/modules/processing/parsers/MACO/Remcos.py deleted file mode 100644 index 739dd52b54f..00000000000 --- a/modules/processing/parsers/MACO/Remcos.py +++ /dev/null @@ -1,26 +0,0 @@ -import os - -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.Remcos import extract_config - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - parsed_result = MACOModel(family="Remcos", other=raw_config) - - return parsed_result - - -class Remcos(Extractor): - author = "kevoreilly" - family = "Remcos" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/Retefe.py b/modules/processing/parsers/MACO/Retefe.py deleted file mode 100644 index 119d0af7c4c..00000000000 --- a/modules/processing/parsers/MACO/Retefe.py +++ /dev/null @@ -1,24 +0,0 @@ -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.Retefe import extract_config, rule_source - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - parsed_result = MACOModel(family="Retefe", other=raw_config) - - return parsed_result - - -class Retefe(Extractor): - author = "kevoreilly" - family = "Retefe" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - yara_rule = rule_source - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/Rhadamanthys.py b/modules/processing/parsers/MACO/Rhadamanthys.py deleted file mode 100644 index d98b140a08e..00000000000 --- a/modules/processing/parsers/MACO/Rhadamanthys.py +++ /dev/null @@ -1,27 +0,0 @@ -import os - -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.Rhadamanthys import extract_config - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - parsed_result = MACOModel(family="Rhadamanthys", other=raw_config) - parsed_result.http = [MACOModel.Http(hostname=raw_config["C2"], usage="c2")] - - return parsed_result - - -class Rhadamanthys(Extractor): - author = "kevoreilly" - family = "Rhadamanthys" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/Rozena.py b/modules/processing/parsers/MACO/Rozena.py deleted file mode 100644 index ba019d79cd9..00000000000 --- a/modules/processing/parsers/MACO/Rozena.py +++ /dev/null @@ -1,27 +0,0 @@ -import os - -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.Rozena import extract_config - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - parsed_result = MACOModel(family="Rozena", other=raw_config) - parsed_result.http = [MACOModel.Http(hostname=raw_config["C2"], port=raw_config["Port"], usage="c2")] - - return parsed_result - - -class Rozena(Extractor): - author = "kevoreilly" - family = "Rozena" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/SmallNet.py b/modules/processing/parsers/MACO/SmallNet.py deleted file mode 100644 index 5b81de3f3af..00000000000 --- a/modules/processing/parsers/MACO/SmallNet.py +++ /dev/null @@ -1,26 +0,0 @@ -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.SmallNet import extract_config - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - parsed_result = MACOModel(family="SmallNet", other=raw_config) - - return parsed_result - - -class SmallNet(Extractor): - author = "kevoreilly" - family = "SmallNet" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - - def run(self, stream, matches): - output = extract_config(stream.read()) - if output: - config = output if isinstance(output, dict) else output[0] - return convert_to_MACO(config) diff --git a/modules/processing/parsers/MACO/SmokeLoader.py b/modules/processing/parsers/MACO/SmokeLoader.py deleted file mode 100644 index ba61c9c08de..00000000000 --- a/modules/processing/parsers/MACO/SmokeLoader.py +++ /dev/null @@ -1,26 +0,0 @@ -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.SmokeLoader import extract_config, rule_source - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - parsed_result = MACOModel( - family="SmokeLoader", other=raw_config, http=[MACOModel.Http(uri=c2, usage="c2") for c2 in raw_config["C2s"]] - ) - - return parsed_result - - -class SmokeLoader(Extractor): - author = "kevoreilly" - family = "SmokeLoader" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - yara_rule = rule_source - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/Socks5Systemz.py b/modules/processing/parsers/MACO/Socks5Systemz.py deleted file mode 100644 index 9e6e2ab93a9..00000000000 --- a/modules/processing/parsers/MACO/Socks5Systemz.py +++ /dev/null @@ -1,31 +0,0 @@ -import os - -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.Socks5Systemz import extract_config - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - parsed_result = MACOModel( - family="Socks5Systemz", - other=raw_config, - http=[MACOModel.Http(hostname=c2, usage="c2") for c2 in raw_config.get("C2s", [])] - + [MACOModel.Http(hostname=decoy, usage="decoy") for decoy in raw_config.get("Dummy domain", [])], - ) - - return parsed_result - - -class Socks5Systemz(Extractor): - author = "kevoreilly" - family = "Socks5Systemz" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/SparkRAT.py b/modules/processing/parsers/MACO/SparkRAT.py deleted file mode 100644 index deae637bd99..00000000000 --- a/modules/processing/parsers/MACO/SparkRAT.py +++ /dev/null @@ -1,34 +0,0 @@ -import os - -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.SparkRAT import extract_config - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - parsed_result = MACOModel(family="SparkRAT", other=raw_config) - - url = f"http{'s' if raw_config['secure'] else ''}://{raw_config['host']}:{raw_config['port']}{raw_config['path']}" - - parsed_result.http.append( - MACOModel.Http(uri=url, hostname=raw_config["host"], port=raw_config["port"], path=raw_config["path"]) - ) - - parsed_result.identifier.append(raw_config["uuid"]) - - return parsed_result - - -class SparkRAT(Extractor): - author = "kevoreilly" - family = "SparkRAT" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/SquirrelWaffle.py b/modules/processing/parsers/MACO/SquirrelWaffle.py deleted file mode 100644 index 0790a7b6653..00000000000 --- a/modules/processing/parsers/MACO/SquirrelWaffle.py +++ /dev/null @@ -1,26 +0,0 @@ -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.SquirrelWaffle import extract_config, rule_source - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - parsed_result = MACOModel( - family="SquirrelWaffle", other=raw_config, http=[MACOModel.Http(uri=c2, usage="c2") for c2 in raw_config["URLs"]] - ) - - return parsed_result - - -class SquirrelWaffle(Extractor): - author = "kevoreilly" - family = "SquirrelWaffle" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - yara_rule = rule_source - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/Stealc.py b/modules/processing/parsers/MACO/Stealc.py deleted file mode 100644 index 9cd38a935b7..00000000000 --- a/modules/processing/parsers/MACO/Stealc.py +++ /dev/null @@ -1,26 +0,0 @@ -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.Stealc import RULE_SOURCE, extract_config - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - parsed_result = MACOModel( - family="Stealc", other=raw_config, http=[MACOModel.Http(uri=c2, usage="c2") for c2 in raw_config["C2"]] - ) - - return parsed_result - - -class Stealc(Extractor): - author = "kevoreilly" - family = "Stealc" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - yara_rule = RULE_SOURCE - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/Strrat.py b/modules/processing/parsers/MACO/Strrat.py deleted file mode 100644 index 58a5d5f93d9..00000000000 --- a/modules/processing/parsers/MACO/Strrat.py +++ /dev/null @@ -1,23 +0,0 @@ -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.Strrat import extract_config - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - parsed_result = MACOModel(family="Strrat", other=raw_config) - - return parsed_result - - -class Strrat(Extractor): - author = "kevoreilly" - family = "Strrat" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/TSCookie.py b/modules/processing/parsers/MACO/TSCookie.py deleted file mode 100644 index 7344c47e381..00000000000 --- a/modules/processing/parsers/MACO/TSCookie.py +++ /dev/null @@ -1,26 +0,0 @@ -import os - -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.TSCookie import extract_config - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - parsed_result = MACOModel(family="TSCookie", other=raw_config) - - return parsed_result - - -class TSCookie(Extractor): - author = "kevoreilly" - family = "TSCookie" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/TrickBot.py b/modules/processing/parsers/MACO/TrickBot.py deleted file mode 100644 index 5962d7b46d7..00000000000 --- a/modules/processing/parsers/MACO/TrickBot.py +++ /dev/null @@ -1,24 +0,0 @@ -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.TrickBot import extract_config, rule_source - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - parsed_result = MACOModel(family="TrickBot", other=raw_config) - - return parsed_result - - -class TrickBot(Extractor): - author = "kevoreilly" - family = "TrickBot" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - yara_rule = rule_source - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/UrsnifV3.py b/modules/processing/parsers/MACO/UrsnifV3.py deleted file mode 100644 index 2e8caefbdb0..00000000000 --- a/modules/processing/parsers/MACO/UrsnifV3.py +++ /dev/null @@ -1,26 +0,0 @@ -import os - -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.UrsnifV3 import extract_config - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - parsed_result = MACOModel(family="UrsnifV3", other=raw_config) - - return parsed_result - - -class UrsnifV3(Extractor): - author = "kevoreilly" - family = "UrsnifV3" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/VenomRat.py b/modules/processing/parsers/MACO/VenomRat.py deleted file mode 100644 index de2f70ddd85..00000000000 --- a/modules/processing/parsers/MACO/VenomRat.py +++ /dev/null @@ -1,23 +0,0 @@ -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.VenomRAT import extract_config - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - parsed_result = MACOModel(family="VenomRAT", other=raw_config) - - return parsed_result - - -class VenomRAT(Extractor): - author = "kevoreilly" - family = "VenomRAT" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/WarzoneRAT.py b/modules/processing/parsers/MACO/WarzoneRAT.py deleted file mode 100644 index 186ed365448..00000000000 --- a/modules/processing/parsers/MACO/WarzoneRAT.py +++ /dev/null @@ -1,27 +0,0 @@ -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.WarzoneRAT import extract_config - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - parsed_result = MACOModel(family="WarzoneRAT", other=raw_config) - - if "C2" in raw_config: - host, port = raw_config["C2"].split(":") - parsed_result.http.append(MACOModel.Http(hostname=host, port=port, usage="c2")) - - return parsed_result - - -class WarzoneRAT(Extractor): - author = "kevoreilly" - family = "WarzoneRAT" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/XWorm.py b/modules/processing/parsers/MACO/XWorm.py deleted file mode 100644 index 8d81f728c21..00000000000 --- a/modules/processing/parsers/MACO/XWorm.py +++ /dev/null @@ -1,26 +0,0 @@ -import os - -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.XWorm import extract_config - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - parsed_result = MACOModel(family="XWorm", other=raw_config) - - return parsed_result - - -class XWorm(Extractor): - author = "kevoreilly" - family = "XWorm" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/XenoRAT.py b/modules/processing/parsers/MACO/XenoRAT.py deleted file mode 100644 index 31fc541f702..00000000000 --- a/modules/processing/parsers/MACO/XenoRAT.py +++ /dev/null @@ -1,26 +0,0 @@ -import os - -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.XenoRAT import extract_config - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - parsed_result = MACOModel(family="XenoRAT", other=raw_config) - - return parsed_result - - -class XenoRAT(Extractor): - author = "kevoreilly" - family = "XenoRAT" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - yara_rule = open(os.path.join(os.path.dirname(__file__).split("/modules", 1)[0], f"data/yara/CAPE/{family}.yar")).read() - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/Zloader.py b/modules/processing/parsers/MACO/Zloader.py deleted file mode 100644 index a6e085f4ce0..00000000000 --- a/modules/processing/parsers/MACO/Zloader.py +++ /dev/null @@ -1,33 +0,0 @@ -from maco.extractor import Extractor -from maco.model import ExtractorModel as MACOModel - -from modules.processing.parsers.CAPE.Zloader import extract_config, rule_source - - -def convert_to_MACO(raw_config: dict): - if not raw_config: - return None - - parsed_result = MACOModel(family="Zloader", other=raw_config) - - if "Campaign ID" in raw_config: - parsed_result.campaign_id = [raw_config["Campaign ID"]] - - if "RC4 key" in raw_config: - parsed_result.encryption = [MACOModel.Encryption(algorithm="RC4", key=raw_config[:"RC4 key"])] - - for address in raw_config.get("address", []): - parsed_result.http.append(MACOModel.Http(uri=address)) - - return parsed_result - - -class Zloader(Extractor): - author = "kevoreilly" - family = "Zloader" - last_modified = "2024-10-26" - sharing = "TLP:CLEAR" - yara_rule = rule_source - - def run(self, stream, matches): - return convert_to_MACO(extract_config(stream.read())) diff --git a/modules/processing/parsers/MACO/__init__.py b/modules/processing/parsers/MACO/__init__.py deleted file mode 100644 index f39e5e8d683..00000000000 --- a/modules/processing/parsers/MACO/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# Init diff --git a/modules/processing/parsers/MACO/test_maco.py b/modules/processing/parsers/MACO/test_maco.py deleted file mode 100644 index d502c95b81c..00000000000 --- a/modules/processing/parsers/MACO/test_maco.py +++ /dev/null @@ -1,10 +0,0 @@ -from maco.extractor import Extractor - - -class Test(Extractor): - author = "test" - family = "test" - last_modified = "2024-10-20" - - def run(self, stream, matches): - pass diff --git a/modules/processing/parsers/RATDecoders/__init__.py b/modules/processing/parsers/RATDecoders/__init__.py deleted file mode 100644 index f39e5e8d683..00000000000 --- a/modules/processing/parsers/RATDecoders/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# Init diff --git a/modules/processing/parsers/RATDecoders/test_rats.py b/modules/processing/parsers/RATDecoders/test_rats.py deleted file mode 100644 index c75b0e1eacf..00000000000 --- a/modules/processing/parsers/RATDecoders/test_rats.py +++ /dev/null @@ -1,13 +0,0 @@ -from malwareconfig.common import Decoder - -# https://youtu.be/C_ijc7A5oAc?list=OLAK5uy_kGTSX7lmPmKwIVzgFLqd0x3dSF6HQhE-I - - -class TEST_RATS(Decoder): - decoder_name = "TestRats" - decoder__version = 1 - decoder_author = "doomedraven" - decoder_description = "Test module to ensure that framework loads properly." - - def __init__(self): - pass diff --git a/modules/processing/parsers/__init__.py b/modules/processing/parsers/__init__.py deleted file mode 100644 index f39e5e8d683..00000000000 --- a/modules/processing/parsers/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# Init diff --git a/modules/processing/parsers/malduck/LICENSE b/modules/processing/parsers/malduck/LICENSE deleted file mode 100644 index f288702d2fa..00000000000 --- a/modules/processing/parsers/malduck/LICENSE +++ /dev/null @@ -1,674 +0,0 @@ - GNU GENERAL PUBLIC LICENSE - Version 3, 29 June 2007 - - Copyright (C) 2007 Free Software Foundation, Inc. - Everyone is permitted to copy and distribute verbatim copies - of this license document, but changing it is not allowed. - - Preamble - - The GNU General Public License is a free, copyleft license for -software and other kinds of works. - - The licenses for most software and other practical works are designed -to take away your freedom to share and change the works. By contrast, -the GNU General Public License is intended to guarantee your freedom to -share and change all versions of a program--to make sure it remains free -software for all its users. We, the Free Software Foundation, use the -GNU General Public License for most of our software; it applies also to -any other work released this way by its authors. You can apply it to -your programs, too. - - When we speak of free software, we are referring to freedom, not -price. Our General Public Licenses are designed to make sure that you -have the freedom to distribute copies of free software (and charge for -them if you wish), that you receive source code or can get it if you -want it, that you can change the software or use pieces of it in new -free programs, and that you know you can do these things. - - To protect your rights, we need to prevent others from denying you -these rights or asking you to surrender the rights. Therefore, you have -certain responsibilities if you distribute copies of the software, or if -you modify it: responsibilities to respect the freedom of others. - - For example, if you distribute copies of such a program, whether -gratis or for a fee, you must pass on to the recipients the same -freedoms that you received. You must make sure that they, too, receive -or can get the source code. And you must show them these terms so they -know their rights. - - Developers that use the GNU GPL protect your rights with two steps: -(1) assert copyright on the software, and (2) offer you this License -giving you legal permission to copy, distribute and/or modify it. - - For the developers' and authors' protection, the GPL clearly explains -that there is no warranty for this free software. For both users' and -authors' sake, the GPL requires that modified versions be marked as -changed, so that their problems will not be attributed erroneously to -authors of previous versions. - - Some devices are designed to deny users access to install or run -modified versions of the software inside them, although the manufacturer -can do so. This is fundamentally incompatible with the aim of -protecting users' freedom to change the software. The systematic -pattern of such abuse occurs in the area of products for individuals to -use, which is precisely where it is most unacceptable. Therefore, we -have designed this version of the GPL to prohibit the practice for those -products. If such problems arise substantially in other domains, we -stand ready to extend this provision to those domains in future versions -of the GPL, as needed to protect the freedom of users. - - Finally, every program is threatened constantly by software patents. -States should not allow patents to restrict development and use of -software on general-purpose computers, but in those that do, we wish to -avoid the special danger that patents applied to a free program could -make it effectively proprietary. To prevent this, the GPL assures that -patents cannot be used to render the program non-free. - - The precise terms and conditions for copying, distribution and -modification follow. - - TERMS AND CONDITIONS - - 0. Definitions. - - "This License" refers to version 3 of the GNU General Public License. - - "Copyright" also means copyright-like laws that apply to other kinds of -works, such as semiconductor masks. - - "The Program" refers to any copyrightable work licensed under this -License. Each licensee is addressed as "you". "Licensees" and -"recipients" may be individuals or organizations. - - To "modify" a work means to copy from or adapt all or part of the work -in a fashion requiring copyright permission, other than the making of an -exact copy. The resulting work is called a "modified version" of the -earlier work or a work "based on" the earlier work. - - A "covered work" means either the unmodified Program or a work based -on the Program. - - To "propagate" a work means to do anything with it that, without -permission, would make you directly or secondarily liable for -infringement under applicable copyright law, except executing it on a -computer or modifying a private copy. Propagation includes copying, -distribution (with or without modification), making available to the -public, and in some countries other activities as well. - - To "convey" a work means any kind of propagation that enables other -parties to make or receive copies. Mere interaction with a user through -a computer network, with no transfer of a copy, is not conveying. - - An interactive user interface displays "Appropriate Legal Notices" -to the extent that it includes a convenient and prominently visible -feature that (1) displays an appropriate copyright notice, and (2) -tells the user that there is no warranty for the work (except to the -extent that warranties are provided), that licensees may convey the -work under this License, and how to view a copy of this License. If -the interface presents a list of user commands or options, such as a -menu, a prominent item in the list meets this criterion. - - 1. Source Code. - - The "source code" for a work means the preferred form of the work -for making modifications to it. "Object code" means any non-source -form of a work. - - A "Standard Interface" means an interface that either is an official -standard defined by a recognized standards body, or, in the case of -interfaces specified for a particular programming language, one that -is widely used among developers working in that language. - - The "System Libraries" of an executable work include anything, other -than the work as a whole, that (a) is included in the normal form of -packaging a Major Component, but which is not part of that Major -Component, and (b) serves only to enable use of the work with that -Major Component, or to implement a Standard Interface for which an -implementation is available to the public in source code form. A -"Major Component", in this context, means a major essential component -(kernel, window system, and so on) of the specific operating system -(if any) on which the executable work runs, or a compiler used to -produce the work, or an object code interpreter used to run it. - - The "Corresponding Source" for a work in object code form means all -the source code needed to generate, install, and (for an executable -work) run the object code and to modify the work, including scripts to -control those activities. However, it does not include the work's -System Libraries, or general-purpose tools or generally available free -programs which are used unmodified in performing those activities but -which are not part of the work. For example, Corresponding Source -includes interface definition files associated with source files for -the work, and the source code for shared libraries and dynamically -linked subprograms that the work is specifically designed to require, -such as by intimate data communication or control flow between those -subprograms and other parts of the work. - - The Corresponding Source need not include anything that users -can regenerate automatically from other parts of the Corresponding -Source. - - The Corresponding Source for a work in source code form is that -same work. - - 2. Basic Permissions. - - All rights granted under this License are granted for the term of -copyright on the Program, and are irrevocable provided the stated -conditions are met. This License explicitly affirms your unlimited -permission to run the unmodified Program. The output from running a -covered work is covered by this License only if the output, given its -content, constitutes a covered work. This License acknowledges your -rights of fair use or other equivalent, as provided by copyright law. - - You may make, run and propagate covered works that you do not -convey, without conditions so long as your license otherwise remains -in force. You may convey covered works to others for the sole purpose -of having them make modifications exclusively for you, or provide you -with facilities for running those works, provided that you comply with -the terms of this License in conveying all material for which you do -not control copyright. Those thus making or running the covered works -for you must do so exclusively on your behalf, under your direction -and control, on terms that prohibit them from making any copies of -your copyrighted material outside their relationship with you. - - Conveying under any other circumstances is permitted solely under -the conditions stated below. Sublicensing is not allowed; section 10 -makes it unnecessary. - - 3. Protecting Users' Legal Rights From Anti-Circumvention Law. - - No covered work shall be deemed part of an effective technological -measure under any applicable law fulfilling obligations under article -11 of the WIPO copyright treaty adopted on 20 December 1996, or -similar laws prohibiting or restricting circumvention of such -measures. - - When you convey a covered work, you waive any legal power to forbid -circumvention of technological measures to the extent such circumvention -is effected by exercising rights under this License with respect to -the covered work, and you disclaim any intention to limit operation or -modification of the work as a means of enforcing, against the work's -users, your or third parties' legal rights to forbid circumvention of -technological measures. - - 4. Conveying Verbatim Copies. - - You may convey verbatim copies of the Program's source code as you -receive it, in any medium, provided that you conspicuously and -appropriately publish on each copy an appropriate copyright notice; -keep intact all notices stating that this License and any -non-permissive terms added in accord with section 7 apply to the code; -keep intact all notices of the absence of any warranty; and give all -recipients a copy of this License along with the Program. - - You may charge any price or no price for each copy that you convey, -and you may offer support or warranty protection for a fee. - - 5. Conveying Modified Source Versions. - - You may convey a work based on the Program, or the modifications to -produce it from the Program, in the form of source code under the -terms of section 4, provided that you also meet all of these conditions: - - a) The work must carry prominent notices stating that you modified - it, and giving a relevant date. - - b) The work must carry prominent notices stating that it is - released under this License and any conditions added under section - 7. This requirement modifies the requirement in section 4 to - "keep intact all notices". - - c) You must license the entire work, as a whole, under this - License to anyone who comes into possession of a copy. This - License will therefore apply, along with any applicable section 7 - additional terms, to the whole of the work, and all its parts, - regardless of how they are packaged. This License gives no - permission to license the work in any other way, but it does not - invalidate such permission if you have separately received it. - - d) If the work has interactive user interfaces, each must display - Appropriate Legal Notices; however, if the Program has interactive - interfaces that do not display Appropriate Legal Notices, your - work need not make them do so. - - A compilation of a covered work with other separate and independent -works, which are not by their nature extensions of the covered work, -and which are not combined with it such as to form a larger program, -in or on a volume of a storage or distribution medium, is called an -"aggregate" if the compilation and its resulting copyright are not -used to limit the access or legal rights of the compilation's users -beyond what the individual works permit. Inclusion of a covered work -in an aggregate does not cause this License to apply to the other -parts of the aggregate. - - 6. Conveying Non-Source Forms. - - You may convey a covered work in object code form under the terms -of sections 4 and 5, provided that you also convey the -machine-readable Corresponding Source under the terms of this License, -in one of these ways: - - a) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by the - Corresponding Source fixed on a durable physical medium - customarily used for software interchange. - - b) Convey the object code in, or embodied in, a physical product - (including a physical distribution medium), accompanied by a - written offer, valid for at least three years and valid for as - long as you offer spare parts or customer support for that product - model, to give anyone who possesses the object code either (1) a - copy of the Corresponding Source for all the software in the - product that is covered by this License, on a durable physical - medium customarily used for software interchange, for a price no - more than your reasonable cost of physically performing this - conveying of source, or (2) access to copy the - Corresponding Source from a network server at no charge. - - c) Convey individual copies of the object code with a copy of the - written offer to provide the Corresponding Source. This - alternative is allowed only occasionally and noncommercially, and - only if you received the object code with such an offer, in accord - with subsection 6b. - - d) Convey the object code by offering access from a designated - place (gratis or for a charge), and offer equivalent access to the - Corresponding Source in the same way through the same place at no - further charge. You need not require recipients to copy the - Corresponding Source along with the object code. If the place to - copy the object code is a network server, the Corresponding Source - may be on a different server (operated by you or a third party) - that supports equivalent copying facilities, provided you maintain - clear directions next to the object code saying where to find the - Corresponding Source. Regardless of what server hosts the - Corresponding Source, you remain obligated to ensure that it is - available for as long as needed to satisfy these requirements. - - e) Convey the object code using peer-to-peer transmission, provided - you inform other peers where the object code and Corresponding - Source of the work are being offered to the general public at no - charge under subsection 6d. - - A separable portion of the object code, whose source code is excluded -from the Corresponding Source as a System Library, need not be -included in conveying the object code work. - - A "User Product" is either (1) a "consumer product", which means any -tangible personal property which is normally used for personal, family, -or household purposes, or (2) anything designed or sold for incorporation -into a dwelling. In determining whether a product is a consumer product, -doubtful cases shall be resolved in favor of coverage. For a particular -product received by a particular user, "normally used" refers to a -typical or common use of that class of product, regardless of the status -of the particular user or of the way in which the particular user -actually uses, or expects or is expected to use, the product. A product -is a consumer product regardless of whether the product has substantial -commercial, industrial or non-consumer uses, unless such uses represent -the only significant mode of use of the product. - - "Installation Information" for a User Product means any methods, -procedures, authorization keys, or other information required to install -and execute modified versions of a covered work in that User Product from -a modified version of its Corresponding Source. The information must -suffice to ensure that the continued functioning of the modified object -code is in no case prevented or interfered with solely because -modification has been made. - - If you convey an object code work under this section in, or with, or -specifically for use in, a User Product, and the conveying occurs as -part of a transaction in which the right of possession and use of the -User Product is transferred to the recipient in perpetuity or for a -fixed term (regardless of how the transaction is characterized), the -Corresponding Source conveyed under this section must be accompanied -by the Installation Information. But this requirement does not apply -if neither you nor any third party retains the ability to install -modified object code on the User Product (for example, the work has -been installed in ROM). - - The requirement to provide Installation Information does not include a -requirement to continue to provide support service, warranty, or updates -for a work that has been modified or installed by the recipient, or for -the User Product in which it has been modified or installed. Access to a -network may be denied when the modification itself materially and -adversely affects the operation of the network or violates the rules and -protocols for communication across the network. - - Corresponding Source conveyed, and Installation Information provided, -in accord with this section must be in a format that is publicly -documented (and with an implementation available to the public in -source code form), and must require no special password or key for -unpacking, reading or copying. - - 7. Additional Terms. - - "Additional permissions" are terms that supplement the terms of this -License by making exceptions from one or more of its conditions. -Additional permissions that are applicable to the entire Program shall -be treated as though they were included in this License, to the extent -that they are valid under applicable law. If additional permissions -apply only to part of the Program, that part may be used separately -under those permissions, but the entire Program remains governed by -this License without regard to the additional permissions. - - When you convey a copy of a covered work, you may at your option -remove any additional permissions from that copy, or from any part of -it. (Additional permissions may be written to require their own -removal in certain cases when you modify the work.) You may place -additional permissions on material, added by you to a covered work, -for which you have or can give appropriate copyright permission. - - Notwithstanding any other provision of this License, for material you -add to a covered work, you may (if authorized by the copyright holders of -that material) supplement the terms of this License with terms: - - a) Disclaiming warranty or limiting liability differently from the - terms of sections 15 and 16 of this License; or - - b) Requiring preservation of specified reasonable legal notices or - author attributions in that material or in the Appropriate Legal - Notices displayed by works containing it; or - - c) Prohibiting misrepresentation of the origin of that material, or - requiring that modified versions of such material be marked in - reasonable ways as different from the original version; or - - d) Limiting the use for publicity purposes of names of licensors or - authors of the material; or - - e) Declining to grant rights under trademark law for use of some - trade names, trademarks, or service marks; or - - f) Requiring indemnification of licensors and authors of that - material by anyone who conveys the material (or modified versions of - it) with contractual assumptions of liability to the recipient, for - any liability that these contractual assumptions directly impose on - those licensors and authors. - - All other non-permissive additional terms are considered "further -restrictions" within the meaning of section 10. If the Program as you -received it, or any part of it, contains a notice stating that it is -governed by this License along with a term that is a further -restriction, you may remove that term. If a license document contains -a further restriction but permits relicensing or conveying under this -License, you may add to a covered work material governed by the terms -of that license document, provided that the further restriction does -not survive such relicensing or conveying. - - If you add terms to a covered work in accord with this section, you -must place, in the relevant source files, a statement of the -additional terms that apply to those files, or a notice indicating -where to find the applicable terms. - - Additional terms, permissive or non-permissive, may be stated in the -form of a separately written license, or stated as exceptions; -the above requirements apply either way. - - 8. Termination. - - You may not propagate or modify a covered work except as expressly -provided under this License. Any attempt otherwise to propagate or -modify it is void, and will automatically terminate your rights under -this License (including any patent licenses granted under the third -paragraph of section 11). - - However, if you cease all violation of this License, then your -license from a particular copyright holder is reinstated (a) -provisionally, unless and until the copyright holder explicitly and -finally terminates your license, and (b) permanently, if the copyright -holder fails to notify you of the violation by some reasonable means -prior to 60 days after the cessation. - - Moreover, your license from a particular copyright holder is -reinstated permanently if the copyright holder notifies you of the -violation by some reasonable means, this is the first time you have -received notice of violation of this License (for any work) from that -copyright holder, and you cure the violation prior to 30 days after -your receipt of the notice. - - Termination of your rights under this section does not terminate the -licenses of parties who have received copies or rights from you under -this License. If your rights have been terminated and not permanently -reinstated, you do not qualify to receive new licenses for the same -material under section 10. - - 9. Acceptance Not Required for Having Copies. - - You are not required to accept this License in order to receive or -run a copy of the Program. Ancillary propagation of a covered work -occurring solely as a consequence of using peer-to-peer transmission -to receive a copy likewise does not require acceptance. However, -nothing other than this License grants you permission to propagate or -modify any covered work. These actions infringe copyright if you do -not accept this License. Therefore, by modifying or propagating a -covered work, you indicate your acceptance of this License to do so. - - 10. Automatic Licensing of Downstream Recipients. - - Each time you convey a covered work, the recipient automatically -receives a license from the original licensors, to run, modify and -propagate that work, subject to this License. You are not responsible -for enforcing compliance by third parties with this License. - - An "entity transaction" is a transaction transferring control of an -organization, or substantially all assets of one, or subdividing an -organization, or merging organizations. If propagation of a covered -work results from an entity transaction, each party to that -transaction who receives a copy of the work also receives whatever -licenses to the work the party's predecessor in interest had or could -give under the previous paragraph, plus a right to possession of the -Corresponding Source of the work from the predecessor in interest, if -the predecessor has it or can get it with reasonable efforts. - - You may not impose any further restrictions on the exercise of the -rights granted or affirmed under this License. For example, you may -not impose a license fee, royalty, or other charge for exercise of -rights granted under this License, and you may not initiate litigation -(including a cross-claim or counterclaim in a lawsuit) alleging that -any patent claim is infringed by making, using, selling, offering for -sale, or importing the Program or any portion of it. - - 11. Patents. - - A "contributor" is a copyright holder who authorizes use under this -License of the Program or a work on which the Program is based. The -work thus licensed is called the contributor's "contributor version". - - A contributor's "essential patent claims" are all patent claims -owned or controlled by the contributor, whether already acquired or -hereafter acquired, that would be infringed by some manner, permitted -by this License, of making, using, or selling its contributor version, -but do not include claims that would be infringed only as a -consequence of further modification of the contributor version. For -purposes of this definition, "control" includes the right to grant -patent sublicenses in a manner consistent with the requirements of -this License. - - Each contributor grants you a non-exclusive, worldwide, royalty-free -patent license under the contributor's essential patent claims, to -make, use, sell, offer for sale, import and otherwise run, modify and -propagate the contents of its contributor version. - - In the following three paragraphs, a "patent license" is any express -agreement or commitment, however denominated, not to enforce a patent -(such as an express permission to practice a patent or covenant not to -sue for patent infringement). To "grant" such a patent license to a -party means to make such an agreement or commitment not to enforce a -patent against the party. - - If you convey a covered work, knowingly relying on a patent license, -and the Corresponding Source of the work is not available for anyone -to copy, free of charge and under the terms of this License, through a -publicly available network server or other readily accessible means, -then you must either (1) cause the Corresponding Source to be so -available, or (2) arrange to deprive yourself of the benefit of the -patent license for this particular work, or (3) arrange, in a manner -consistent with the requirements of this License, to extend the patent -license to downstream recipients. "Knowingly relying" means you have -actual knowledge that, but for the patent license, your conveying the -covered work in a country, or your recipient's use of the covered work -in a country, would infringe one or more identifiable patents in that -country that you have reason to believe are valid. - - If, pursuant to or in connection with a single transaction or -arrangement, you convey, or propagate by procuring conveyance of, a -covered work, and grant a patent license to some of the parties -receiving the covered work authorizing them to use, propagate, modify -or convey a specific copy of the covered work, then the patent license -you grant is automatically extended to all recipients of the covered -work and works based on it. - - A patent license is "discriminatory" if it does not include within -the scope of its coverage, prohibits the exercise of, or is -conditioned on the non-exercise of one or more of the rights that are -specifically granted under this License. You may not convey a covered -work if you are a party to an arrangement with a third party that is -in the business of distributing software, under which you make payment -to the third party based on the extent of your activity of conveying -the work, and under which the third party grants, to any of the -parties who would receive the covered work from you, a discriminatory -patent license (a) in connection with copies of the covered work -conveyed by you (or copies made from those copies), or (b) primarily -for and in connection with specific products or compilations that -contain the covered work, unless you entered into that arrangement, -or that patent license was granted, prior to 28 March 2007. - - Nothing in this License shall be construed as excluding or limiting -any implied license or other defenses to infringement that may -otherwise be available to you under applicable patent law. - - 12. No Surrender of Others' Freedom. - - If conditions are imposed on you (whether by court order, agreement or -otherwise) that contradict the conditions of this License, they do not -excuse you from the conditions of this License. If you cannot convey a -covered work so as to satisfy simultaneously your obligations under this -License and any other pertinent obligations, then as a consequence you may -not convey it at all. For example, if you agree to terms that obligate you -to collect a royalty for further conveying from those to whom you convey -the Program, the only way you could satisfy both those terms and this -License would be to refrain entirely from conveying the Program. - - 13. Use with the GNU Affero General Public License. - - Notwithstanding any other provision of this License, you have -permission to link or combine any covered work with a work licensed -under version 3 of the GNU Affero General Public License into a single -combined work, and to convey the resulting work. The terms of this -License will continue to apply to the part which is the covered work, -but the special requirements of the GNU Affero General Public License, -section 13, concerning interaction through a network will apply to the -combination as such. - - 14. Revised Versions of this License. - - The Free Software Foundation may publish revised and/or new versions of -the GNU General Public License from time to time. Such new versions will -be similar in spirit to the present version, but may differ in detail to -address new problems or concerns. - - Each version is given a distinguishing version number. If the -Program specifies that a certain numbered version of the GNU General -Public License "or any later version" applies to it, you have the -option of following the terms and conditions either of that numbered -version or of any later version published by the Free Software -Foundation. If the Program does not specify a version number of the -GNU General Public License, you may choose any version ever published -by the Free Software Foundation. - - If the Program specifies that a proxy can decide which future -versions of the GNU General Public License can be used, that proxy's -public statement of acceptance of a version permanently authorizes you -to choose that version for the Program. - - Later license versions may give you additional or different -permissions. However, no additional obligations are imposed on any -author or copyright holder as a result of your choosing to follow a -later version. - - 15. Disclaimer of Warranty. - - THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY -APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT -HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY -OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, -THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR -PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM -IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF -ALL NECESSARY SERVICING, REPAIR OR CORRECTION. - - 16. Limitation of Liability. - - IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING -WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS -THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY -GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE -USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF -DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD -PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), -EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF -SUCH DAMAGES. - - 17. Interpretation of Sections 15 and 16. - - If the disclaimer of warranty and limitation of liability provided -above cannot be given local legal effect according to their terms, -reviewing courts shall apply local law that most closely approximates -an absolute waiver of all civil liability in connection with the -Program, unless a warranty or assumption of liability accompanies a -copy of the Program in return for a fee. - - END OF TERMS AND CONDITIONS - - How to Apply These Terms to Your New Programs - - If you develop a new program, and you want it to be of the greatest -possible use to the public, the best way to achieve this is to make it -free software which everyone can redistribute and change under these terms. - - To do so, attach the following notices to the program. It is safest -to attach them to the start of each source file to most effectively -state the exclusion of warranty; and each file should have at least -the "copyright" line and a pointer to where the full notice is found. - - - Copyright (C) - - This program is free software: you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation, either version 3 of the License, or - (at your option) any later version. - - This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. - - You should have received a copy of the GNU General Public License - along with this program. If not, see . - -Also add information on how to contact you by electronic and paper mail. - - If the program does terminal interaction, make it output a short -notice like this when it starts in an interactive mode: - - Copyright (C) - This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. - This is free software, and you are welcome to redistribute it - under certain conditions; type `show c' for details. - -The hypothetical commands `show w' and `show c' should show the appropriate -parts of the General Public License. Of course, your program's commands -might be different; for a GUI interface, you would use an "about box". - - You should also get your employer (if you work as a programmer) or school, -if any, to sign a "copyright disclaimer" for the program, if necessary. -For more information on this, and how to apply and follow the GNU GPL, see -. - - The GNU General Public License does not permit incorporating your program -into proprietary programs. If your program is a subroutine library, you -may consider it more useful to permit linking proprietary applications with -the library. If this is what you want to do, use the GNU Lesser General -Public License instead of this License. But first, please read -. diff --git a/modules/processing/parsers/malduck/README.md b/modules/processing/parsers/malduck/README.md deleted file mode 100644 index 8abda618019..00000000000 --- a/modules/processing/parsers/malduck/README.md +++ /dev/null @@ -1,29 +0,0 @@ -:duck: Malduck -========= - -Malduck is your ducky companion in malware analysis journeys. It is mostly based on [Roach](https://github.com/hatching/roach) project, which derives many concepts from [mlib](https://github.com/mak/mlib) -library created by [Maciej Kotowicz](https://lokalhost.pl). The purpose of fork was to make Roach independent from [Cuckoo Sandbox](https://cuckoosandbox.org/) project, but still supporting its internal `procmem` format. - -Malduck provides many improvements resulting from CERT.pl codebase, making scripts written for malware analysis purposes much shorter and more powerful. - -Improvements -============ - -* Support for (non)memory-mapped PE images without header fix-up. -* Searching for wildcarded byte sequences -* Support for x64 disassembly -* Fixed-precision integer types -* Many improvements in ProcessMemory - -Usage -========== - -Installing may be performed by running - -``` -pip install malduck -``` - -Usage documentation can be found [on readthedocs](https://malduck.readthedocs.io/en/latest/). - -![Co-financed by the Connecting Europe Facility by of the European Union](https://www.cert.pl/wp-content/uploads/2019/02/en_horizontal_cef_logo-1.png) diff --git a/modules/processing/parsers/malduck/__init__.py b/modules/processing/parsers/malduck/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/modules/processing/parsers/malduck/test_malduck.py b/modules/processing/parsers/malduck/test_malduck.py deleted file mode 100644 index 7d19b9a4bab..00000000000 --- a/modules/processing/parsers/malduck/test_malduck.py +++ /dev/null @@ -1,15 +0,0 @@ -from malduck.extractor import Extractor - -__author__ = "doomedraven" -__version__ = "1.0.0" - - -class TEST_MALDUCK(Extractor): - """ - TEST Configuration Extractor - """ - - family = "TEST_MALDUCK" - - def TEST_MALDUCK(self): - pass diff --git a/modules/processing/parsers/mwcp/SmokeLoader.py b/modules/processing/parsers/mwcp/SmokeLoader.py deleted file mode 100644 index 72543ddddec..00000000000 --- a/modules/processing/parsers/mwcp/SmokeLoader.py +++ /dev/null @@ -1,165 +0,0 @@ -# Copyright (C) 2018 Kevin O'Reilly (kevin.oreilly@contextis.co.uk) -# This program is free software: you can redistribute it and/or modify -# it under the terms of the GNU General Public License as published by -# the Free Software Foundation, either version 3 of the License, or -# (at your option) any later version. -# -# This program is distributed in the hope that it will be useful, -# but WITHOUT ANY WARRANTY; without even the implied warranty of -# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -# GNU General Public License for more details. -# -# You should have received a copy of the GNU General Public License -# along with this program. If not, see . - -import struct - -import pefile -import yara -from mwcp.parser import Parser - -rule_source = """ -rule SmokeLoader -{ - meta: - author = "kev" - description = "SmokeLoader C2 decryption function" - cape_type = "SmokeLoader Payload" - strings: - $decrypt64_1 = {44 0F B6 CF 48 8B D0 49 03 D9 4C 2B D8 8B 4B 01 41 8A 04 13 41 BA 04 00 00 00 0F C9 32 C1 C1 F9 08 49 FF CA 75 F6 F6 D0 88 02 48 FF C2 49 FF C9 75 DB 49 8B C0 48 8B 5C 24 30 48 83 C4 20 5F C3} - $decrypt64_2 = {40 84 FF 90 90 E8 00 00 00 00 5E 48 83 C6 1C 49 8B F8 A4 80 3E 00 75 FA 80 07 00 48 8B 5C 24 30 48 83 C4 20 5F C3} - $decrypt32_1 = {03 EE 8B D7 2B C7 8B F8 8B 4D 01 8A 04 17 6A 04 0F C9 5B 32 C1 C1 F9 08 4B 75 F8 F6 D0 88 02 42 4E 75 E5 8B 7C 24 14 8B C7 5F 5E 5D 5B 59 59 C3} - $ref64_1 = {40 53 48 83 EC 20 8B 05 ?? ?? ?? ?? 83 F8 ?? 75 27 33 C0 89 05 ?? ?? ?? ?? 84 C9 74 1B BB E8 03 00 00 B9 58 02 00 00 FF 15 ?? ?? ?? ?? 48 FF CB 75 F0 8B 05 ?? ?? ?? ?? 48 63 C8 48 8D 05} - $ref64_2 = {8B 05 ?? ?? ?? ?? 33 C9 83 F8 04 0F 44 C1 48 63 C8 89 05 ?? ?? ?? ?? 48 8D 05 ?? ?? ?? ?? 48 8B 0C C8 E9} - $ref32_1 = {8A C1 8B 0D 70 6D 00 10 83 F9 02 75 27 33 C9 89 0D 70 6D 00 10 84 C0 74 1B 56 BE E8 03 00 00 68 58 02 00 00 FF 15 38 6E 00 10 4E 75 F2 8B 0D 70 6D 00 10 5E 8B 0C 8D} - condition: - (any of ($decrypt*)) and (any of ($ref*)) -} -""" - - -def yara_scan(raw_data, rule_name): - addresses = {} - yara_rules = yara.compile(source=rule_source) - matches = yara_rules.match(data=raw_data) - for match in matches: - if match.rule == "SmokeLoader": - for item in match.strings: - if item[1] == rule_name: - addresses[item[1]] = item[0] - return addresses - - -def xor_decode(buffer, key): - byte_key = 0xFF - for i in range(0, 4): - byte_key = byte_key ^ (key >> (i * 8) & 0xFF) - return "".join(chr(ord(x) ^ byte_key) for x in buffer) - - -class SmokeLoader(Parser): - - DESCRIPTION = "SmokeLoader configuration parser." - AUTHOR = "kevoreilly" - - def run(self): - filebuf = self.file_object.file_data - - try: - pe = pefile.PE(data=filebuf, fast_load=False) - image_base = pe.OPTIONAL_HEADER.ImageBase - except Exception: - image_base = 0 - - table_ref = yara_scan(filebuf, "$ref64_1") - if table_ref: - table_ref_offset = int(table_ref["$ref64_1"]) - table_delta = struct.unpack("i", filebuf[table_ref_offset + 62 : table_ref_offset + 66])[0] - table_offset = table_ref_offset + table_delta + 66 - - table_loop = True - while table_loop: - c2_offset = 0 - if image_base: - c2_rva = struct.unpack("Q", filebuf[table_offset : table_offset + 8])[0] - if not c2_rva: - table_loop = False - else: - c2_rva -= image_base - if c2_rva and c2_rva < 0x8000: - c2_offset = pe.get_offset_from_rva(c2_rva) - else: - table_loop = False - else: - c2_offset = struct.unpack("I", filebuf[table_offset : table_offset + 4])[0] & 0xFFFF - if c2_offset and c2_offset < 0x8000: - try: - c2_size = struct.unpack("B", filebuf[c2_offset : c2_offset + 1])[0] - c2_key = struct.unpack("I", filebuf[c2_offset + c2_size + 1 : c2_offset + c2_size + 5])[0] - c2_url = xor_decode(filebuf[c2_offset + 1 : c2_offset + c2_size + 1], c2_key).decode("ascii") - if c2_url: - self.reporter.add_metadata("address", c2_url) - except Exception: - table_loop = False - else: - table_loop = False - table_offset += 8 - return - else: - table_ref = yara_scan(filebuf, "$ref64_2") - if table_ref: - table_ref_offset = int(table_ref["$ref64_2"]) - table_delta = struct.unpack("i", filebuf[table_ref_offset + 26 : table_ref_offset + 30])[0] - table_offset = table_ref_offset + table_delta + 30 - - for index in range(0, 2): - if image_base: - c2_rva = struct.unpack("Q", filebuf[table_offset : table_offset + 8])[0] - image_base - c2_offset = pe.get_offset_from_rva(c2_rva) - else: - c2_offset = struct.unpack("I", filebuf[table_offset : table_offset + 4])[0] & 0xFFFF - c2_size = struct.unpack("B", filebuf[c2_offset : c2_offset + 1])[0] - c2_key = struct.unpack("I", filebuf[c2_offset + c2_size + 1 : c2_offset + c2_size + 5])[0] - try: - c2_url = xor_decode(filebuf[c2_offset + 1 : c2_offset + c2_size + 1], c2_key).decode("ascii") - if c2_url: - self.reporter.add_metadata("address", c2_url) - except Exception: - pass - table_offset += 8 - return - else: - table_ref = yara_scan(filebuf, "$ref32_1") - if table_ref: - table_ref_offset = int(table_ref["$ref32_1"]) - table_rva = struct.unpack("i", filebuf[table_ref_offset + 55 : table_ref_offset + 59])[0] - image_base - table_offset = pe.get_offset_from_rva(table_rva) - - table_loop = True - while table_loop: - c2_offset = 0 - if image_base: - c2_rva = struct.unpack("I", filebuf[table_offset : table_offset + 4])[0] - if not c2_rva: - table_loop = False - else: - c2_rva -= image_base - if c2_rva and c2_rva < 0x8000: - c2_offset = pe.get_offset_from_rva(c2_rva) - else: - table_loop = False - else: - c2_offset = struct.unpack("I", filebuf[table_offset : table_offset + 4])[0] & 0xFFFF - if c2_offset and c2_offset < 0x8000: - try: - c2_size = struct.unpack("B", filebuf[c2_offset : c2_offset + 1])[0] - c2_key = struct.unpack("I", filebuf[c2_offset + c2_size + 1 : c2_offset + c2_size + 5])[0] - c2_url = xor_decode(filebuf[c2_offset + 1 : c2_offset + c2_size + 1], c2_key).decode("ascii") - if c2_url: - self.reporter.add_metadata("address", c2_url) - except Exception: - table_loop = False - else: - table_loop = False - table_offset += 4 - return diff --git a/modules/processing/parsers/mwcp/__init__.py b/modules/processing/parsers/mwcp/__init__.py deleted file mode 100644 index e69de29bb2d..00000000000 diff --git a/modules/processing/parsers/mwcp/test_mwcp.py b/modules/processing/parsers/mwcp/test_mwcp.py deleted file mode 100644 index 6a64efb6c3c..00000000000 --- a/modules/processing/parsers/mwcp/test_mwcp.py +++ /dev/null @@ -1,9 +0,0 @@ -from mwcp.parser import Parser - - -class MWCP_TEST(Parser): - DESCRIPTION = "Test module to ensure that framework loads properly." - AUTHOR = "doomedraven" - - def run(self): - pass diff --git a/modules/processing/pcapng.py b/modules/processing/pcapng.py index c1868a57acc..f41e288cb97 100644 --- a/modules/processing/pcapng.py +++ b/modules/processing/pcapng.py @@ -80,6 +80,7 @@ def append_file_contents_to_file(self, file_with_contents, append_to_file): dst.write(src.read()) def generate_pcapng(self, sslkeylogfile_path): + # ToDo bail if file is empty cmd = [EDITCAP, "--inject-secrets", "tls," + sslkeylogfile_path, self.pcap_path, self.pcapng_path] log.debug("generating pcapng with command '%s", cmd) subprocess.check_call(cmd, timeout=EDITCAP_TIMEOUT) diff --git a/modules/processing/polarproxy.py b/modules/processing/polarproxy.py new file mode 100644 index 00000000000..26a03f4e4d0 --- /dev/null +++ b/modules/processing/polarproxy.py @@ -0,0 +1,90 @@ +# Copyright (C) 2010-2015 Cuckoo Foundation. +# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org +# See the file 'docs/LICENSE' for copying permission. + +# Imports for the batch sort. +# http://stackoverflow.com/questions/10665925/how-to-sort-huge-files-with-python +# http://code.activestate.com/recipes/576755/ + +import logging +import os +import shutil +import subprocess +import sys +import tempfile + + +from lib.cuckoo.common.abstracts import Processing +from lib.cuckoo.common.config import Config +from lib.cuckoo.common.objects import File +from lib.cuckoo.common.path_utils import path_exists + +# required to work webgui +CUCKOO_ROOT = os.path.join(os.path.abspath(os.path.dirname(__file__)), "..", "..") +sys.path.append(CUCKOO_ROOT) + +log = logging.getLogger(__name__) +cfg = Config() +polarproxy_cfg = Config("polarproxy") + + +def run_subprocess(command_args, shell=False): + """Execute the subprocess, wait for completion. + + Return the exitcode (returncode), the stdout, and the stderr. + """ + p = subprocess.Popen( + args=command_args, + shell=shell, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + ) + stdout, stderr = p.communicate() + return p.returncode, stdout, stderr + + +class PolarProxyProcessor(Processing): + """Network analysis.""" + + key = "polarproxy" + order = 1 + + def run(self): + if not path_exists(self.pcap_path): + log.debug('The PCAP file does not exist at path "%s"', self.pcap_path) + return {} + + tls_pcap_path = os.path.join(self.analysis_path, "polarproxy", "tls.pcap") + if not path_exists(tls_pcap_path): + log.debug('The TLS PCAP file does not exist at path "%s"', tls_pcap_path) + return {} + + if not path_exists(polarproxy_cfg.cfg.mergecap): + log.debug('The mergecap application does not exist at path "%s"', polarproxy_cfg.cfg.mergecap) + return {} + + temp_dir = tempfile.TemporaryDirectory() + + tmp_pcap = os.path.join(temp_dir.name, "tmp.pcap") + + ret, stdout, stderr = run_subprocess([ + polarproxy_cfg.cfg.mergecap, + # Make snaplen consistent across all packets so wireshark doesn't freak out + "-s", "262144", + # Use pcap format instead of pcapng for Snort + "-F", "pcap", + # Destination file + "-w", tmp_pcap, + # Input files + self.pcap_path, + tls_pcap_path + ]) + + if ret == 0: + log.info("Creating PCAP with decrypted TLS streams") + shutil.move(tmp_pcap, self.pcap_path) + else: + log.warning("Failed to merge pcaps: %s", stderr.decode()) + + results = {"pcap_sha256": File(self.pcap_path).get_sha256()} + return results diff --git a/modules/processing/reversinglabs.py b/modules/processing/reversinglabs.py index 49e663d4924..a55a838f523 100644 --- a/modules/processing/reversinglabs.py +++ b/modules/processing/reversinglabs.py @@ -124,7 +124,7 @@ def run(self): return {} target = self.task["target"] - log.debug(f"Looking up: {target}") + log.debug("Looking up: %s", target) reversing_labs_response = reversing_labs_lookup(target) if "error" in reversing_labs_response: raise CuckooProcessingError(reversing_labs_response["msg"]) diff --git a/modules/processing/strace.py b/modules/processing/strace.py index d1027b79494..20e26d8a38d 100644 --- a/modules/processing/strace.py +++ b/modules/processing/strace.py @@ -229,68 +229,64 @@ def update_file_descriptors(self, fd_calls): Returns an updated process list where file-access related calls have the matching file descriptor at the time of it being opened. """ + if not self.options.get("update_file_descriptors"): + return # Default file descriptors - file_descriptors = [ - { - "fd": "0", + fd_lookup = { + "0": [{ "filename": "STDIN", "time_opened": "00:00:00.000000", "time_closed": None, - }, - { - "fd": "1", + }], + "1": [{ "filename": "STDOUT", "time_opened": "00:00:00.000000", "time_closed": None, - }, - { - "fd": "2", + }], + "2": [{ "filename": "STDERR", "time_opened": "00:00:00.000000", "time_closed": None, - }, - ] - + }] + } for fd_call in fd_calls: # Retrieve the relevant informaton from syscalls that open/duplicate/close file descriptors match fd_call["syscall"]: case syscall if syscall in ["open", "creat", "openat", "openat2"]: - file_descriptors.append( + fd_lookup.setdefault(fd_call["fd"], []).append( { - "fd": fd_call["fd"], "filename": fd_call["filename"], "time_opened": fd_call["time"], "time_closed": None, } ) case syscall if syscall in ["dup", "dup2", "dup3"]: - for fd in reversed(file_descriptors): - if fd["time_closed"] is None and fd_call["oldfd"] == fd["fd"]: - file_descriptors.append( + for fd in reversed(fd_lookup.get(fd_call["oldfd"], [])): + if fd["time_closed"] is None: + fd_lookup.setdefault(fd_call["fd"], []).append( { - "fd": fd_call["fd"], "filename": fd["filename"], "time_opened": fd_call["time"], "time_closed": None, } ) case "close": - for fd in reversed(file_descriptors): - if fd["time_closed"] is None and fd_call["fd"] == fd["fd"]: + for fd in reversed(fd_lookup.get(fd_call["fd"], [])): + if fd["time_closed"] is None: fd["time_closed"] = fd_call["time"] for process in self.results: - for call in process["calls"]: - if call["api"] in fd_syscalls: - # append filename to file descriptor according to relevant time that fd is opened - # if any unclosed file descriptor, assume that it is closed after process is finished - for fd in file_descriptors: - if ( - call["arguments"][0]["value"] == fd["fd"] - and fd["time_opened"] < call["timestamp"] - and (fd["time_closed"] is None or call["timestamp"] <= fd["time_closed"]) - ): - call["arguments"][0]["value"] += f' ({fd["filename"]})' + calls = [c for c in process["calls"] if c["api"] in fd_syscalls] + for call in calls: + # append filename to file descriptor according to relevant time that fd is opened + # if any unclosed file descriptor, assume that it is closed after process is finished + for fd in fd_lookup.get(call["arguments"][0]["value"], []): + if ( + fd["time_opened"] < call["timestamp"] + and (fd["time_closed"] is None or call["timestamp"] <= fd["time_closed"]) + ): + call["arguments"][0]["value"] += f' ({fd["filename"]})' + break def update_parent_ids(self, relations): """ diff --git a/modules/processing/url_analysis.py b/modules/processing/url_analysis.py index 230e30a492b..6db0158b54e 100644 --- a/modules/processing/url_analysis.py +++ b/modules/processing/url_analysis.py @@ -35,4 +35,5 @@ def run(self): if vt_details: self.results["url"].setdefault("virustotal", vt_details) + self.results["target"] = {"category": "url"} return target_info diff --git a/modules/reporting/browserext.py b/modules/reporting/browserext.py index 20872821dd8..3486ac3205f 100644 --- a/modules/reporting/browserext.py +++ b/modules/reporting/browserext.py @@ -23,5 +23,5 @@ def run(self, results): with open(browser_log_path, "r") as blp_fd: try: results["browser"]["requests"] = json.load(blp_fd) - except Exception as ex: - log.debug(f"error parsing browser requests json: {ex}") + except Exception as e: + log.debug("error parsing browser requests json: %s", str(e)) diff --git a/modules/reporting/callback.py b/modules/reporting/callback.py index 40fcfaedca5..448af1c95b5 100644 --- a/modules/reporting/callback.py +++ b/modules/reporting/callback.py @@ -24,12 +24,11 @@ def run(self, results): Database().set_status(task_id, TASK_REPORTED) for url in urls: try: - for value in (task_id, str(task_id)): - res = requests.post(url, data=json.dumps({"task_id": value}), timeout=20) - if res and res.ok: - log.debug("reported id: %d", task_id) - else: - log.error("failed to report %d", task_id) + res = requests.post(url, headers={"Content-Type": "application/json"}, data=json.dumps({"task_id": task_id}), timeout=20) + if res and res.ok: + log.debug("reported id: %d", task_id) + else: + log.error("failed to report %d", task_id) except requests.exceptions.ConnectTimeout: log.error("Timeout when calling to callback: %s", url) except Exception as e: diff --git a/modules/reporting/flare_capa_summary.py b/modules/reporting/flare_capa_summary.py deleted file mode 100644 index 74bd56c82b2..00000000000 --- a/modules/reporting/flare_capa_summary.py +++ /dev/null @@ -1,31 +0,0 @@ -# Copyright (C) 2019-2024 DoomedRaven -# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org -# See the file 'docs/LICENSE' for copying permission. - -import logging - -from lib.cuckoo.common.abstracts import Report -from lib.cuckoo.common.integrations.capa import HAVE_FLARE_CAPA, flare_capa_details - -log = logging.getLogger(__name__) - - -def generate_cape_analysis_summary(results): - if not results.get("target"): - return {} - try: - return flare_capa_details(results["target"]["file"]["path"], "static", on_demand=True, backend="cape", results=results) - except Exception as e: - log.warning("Can't generate FLARE CAPA for %s: %s", results["target"]["file"]["path"], e) - - return {} - - -class CAPASummary(Report): - """Generate CAPE analysis summary by using FLARE CAPA""" - - def run(self, results): - if HAVE_FLARE_CAPA and self.options.enabled and not self.options.on_demand: - report = generate_cape_analysis_summary(results) - if report: - results["capa_summary"] = report diff --git a/modules/reporting/jsondump.py b/modules/reporting/jsondump.py index 665500b619c..50b2c262e45 100644 --- a/modules/reporting/jsondump.py +++ b/modules/reporting/jsondump.py @@ -4,6 +4,7 @@ import os +from lib.cuckoo.common.utils import create_zip from lib.cuckoo.common.abstracts import Report from lib.cuckoo.common.exceptions import CuckooReportError from lib.cuckoo.common.path_utils import path_write_file @@ -17,7 +18,6 @@ HAVE_ORJSON = False - class JsonDump(Report): """Saves analysis results in JSON format.""" @@ -48,5 +48,13 @@ def run(self, results): else: with open(path, "w") as report: json.dump(results, report, sort_keys=False, indent=int(indent), ensure_ascii=False) + + # useful if you frequently fetch zipped reports to not compress in memory all the time + if self.options.get("store_compressed") and os.path.exists(path): + zip_path = path + ".zip" + zipped_io = create_zip(path) + with open(zip_path, "wb") as f: + f.write(zipped_io.getvalue()) + except (UnicodeError, TypeError, IOError) as e: raise CuckooReportError(f"Failed to generate JSON report: {e}") diff --git a/modules/reporting/litereport.py b/modules/reporting/litereport.py index 63a6a784a2d..19f843c305e 100644 --- a/modules/reporting/litereport.py +++ b/modules/reporting/litereport.py @@ -4,6 +4,8 @@ import os +import chardet + from lib.cuckoo.common.abstracts import Report from lib.cuckoo.common.exceptions import CuckooReportError from lib.cuckoo.common.path_utils import path_write_file @@ -23,7 +25,11 @@ class LiteReport(Report): def default(self, obj): if isinstance(obj, bytes): - return obj.decode() + encoding = chardet.detect(obj)["encoding"] + if encoding: + return obj.decode(encoding, errors="replace") + else: + return obj.decode("utf-8", errors="replace") raise TypeError def run(self, results): diff --git a/modules/reporting/maec5.py b/modules/reporting/maec5.py index bb237e251c7..59958b076aa 100644 --- a/modules/reporting/maec5.py +++ b/modules/reporting/maec5.py @@ -284,7 +284,6 @@ def add_dropped_files(self): # Grab list of all dropped files- remember # package['observable_objects'] is a dict where the key is object-ID for f in self.results["dropped"]: - # Create a new Malware Instance for each dropped file malwareInstance = self.create_malware_instance(f) @@ -432,7 +431,7 @@ def create_network_obj(self, value, obj): elif re.match("^([0-9a-fA-F][0-9a-fA-F]:){5}([0-9a-fA-F][0-9a-fA-F])$", value): network_obj["type"] = "mac-addr" # Test for an IPv4 address - elif re.match("^(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9]{1,2})(\.(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9]{1,2})){3}$", value): + elif re.match(r"^(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9]{1,2})(\.(25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[0-9]{1,2})){3}$", value): network_obj["type"] = "ipv4-addr" obj["protocols"] = ["ipv4", "tcp"] else: diff --git a/modules/reporting/mongodb.py b/modules/reporting/mongodb.py index dc02f3ba5ea..dfe39c1a299 100644 --- a/modules/reporting/mongodb.py +++ b/modules/reporting/mongodb.py @@ -109,6 +109,13 @@ def run(self, results): # reporting modules. report = get_json_document(results, self.analysis_path) + mongo_delete_data(int(report["info"]["id"])) + log.debug("Deleted previous MongoDB data for Task %s", report["info"]["id"]) + + # trick for distributed api + if results.get("info", {}).get("options", {}).get("main_task_id", ""): + report["info"]["id"] = int(results["info"]["options"]["main_task_id"]) + if "network" not in report: report["network"] = {} @@ -117,13 +124,6 @@ def run(self, results): report["behavior"] = dict(report["behavior"]) report["behavior"]["processes"] = new_processes - # trick for distributed api - if results.get("info", {}).get("options", {}).get("main_task_id", ""): - report["info"]["id"] = int(results["info"]["options"]["main_task_id"]) - - mongo_delete_data(int(report["info"]["id"])) - log.debug("Deleted previous MongoDB data for Task %s", report["info"]["id"]) - ensure_valid_utf8(report) gc.collect() @@ -131,9 +131,13 @@ def run(self, results): try: mongo_insert_one("analysis", report) except OperationFailure as e: - # ToDo rewrite how children are stored - if str(e).startswith("BSONObj exceeds maximum nested object"): - log.debug("Deleting behavior process tree children from results.") + # Check for error codes indicating the BSON object was too large + # (10334 BSONObjectTooLarge) or the maximum nested object depth was + # exceeded (15 Overflow). + if e.code in (10334, 15): + log.error("Got MongoDB OperationFailure, code %d", e.code) + # ToDo rewrite how children are stored + log.warning("Deleting behavior process tree children from results.") del report["behavior"]["processtree"][0]["children"] try: mongo_insert_one("analysis", report) @@ -141,6 +145,8 @@ def run(self, results): log.error("Deleting behavior process tree parent from results: %s", str(e)) del report["behavior"]["processtree"][0] mongo_insert_one("analysis", report) + else: + raise CuckooReportError("Failed inserting report in Mongo") from e except InvalidDocument as e: if str(e).startswith("cannot encode object") or "must not contain" in str(e): self.loop_saver(report) @@ -164,12 +170,12 @@ def run(self, results): for j, parent_dict in enumerate(report[parent_key]): child_key, csize = self.debug_dict_size(parent_dict)[0] if csize > size_filter: - log.warn("results['%s']['%s'] deleted due to size: %s", parent_key, child_key, csize) + log.warning("results['%s']['%s'] deleted due to size: %s", parent_key, child_key, csize) del report[parent_key][j][child_key] else: child_key, csize = self.debug_dict_size(report[parent_key])[0] if csize > size_filter: - log.warn("results['%s']['%s'] deleted due to size: %s", parent_key, child_key, csize) + log.warning("results['%s']['%s'] deleted due to size: %s", parent_key, child_key, csize) del report[parent_key][child_key] try: mongo_insert_one("analysis", report) diff --git a/modules/reporting/report_doc.py b/modules/reporting/report_doc.py index 83b8c6019bc..162b965a108 100644 --- a/modules/reporting/report_doc.py +++ b/modules/reporting/report_doc.py @@ -57,7 +57,13 @@ def get_json_document(results, analysis_path): # Create a copy of the dictionary. This is done in order to not modify # the original dictionary and possibly # compromise the following reporting modules. - report = copy.deepcopy(results) + try: + report = copy.deepcopy(results) + except AttributeError: + if "memory" in results: + del results["memory"] + log.error("Deleting Volatility results") + report = copy.deepcopy(results) if "network" not in report: report["network"] = {} @@ -119,10 +125,9 @@ def insert_calls(report, elastic_db=None, mongodb=False): chunk_id = None # If the chunk size is CHUNK_CALL_SIZE or if the loop is completed then store the chunk in DB. if len(chunk) == CHUNK_CALL_SIZE: - to_insert = {"pid": process["process_id"], "calls": chunk} + to_insert = {"pid": process["process_id"], "calls": chunk , "task_id": report["info"]["id"]} with suppress(Exception): chunk_id = mongo_insert_one("calls", to_insert).inserted_id - if chunk_id: chunks_ids.append(chunk_id) # Reset the chunk. @@ -133,10 +138,9 @@ def insert_calls(report, elastic_db=None, mongodb=False): # Store leftovers. if chunk: chunk_id = None - to_insert = {"pid": process["process_id"], "calls": chunk} + to_insert = {"pid": process["process_id"], "calls": chunk, "task_id": report["info"]["id"]} with suppress(Exception): chunk_id = mongo_insert_one("calls", to_insert).inserted_id - if chunk_id: chunks_ids.append(chunk_id) @@ -157,5 +161,4 @@ def gendata(p_call_chunks, process_id): # Add list of chunks. new_process["calls"] = chunks_ids new_processes.append(new_process) - return new_processes diff --git a/modules/reporting/tmpfsclean.py b/modules/reporting/tmpfsclean.py index 92e5b25e5f0..c05b15b56ff 100644 --- a/modules/reporting/tmpfsclean.py +++ b/modules/reporting/tmpfsclean.py @@ -10,6 +10,7 @@ class TMPFSCLEAN(Report): "Remove/save memdump" + order = 9998 def run(self, results): @@ -25,6 +26,8 @@ def run(self, results): log.debug("Deleting memdump: %s", src) if path_exists(src): path_delete(src) + if path_exists(f"{src}.strings"): + path_delete(f"{src}.strings") else: dest = get_memdump_path(results["info"]["id"], analysis_folder=True) log.debug("Storing memdump: %s", dest) diff --git a/modules/signatures/CAPE.py b/modules/signatures/CAPE.py index e03e40b0f0c..45b644d2a9c 100644 --- a/modules/signatures/CAPE.py +++ b/modules/signatures/CAPE.py @@ -140,7 +140,6 @@ def __init__(self, *args, **kwargs): filter_apinames = set(["NtAllocateVirtualMemory", "NtProtectVirtualMemory", "VirtualProtectEx"]) def on_call(self, call, process): - if process["process_name"] in ("WINWORD.EXE", "EXCEL.EXE", "POWERPNT.EXE"): return False if call["api"] == "NtAllocateVirtualMemory": @@ -571,7 +570,6 @@ def __init__(self, *args, **kwargs): self.transacted_hollowing = False def on_call(self, call, process): - if call["api"] == "RtlSetCurrentTransaction": self.transaction_set = True diff --git a/poetry.lock b/poetry.lock index 26063d23dd1..eba2a495f62 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand. +# This file is automatically @generated by Poetry 2.0.1 and should not be changed by hand. [[package]] name = "alembic" @@ -6,6 +6,7 @@ version = "1.9.4" description = "A database migration tool for SQLAlchemy." optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "alembic-1.9.4-py3-none-any.whl", hash = "sha256:6f1c2207369bf4f49f952057a33bb017fbe5c148c2a773b46906b806ea6e825f"}, {file = "alembic-1.9.4.tar.gz", hash = "sha256:4d3bd32ecdbb7bbfb48a9fe9e6d6fd6a831a1b59d03e26e292210237373e7db5"}, @@ -24,6 +25,7 @@ version = "0.7.0" description = "Reusable constraint types to use with typing.Annotated" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, @@ -31,24 +33,25 @@ files = [ [[package]] name = "anyio" -version = "4.6.0" +version = "4.8.0" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "anyio-4.6.0-py3-none-any.whl", hash = "sha256:c7d2e9d63e31599eeb636c8c5c03a7e108d73b345f064f1c19fdc87b79036a9a"}, - {file = "anyio-4.6.0.tar.gz", hash = "sha256:137b4559cbb034c477165047febb6ff83f390fc3b20bf181c1fc0a728cb8beeb"}, + {file = "anyio-4.8.0-py3-none-any.whl", hash = "sha256:b5011f270ab5eb0abf13385f851315585cc37ef330dd88e27ec3d34d651fd47a"}, + {file = "anyio-4.8.0.tar.gz", hash = "sha256:1d9fe889df5212298c0c0723fa20479d1b94883a2df44bd3897aa91083316f7a"}, ] [package.dependencies] exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} idna = ">=2.8" sniffio = ">=1.1" -typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} +typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""} [package.extras] -doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.21.0b1)"] +doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21)"] trio = ["trio (>=0.26.1)"] [[package]] @@ -57,6 +60,7 @@ version = "3.8.1" description = "ASGI specs, helper code, and adapters" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "asgiref-3.8.1-py3-none-any.whl", hash = "sha256:3e1e3ecc849832fe52ccf2cb6686b7a55f82bb1d6aee72a58826471390335e47"}, {file = "asgiref-3.8.1.tar.gz", hash = "sha256:c343bd80a0bec947a9860adb4c432ffa7db769836c64238fc34bdc3fec84d590"}, @@ -70,19 +74,20 @@ tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] [[package]] name = "attrs" -version = "24.2.0" +version = "25.1.0" description = "Classes Without Boilerplate" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" +groups = ["main", "dev"] files = [ - {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, - {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, + {file = "attrs-25.1.0-py3-none-any.whl", hash = "sha256:c75a69e28a550a7e93789579c22aa26b0f5b83b75dc4e08fe092980051e1090a"}, + {file = "attrs-25.1.0.tar.gz", hash = "sha256:1c97078a80c814273a76b2a298a932eb681c87415c11dee0a6921de7f1b02c3e"}, ] [package.extras] benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] +dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] @@ -93,6 +98,7 @@ version = "24.4.2" description = "WebSocket client & server library, WAMP real-time framework" optional = false python-versions = ">=3.9" +groups = ["main"] files = [ {file = "autobahn-24.4.2-py2.py3-none-any.whl", hash = "sha256:c56a2abe7ac78abbfb778c02892d673a4de58fd004d088cd7ab297db25918e81"}, {file = "autobahn-24.4.2.tar.gz", hash = "sha256:a2d71ef1b0cf780b6d11f8b205fd2c7749765e65795f2ea7d823796642ee92c9"}, @@ -122,6 +128,7 @@ version = "24.8.1" description = "Self-service finite-state machines for the programmer on the go." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "Automat-24.8.1-py3-none-any.whl", hash = "sha256:bf029a7bc3da1e2c24da2343e7598affaa9f10bf0ab63ff808566ce90551e02a"}, {file = "automat-24.8.1.tar.gz", hash = "sha256:b34227cf63f6325b8ad2399ede780675083e439b20c323d376373d8ee6306d88"}, @@ -132,38 +139,37 @@ visualize = ["Twisted (>=16.1.1)", "graphviz (>0.5.1)"] [[package]] name = "bcrypt" -version = "4.2.0" +version = "4.2.1" description = "Modern password hashing for your software and your servers" optional = false python-versions = ">=3.7" -files = [ - {file = "bcrypt-4.2.0-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:096a15d26ed6ce37a14c1ac1e48119660f21b24cba457f160a4b830f3fe6b5cb"}, - {file = "bcrypt-4.2.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c02d944ca89d9b1922ceb8a46460dd17df1ba37ab66feac4870f6862a1533c00"}, - {file = "bcrypt-4.2.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d84cf6d877918620b687b8fd1bf7781d11e8a0998f576c7aa939776b512b98d"}, - {file = "bcrypt-4.2.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:1bb429fedbe0249465cdd85a58e8376f31bb315e484f16e68ca4c786dcc04291"}, - {file = "bcrypt-4.2.0-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:655ea221910bcac76ea08aaa76df427ef8625f92e55a8ee44fbf7753dbabb328"}, - {file = "bcrypt-4.2.0-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:1ee38e858bf5d0287c39b7a1fc59eec64bbf880c7d504d3a06a96c16e14058e7"}, - {file = "bcrypt-4.2.0-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:0da52759f7f30e83f1e30a888d9163a81353ef224d82dc58eb5bb52efcabc399"}, - {file = "bcrypt-4.2.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:3698393a1b1f1fd5714524193849d0c6d524d33523acca37cd28f02899285060"}, - {file = "bcrypt-4.2.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:762a2c5fb35f89606a9fde5e51392dad0cd1ab7ae64149a8b935fe8d79dd5ed7"}, - {file = "bcrypt-4.2.0-cp37-abi3-win32.whl", hash = "sha256:5a1e8aa9b28ae28020a3ac4b053117fb51c57a010b9f969603ed885f23841458"}, - {file = "bcrypt-4.2.0-cp37-abi3-win_amd64.whl", hash = "sha256:8f6ede91359e5df88d1f5c1ef47428a4420136f3ce97763e31b86dd8280fbdf5"}, - {file = "bcrypt-4.2.0-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:c52aac18ea1f4a4f65963ea4f9530c306b56ccd0c6f8c8da0c06976e34a6e841"}, - {file = "bcrypt-4.2.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3bbbfb2734f0e4f37c5136130405332640a1e46e6b23e000eeff2ba8d005da68"}, - {file = "bcrypt-4.2.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3413bd60460f76097ee2e0a493ccebe4a7601918219c02f503984f0a7ee0aebe"}, - {file = "bcrypt-4.2.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:8d7bb9c42801035e61c109c345a28ed7e84426ae4865511eb82e913df18f58c2"}, - {file = "bcrypt-4.2.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3d3a6d28cb2305b43feac298774b997e372e56c7c7afd90a12b3dc49b189151c"}, - {file = "bcrypt-4.2.0-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:9c1c4ad86351339c5f320ca372dfba6cb6beb25e8efc659bedd918d921956bae"}, - {file = "bcrypt-4.2.0-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:27fe0f57bb5573104b5a6de5e4153c60814c711b29364c10a75a54bb6d7ff48d"}, - {file = "bcrypt-4.2.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:8ac68872c82f1add6a20bd489870c71b00ebacd2e9134a8aa3f98a0052ab4b0e"}, - {file = "bcrypt-4.2.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:cb2a8ec2bc07d3553ccebf0746bbf3d19426d1c6d1adbd4fa48925f66af7b9e8"}, - {file = "bcrypt-4.2.0-cp39-abi3-win32.whl", hash = "sha256:77800b7147c9dc905db1cba26abe31e504d8247ac73580b4aa179f98e6608f34"}, - {file = "bcrypt-4.2.0-cp39-abi3-win_amd64.whl", hash = "sha256:61ed14326ee023917ecd093ee6ef422a72f3aec6f07e21ea5f10622b735538a9"}, - {file = "bcrypt-4.2.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:39e1d30c7233cfc54f5c3f2c825156fe044efdd3e0b9d309512cc514a263ec2a"}, - {file = "bcrypt-4.2.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f4f4acf526fcd1c34e7ce851147deedd4e26e6402369304220250598b26448db"}, - {file = "bcrypt-4.2.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:1ff39b78a52cf03fdf902635e4c81e544714861ba3f0efc56558979dd4f09170"}, - {file = "bcrypt-4.2.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:373db9abe198e8e2c70d12b479464e0d5092cc122b20ec504097b5f2297ed184"}, - {file = "bcrypt-4.2.0.tar.gz", hash = "sha256:cf69eaf5185fd58f268f805b505ce31f9b9fc2d64b376642164e9244540c1221"}, +groups = ["main"] +files = [ + {file = "bcrypt-4.2.1-cp37-abi3-macosx_10_12_universal2.whl", hash = "sha256:1340411a0894b7d3ef562fb233e4b6ed58add185228650942bdc885362f32c17"}, + {file = "bcrypt-4.2.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1ee315739bc8387aa36ff127afc99120ee452924e0df517a8f3e4c0187a0f5f"}, + {file = "bcrypt-4.2.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8dbd0747208912b1e4ce730c6725cb56c07ac734b3629b60d4398f082ea718ad"}, + {file = "bcrypt-4.2.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:aaa2e285be097050dba798d537b6efd9b698aa88eef52ec98d23dcd6d7cf6fea"}, + {file = "bcrypt-4.2.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:76d3e352b32f4eeb34703370e370997065d28a561e4a18afe4fef07249cb4396"}, + {file = "bcrypt-4.2.1-cp37-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:b7703ede632dc945ed1172d6f24e9f30f27b1b1a067f32f68bf169c5f08d0425"}, + {file = "bcrypt-4.2.1-cp37-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:89df2aea2c43be1e1fa066df5f86c8ce822ab70a30e4c210968669565c0f4685"}, + {file = "bcrypt-4.2.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:04e56e3fe8308a88b77e0afd20bec516f74aecf391cdd6e374f15cbed32783d6"}, + {file = "bcrypt-4.2.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:cfdf3d7530c790432046c40cda41dfee8c83e29482e6a604f8930b9930e94139"}, + {file = "bcrypt-4.2.1-cp37-abi3-win32.whl", hash = "sha256:adadd36274510a01f33e6dc08f5824b97c9580583bd4487c564fc4617b328005"}, + {file = "bcrypt-4.2.1-cp37-abi3-win_amd64.whl", hash = "sha256:8c458cd103e6c5d1d85cf600e546a639f234964d0228909d8f8dbeebff82d526"}, + {file = "bcrypt-4.2.1-cp39-abi3-macosx_10_12_universal2.whl", hash = "sha256:8ad2f4528cbf0febe80e5a3a57d7a74e6635e41af1ea5675282a33d769fba413"}, + {file = "bcrypt-4.2.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:909faa1027900f2252a9ca5dfebd25fc0ef1417943824783d1c8418dd7d6df4a"}, + {file = "bcrypt-4.2.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cde78d385d5e93ece5479a0a87f73cd6fa26b171c786a884f955e165032b262c"}, + {file = "bcrypt-4.2.1-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:533e7f3bcf2f07caee7ad98124fab7499cb3333ba2274f7a36cf1daee7409d99"}, + {file = "bcrypt-4.2.1-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:687cf30e6681eeda39548a93ce9bfbb300e48b4d445a43db4298d2474d2a1e54"}, + {file = "bcrypt-4.2.1-cp39-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:041fa0155c9004eb98a232d54da05c0b41d4b8e66b6fc3cb71b4b3f6144ba837"}, + {file = "bcrypt-4.2.1-cp39-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:f85b1ffa09240c89aa2e1ae9f3b1c687104f7b2b9d2098da4e923f1b7082d331"}, + {file = "bcrypt-4.2.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:c6f5fa3775966cca251848d4d5393ab016b3afed251163c1436fefdec3b02c84"}, + {file = "bcrypt-4.2.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:807261df60a8b1ccd13e6599c779014a362ae4e795f5c59747f60208daddd96d"}, + {file = "bcrypt-4.2.1-cp39-abi3-win32.whl", hash = "sha256:b588af02b89d9fad33e5f98f7838bf590d6d692df7153647724a7f20c186f6bf"}, + {file = "bcrypt-4.2.1-cp39-abi3-win_amd64.whl", hash = "sha256:e84e0e6f8e40a242b11bce56c313edc2be121cec3e0ec2d76fce01f6af33c07c"}, + {file = "bcrypt-4.2.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:76132c176a6d9953cdc83c296aeaed65e1a708485fd55abf163e0d9f8f16ce0e"}, + {file = "bcrypt-4.2.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e158009a54c4c8bc91d5e0da80920d048f918c61a581f0a63e4e93bb556d362f"}, + {file = "bcrypt-4.2.1.tar.gz", hash = "sha256:6765386e3ab87f569b276988742039baab087b2cdb01e809d74e74503c2faafe"}, ] [package.extras] @@ -176,6 +182,7 @@ version = "4.12.3" description = "Screen-scraping library" optional = false python-versions = ">=3.6.0" +groups = ["main"] files = [ {file = "beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed"}, {file = "beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051"}, @@ -193,33 +200,34 @@ lxml = ["lxml"] [[package]] name = "black" -version = "24.8.0" +version = "24.10.0" description = "The uncompromising code formatter." optional = false -python-versions = ">=3.8" -files = [ - {file = "black-24.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:09cdeb74d494ec023ded657f7092ba518e8cf78fa8386155e4a03fdcc44679e6"}, - {file = "black-24.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:81c6742da39f33b08e791da38410f32e27d632260e599df7245cccee2064afeb"}, - {file = "black-24.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:707a1ca89221bc8a1a64fb5e15ef39cd755633daa672a9db7498d1c19de66a42"}, - {file = "black-24.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:d6417535d99c37cee4091a2f24eb2b6d5ec42b144d50f1f2e436d9fe1916fe1a"}, - {file = "black-24.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fb6e2c0b86bbd43dee042e48059c9ad7830abd5c94b0bc518c0eeec57c3eddc1"}, - {file = "black-24.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:837fd281f1908d0076844bc2b801ad2d369c78c45cf800cad7b61686051041af"}, - {file = "black-24.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:62e8730977f0b77998029da7971fa896ceefa2c4c4933fcd593fa599ecbf97a4"}, - {file = "black-24.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:72901b4913cbac8972ad911dc4098d5753704d1f3c56e44ae8dce99eecb0e3af"}, - {file = "black-24.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:7c046c1d1eeb7aea9335da62472481d3bbf3fd986e093cffd35f4385c94ae368"}, - {file = "black-24.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:649f6d84ccbae73ab767e206772cc2d7a393a001070a4c814a546afd0d423aed"}, - {file = "black-24.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2b59b250fdba5f9a9cd9d0ece6e6d993d91ce877d121d161e4698af3eb9c1018"}, - {file = "black-24.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:6e55d30d44bed36593c3163b9bc63bf58b3b30e4611e4d88a0c3c239930ed5b2"}, - {file = "black-24.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:505289f17ceda596658ae81b61ebbe2d9b25aa78067035184ed0a9d855d18afd"}, - {file = "black-24.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:b19c9ad992c7883ad84c9b22aaa73562a16b819c1d8db7a1a1a49fb7ec13c7d2"}, - {file = "black-24.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1f13f7f386f86f8121d76599114bb8c17b69d962137fc70efe56137727c7047e"}, - {file = "black-24.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:f490dbd59680d809ca31efdae20e634f3fae27fba3ce0ba3208333b713bc3920"}, - {file = "black-24.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:eab4dd44ce80dea27dc69db40dab62d4ca96112f87996bca68cd75639aeb2e4c"}, - {file = "black-24.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3c4285573d4897a7610054af5a890bde7c65cb466040c5f0c8b732812d7f0e5e"}, - {file = "black-24.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e84e33b37be070ba135176c123ae52a51f82306def9f7d063ee302ecab2cf47"}, - {file = "black-24.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:73bbf84ed136e45d451a260c6b73ed674652f90a2b3211d6a35e78054563a9bb"}, - {file = "black-24.8.0-py3-none-any.whl", hash = "sha256:972085c618ee94f402da1af548a4f218c754ea7e5dc70acb168bfaca4c2542ed"}, - {file = "black-24.8.0.tar.gz", hash = "sha256:2500945420b6784c38b9ee885af039f5e7471ef284ab03fa35ecdde4688cd83f"}, +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "black-24.10.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6668650ea4b685440857138e5fe40cde4d652633b1bdffc62933d0db4ed9812"}, + {file = "black-24.10.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1c536fcf674217e87b8cc3657b81809d3c085d7bf3ef262ead700da345bfa6ea"}, + {file = "black-24.10.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:649fff99a20bd06c6f727d2a27f401331dc0cc861fb69cde910fe95b01b5928f"}, + {file = "black-24.10.0-cp310-cp310-win_amd64.whl", hash = "sha256:fe4d6476887de70546212c99ac9bd803d90b42fc4767f058a0baa895013fbb3e"}, + {file = "black-24.10.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5a2221696a8224e335c28816a9d331a6c2ae15a2ee34ec857dcf3e45dbfa99ad"}, + {file = "black-24.10.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f9da3333530dbcecc1be13e69c250ed8dfa67f43c4005fb537bb426e19200d50"}, + {file = "black-24.10.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4007b1393d902b48b36958a216c20c4482f601569d19ed1df294a496eb366392"}, + {file = "black-24.10.0-cp311-cp311-win_amd64.whl", hash = "sha256:394d4ddc64782e51153eadcaaca95144ac4c35e27ef9b0a42e121ae7e57a9175"}, + {file = "black-24.10.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b5e39e0fae001df40f95bd8cc36b9165c5e2ea88900167bddf258bacef9bbdc3"}, + {file = "black-24.10.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d37d422772111794b26757c5b55a3eade028aa3fde43121ab7b673d050949d65"}, + {file = "black-24.10.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:14b3502784f09ce2443830e3133dacf2c0110d45191ed470ecb04d0f5f6fcb0f"}, + {file = "black-24.10.0-cp312-cp312-win_amd64.whl", hash = "sha256:30d2c30dc5139211dda799758559d1b049f7f14c580c409d6ad925b74a4208a8"}, + {file = "black-24.10.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1cbacacb19e922a1d75ef2b6ccaefcd6e93a2c05ede32f06a21386a04cedb981"}, + {file = "black-24.10.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1f93102e0c5bb3907451063e08b9876dbeac810e7da5a8bfb7aeb5a9ef89066b"}, + {file = "black-24.10.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ddacb691cdcdf77b96f549cf9591701d8db36b2f19519373d60d31746068dbf2"}, + {file = "black-24.10.0-cp313-cp313-win_amd64.whl", hash = "sha256:680359d932801c76d2e9c9068d05c6b107f2584b2a5b88831c83962eb9984c1b"}, + {file = "black-24.10.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:17374989640fbca88b6a448129cd1745c5eb8d9547b464f281b251dd00155ccd"}, + {file = "black-24.10.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:63f626344343083322233f175aaf372d326de8436f5928c042639a4afbbf1d3f"}, + {file = "black-24.10.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfa1d0cb6200857f1923b602f978386a3a2758a65b52e0950299ea014be6800"}, + {file = "black-24.10.0-cp39-cp39-win_amd64.whl", hash = "sha256:2cd9c95431d94adc56600710f8813ee27eea544dd118d45896bb734e9d7a0dc7"}, + {file = "black-24.10.0-py3-none-any.whl", hash = "sha256:3bb2b7a1f7b685f85b11fed1ef10f8a9148bceb49853e47a294a3dd963c1dd7d"}, + {file = "black-24.10.0.tar.gz", hash = "sha256:846ea64c97afe3bc677b761787993be4991810ecc7a4a937816dd6bddedc4875"}, ] [package.dependencies] @@ -233,7 +241,7 @@ typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} [package.extras] colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] +d = ["aiohttp (>=3.10)"] jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] uvloop = ["uvloop (>=0.15.2)"] @@ -243,6 +251,7 @@ version = "0.0.1" description = "Dummy package for Beautiful Soup" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "bs4-0.0.1.tar.gz", hash = "sha256:36ecea1fd7cc5c0c6e4a1ff075df26d50da647b75376626cc186e2212886dd3a"}, ] @@ -252,27 +261,62 @@ beautifulsoup4 = "*" [[package]] name = "cachetools" -version = "5.5.0" +version = "5.5.1" description = "Extensible memoizing collections and decorators" optional = false python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "cachetools-5.5.1-py3-none-any.whl", hash = "sha256:b76651fdc3b24ead3c648bbdeeb940c1b04d365b38b4af66788f9ec4a81d42bb"}, + {file = "cachetools-5.5.1.tar.gz", hash = "sha256:70f238fbba50383ef62e55c6aff6d9673175fe59f7c6782c7a0b9e38f4a9df95"}, +] + +[[package]] +name = "cape-parsers" +version = "0.1.36" +description = "CAPE: Malware Configuration Extraction" +optional = false +python-versions = "<4.0,>=3.10" +groups = ["main"] files = [ - {file = "cachetools-5.5.0-py3-none-any.whl", hash = "sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292"}, - {file = "cachetools-5.5.0.tar.gz", hash = "sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a"}, + {file = "cape_parsers-0.1.36-py3-none-any.whl", hash = "sha256:c0944f8183aaf53d4a6c8f370423ce1134bdc4a7f7b23e37f92f0e061423e229"}, + {file = "cape_parsers-0.1.36.tar.gz", hash = "sha256:459db9a10a2c09a1dfb386dcec1b7968293d2c6eef417c4179022895e396ddc5"}, ] +[package.dependencies] +capstone = ">=4.0.2" +dncil = ">=1.0.2" +dnfile = ">=0.15.1" +netstruct = "1.1.2" +pefile = "*" +pycryptodomex = ">=3.20.0" +rat-king-parser = ">=4.1.0" +ruff = ">=0.7.2" +unicorn = "2.1.1" +yara-python = ">=4.5.1" + +[package.extras] +maco = ["maco (==1.1.8)"] + [[package]] name = "capstone" -version = "4.0.2" +version = "5.0.5" description = "Capstone disassembly engine" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "capstone-4.0.2-py2.py3-none-manylinux1_i686.whl", hash = "sha256:da442f979414cf27e4621e70e835880878c858ea438c4f0e957e132593579e37"}, - {file = "capstone-4.0.2-py2.py3-none-manylinux1_x86_64.whl", hash = "sha256:9d1a9096c5f875b11290317722ed44bb6e7c52e50cc79d791f142bce968c49aa"}, - {file = "capstone-4.0.2-py2.py3-none-win32.whl", hash = "sha256:c3d9b443d1adb40ee2d9a4e7341169b76476ddcf3a54c03793b16cdc7cd35c5a"}, - {file = "capstone-4.0.2-py2.py3-none-win_amd64.whl", hash = "sha256:0d65ffe8620920976ceadedc769f22318f6f150a592368d8a735612367ac8a1a"}, - {file = "capstone-4.0.2.tar.gz", hash = "sha256:2842913092c9b69fd903744bc1b87488e1451625460baac173056e1808ec1c66"}, + {file = "capstone-5.0.5-py3-none-macosx_10_9_universal2.whl", hash = "sha256:24db89d74b571659fe6212e756795cd5d394378c50e19e41dbcfb6c087c2f87d"}, + {file = "capstone-5.0.5-py3-none-macosx_10_9_x86_64.whl", hash = "sha256:15f4b85df176999bbf7eb3f53f0cf2cee728254600c1be21442e2581189309e9"}, + {file = "capstone-5.0.5-py3-none-macosx_11_0_arm64.whl", hash = "sha256:5416621ac2d243d89b788f1309b143ea1f400da3eb5c47c6a87f1add99732a83"}, + {file = "capstone-5.0.5-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ef47da78f44de1cdff1519b360186681fca0097e92046a7d7203d56364f99da"}, + {file = "capstone-5.0.5-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:754968f057d9e5d9c383f2918a1d56d455bfb274bbf307f219180b16e6d5aaeb"}, + {file = "capstone-5.0.5-py3-none-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:933797f7e2a257a77c3a699316deea92efa120a10d41e22725a96fc82f0a769e"}, + {file = "capstone-5.0.5-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:50b646f0c56b0cac5c993dde08b5e5eacf8b1f66031ec8d60154eae6e3c0645e"}, + {file = "capstone-5.0.5-py3-none-musllinux_1_2_i686.whl", hash = "sha256:cd35b666739d7b79066fc69fd0c145d5ceb6a4131df3db1225ec6dcfa3fe322f"}, + {file = "capstone-5.0.5-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:89dac65a1c84670ee30ccaf2ae688c4b27ad514d9dc8738a9826579051e29ecb"}, + {file = "capstone-5.0.5-py3-none-win_amd64.whl", hash = "sha256:a03b6b42b33bb0739b2436a555e699ac91cd1d1891134269b04e359b607e50e8"}, + {file = "capstone-5.0.5.tar.gz", hash = "sha256:32346f6019d5351adaaf584ffc60c1e40db6b47d1d049eb924f903eb2b073e87"}, ] [[package]] @@ -281,6 +325,8 @@ version = "1.2.2" description = "CaRT Neutering format" optional = false python-versions = ">=3.6" +groups = ["main"] +markers = "extra == \"maco\"" files = [ {file = "cart-1.2.2-py2.py3-none-any.whl", hash = "sha256:c111398038683c85d3edcadaa3b16183461907bdb613e05cbb60d381f2886309"}, ] @@ -290,13 +336,14 @@ pycryptodome = "*" [[package]] name = "certifi" -version = "2024.7.4" +version = "2025.1.31" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" +groups = ["main"] files = [ - {file = "certifi-2024.7.4-py3-none-any.whl", hash = "sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90"}, - {file = "certifi-2024.7.4.tar.gz", hash = "sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b"}, + {file = "certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe"}, + {file = "certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651"}, ] [[package]] @@ -305,6 +352,7 @@ version = "1.17.1" description = "Foreign Function Interface for Python calling C code." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, @@ -384,6 +432,7 @@ version = "3.4.0" description = "Validate configuration and produce human readable error messages." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9"}, {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, @@ -395,6 +444,7 @@ version = "3.0.5" description = "Brings async, event-driven capabilities to Django. Django 2.2 and up only." optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "channels-3.0.5-py3-none-any.whl", hash = "sha256:3813b8025bf85509769793aca720e6c3b1c5bde1cb253a961252bf0242b60a26"}, {file = "channels-3.0.5.tar.gz", hash = "sha256:a3dc3339cc033e7c2afe083fb3dedf74fc5009815967e317e080e7bfdc92ea26"}, @@ -414,6 +464,7 @@ version = "4.0.0" description = "Universal encoding detector for Python 2 and 3" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +groups = ["main"] files = [ {file = "chardet-4.0.0-py2.py3-none-any.whl", hash = "sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5"}, {file = "chardet-4.0.0.tar.gz", hash = "sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa"}, @@ -421,112 +472,116 @@ files = [ [[package]] name = "charset-normalizer" -version = "3.3.2" +version = "3.4.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false -python-versions = ">=3.7.0" -files = [ - {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, - {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-win32.whl", hash = "sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-win32.whl", hash = "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-win32.whl", hash = "sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-win32.whl", hash = "sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-win32.whl", hash = "sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765"}, + {file = "charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85"}, + {file = "charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3"}, ] [[package]] name = "click" -version = "8.1.7" +version = "8.1.8" description = "Composable command line interface toolkit" optional = false python-versions = ">=3.7" +groups = ["main", "dev"] files = [ - {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, - {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, + {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, + {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, ] [package.dependencies] @@ -538,10 +593,12 @@ version = "0.4.6" description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["main", "dev"] files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] +markers = {dev = "platform_system == \"Windows\" or sys_platform == \"win32\""} [[package]] name = "colorclass" @@ -549,6 +606,7 @@ version = "2.2.2" description = "Colorful worry-free console applications for Linux, Mac OS X, and Windows." optional = false python-versions = ">=2.6" +groups = ["main"] files = [ {file = "colorclass-2.2.2-py2.py3-none-any.whl", hash = "sha256:6f10c273a0ef7a1150b1120b6095cbdd68e5cf36dfd5d0fc957a2500bbf99a55"}, {file = "colorclass-2.2.2.tar.gz", hash = "sha256:6d4fe287766166a98ca7bc6f6312daf04a0481b1eda43e7173484051c0ab4366"}, @@ -560,6 +618,7 @@ version = "23.10.4" description = "Symbolic constants in Python" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "constantly-23.10.4-py3-none-any.whl", hash = "sha256:3fd9b4d1c3dc1ec9757f3c52aef7e53ad9323dbe39f51dfd4c43853b68dfa3f9"}, {file = "constantly-23.10.4.tar.gz", hash = "sha256:aa92b70a33e2ac0bb33cd745eb61776594dc48764b06c35e0efd050b7f1c7cbd"}, @@ -567,83 +626,74 @@ files = [ [[package]] name = "coverage" -version = "7.6.1" +version = "7.6.10" description = "Code coverage measurement for Python" optional = false -python-versions = ">=3.8" -files = [ - {file = "coverage-7.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b06079abebbc0e89e6163b8e8f0e16270124c154dc6e4a47b413dd538859af16"}, - {file = "coverage-7.6.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:cf4b19715bccd7ee27b6b120e7e9dd56037b9c0681dcc1adc9ba9db3d417fa36"}, - {file = "coverage-7.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61c0abb4c85b095a784ef23fdd4aede7a2628478e7baba7c5e3deba61070a02"}, - {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd21f6ae3f08b41004dfb433fa895d858f3f5979e7762d052b12aef444e29afc"}, - {file = "coverage-7.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f59d57baca39b32db42b83b2a7ba6f47ad9c394ec2076b084c3f029b7afca23"}, - {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a1ac0ae2b8bd743b88ed0502544847c3053d7171a3cff9228af618a068ed9c34"}, - {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e6a08c0be454c3b3beb105c0596ebdc2371fab6bb90c0c0297f4e58fd7e1012c"}, - {file = "coverage-7.6.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f5796e664fe802da4f57a168c85359a8fbf3eab5e55cd4e4569fbacecc903959"}, - {file = "coverage-7.6.1-cp310-cp310-win32.whl", hash = "sha256:7bb65125fcbef8d989fa1dd0e8a060999497629ca5b0efbca209588a73356232"}, - {file = "coverage-7.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:3115a95daa9bdba70aea750db7b96b37259a81a709223c8448fa97727d546fe0"}, - {file = "coverage-7.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7dea0889685db8550f839fa202744652e87c60015029ce3f60e006f8c4462c93"}, - {file = "coverage-7.6.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ed37bd3c3b063412f7620464a9ac1314d33100329f39799255fb8d3027da50d3"}, - {file = "coverage-7.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d85f5e9a5f8b73e2350097c3756ef7e785f55bd71205defa0bfdaf96c31616ff"}, - {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9bc572be474cafb617672c43fe989d6e48d3c83af02ce8de73fff1c6bb3c198d"}, - {file = "coverage-7.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0c0420b573964c760df9e9e86d1a9a622d0d27f417e1a949a8a66dd7bcee7bc6"}, - {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1f4aa8219db826ce6be7099d559f8ec311549bfc4046f7f9fe9b5cea5c581c56"}, - {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:fc5a77d0c516700ebad189b587de289a20a78324bc54baee03dd486f0855d234"}, - {file = "coverage-7.6.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b48f312cca9621272ae49008c7f613337c53fadca647d6384cc129d2996d1133"}, - {file = "coverage-7.6.1-cp311-cp311-win32.whl", hash = "sha256:1125ca0e5fd475cbbba3bb67ae20bd2c23a98fac4e32412883f9bcbaa81c314c"}, - {file = "coverage-7.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:8ae539519c4c040c5ffd0632784e21b2f03fc1340752af711f33e5be83a9d6c6"}, - {file = "coverage-7.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:95cae0efeb032af8458fc27d191f85d1717b1d4e49f7cb226cf526ff28179778"}, - {file = "coverage-7.6.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5621a9175cf9d0b0c84c2ef2b12e9f5f5071357c4d2ea6ca1cf01814f45d2391"}, - {file = "coverage-7.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:260933720fdcd75340e7dbe9060655aff3af1f0c5d20f46b57f262ab6c86a5e8"}, - {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07e2ca0ad381b91350c0ed49d52699b625aab2b44b65e1b4e02fa9df0e92ad2d"}, - {file = "coverage-7.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c44fee9975f04b33331cb8eb272827111efc8930cfd582e0320613263ca849ca"}, - {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877abb17e6339d96bf08e7a622d05095e72b71f8afd8a9fefc82cf30ed944163"}, - {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3e0cadcf6733c09154b461f1ca72d5416635e5e4ec4e536192180d34ec160f8a"}, - {file = "coverage-7.6.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3c02d12f837d9683e5ab2f3d9844dc57655b92c74e286c262e0fc54213c216d"}, - {file = "coverage-7.6.1-cp312-cp312-win32.whl", hash = "sha256:e05882b70b87a18d937ca6768ff33cc3f72847cbc4de4491c8e73880766718e5"}, - {file = "coverage-7.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:b5d7b556859dd85f3a541db6a4e0167b86e7273e1cdc973e5b175166bb634fdb"}, - {file = "coverage-7.6.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a4acd025ecc06185ba2b801f2de85546e0b8ac787cf9d3b06e7e2a69f925b106"}, - {file = "coverage-7.6.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a6d3adcf24b624a7b778533480e32434a39ad8fa30c315208f6d3e5542aeb6e9"}, - {file = "coverage-7.6.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0c212c49b6c10e6951362f7c6df3329f04c2b1c28499563d4035d964ab8e08c"}, - {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e81d7a3e58882450ec4186ca59a3f20a5d4440f25b1cff6f0902ad890e6748a"}, - {file = "coverage-7.6.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78b260de9790fd81e69401c2dc8b17da47c8038176a79092a89cb2b7d945d060"}, - {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a78d169acd38300060b28d600344a803628c3fd585c912cacc9ea8790fe96862"}, - {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2c09f4ce52cb99dd7505cd0fc8e0e37c77b87f46bc9c1eb03fe3bc9991085388"}, - {file = "coverage-7.6.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6878ef48d4227aace338d88c48738a4258213cd7b74fd9a3d4d7582bb1d8a155"}, - {file = "coverage-7.6.1-cp313-cp313-win32.whl", hash = "sha256:44df346d5215a8c0e360307d46ffaabe0f5d3502c8a1cefd700b34baf31d411a"}, - {file = "coverage-7.6.1-cp313-cp313-win_amd64.whl", hash = "sha256:8284cf8c0dd272a247bc154eb6c95548722dce90d098c17a883ed36e67cdb129"}, - {file = "coverage-7.6.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d3296782ca4eab572a1a4eca686d8bfb00226300dcefdf43faa25b5242ab8a3e"}, - {file = "coverage-7.6.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:502753043567491d3ff6d08629270127e0c31d4184c4c8d98f92c26f65019962"}, - {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6a89ecca80709d4076b95f89f308544ec8f7b4727e8a547913a35f16717856cb"}, - {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a318d68e92e80af8b00fa99609796fdbcdfef3629c77c6283566c6f02c6d6704"}, - {file = "coverage-7.6.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13b0a73a0896988f053e4fbb7de6d93388e6dd292b0d87ee51d106f2c11b465b"}, - {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:4421712dbfc5562150f7554f13dde997a2e932a6b5f352edcce948a815efee6f"}, - {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:166811d20dfea725e2e4baa71fffd6c968a958577848d2131f39b60043400223"}, - {file = "coverage-7.6.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:225667980479a17db1048cb2bf8bfb39b8e5be8f164b8f6628b64f78a72cf9d3"}, - {file = "coverage-7.6.1-cp313-cp313t-win32.whl", hash = "sha256:170d444ab405852903b7d04ea9ae9b98f98ab6d7e63e1115e82620807519797f"}, - {file = "coverage-7.6.1-cp313-cp313t-win_amd64.whl", hash = "sha256:b9f222de8cded79c49bf184bdbc06630d4c58eec9459b939b4a690c82ed05657"}, - {file = "coverage-7.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6db04803b6c7291985a761004e9060b2bca08da6d04f26a7f2294b8623a0c1a0"}, - {file = "coverage-7.6.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f1adfc8ac319e1a348af294106bc6a8458a0f1633cc62a1446aebc30c5fa186a"}, - {file = "coverage-7.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a95324a9de9650a729239daea117df21f4b9868ce32e63f8b650ebe6cef5595b"}, - {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b43c03669dc4618ec25270b06ecd3ee4fa94c7f9b3c14bae6571ca00ef98b0d3"}, - {file = "coverage-7.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8929543a7192c13d177b770008bc4e8119f2e1f881d563fc6b6305d2d0ebe9de"}, - {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:a09ece4a69cf399510c8ab25e0950d9cf2b42f7b3cb0374f95d2e2ff594478a6"}, - {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:9054a0754de38d9dbd01a46621636689124d666bad1936d76c0341f7d71bf569"}, - {file = "coverage-7.6.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0dbde0f4aa9a16fa4d754356a8f2e36296ff4d83994b2c9d8398aa32f222f989"}, - {file = "coverage-7.6.1-cp38-cp38-win32.whl", hash = "sha256:da511e6ad4f7323ee5702e6633085fb76c2f893aaf8ce4c51a0ba4fc07580ea7"}, - {file = "coverage-7.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:3f1156e3e8f2872197af3840d8ad307a9dd18e615dc64d9ee41696f287c57ad8"}, - {file = "coverage-7.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abd5fd0db5f4dc9289408aaf34908072f805ff7792632250dcb36dc591d24255"}, - {file = "coverage-7.6.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:547f45fa1a93154bd82050a7f3cddbc1a7a4dd2a9bf5cb7d06f4ae29fe94eaf8"}, - {file = "coverage-7.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:645786266c8f18a931b65bfcefdbf6952dd0dea98feee39bd188607a9d307ed2"}, - {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e0b2df163b8ed01d515807af24f63de04bebcecbd6c3bfeff88385789fdf75a"}, - {file = "coverage-7.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:609b06f178fe8e9f89ef676532760ec0b4deea15e9969bf754b37f7c40326dbc"}, - {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:702855feff378050ae4f741045e19a32d57d19f3e0676d589df0575008ea5004"}, - {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:2bdb062ea438f22d99cba0d7829c2ef0af1d768d1e4a4f528087224c90b132cb"}, - {file = "coverage-7.6.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9c56863d44bd1c4fe2abb8a4d6f5371d197f1ac0ebdee542f07f35895fc07f36"}, - {file = "coverage-7.6.1-cp39-cp39-win32.whl", hash = "sha256:6e2cd258d7d927d09493c8df1ce9174ad01b381d4729a9d8d4e38670ca24774c"}, - {file = "coverage-7.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:06a737c882bd26d0d6ee7269b20b12f14a8704807a01056c80bb881a4b2ce6ca"}, - {file = "coverage-7.6.1-pp38.pp39.pp310-none-any.whl", hash = "sha256:e9a6e0eb86070e8ccaedfbd9d38fec54864f3125ab95419970575b42af7541df"}, - {file = "coverage-7.6.1.tar.gz", hash = "sha256:953510dfb7b12ab69d20135a0662397f077c59b1e6379a768e97c59d852ee51d"}, +python-versions = ">=3.9" +groups = ["dev"] +files = [ + {file = "coverage-7.6.10-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5c912978f7fbf47ef99cec50c4401340436d200d41d714c7a4766f377c5b7b78"}, + {file = "coverage-7.6.10-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a01ec4af7dfeb96ff0078ad9a48810bb0cc8abcb0115180c6013a6b26237626c"}, + {file = "coverage-7.6.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3b204c11e2b2d883946fe1d97f89403aa1811df28ce0447439178cc7463448a"}, + {file = "coverage-7.6.10-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32ee6d8491fcfc82652a37109f69dee9a830e9379166cb73c16d8dc5c2915165"}, + {file = "coverage-7.6.10-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675cefc4c06e3b4c876b85bfb7c59c5e2218167bbd4da5075cbe3b5790a28988"}, + {file = "coverage-7.6.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f4f620668dbc6f5e909a0946a877310fb3d57aea8198bde792aae369ee1c23b5"}, + {file = "coverage-7.6.10-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:4eea95ef275de7abaef630c9b2c002ffbc01918b726a39f5a4353916ec72d2f3"}, + {file = "coverage-7.6.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e2f0280519e42b0a17550072861e0bc8a80a0870de260f9796157d3fca2733c5"}, + {file = "coverage-7.6.10-cp310-cp310-win32.whl", hash = "sha256:bc67deb76bc3717f22e765ab3e07ee9c7a5e26b9019ca19a3b063d9f4b874244"}, + {file = "coverage-7.6.10-cp310-cp310-win_amd64.whl", hash = "sha256:0f460286cb94036455e703c66988851d970fdfd8acc2a1122ab7f4f904e4029e"}, + {file = "coverage-7.6.10-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ea3c8f04b3e4af80e17bab607c386a830ffc2fb88a5484e1df756478cf70d1d3"}, + {file = "coverage-7.6.10-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:507a20fc863cae1d5720797761b42d2d87a04b3e5aeb682ef3b7332e90598f43"}, + {file = "coverage-7.6.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d37a84878285b903c0fe21ac8794c6dab58150e9359f1aaebbeddd6412d53132"}, + {file = "coverage-7.6.10-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a534738b47b0de1995f85f582d983d94031dffb48ab86c95bdf88dc62212142f"}, + {file = "coverage-7.6.10-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d7a2bf79378d8fb8afaa994f91bfd8215134f8631d27eba3e0e2c13546ce994"}, + {file = "coverage-7.6.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6713ba4b4ebc330f3def51df1d5d38fad60b66720948112f114968feb52d3f99"}, + {file = "coverage-7.6.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ab32947f481f7e8c763fa2c92fd9f44eeb143e7610c4ca9ecd6a36adab4081bd"}, + {file = "coverage-7.6.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7bbd8c8f1b115b892e34ba66a097b915d3871db7ce0e6b9901f462ff3a975377"}, + {file = "coverage-7.6.10-cp311-cp311-win32.whl", hash = "sha256:299e91b274c5c9cdb64cbdf1b3e4a8fe538a7a86acdd08fae52301b28ba297f8"}, + {file = "coverage-7.6.10-cp311-cp311-win_amd64.whl", hash = "sha256:489a01f94aa581dbd961f306e37d75d4ba16104bbfa2b0edb21d29b73be83609"}, + {file = "coverage-7.6.10-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:27c6e64726b307782fa5cbe531e7647aee385a29b2107cd87ba7c0105a5d3853"}, + {file = "coverage-7.6.10-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:c56e097019e72c373bae32d946ecf9858fda841e48d82df7e81c63ac25554078"}, + {file = "coverage-7.6.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7827a5bc7bdb197b9e066cdf650b2887597ad124dd99777332776f7b7c7d0d0"}, + {file = "coverage-7.6.10-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:204a8238afe787323a8b47d8be4df89772d5c1e4651b9ffa808552bdf20e1d50"}, + {file = "coverage-7.6.10-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e67926f51821b8e9deb6426ff3164870976fe414d033ad90ea75e7ed0c2e5022"}, + {file = "coverage-7.6.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e78b270eadb5702938c3dbe9367f878249b5ef9a2fcc5360ac7bff694310d17b"}, + {file = "coverage-7.6.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:714f942b9c15c3a7a5fe6876ce30af831c2ad4ce902410b7466b662358c852c0"}, + {file = "coverage-7.6.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:abb02e2f5a3187b2ac4cd46b8ced85a0858230b577ccb2c62c81482ca7d18852"}, + {file = "coverage-7.6.10-cp312-cp312-win32.whl", hash = "sha256:55b201b97286cf61f5e76063f9e2a1d8d2972fc2fcfd2c1272530172fd28c359"}, + {file = "coverage-7.6.10-cp312-cp312-win_amd64.whl", hash = "sha256:e4ae5ac5e0d1e4edfc9b4b57b4cbecd5bc266a6915c500f358817a8496739247"}, + {file = "coverage-7.6.10-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:05fca8ba6a87aabdd2d30d0b6c838b50510b56cdcfc604d40760dae7153b73d9"}, + {file = "coverage-7.6.10-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9e80eba8801c386f72e0712a0453431259c45c3249f0009aff537a517b52942b"}, + {file = "coverage-7.6.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a372c89c939d57abe09e08c0578c1d212e7a678135d53aa16eec4430adc5e690"}, + {file = "coverage-7.6.10-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ec22b5e7fe7a0fa8509181c4aac1db48f3dd4d3a566131b313d1efc102892c18"}, + {file = "coverage-7.6.10-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26bcf5c4df41cad1b19c84af71c22cbc9ea9a547fc973f1f2cc9a290002c8b3c"}, + {file = "coverage-7.6.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4e4630c26b6084c9b3cb53b15bd488f30ceb50b73c35c5ad7871b869cb7365fd"}, + {file = "coverage-7.6.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2396e8116db77789f819d2bc8a7e200232b7a282c66e0ae2d2cd84581a89757e"}, + {file = "coverage-7.6.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:79109c70cc0882e4d2d002fe69a24aa504dec0cc17169b3c7f41a1d341a73694"}, + {file = "coverage-7.6.10-cp313-cp313-win32.whl", hash = "sha256:9e1747bab246d6ff2c4f28b4d186b205adced9f7bd9dc362051cc37c4a0c7bd6"}, + {file = "coverage-7.6.10-cp313-cp313-win_amd64.whl", hash = "sha256:254f1a3b1eef5f7ed23ef265eaa89c65c8c5b6b257327c149db1ca9d4a35f25e"}, + {file = "coverage-7.6.10-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:2ccf240eb719789cedbb9fd1338055de2761088202a9a0b73032857e53f612fe"}, + {file = "coverage-7.6.10-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:0c807ca74d5a5e64427c8805de15b9ca140bba13572d6d74e262f46f50b13273"}, + {file = "coverage-7.6.10-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2bcfa46d7709b5a7ffe089075799b902020b62e7ee56ebaed2f4bdac04c508d8"}, + {file = "coverage-7.6.10-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4e0de1e902669dccbf80b0415fb6b43d27edca2fbd48c74da378923b05316098"}, + {file = "coverage-7.6.10-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f7b444c42bbc533aaae6b5a2166fd1a797cdb5eb58ee51a92bee1eb94a1e1cb"}, + {file = "coverage-7.6.10-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b330368cb99ef72fcd2dc3ed260adf67b31499584dc8a20225e85bfe6f6cfed0"}, + {file = "coverage-7.6.10-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:9a7cfb50515f87f7ed30bc882f68812fd98bc2852957df69f3003d22a2aa0abf"}, + {file = "coverage-7.6.10-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6f93531882a5f68c28090f901b1d135de61b56331bba82028489bc51bdd818d2"}, + {file = "coverage-7.6.10-cp313-cp313t-win32.whl", hash = "sha256:89d76815a26197c858f53c7f6a656686ec392b25991f9e409bcef020cd532312"}, + {file = "coverage-7.6.10-cp313-cp313t-win_amd64.whl", hash = "sha256:54a5f0f43950a36312155dae55c505a76cd7f2b12d26abeebbe7a0b36dbc868d"}, + {file = "coverage-7.6.10-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:656c82b8a0ead8bba147de9a89bda95064874c91a3ed43a00e687f23cc19d53a"}, + {file = "coverage-7.6.10-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ccc2b70a7ed475c68ceb548bf69cec1e27305c1c2606a5eb7c3afff56a1b3b27"}, + {file = "coverage-7.6.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a5e37dc41d57ceba70956fa2fc5b63c26dba863c946ace9705f8eca99daecdc4"}, + {file = "coverage-7.6.10-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0aa9692b4fdd83a4647eeb7db46410ea1322b5ed94cd1715ef09d1d5922ba87f"}, + {file = "coverage-7.6.10-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa744da1820678b475e4ba3dfd994c321c5b13381d1041fe9c608620e6676e25"}, + {file = "coverage-7.6.10-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:c0b1818063dc9e9d838c09e3a473c1422f517889436dd980f5d721899e66f315"}, + {file = "coverage-7.6.10-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:59af35558ba08b758aec4d56182b222976330ef8d2feacbb93964f576a7e7a90"}, + {file = "coverage-7.6.10-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7ed2f37cfce1ce101e6dffdfd1c99e729dd2ffc291d02d3e2d0af8b53d13840d"}, + {file = "coverage-7.6.10-cp39-cp39-win32.whl", hash = "sha256:4bcc276261505d82f0ad426870c3b12cb177752834a633e737ec5ee79bbdff18"}, + {file = "coverage-7.6.10-cp39-cp39-win_amd64.whl", hash = "sha256:457574f4599d2b00f7f637a0700a6422243b3565509457b2dbd3f50703e11f59"}, + {file = "coverage-7.6.10-pp39.pp310-none-any.whl", hash = "sha256:fd34e7b3405f0cc7ab03d54a334c17a9e802897580d964bd8c2001f4b9fd488f"}, + {file = "coverage-7.6.10.tar.gz", hash = "sha256:7fb105327c8f8f0682e29843e2ff96af9dcbe5bab8eeb4b398c6a33a16d80a23"}, ] [package.dependencies] @@ -652,15 +702,32 @@ tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.1 [package.extras] toml = ["tomli"] +[[package]] +name = "crispy-bootstrap4" +version = "2024.10" +description = "Bootstrap4 template pack for django-crispy-forms" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "crispy-bootstrap4-2024.10.tar.gz", hash = "sha256:503e8922b0f3b5262a6fdf303a3a94eb2a07514812f1ca130b88f7c02dd25e2b"}, + {file = "crispy_bootstrap4-2024.10-py3-none-any.whl", hash = "sha256:138a97884044ae4c4799c80595b36c42066e4e933431e2e971611e251c84f96c"}, +] + +[package.dependencies] +django = ">=4.2" +django-crispy-forms = ">=2.3" + [[package]] name = "crudini" -version = "0.9.4" +version = "0.9.5" description = "A utility for manipulating ini files" optional = false python-versions = "*" +groups = ["main"] files = [ - {file = "crudini-0.9.4-py2.py3-none-any.whl", hash = "sha256:639beb4649be5108bc00dc0947b5641995f40bab7814cbbb3e16e2082905b9c6"}, - {file = "crudini-0.9.4.tar.gz", hash = "sha256:6fd0eb341b6cbd91e1883030ea9f2102c1c95619eb563af7ddabc2161e019f6b"}, + {file = "crudini-0.9.5-py2.py3-none-any.whl", hash = "sha256:84bc208dc7d89571bdc3c99274259d0b32d6b3a692d4255524f2eb4b64e9195c"}, + {file = "crudini-0.9.5.tar.gz", hash = "sha256:59ae650f45af82a64afc33eb876909ee0c4888dc4e8711ef59731c1edfda5e24"}, ] [package.dependencies] @@ -668,51 +735,56 @@ iniparse = ">=0.5" [[package]] name = "cryptography" -version = "43.0.1" +version = "44.0.1" description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." optional = false -python-versions = ">=3.7" -files = [ - {file = "cryptography-43.0.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:8385d98f6a3bf8bb2d65a73e17ed87a3ba84f6991c155691c51112075f9ffc5d"}, - {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:27e613d7077ac613e399270253259d9d53872aaf657471473ebfc9a52935c062"}, - {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68aaecc4178e90719e95298515979814bda0cbada1256a4485414860bd7ab962"}, - {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:de41fd81a41e53267cb020bb3a7212861da53a7d39f863585d13ea11049cf277"}, - {file = "cryptography-43.0.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f98bf604c82c416bc829e490c700ca1553eafdf2912a91e23a79d97d9801372a"}, - {file = "cryptography-43.0.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:61ec41068b7b74268fa86e3e9e12b9f0c21fcf65434571dbb13d954bceb08042"}, - {file = "cryptography-43.0.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:014f58110f53237ace6a408b5beb6c427b64e084eb451ef25a28308270086494"}, - {file = "cryptography-43.0.1-cp37-abi3-win32.whl", hash = "sha256:2bd51274dcd59f09dd952afb696bf9c61a7a49dfc764c04dd33ef7a6b502a1e2"}, - {file = "cryptography-43.0.1-cp37-abi3-win_amd64.whl", hash = "sha256:666ae11966643886c2987b3b721899d250855718d6d9ce41b521252a17985f4d"}, - {file = "cryptography-43.0.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:ac119bb76b9faa00f48128b7f5679e1d8d437365c5d26f1c2c3f0da4ce1b553d"}, - {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bbcce1a551e262dfbafb6e6252f1ae36a248e615ca44ba302df077a846a8806"}, - {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58d4e9129985185a06d849aa6df265bdd5a74ca6e1b736a77959b498e0505b85"}, - {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:d03a475165f3134f773d1388aeb19c2d25ba88b6a9733c5c590b9ff7bbfa2e0c"}, - {file = "cryptography-43.0.1-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:511f4273808ab590912a93ddb4e3914dfd8a388fed883361b02dea3791f292e1"}, - {file = "cryptography-43.0.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:80eda8b3e173f0f247f711eef62be51b599b5d425c429b5d4ca6a05e9e856baa"}, - {file = "cryptography-43.0.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:38926c50cff6f533f8a2dae3d7f19541432610d114a70808f0926d5aaa7121e4"}, - {file = "cryptography-43.0.1-cp39-abi3-win32.whl", hash = "sha256:a575913fb06e05e6b4b814d7f7468c2c660e8bb16d8d5a1faf9b33ccc569dd47"}, - {file = "cryptography-43.0.1-cp39-abi3-win_amd64.whl", hash = "sha256:d75601ad10b059ec832e78823b348bfa1a59f6b8d545db3a24fd44362a1564cb"}, - {file = "cryptography-43.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ea25acb556320250756e53f9e20a4177515f012c9eaea17eb7587a8c4d8ae034"}, - {file = "cryptography-43.0.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:c1332724be35d23a854994ff0b66530119500b6053d0bd3363265f7e5e77288d"}, - {file = "cryptography-43.0.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:fba1007b3ef89946dbbb515aeeb41e30203b004f0b4b00e5e16078b518563289"}, - {file = "cryptography-43.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5b43d1ea6b378b54a1dc99dd8a2b5be47658fe9a7ce0a58ff0b55f4b43ef2b84"}, - {file = "cryptography-43.0.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:88cce104c36870d70c49c7c8fd22885875d950d9ee6ab54df2745f83ba0dc365"}, - {file = "cryptography-43.0.1-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:9d3cdb25fa98afdd3d0892d132b8d7139e2c087da1712041f6b762e4f807cc96"}, - {file = "cryptography-43.0.1-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:e710bf40870f4db63c3d7d929aa9e09e4e7ee219e703f949ec4073b4294f6172"}, - {file = "cryptography-43.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7c05650fe8023c5ed0d46793d4b7d7e6cd9c04e68eabe5b0aeea836e37bdcec2"}, - {file = "cryptography-43.0.1.tar.gz", hash = "sha256:203e92a75716d8cfb491dc47c79e17d0d9207ccffcbcb35f598fbe463ae3444d"}, +python-versions = "!=3.9.0,!=3.9.1,>=3.7" +groups = ["main"] +files = [ + {file = "cryptography-44.0.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bf688f615c29bfe9dfc44312ca470989279f0e94bb9f631f85e3459af8efc009"}, + {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd7c7e2d71d908dc0f8d2027e1604102140d84b155e658c20e8ad1304317691f"}, + {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:887143b9ff6bad2b7570da75a7fe8bbf5f65276365ac259a5d2d5147a73775f2"}, + {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:322eb03ecc62784536bc173f1483e76747aafeb69c8728df48537eb431cd1911"}, + {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:21377472ca4ada2906bc313168c9dc7b1d7ca417b63c1c3011d0c74b7de9ae69"}, + {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:df978682c1504fc93b3209de21aeabf2375cb1571d4e61907b3e7a2540e83026"}, + {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:eb3889330f2a4a148abead555399ec9a32b13b7c8ba969b72d8e500eb7ef84cd"}, + {file = "cryptography-44.0.1-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:8e6a85a93d0642bd774460a86513c5d9d80b5c002ca9693e63f6e540f1815ed0"}, + {file = "cryptography-44.0.1-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:6f76fdd6fd048576a04c5210d53aa04ca34d2ed63336d4abd306d0cbe298fddf"}, + {file = "cryptography-44.0.1-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6c8acf6f3d1f47acb2248ec3ea261171a671f3d9428e34ad0357148d492c7864"}, + {file = "cryptography-44.0.1-cp37-abi3-win32.whl", hash = "sha256:24979e9f2040c953a94bf3c6782e67795a4c260734e5264dceea65c8f4bae64a"}, + {file = "cryptography-44.0.1-cp37-abi3-win_amd64.whl", hash = "sha256:fd0ee90072861e276b0ff08bd627abec29e32a53b2be44e41dbcdf87cbee2b00"}, + {file = "cryptography-44.0.1-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:a2d8a7045e1ab9b9f803f0d9531ead85f90c5f2859e653b61497228b18452008"}, + {file = "cryptography-44.0.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b8272f257cf1cbd3f2e120f14c68bff2b6bdfcc157fafdee84a1b795efd72862"}, + {file = "cryptography-44.0.1-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1e8d181e90a777b63f3f0caa836844a1182f1f265687fac2115fcf245f5fbec3"}, + {file = "cryptography-44.0.1-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:436df4f203482f41aad60ed1813811ac4ab102765ecae7a2bbb1dbb66dcff5a7"}, + {file = "cryptography-44.0.1-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4f422e8c6a28cf8b7f883eb790695d6d45b0c385a2583073f3cec434cc705e1a"}, + {file = "cryptography-44.0.1-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:72198e2b5925155497a5a3e8c216c7fb3e64c16ccee11f0e7da272fa93b35c4c"}, + {file = "cryptography-44.0.1-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:2a46a89ad3e6176223b632056f321bc7de36b9f9b93b2cc1cccf935a3849dc62"}, + {file = "cryptography-44.0.1-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:53f23339864b617a3dfc2b0ac8d5c432625c80014c25caac9082314e9de56f41"}, + {file = "cryptography-44.0.1-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:888fcc3fce0c888785a4876ca55f9f43787f4c5c1cc1e2e0da71ad481ff82c5b"}, + {file = "cryptography-44.0.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:00918d859aa4e57db8299607086f793fa7813ae2ff5a4637e318a25ef82730f7"}, + {file = "cryptography-44.0.1-cp39-abi3-win32.whl", hash = "sha256:9b336599e2cb77b1008cb2ac264b290803ec5e8e89d618a5e978ff5eb6f715d9"}, + {file = "cryptography-44.0.1-cp39-abi3-win_amd64.whl", hash = "sha256:e403f7f766ded778ecdb790da786b418a9f2394f36e8cc8b796cc056ab05f44f"}, + {file = "cryptography-44.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:1f9a92144fa0c877117e9748c74501bea842f93d21ee00b0cf922846d9d0b183"}, + {file = "cryptography-44.0.1-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:610a83540765a8d8ce0f351ce42e26e53e1f774a6efb71eb1b41eb01d01c3d12"}, + {file = "cryptography-44.0.1-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:5fed5cd6102bb4eb843e3315d2bf25fede494509bddadb81e03a859c1bc17b83"}, + {file = "cryptography-44.0.1-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:f4daefc971c2d1f82f03097dc6f216744a6cd2ac0f04c68fb935ea2ba2a0d420"}, + {file = "cryptography-44.0.1-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:94f99f2b943b354a5b6307d7e8d19f5c423a794462bde2bf310c770ba052b1c4"}, + {file = "cryptography-44.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d9c5b9f698a83c8bd71e0f4d3f9f839ef244798e5ffe96febfa9714717db7af7"}, + {file = "cryptography-44.0.1.tar.gz", hash = "sha256:f51f5705ab27898afda1aaa430f34ad90dc117421057782022edf0600bec5f14"}, ] [package.dependencies] cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} [package.extras] -docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"] -docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"] -nox = ["nox"] -pep8test = ["check-sdist", "click", "mypy", "ruff"] -sdist = ["build"] +docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=3.0.0)"] +docstest = ["pyenchant (>=3)", "readme-renderer (>=30.0)", "sphinxcontrib-spelling (>=7.3.1)"] +nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2)"] +pep8test = ["check-sdist", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"] +sdist = ["build (>=1.0.0)"] ssh = ["bcrypt (>=3.1.5)"] -test = ["certifi", "cryptography-vectors (==43.0.1)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"] +test = ["certifi (>=2024)", "cryptography-vectors (==44.0.1)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"] test-randomorder = ["pytest-randomly"] [[package]] @@ -721,6 +793,7 @@ version = "0.3.0" description = "Python interface to c++filt / abi::__cxa_demangle" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "cxxfilt-0.3.0-py2.py3-none-any.whl", hash = "sha256:774e85a8d0157775ed43276d89397d924b104135762d86b3a95f81f203094e07"}, {file = "cxxfilt-0.3.0.tar.gz", hash = "sha256:7df6464ba5e8efbf0d8974c0b2c78b32546676f06059a83515dbdfa559b34214"}, @@ -731,50 +804,78 @@ test = ["pytest (>=3.0.0)"] [[package]] name = "cython" -version = "0.29.24" -description = "The Cython compiler for writing C extensions for the Python language." -optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "Cython-0.29.24-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:6a2cf2ccccc25413864928dfd730c29db6f63eaf98206c1e600003a445ca7f58"}, - {file = "Cython-0.29.24-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:b28f92e617f540d3f21f8fd479a9c6491be920ffff672a4c61b7fc4d7f749f39"}, - {file = "Cython-0.29.24-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:37bcfa5df2a3009f49624695d917c3804fccbdfcdc5eda6378754a879711a4d5"}, - {file = "Cython-0.29.24-cp27-cp27m-win32.whl", hash = "sha256:9164aeef1af6f837e4fc20402a31d256188ba4d535e262c6cb78caf57ad744f8"}, - {file = "Cython-0.29.24-cp27-cp27m-win_amd64.whl", hash = "sha256:73ac33a4379056a02031baa4def255717fadb9181b5ac2b244792d53eae1c925"}, - {file = "Cython-0.29.24-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:09ac3087ac7a3d489ebcb3fb8402e00c13d1a3a1c6bc73fd3b0d756a3e341e79"}, - {file = "Cython-0.29.24-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:774cb8fd931ee1ba52c472bc1c19077cd6895c1b24014ae07bb27df59aed5ebe"}, - {file = "Cython-0.29.24-cp34-cp34m-win32.whl", hash = "sha256:5dd56d0be50073f0e54825a8bc3393852de0eed126339ecbca0ae149dba55cfc"}, - {file = "Cython-0.29.24-cp34-cp34m-win_amd64.whl", hash = "sha256:88dc3c250dec280b0489a83950b15809762e27232f4799b1b8d0bad503f5ab84"}, - {file = "Cython-0.29.24-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:5fa12ebafc2f688ea6d26ab6d1d2e634a9872509ba7135b902bb0d8b368fb04b"}, - {file = "Cython-0.29.24-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:60c958bcab0ff315b4036a949bed1c65334e1f6a69e17e9966d742febb59043a"}, - {file = "Cython-0.29.24-cp35-cp35m-win32.whl", hash = "sha256:166f9f29cd0058ce1a14a7b3a2458b849ed34b1ec5fd4108af3fdd2c24afcbb0"}, - {file = "Cython-0.29.24-cp35-cp35m-win_amd64.whl", hash = "sha256:76cbca0188d278e93d12ebdaf5990678e6e436485fdfad49dbe9b07717d41a3c"}, - {file = "Cython-0.29.24-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f2e9381497b12e8f622af620bde0d1d094035d79b899abb2ddd3a7891f535083"}, - {file = "Cython-0.29.24-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:d8d1a087f35e39384303f5e6b75d465d6f29d746d7138eae9d3b6e8e6f769eae"}, - {file = "Cython-0.29.24-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:112efa54a58293a4fb0acf0dd8e5b3736e95b595eee24dd88615648e445abe41"}, - {file = "Cython-0.29.24-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4cf4452f0e4d50e11701bca38f3857fe6fa16593e7fd6a4d5f7be66f611b7da2"}, - {file = "Cython-0.29.24-cp36-cp36m-win32.whl", hash = "sha256:854fe2193d3ad4c8b61932ff54d6dbe10c5fa8749eb8958d72cc0ab28243f833"}, - {file = "Cython-0.29.24-cp36-cp36m-win_amd64.whl", hash = "sha256:84826ec1c11cda56261a252ddecac0c7d6b02e47e81b94f40b27b4c23c29c17c"}, - {file = "Cython-0.29.24-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6ade74eece909fd3a437d9a5084829180751d7ade118e281e9824dd75eafaff2"}, - {file = "Cython-0.29.24-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:0a142c6b862e6ed6b02209d543062c038c110585b5e32d1ad7c9717af4f07e41"}, - {file = "Cython-0.29.24-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:10cb3def9774fa99e4583617a5616874aed3255dc241fd1f4a3c2978c78e1c53"}, - {file = "Cython-0.29.24-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2f41ef7edd76dd23315925e003f0c58c8585f3ab24be6885c4b3f60e77c82746"}, - {file = "Cython-0.29.24-cp37-cp37m-win32.whl", hash = "sha256:821c2d416ad7d006b069657ee1034c0e0cb45bdbe9ab6ab631e8c495dfcfa4ac"}, - {file = "Cython-0.29.24-cp37-cp37m-win_amd64.whl", hash = "sha256:2d9e61ed1056a3b6a4b9156b62297ad18b357a7948e57a2f49b061217696567e"}, - {file = "Cython-0.29.24-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:55b0ee28c2c8118bfb3ad9b25cf7a6cbd724e442ea96956e32ccd908d5e3e043"}, - {file = "Cython-0.29.24-cp38-cp38-manylinux1_i686.whl", hash = "sha256:eb2843f8cc01c645725e6fc690a84e99cdb266ce8ebe427cf3a680ff09f876aa"}, - {file = "Cython-0.29.24-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:661dbdea519d9cfb288867252b75fef73ffa8e8bb674cec27acf70646afb369b"}, - {file = "Cython-0.29.24-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc05de569f811be1fcfde6756c9048ae518f0c4b6d9f8f024752c5365d934cac"}, - {file = "Cython-0.29.24-cp38-cp38-win32.whl", hash = "sha256:a102cfa795c6b3b81a29bdb9dbec545367cd7f353c03e6f30a056fdfefd92854"}, - {file = "Cython-0.29.24-cp38-cp38-win_amd64.whl", hash = "sha256:416046a98255eff97ec02077d20ebeaae52682dfca1c35aadf31260442b92514"}, - {file = "Cython-0.29.24-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ad43e684ade673565f6f9d6638015112f6c7f11aa2a632167b79014f613f0f5f"}, - {file = "Cython-0.29.24-cp39-cp39-manylinux1_i686.whl", hash = "sha256:afb521523cb46ddaa8d269b421f88ea2731fee05e65b952b96d4db760f5a2a1c"}, - {file = "Cython-0.29.24-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:0d414458cb22f8a90d64260da6dace5d5fcebde43f31be52ca51f818c46db8cb"}, - {file = "Cython-0.29.24-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8cb87777e82d1996aef6c146560a19270684271c9c669ba62ac6803b3cd2ff82"}, - {file = "Cython-0.29.24-cp39-cp39-win32.whl", hash = "sha256:91339ee4b465924a3ea4b2a9cec7f7227bc4cadf673ce859d24c2b9ef60b1214"}, - {file = "Cython-0.29.24-cp39-cp39-win_amd64.whl", hash = "sha256:5fb977945a2111f6b64501fdf7ed0ec162cc502b84457fd648d6a558ea8de0d6"}, - {file = "Cython-0.29.24-py2.py3-none-any.whl", hash = "sha256:f96411f0120b5cae483923aaacd2872af8709be4b46522daedc32f051d778385"}, - {file = "Cython-0.29.24.tar.gz", hash = "sha256:cdf04d07c3600860e8c2ebaad4e8f52ac3feb212453c1764a49ac08c827e8443"}, +version = "3.0.11" +description = "The Cython compiler for writing C extensions in the Python language." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" +groups = ["main"] +files = [ + {file = "Cython-3.0.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:44292aae17524abb4b70a25111fe7dec1a0ad718711d47e3786a211d5408fdaa"}, + {file = "Cython-3.0.11-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a75d45fbc20651c1b72e4111149fed3b33d270b0a4fb78328c54d965f28d55e1"}, + {file = "Cython-3.0.11-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d89a82937ce4037f092e9848a7bbcc65bc8e9fc9aef2bb74f5c15e7d21a73080"}, + {file = "Cython-3.0.11-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a8ea2e7e2d3bc0d8630dafe6c4a5a89485598ff8a61885b74f8ed882597efd5"}, + {file = "Cython-3.0.11-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:cee29846471ce60226b18e931d8c1c66a158db94853e3e79bc2da9bd22345008"}, + {file = "Cython-3.0.11-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:eeb6860b0f4bfa402de8929833fe5370fa34069c7ebacb2d543cb017f21fb891"}, + {file = "Cython-3.0.11-cp310-cp310-win32.whl", hash = "sha256:3699391125ab344d8d25438074d1097d9ba0fb674d0320599316cfe7cf5f002a"}, + {file = "Cython-3.0.11-cp310-cp310-win_amd64.whl", hash = "sha256:d02f4ebe15aac7cdacce1a628e556c1983f26d140fd2e0ac5e0a090e605a2d38"}, + {file = "Cython-3.0.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:75ba1c70b6deeaffbac123856b8d35f253da13552207aa969078611c197377e4"}, + {file = "Cython-3.0.11-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:af91497dc098718e634d6ec8f91b182aea6bb3690f333fc9a7777bc70abe8810"}, + {file = "Cython-3.0.11-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3999fb52d3328a6a5e8c63122b0a8bd110dfcdb98dda585a3def1426b991cba7"}, + {file = "Cython-3.0.11-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d566a4e09b8979be8ab9f843bac0dd216c81f5e5f45661a9b25cd162ed80508c"}, + {file = "Cython-3.0.11-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:46aec30f217bdf096175a1a639203d44ac73a36fe7fa3dd06bd012e8f39eca0f"}, + {file = "Cython-3.0.11-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ddd1fe25af330f4e003421636746a546474e4ccd8f239f55d2898d80983d20ed"}, + {file = "Cython-3.0.11-cp311-cp311-win32.whl", hash = "sha256:221de0b48bf387f209003508e602ce839a80463522fc6f583ad3c8d5c890d2c1"}, + {file = "Cython-3.0.11-cp311-cp311-win_amd64.whl", hash = "sha256:3ff8ac1f0ecd4f505db4ab051e58e4531f5d098b6ac03b91c3b902e8d10c67b3"}, + {file = "Cython-3.0.11-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:11996c40c32abf843ba652a6d53cb15944c88d91f91fc4e6f0028f5df8a8f8a1"}, + {file = "Cython-3.0.11-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63f2c892e9f9c1698ecfee78205541623eb31cd3a1b682668be7ac12de94aa8e"}, + {file = "Cython-3.0.11-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8b14c24f1dc4c4c9d997cca8d1b7fb01187a218aab932328247dcf5694a10102"}, + {file = "Cython-3.0.11-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c8eed5c015685106db15dd103fd040948ddca9197b1dd02222711815ea782a27"}, + {file = "Cython-3.0.11-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:780f89c95b8aec1e403005b3bf2f0a2afa060b3eba168c86830f079339adad89"}, + {file = "Cython-3.0.11-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a690f2ff460682ea985e8d38ec541be97e0977fa0544aadc21efc116ff8d7579"}, + {file = "Cython-3.0.11-cp312-cp312-win32.whl", hash = "sha256:2252b5aa57621848e310fe7fa6f7dce5f73aa452884a183d201a8bcebfa05a00"}, + {file = "Cython-3.0.11-cp312-cp312-win_amd64.whl", hash = "sha256:da394654c6da15c1d37f0b7ec5afd325c69a15ceafee2afba14b67a5df8a82c8"}, + {file = "Cython-3.0.11-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4341d6a64d47112884e0bcf31e6c075268220ee4cd02223047182d4dda94d637"}, + {file = "Cython-3.0.11-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:351955559b37e6c98b48aecb178894c311be9d731b297782f2b78d111f0c9015"}, + {file = "Cython-3.0.11-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c02361af9bfa10ff1ccf967fc75159e56b1c8093caf565739ed77a559c1f29f"}, + {file = "Cython-3.0.11-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6823aef13669a32caf18bbb036de56065c485d9f558551a9b55061acf9c4c27f"}, + {file = "Cython-3.0.11-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6fb68cef33684f8cc97987bee6ae919eee7e18ee6a3ad7ed9516b8386ef95ae6"}, + {file = "Cython-3.0.11-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:790263b74432cb997740d73665f4d8d00b9cd1cecbdd981d93591ddf993d4f12"}, + {file = "Cython-3.0.11-cp313-cp313-win32.whl", hash = "sha256:e6dd395d1a704e34a9fac00b25f0036dce6654c6b898be6f872ac2bb4f2eda48"}, + {file = "Cython-3.0.11-cp313-cp313-win_amd64.whl", hash = "sha256:52186101d51497519e99b60d955fd5cb3bf747c67f00d742e70ab913f1e42d31"}, + {file = "Cython-3.0.11-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:c69d5cad51388522b98a99b4be1b77316de85b0c0523fa865e0ea58bbb622e0a"}, + {file = "Cython-3.0.11-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8acdc87e9009110adbceb7569765eb0980129055cc954c62f99fe9f094c9505e"}, + {file = "Cython-3.0.11-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1dd47865f4c0a224da73acf83d113f93488d17624e2457dce1753acdfb1cc40c"}, + {file = "Cython-3.0.11-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:301bde949b4f312a1c70e214b0c3bc51a3f955d466010d2f68eb042df36447b0"}, + {file = "Cython-3.0.11-cp36-cp36m-musllinux_1_2_aarch64.whl", hash = "sha256:f3953d2f504176f929862e5579cfc421860c33e9707f585d70d24e1096accdf7"}, + {file = "Cython-3.0.11-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:3f2b062f6df67e8a56c75e500ca330cf62c85ac26dd7fd006f07ef0f83aebfa3"}, + {file = "Cython-3.0.11-cp36-cp36m-win32.whl", hash = "sha256:c3d68751668c66c7a140b6023dba5d5d507f72063407bb609d3a5b0f3b8dfbe4"}, + {file = "Cython-3.0.11-cp36-cp36m-win_amd64.whl", hash = "sha256:bcd29945fafd12484cf37b1d84f12f0e7a33ba3eac5836531c6bd5283a6b3a0c"}, + {file = "Cython-3.0.11-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4e9a8d92978b15a0c7ca7f98447c6c578dc8923a0941d9d172d0b077cb69c576"}, + {file = "Cython-3.0.11-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:421017466e9260aca86823974e26e158e6358622f27c0f4da9c682f3b6d2e624"}, + {file = "Cython-3.0.11-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d80a7232938d523c1a12f6b1794ab5efb1ae77ad3fde79de4bb558d8ab261619"}, + {file = "Cython-3.0.11-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bfa550d9ae39e827a6e7198076df763571cb53397084974a6948af558355e028"}, + {file = "Cython-3.0.11-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:aedceb6090a60854b31bf9571dc55f642a3fa5b91f11b62bcef167c52cac93d8"}, + {file = "Cython-3.0.11-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:473d35681d9f93ce380e6a7c8feb2d65fc6333bd7117fbc62989e404e241dbb0"}, + {file = "Cython-3.0.11-cp37-cp37m-win32.whl", hash = "sha256:3379c6521e25aa6cd7703bb7d635eaca75c0f9c7f1b0fdd6dd15a03bfac5f68d"}, + {file = "Cython-3.0.11-cp37-cp37m-win_amd64.whl", hash = "sha256:14701edb3107a5d9305a82d9d646c4f28bfecbba74b26cc1ee2f4be08f602057"}, + {file = "Cython-3.0.11-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:598699165cfa7c6d69513ee1bffc9e1fdd63b00b624409174c388538aa217975"}, + {file = "Cython-3.0.11-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0583076c4152b417a3a8a5d81ec02f58c09b67d3f22d5857e64c8734ceada8c"}, + {file = "Cython-3.0.11-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:52205347e916dd65d2400b977df4c697390c3aae0e96275a438cc4ae85dadc08"}, + {file = "Cython-3.0.11-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:989899a85f0d9a57cebb508bd1f194cb52f0e3f7e22ac259f33d148d6422375c"}, + {file = "Cython-3.0.11-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:53b6072a89049a991d07f42060f65398448365c59c9cb515c5925b9bdc9d71f8"}, + {file = "Cython-3.0.11-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:f988f7f8164a6079c705c39e2d75dbe9967e3dacafe041420d9af7b9ee424162"}, + {file = "Cython-3.0.11-cp38-cp38-win32.whl", hash = "sha256:a1f4cbc70f6b7f0c939522118820e708e0d490edca42d852fa8004ec16780be2"}, + {file = "Cython-3.0.11-cp38-cp38-win_amd64.whl", hash = "sha256:187685e25e037320cae513b8cc4bf9dbc4465c037051aede509cbbf207524de2"}, + {file = "Cython-3.0.11-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0fc6fdd6fa493be7bdda22355689d5446ac944cd71286f6f44a14b0d67ee3ff5"}, + {file = "Cython-3.0.11-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b1d1f6f94cc5d42a4591f6d60d616786b9cd15576b112bc92a23131fcf38020"}, + {file = "Cython-3.0.11-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b4ab2b92a3e6ed552adbe9350fd2ef3aa0cc7853cf91569f9dbed0c0699bbeab"}, + {file = "Cython-3.0.11-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:104d6f2f2c827ccc5e9e42c80ef6773a6aa94752fe6bc5b24a4eab4306fb7f07"}, + {file = "Cython-3.0.11-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:13062ce556a1e98d2821f7a0253b50569fdc98c36efd6653a65b21e3f8bbbf5f"}, + {file = "Cython-3.0.11-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:525d09b3405534763fa73bd78c8e51ac8264036ce4c16d37dfd1555a7da6d3a7"}, + {file = "Cython-3.0.11-cp39-cp39-win32.whl", hash = "sha256:b8c7e514075696ca0f60c337f9e416e61d7ccbc1aa879a56c39181ed90ec3059"}, + {file = "Cython-3.0.11-cp39-cp39-win_amd64.whl", hash = "sha256:8948802e1f5677a673ea5d22a1e7e273ca5f83e7a452786ca286eebf97cee67c"}, + {file = "Cython-3.0.11-py2.py3-none-any.whl", hash = "sha256:0e25f6425ad4a700d7f77cd468da9161e63658837d1bc34861a9861a4ef6346d"}, + {file = "cython-3.0.11.tar.gz", hash = "sha256:7146dd2af8682b4ca61331851e6aebce9fe5158e75300343f80c07ca80b1faff"}, ] [[package]] @@ -783,6 +884,7 @@ version = "3.0.2" description = "Django ASGI (HTTP/WebSocket) server" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "daphne-3.0.2-py3-none-any.whl", hash = "sha256:a9af943c79717bc52fe64a3c236ae5d3adccc8b5be19c881b442d2c3db233393"}, {file = "daphne-3.0.2.tar.gz", hash = "sha256:76ffae916ba3aa66b46996c14fa713e46004788167a4873d647544e750e0e99f"}, @@ -796,73 +898,32 @@ twisted = {version = ">=18.7", extras = ["tls"]} [package.extras] tests = ["hypothesis (==4.23)", "pytest (>=3.10,<4.0)", "pytest-asyncio (>=0.8,<1.0)"] -[[package]] -name = "defusedxml" -version = "0.7.1" -description = "XML bomb protection for Python stdlib modules" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "defusedxml-0.7.1-py2.py3-none-any.whl", hash = "sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61"}, - {file = "defusedxml-0.7.1.tar.gz", hash = "sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69"}, -] - -[[package]] -name = "die-python" -version = "0.1.0" -description = "Python bindings for Detect It Easy (DIE)." -optional = false -python-versions = ">=3.8" -files = [ - {file = "die_python-0.1.0-cp310-cp310-macosx_13_0_x86_64.whl", hash = "sha256:afd9ccfb75adae7e785ac52287b6a017d31a46285cb64e060d6a2a34ad0c381c"}, - {file = "die_python-0.1.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:613b9edba2337179df1b52ea97dfdf2ba5da4da1864a3887ec098ffd274540a5"}, - {file = "die_python-0.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:4293a7ef32ff539db9abb011192b1ac007229ebb4f5f70cc09902ceea4a2635b"}, - {file = "die_python-0.1.0-cp311-cp311-macosx_13_0_x86_64.whl", hash = "sha256:2e4d49917ab727f3e89e42713b0b71ce5e45e29c1ad7885d5a92d2a5555fde40"}, - {file = "die_python-0.1.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:5a0b9a46f8007d77939415422d573c5e659ce712b6cea2ed9f2c825580073ba4"}, - {file = "die_python-0.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:215896714c97b8b679685d2593afe0508f7af234630b0e591f5ad5ca133106b6"}, - {file = "die_python-0.1.0-cp312-abi3-macosx_13_0_x86_64.whl", hash = "sha256:ef51d19d7d91a8bd65abf56ff01adff6cc5289fd8ada2c4362398a76d7d490f4"}, - {file = "die_python-0.1.0-cp312-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:ec8756cd695415b0fe8eccb6b2475ba9bff1a5f677dc1e845325ceed7e7c2512"}, - {file = "die_python-0.1.0-cp312-abi3-win_amd64.whl", hash = "sha256:a1a6932a1fdda99f37676d7d3925595eb44f362632f2168163f14f5637f2734a"}, - {file = "die_python-0.1.0-cp38-cp38-macosx_13_0_x86_64.whl", hash = "sha256:f09c98d928a0f32c72b5bbed90fb40ba1afcb81b3ab3e362104a6cb23738b4ae"}, - {file = "die_python-0.1.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:31d064d6b7a31ac22529ad7cdf3be1de4e6dee7b005efdd9224e8a395f3b2b8e"}, - {file = "die_python-0.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:21f58eb5da2dabc06c9487705e272cb89a7436f2637a6f79d839b9cb3cb1726c"}, - {file = "die_python-0.1.0-cp39-cp39-macosx_13_0_x86_64.whl", hash = "sha256:721ab6005cab42dee9b318a1f2a7c8e4c3dc6e5290e20a3cb716991f6ddd75e0"}, - {file = "die_python-0.1.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:6babc19fac759724b316bc1ee4d23ce03235c4aac9dccfb6f174281ebfd353c5"}, - {file = "die_python-0.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:1013431cc76e9de762f0d7c375699b7a5d6ba43cd812664fa9d891300476ba26"}, -] - -[package.dependencies] -nanobind = "*" -setuptools = "*" -wheel = "*" - -[package.extras] -tests = ["beautifulsoup4", "black", "lxml", "pytest"] - [[package]] name = "distlib" -version = "0.3.8" +version = "0.3.9" description = "Distribution utilities" optional = false python-versions = "*" +groups = ["dev"] files = [ - {file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"}, - {file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"}, + {file = "distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87"}, + {file = "distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403"}, ] [[package]] name = "django" -version = "4.2.16" +version = "5.1.9" description = "A high-level Python web framework that encourages rapid development and clean, pragmatic design." optional = false -python-versions = ">=3.8" +python-versions = ">=3.10" +groups = ["main"] files = [ - {file = "Django-4.2.16-py3-none-any.whl", hash = "sha256:1ddc333a16fc139fd253035a1606bb24261951bbc3a6ca256717fa06cc41a898"}, - {file = "Django-4.2.16.tar.gz", hash = "sha256:6f1616c2786c408ce86ab7e10f792b8f15742f7b7b7460243929cb371e7f1dad"}, + {file = "django-5.1.9-py3-none-any.whl", hash = "sha256:2fd1d4a0a66a5ba702699eb692e75b0d828b73cc2f4e1fc4b6a854a918967411"}, + {file = "django-5.1.9.tar.gz", hash = "sha256:565881bdd0eb67da36442e9ac788bda90275386b549070d70aee86327781a4fc"}, ] [package.dependencies] -asgiref = ">=3.6.0,<4" +asgiref = ">=3.8.1,<4" sqlparse = ">=0.3.1" tzdata = {version = "*", markers = "sys_platform == \"win32\""} @@ -872,59 +933,70 @@ bcrypt = ["bcrypt"] [[package]] name = "django-allauth" -version = "0.54.0" +version = "65.3.1" description = "Integrated set of Django applications addressing authentication, registration, account management as well as 3rd party (social) account authentication." optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "django-allauth-0.54.0.tar.gz", hash = "sha256:120e265f802b65738899c6cb627b827fde46a4d03067034c633f516c2adf3e3e"}, + {file = "django_allauth-65.3.1.tar.gz", hash = "sha256:e02e951b71a2753a746459f2efa114c7c72bf2cef6887dbe8607a577c0350587"}, ] [package.dependencies] -Django = ">=2.0" -pyjwt = {version = ">=1.7", extras = ["crypto"]} -python3-openid = ">=3.0.8" -requests = "*" -requests-oauthlib = ">=0.3.0" +asgiref = ">=3.8.1" +Django = ">=4.2.16" + +[package.extras] +mfa = ["fido2 (>=1.1.2)", "qrcode (>=7.0.0)"] +openid = ["python3-openid (>=3.0.8)"] +saml = ["python3-saml (>=1.15.0,<2.0.0)"] +socialaccount = ["pyjwt[crypto] (>=1.7)", "requests (>=2.0.0)", "requests-oauthlib (>=0.3.0)"] +steam = ["python3-openid (>=3.0.8)"] [[package]] name = "django-crispy-forms" -version = "1.14.0" +version = "2.3" description = "Best way to have Django DRY forms" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "django-crispy-forms-1.14.0.tar.gz", hash = "sha256:35887b8851a931374dd697207a8f56c57a9c5cb9dbf0b9fa54314da5666cea5b"}, - {file = "django_crispy_forms-1.14.0-py3-none-any.whl", hash = "sha256:bc4d2037f6de602d39c0bc452ac3029d1f5d65e88458872cc4dbc01c3a400604"}, + {file = "django_crispy_forms-2.3-py3-none-any.whl", hash = "sha256:efc4c31e5202bbec6af70d383a35e12fc80ea769d464fb0e7fe21768bb138a20"}, + {file = "django_crispy_forms-2.3.tar.gz", hash = "sha256:2db17ae08527201be1273f0df789e5f92819e23dd28fec69cffba7f3762e1a38"}, ] +[package.dependencies] +django = ">=4.2" + [[package]] name = "django-csp" -version = "3.7" +version = "3.8" description = "Django Content Security Policy support." optional = false python-versions = "*" +groups = ["main"] files = [ - {file = "django_csp-3.7-py2.py3-none-any.whl", hash = "sha256:01443a07723f9a479d498bd7bb63571aaa771e690f64bde515db6cdb76e8041a"}, - {file = "django_csp-3.7.tar.gz", hash = "sha256:01eda02ad3f10261c74131cdc0b5a6a62b7c7ad4fd017fbefb7a14776e0a9727"}, + {file = "django_csp-3.8-py3-none-any.whl", hash = "sha256:19b2978b03fcd73517d7d67acbc04fbbcaec0facc3e83baa502965892d1e0719"}, + {file = "django_csp-3.8.tar.gz", hash = "sha256:ef0f1a9f7d8da68ae6e169c02e9ac661c0ecf04db70e0d1d85640512a68471c0"}, ] [package.dependencies] -Django = ">=1.8" +Django = ">=3.2" [package.extras] jinja2 = ["jinja2 (>=2.9.6)"] -tests = ["jinja2 (>=2.9.6)", "mock (==1.0.1)", "pep8 (==1.4.6)", "pytest (<4.0)", "pytest-django", "pytest-flakes (==1.0.1)", "pytest-pep8 (==1.0.6)", "six (==1.12.0)"] +tests = ["jinja2 (>=2.9.6)", "pytest", "pytest-cov", "pytest-django", "pytest-ruff"] [[package]] name = "django-extensions" -version = "3.2.1" +version = "3.2.3" description = "Extensions for Django" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ - {file = "django-extensions-3.2.1.tar.gz", hash = "sha256:2a4f4d757be2563cd1ff7cfdf2e57468f5f931cc88b23cf82ca75717aae504a4"}, - {file = "django_extensions-3.2.1-py3-none-any.whl", hash = "sha256:421464be390289513f86cb5e18eb43e5dc1de8b4c27ba9faa3b91261b0d67e09"}, + {file = "django-extensions-3.2.3.tar.gz", hash = "sha256:44d27919d04e23b3f40231c4ab7af4e61ce832ef46d610cc650d53e68328410a"}, + {file = "django_extensions-3.2.3-py3-none-any.whl", hash = "sha256:9600b7562f79a92cbf1fde6403c04fee314608fefbb595502e34383ae8203401"}, ] [package.dependencies] @@ -932,24 +1004,26 @@ Django = ">=3.2" [[package]] name = "django-ratelimit" -version = "3.0.1" +version = "4.1.0" description = "Cache-based rate-limiting for Django." optional = false -python-versions = ">=3.4" +python-versions = ">=3.7" +groups = ["main"] files = [ - {file = "django-ratelimit-3.0.1.tar.gz", hash = "sha256:73223d860abd5c5d7b9a807fabb39a6220068129b514be8d78044b52607ab154"}, - {file = "django_ratelimit-3.0.1-py2.py3-none-any.whl", hash = "sha256:857e797f23de948b204a31dba9d88aea3ce731b7a5d926d0240c772e19b5486f"}, + {file = "django-ratelimit-4.1.0.tar.gz", hash = "sha256:555943b283045b917ad59f196829530d63be2a39adb72788d985b90c81ba808b"}, + {file = "django_ratelimit-4.1.0-py2.py3-none-any.whl", hash = "sha256:d047a31cf94d83ef1465d7543ca66c6fc16695559b5f8d814d1b51df15110b92"}, ] [[package]] name = "django-recaptcha" -version = "3.0.0" +version = "4.0.0" description = "Django recaptcha form field/widget app." optional = false python-versions = "*" +groups = ["main"] files = [ - {file = "django-recaptcha-3.0.0.tar.gz", hash = "sha256:253197051288923cae675d7eff91b619e3775311292a5dbaf27a8a55ffebc670"}, - {file = "django_recaptcha-3.0.0-py3-none-any.whl", hash = "sha256:1aed69fd6ac8fd9e99e52665392ae6748f8b6339ace656fad779fe0c6c915a52"}, + {file = "django-recaptcha-4.0.0.tar.gz", hash = "sha256:5316438f97700c431d65351470d1255047e3f2cd9af0f2f13592b637dad9213e"}, + {file = "django_recaptcha-4.0.0-py3-none-any.whl", hash = "sha256:0d912d5c7c009df4e47accd25029133d47a74342dbd2a8edc2877b6bffa971a3"}, ] [package.dependencies] @@ -961,6 +1035,7 @@ version = "1.2.1" description = "This Django app allows you to export certain settings to your templates." optional = false python-versions = "*" +groups = ["main"] files = [ {file = "django-settings-export-1.2.1.tar.gz", hash = "sha256:fceeae49fc597f654c1217415d8e049fc81c930b7154f5d8f28c432db738ff79"}, ] @@ -974,6 +1049,7 @@ version = "3.15.2" description = "Web APIs for Django, made easy." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "djangorestframework-3.15.2-py3-none-any.whl", hash = "sha256:2b8871b062ba1aefc2de01f773875441a961fefbf79f5eed1e32b2f096944b20"}, {file = "djangorestframework-3.15.2.tar.gz", hash = "sha256:36fe88cd2d6c6bec23dca9804bab2ba5517a8bb9d8f47ebc68981b56840107ad"}, @@ -988,6 +1064,7 @@ version = "1.0.2" description = "The FLARE team's open-source library to disassemble Common Intermediate Language (CIL) instructions." optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "dncil-1.0.2-py3-none-any.whl", hash = "sha256:69d389e9b850fa9afa2e37ca252b01476379991eee88fd33ab76f924d36dd68d"}, {file = "dncil-1.0.2.tar.gz", hash = "sha256:1557675c2d1351d3260509881cff0383309f81cda4944ed2c3f5cc352953aa15"}, @@ -998,13 +1075,14 @@ dev = ["black (==22.12.0)", "dnfile (==0.12.0)", "hexdump (==3.3.0)", "isort (== [[package]] name = "dnfile" -version = "0.15.0" +version = "0.15.1" description = "Parse .NET executable files." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "dnfile-0.15.0-py3-none-any.whl", hash = "sha256:d60239de76035bed22f3c131925cf8d484f44a9da03c19659a01a615309add55"}, - {file = "dnfile-0.15.0.tar.gz", hash = "sha256:e4ae8803a59d8f845c11524e8b007104b43c90adc2fb0a81dcdc2972c47dfc80"}, + {file = "dnfile-0.15.1-py3-none-any.whl", hash = "sha256:585c8e3e4a29824402430a0a8b7e7ae82c040fc17eeb3a06758fdceebe2d923e"}, + {file = "dnfile-0.15.1.tar.gz", hash = "sha256:1529cf0f976b1382f60a3c56b2e0def90f3486e41193ffd34677e74563c8426c"}, ] [package.dependencies] @@ -1012,21 +1090,22 @@ pefile = ">=2019.4.18" [[package]] name = "dnspython" -version = "2.6.1" +version = "2.7.0" description = "DNS toolkit" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "dnspython-2.6.1-py3-none-any.whl", hash = "sha256:5ef3b9680161f6fa89daf8ad451b5f1a33b18ae8a1c6778cdf4b43f08c0a6e50"}, - {file = "dnspython-2.6.1.tar.gz", hash = "sha256:e8f0f9c23a7b7cb99ded64e6c3a6f3e701d78f50c55e002b839dea7225cff7cc"}, + {file = "dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86"}, + {file = "dnspython-2.7.0.tar.gz", hash = "sha256:ce9c432eda0dc91cf618a5cedf1a4e142651196bbcd2c80e89ed5a907e5cfaf1"}, ] [package.extras] -dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "mypy (>=1.8)", "pylint (>=3)", "pytest (>=7.4)", "pytest-cov (>=4.1.0)", "sphinx (>=7.2.0)", "twine (>=4.0.0)", "wheel (>=0.42.0)"] -dnssec = ["cryptography (>=41)"] +dev = ["black (>=23.1.0)", "coverage (>=7.0)", "flake8 (>=7)", "hypercorn (>=0.16.0)", "mypy (>=1.8)", "pylint (>=3)", "pytest (>=7.4)", "pytest-cov (>=4.1.0)", "quart-trio (>=0.11.0)", "sphinx (>=7.2.0)", "sphinx-rtd-theme (>=2.0.0)", "twine (>=4.0.0)", "wheel (>=0.42.0)"] +dnssec = ["cryptography (>=43)"] doh = ["h2 (>=4.1.0)", "httpcore (>=1.0.0)", "httpx (>=0.26.0)"] -doq = ["aioquic (>=0.9.25)"] -idna = ["idna (>=3.6)"] +doq = ["aioquic (>=1.0.0)"] +idna = ["idna (>=3.7)"] trio = ["trio (>=0.23)"] wmi = ["wmi (>=1.5.1)"] @@ -1036,6 +1115,7 @@ version = "1.9.6" description = "fast, simple packet creation / parsing, with definitions for the basic TCP/IP protocols" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "dpkt-1.9.6-py3-none-any.whl", hash = "sha256:b1739c594297b2b6321dfbf133e3f8dcf54c0ef54cb5739d204331d34a0d8fe4"}, {file = "dpkt-1.9.6.tar.gz", hash = "sha256:b5737010fd420d142e02ed04fa616edd1fc05e414980baef594f72287c875eef"}, @@ -1047,17 +1127,32 @@ version = "0.98.3" description = "EasyGUI is a module for very simple, very easy GUI programming in Python. EasyGUI is different from other GUI generators in that EasyGUI is NOT event-driven. Instead, all GUI interactions are invoked by simple function calls." optional = false python-versions = "*" +groups = ["main"] files = [ {file = "easygui-0.98.3-py2.py3-none-any.whl", hash = "sha256:33498710c68b5376b459cd3fc48d1d1f33822139eb3ed01defbc0528326da3ba"}, {file = "easygui-0.98.3.tar.gz", hash = "sha256:d653ff79ee1f42f63b5a090f2f98ce02335d86ad8963b3ce2661805cafe99a04"}, ] +[[package]] +name = "editorconfig" +version = "0.17.0" +description = "EditorConfig File Locator and Interpreter for Python" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "EditorConfig-0.17.0-py3-none-any.whl", hash = "sha256:fe491719c5f65959ec00b167d07740e7ffec9a3f362038c72b289330b9991dfc"}, + {file = "editorconfig-0.17.0.tar.gz", hash = "sha256:8739052279699840065d3a9f5c125d7d5a98daeefe53b0e5274261d77cb49aa2"}, +] + [[package]] name = "exceptiongroup" version = "1.2.2" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" +groups = ["main", "dev"] +markers = "python_version < \"3.11\"" files = [ {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, @@ -1072,6 +1167,7 @@ version = "2.1.1" description = "execnet: rapid multi-Python deployment" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "execnet-2.1.1-py3-none-any.whl", hash = "sha256:26dee51f1b80cebd6d0ca8e74dd8745419761d3bef34163928cbebbdc4749fdc"}, {file = "execnet-2.1.1.tar.gz", hash = "sha256:5189b52c6121c24feae288166ab41b32549c7e2348652736540b9e6e7d4e72e3"}, @@ -1082,29 +1178,31 @@ testing = ["hatch", "pre-commit", "pytest", "tox"] [[package]] name = "filelock" -version = "3.16.1" +version = "3.17.0" description = "A platform independent file lock." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main", "dev"] files = [ - {file = "filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0"}, - {file = "filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435"}, + {file = "filelock-3.17.0-py3-none-any.whl", hash = "sha256:533dc2f7ba78dc2f0f531fc6c4940addf7b70a481e269a5a3b93be94ffbe8338"}, + {file = "filelock-3.17.0.tar.gz", hash = "sha256:ee4e77401ef576ebb38cd7f13b9b28893194acc20a8e68e18730ba9c0e54660e"}, ] [package.extras] -docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4.1)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.2)", "pytest (>=8.3.3)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.4)"] +docs = ["furo (>=2024.8.6)", "sphinx (>=8.1.3)", "sphinx-autodoc-typehints (>=3)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.6.10)", "diff-cover (>=9.2.1)", "pytest (>=8.3.4)", "pytest-asyncio (>=0.25.2)", "pytest-cov (>=6)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.28.1)"] typing = ["typing-extensions (>=4.12.2)"] [[package]] name = "flare-capa" -version = "7.3.0" +version = "9.1.0" description = "The FLARE team's open-source tool to identify capabilities in executable files." optional = false -python-versions = ">=3.8" +python-versions = ">=3.10" +groups = ["main"] files = [ - {file = "flare_capa-7.3.0-py3-none-any.whl", hash = "sha256:f7a7f35f4dce1aca723fcc792a6afbc384a696d889ef891649dc823d948e43ff"}, - {file = "flare_capa-7.3.0.tar.gz", hash = "sha256:21be4c9ce0af093bb0590ec7fd807096483d16c68753a375576420fe8ebcfecf"}, + {file = "flare_capa-9.1.0-py3-none-any.whl", hash = "sha256:9e3214c66dd13c90e379c74d56a99fee2634c14980d55395535f3a39fe9159bc"}, + {file = "flare_capa-9.1.0.tar.gz", hash = "sha256:7df5c8033b9ea568569611b5f1948b8257c055a191a01ab1e67ce3e407c86e21"}, ] [package.dependencies] @@ -1114,7 +1212,7 @@ dnfile = ">=0.15.0" humanize = ">=4" ida-settings = ">=2" msgspec = ">=0.18.6" -networkx = ">=3,<3.2" +networkx = ">=3" pefile = ">=2023.2.7" protobuf = ">=5" pydantic = ">=2" @@ -1122,18 +1220,14 @@ pyelftools = ">=0.31" pyyaml = ">=6" rich = ">=13" "ruamel.yaml" = ">=0.18" -tabulate = ">=0.9" -termcolor = ">=2" -tqdm = ">=4" viv-utils = {version = ">=0.7.9", extras = ["flirt"]} vivisect = ">=1.1.1" -wcwidth = ">=0.2" xmltodict = ">=0.13.0" [package.extras] -build = ["build (==1.2.2)", "pyinstaller (==6.10.0)", "setuptools (==70.0.0)"] -dev = ["PyGithub (==2.4.0)", "black (==24.8.0)", "deptry (==0.20.0)", "flake8 (==7.1.1)", "flake8-bugbear (==24.8.19)", "flake8-comprehensions (==3.15.0)", "flake8-copyright (==0.2.4)", "flake8-encodings (==0.5.1)", "flake8-logging-format (==0.9.0)", "flake8-no-implicit-concat (==0.3.5)", "flake8-print (==5.0.0)", "flake8-simplify (==0.21.0)", "flake8-todos (==0.3.1)", "flake8-use-pathlib (==0.3.0)", "isort (==5.13.2)", "mypy (==1.11.2)", "mypy-protobuf (==3.6.0)", "pre-commit (==3.5.0)", "pytest (==8.0.0)", "pytest-cov (==5.0.0)", "pytest-instafail (==0.5.0)", "pytest-sugar (==1.0.0)", "ruff (==0.6.4)", "types-PyYAML (==6.0.8)", "types-backports (==0.1.3)", "types-colorama (==0.4.15.11)", "types-protobuf (==5.27.0.20240907)", "types-psutil (==6.0.0.20240901)", "types-requests (==2.32.0.20240712)", "types-tabulate (==0.9.0.20240106)", "types-termcolor (==1.1.4)"] -scripts = ["jschema-to-python (==1.2.3)", "psutil (==6.0.0)", "requests (==2.32.3)", "sarif-om (==1.0.4)", "stix2 (==3.0.1)"] +build = ["build (==1.2.2)", "pyinstaller (==6.12.0)", "setuptools (==75.8.0)"] +dev = ["PyGithub (==2.6.0)", "black (==25.1.0)", "deptry (==0.23.0)", "flake8 (==7.1.1)", "flake8-bugbear (==24.12.12)", "flake8-comprehensions (==3.16.0)", "flake8-copyright (==0.2.4)", "flake8-encodings (==0.5.1)", "flake8-logging-format (==0.9.0)", "flake8-no-implicit-concat (==0.3.5)", "flake8-print (==5.0.0)", "flake8-simplify (==0.21.0)", "flake8-todos (==0.3.1)", "flake8-use-pathlib (==0.3.0)", "isort (==6.0.0)", "mypy (==1.15.0)", "mypy-protobuf (==3.6.0)", "pre-commit (==4.1.0)", "pytest (==8.0.0)", "pytest-instafail (==0.5.0)", "pytest-sugar (==1.0.0)", "ruff (==0.9.2)", "types-PyYAML (==6.0.8)", "types-backports (==0.1.3)", "types-colorama (==0.4.15.11)", "types-protobuf (==5.29.1.20241207)", "types-psutil (==6.1.0.20241102)", "types_requests (==2.32.0.20240712)"] +scripts = ["jschema_to_python (==1.2.3)", "psutil (==7.0.0)", "requests (==2.32.3)", "sarif_om (==1.0.4)", "stix2 (==3.0.1)"] [[package]] name = "freezegun" @@ -1141,6 +1235,7 @@ version = "1.5.1" description = "Let your Python tests travel through time" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "freezegun-1.5.1-py3-none-any.whl", hash = "sha256:bf111d7138a8abe55ab48a71755673dbaa4ab87f4cff5634a4442dfec34c15f1"}, {file = "freezegun-1.5.1.tar.gz", hash = "sha256:b29dedfcda6d5e8e083ce71b2b542753ad48cfec44037b3fc79702e2980a89e9"}, @@ -1155,6 +1250,7 @@ version = "4.3.5" description = "Python module which allows you to specify timeouts when calling any existing function. Also provides support for stoppable-threads" optional = false python-versions = "*" +groups = ["dev"] files = [ {file = "func_timeout-4.3.5.tar.gz", hash = "sha256:74cd3c428ec94f4edfba81f9b2f14904846d5ffccc27c92433b8b5939b5575dd"}, ] @@ -1165,6 +1261,7 @@ version = "2.0" description = "A fancy and practical functional tools" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "funcy-2.0-py2.py3-none-any.whl", hash = "sha256:53df23c8bb1651b12f095df764bfb057935d49537a56de211b098f4c79614bb0"}, {file = "funcy-2.0.tar.gz", hash = "sha256:3963315d59d41c6f30c04bc910e10ab50a3ac4a225868bfa96feed133df075cb"}, @@ -1176,6 +1273,7 @@ version = "1.0.0" description = "Clean single-source support for Python 3 and 2" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +groups = ["main"] files = [ {file = "future-1.0.0-py3-none-any.whl", hash = "sha256:929292d34f5872e70396626ef385ec22355a1fae8ad29e1a734c3e43f9fbc216"}, {file = "future-1.0.0.tar.gz", hash = "sha256:bd2968309307861edae1458a4f8a4f3598c03be43b97521076aebf5d94c07b05"}, @@ -1187,6 +1285,7 @@ version = "24.2.1" description = "Coroutine-based network library" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "gevent-24.2.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:6f947a9abc1a129858391b3d9334c45041c08a0f23d14333d5b844b6e5c17a07"}, {file = "gevent-24.2.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bde283313daf0b34a8d1bab30325f5cb0f4e11b5869dbe5bc61f8fe09a8f66f3"}, @@ -1253,6 +1352,7 @@ version = "3.0.3" description = "Lightweight in-process concurrent programming" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "greenlet-3.0.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9da2bd29ed9e4f15955dd1595ad7bc9320308a3b766ef7f837e23ad4b4aac31a"}, {file = "greenlet-3.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d353cadd6083fdb056bb46ed07e4340b0869c305c8ca54ef9da3421acbdf6881"}, @@ -1320,13 +1420,14 @@ test = ["objgraph", "psutil"] [[package]] name = "gunicorn" -version = "22.0.0" +version = "23.0.0" description = "WSGI HTTP Server for UNIX" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ - {file = "gunicorn-22.0.0-py3-none-any.whl", hash = "sha256:350679f91b24062c86e386e198a15438d53a7a8207235a78ba1b53df4c4378d9"}, - {file = "gunicorn-22.0.0.tar.gz", hash = "sha256:4a0b436239ff76fb33f11c07a16482c521a7e09c1ce3cc293c2330afe01bec63"}, + {file = "gunicorn-23.0.0-py3-none-any.whl", hash = "sha256:ec400d38950de4dfd418cff8328b2c8faed0edb0d517d3394e457c317908ca4d"}, + {file = "gunicorn-23.0.0.tar.gz", hash = "sha256:f014447a0101dc57e294f6c18ca6b40227a4c90e9bdb586042628030cba004ec"}, ] [package.dependencies] @@ -1345,6 +1446,7 @@ version = "0.14.0" description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, @@ -1356,67 +1458,77 @@ version = "1.1.4" description = "HTTP client mock for Python" optional = false python-versions = ">=3" +groups = ["dev"] files = [ {file = "httpretty-1.1.4.tar.gz", hash = "sha256:20de0e5dd5a18292d36d928cc3d6e52f8b2ac73daec40d41eb62dee154933b68"}, ] [[package]] name = "httptools" -version = "0.6.1" +version = "0.6.4" description = "A collection of framework independent HTTP protocol utils." optional = false python-versions = ">=3.8.0" -files = [ - {file = "httptools-0.6.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d2f6c3c4cb1948d912538217838f6e9960bc4a521d7f9b323b3da579cd14532f"}, - {file = "httptools-0.6.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:00d5d4b68a717765b1fabfd9ca755bd12bf44105eeb806c03d1962acd9b8e563"}, - {file = "httptools-0.6.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:639dc4f381a870c9ec860ce5c45921db50205a37cc3334e756269736ff0aac58"}, - {file = "httptools-0.6.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e57997ac7fb7ee43140cc03664de5f268813a481dff6245e0075925adc6aa185"}, - {file = "httptools-0.6.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:0ac5a0ae3d9f4fe004318d64b8a854edd85ab76cffbf7ef5e32920faef62f142"}, - {file = "httptools-0.6.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3f30d3ce413088a98b9db71c60a6ada2001a08945cb42dd65a9a9fe228627658"}, - {file = "httptools-0.6.1-cp310-cp310-win_amd64.whl", hash = "sha256:1ed99a373e327f0107cb513b61820102ee4f3675656a37a50083eda05dc9541b"}, - {file = "httptools-0.6.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7a7ea483c1a4485c71cb5f38be9db078f8b0e8b4c4dc0210f531cdd2ddac1ef1"}, - {file = "httptools-0.6.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:85ed077c995e942b6f1b07583e4eb0a8d324d418954fc6af913d36db7c05a5a0"}, - {file = "httptools-0.6.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b0bb634338334385351a1600a73e558ce619af390c2b38386206ac6a27fecfc"}, - {file = "httptools-0.6.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d9ceb2c957320def533671fc9c715a80c47025139c8d1f3797477decbc6edd2"}, - {file = "httptools-0.6.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:4f0f8271c0a4db459f9dc807acd0eadd4839934a4b9b892f6f160e94da309837"}, - {file = "httptools-0.6.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:6a4f5ccead6d18ec072ac0b84420e95d27c1cdf5c9f1bc8fbd8daf86bd94f43d"}, - {file = "httptools-0.6.1-cp311-cp311-win_amd64.whl", hash = "sha256:5cceac09f164bcba55c0500a18fe3c47df29b62353198e4f37bbcc5d591172c3"}, - {file = "httptools-0.6.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:75c8022dca7935cba14741a42744eee13ba05db00b27a4b940f0d646bd4d56d0"}, - {file = "httptools-0.6.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:48ed8129cd9a0d62cf4d1575fcf90fb37e3ff7d5654d3a5814eb3d55f36478c2"}, - {file = "httptools-0.6.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6f58e335a1402fb5a650e271e8c2d03cfa7cea46ae124649346d17bd30d59c90"}, - {file = "httptools-0.6.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93ad80d7176aa5788902f207a4e79885f0576134695dfb0fefc15b7a4648d503"}, - {file = "httptools-0.6.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:9bb68d3a085c2174c2477eb3ffe84ae9fb4fde8792edb7bcd09a1d8467e30a84"}, - {file = "httptools-0.6.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:b512aa728bc02354e5ac086ce76c3ce635b62f5fbc32ab7082b5e582d27867bb"}, - {file = "httptools-0.6.1-cp312-cp312-win_amd64.whl", hash = "sha256:97662ce7fb196c785344d00d638fc9ad69e18ee4bfb4000b35a52efe5adcc949"}, - {file = "httptools-0.6.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:8e216a038d2d52ea13fdd9b9c9c7459fb80d78302b257828285eca1c773b99b3"}, - {file = "httptools-0.6.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3e802e0b2378ade99cd666b5bffb8b2a7cc8f3d28988685dc300469ea8dd86cb"}, - {file = "httptools-0.6.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4bd3e488b447046e386a30f07af05f9b38d3d368d1f7b4d8f7e10af85393db97"}, - {file = "httptools-0.6.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe467eb086d80217b7584e61313ebadc8d187a4d95bb62031b7bab4b205c3ba3"}, - {file = "httptools-0.6.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:3c3b214ce057c54675b00108ac42bacf2ab8f85c58e3f324a4e963bbc46424f4"}, - {file = "httptools-0.6.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8ae5b97f690badd2ca27cbf668494ee1b6d34cf1c464271ef7bfa9ca6b83ffaf"}, - {file = "httptools-0.6.1-cp38-cp38-win_amd64.whl", hash = "sha256:405784577ba6540fa7d6ff49e37daf104e04f4b4ff2d1ac0469eaa6a20fde084"}, - {file = "httptools-0.6.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:95fb92dd3649f9cb139e9c56604cc2d7c7bf0fc2e7c8d7fbd58f96e35eddd2a3"}, - {file = "httptools-0.6.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dcbab042cc3ef272adc11220517278519adf8f53fd3056d0e68f0a6f891ba94e"}, - {file = "httptools-0.6.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cf2372e98406efb42e93bfe10f2948e467edfd792b015f1b4ecd897903d3e8d"}, - {file = "httptools-0.6.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:678fcbae74477a17d103b7cae78b74800d795d702083867ce160fc202104d0da"}, - {file = "httptools-0.6.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e0b281cf5a125c35f7f6722b65d8542d2e57331be573e9e88bc8b0115c4a7a81"}, - {file = "httptools-0.6.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:95658c342529bba4e1d3d2b1a874db16c7cca435e8827422154c9da76ac4e13a"}, - {file = "httptools-0.6.1-cp39-cp39-win_amd64.whl", hash = "sha256:7ebaec1bf683e4bf5e9fbb49b8cc36da482033596a415b3e4ebab5a4c0d7ec5e"}, - {file = "httptools-0.6.1.tar.gz", hash = "sha256:c6e26c30455600b95d94b1b836085138e82f177351454ee841c148f93a9bad5a"}, +groups = ["main"] +files = [ + {file = "httptools-0.6.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:3c73ce323711a6ffb0d247dcd5a550b8babf0f757e86a52558fe5b86d6fefcc0"}, + {file = "httptools-0.6.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:345c288418f0944a6fe67be8e6afa9262b18c7626c3ef3c28adc5eabc06a68da"}, + {file = "httptools-0.6.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:deee0e3343f98ee8047e9f4c5bc7cedbf69f5734454a94c38ee829fb2d5fa3c1"}, + {file = "httptools-0.6.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca80b7485c76f768a3bc83ea58373f8db7b015551117375e4918e2aa77ea9b50"}, + {file = "httptools-0.6.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:90d96a385fa941283ebd231464045187a31ad932ebfa541be8edf5b3c2328959"}, + {file = "httptools-0.6.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:59e724f8b332319e2875efd360e61ac07f33b492889284a3e05e6d13746876f4"}, + {file = "httptools-0.6.4-cp310-cp310-win_amd64.whl", hash = "sha256:c26f313951f6e26147833fc923f78f95604bbec812a43e5ee37f26dc9e5a686c"}, + {file = "httptools-0.6.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f47f8ed67cc0ff862b84a1189831d1d33c963fb3ce1ee0c65d3b0cbe7b711069"}, + {file = "httptools-0.6.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0614154d5454c21b6410fdf5262b4a3ddb0f53f1e1721cfd59d55f32138c578a"}, + {file = "httptools-0.6.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8787367fbdfccae38e35abf7641dafc5310310a5987b689f4c32cc8cc3ee975"}, + {file = "httptools-0.6.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40b0f7fe4fd38e6a507bdb751db0379df1e99120c65fbdc8ee6c1d044897a636"}, + {file = "httptools-0.6.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:40a5ec98d3f49904b9fe36827dcf1aadfef3b89e2bd05b0e35e94f97c2b14721"}, + {file = "httptools-0.6.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:dacdd3d10ea1b4ca9df97a0a303cbacafc04b5cd375fa98732678151643d4988"}, + {file = "httptools-0.6.4-cp311-cp311-win_amd64.whl", hash = "sha256:288cd628406cc53f9a541cfaf06041b4c71d751856bab45e3702191f931ccd17"}, + {file = "httptools-0.6.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:df017d6c780287d5c80601dafa31f17bddb170232d85c066604d8558683711a2"}, + {file = "httptools-0.6.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:85071a1e8c2d051b507161f6c3e26155b5c790e4e28d7f236422dbacc2a9cc44"}, + {file = "httptools-0.6.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69422b7f458c5af875922cdb5bd586cc1f1033295aa9ff63ee196a87519ac8e1"}, + {file = "httptools-0.6.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16e603a3bff50db08cd578d54f07032ca1631450ceb972c2f834c2b860c28ea2"}, + {file = "httptools-0.6.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ec4f178901fa1834d4a060320d2f3abc5c9e39766953d038f1458cb885f47e81"}, + {file = "httptools-0.6.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f9eb89ecf8b290f2e293325c646a211ff1c2493222798bb80a530c5e7502494f"}, + {file = "httptools-0.6.4-cp312-cp312-win_amd64.whl", hash = "sha256:db78cb9ca56b59b016e64b6031eda5653be0589dba2b1b43453f6e8b405a0970"}, + {file = "httptools-0.6.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ade273d7e767d5fae13fa637f4d53b6e961fb7fd93c7797562663f0171c26660"}, + {file = "httptools-0.6.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:856f4bc0478ae143bad54a4242fccb1f3f86a6e1be5548fecfd4102061b3a083"}, + {file = "httptools-0.6.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:322d20ea9cdd1fa98bd6a74b77e2ec5b818abdc3d36695ab402a0de8ef2865a3"}, + {file = "httptools-0.6.4-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4d87b29bd4486c0093fc64dea80231f7c7f7eb4dc70ae394d70a495ab8436071"}, + {file = "httptools-0.6.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:342dd6946aa6bda4b8f18c734576106b8a31f2fe31492881a9a160ec84ff4bd5"}, + {file = "httptools-0.6.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4b36913ba52008249223042dca46e69967985fb4051951f94357ea681e1f5dc0"}, + {file = "httptools-0.6.4-cp313-cp313-win_amd64.whl", hash = "sha256:28908df1b9bb8187393d5b5db91435ccc9c8e891657f9cbb42a2541b44c82fc8"}, + {file = "httptools-0.6.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:d3f0d369e7ffbe59c4b6116a44d6a8eb4783aae027f2c0b366cf0aa964185dba"}, + {file = "httptools-0.6.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:94978a49b8f4569ad607cd4946b759d90b285e39c0d4640c6b36ca7a3ddf2efc"}, + {file = "httptools-0.6.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40dc6a8e399e15ea525305a2ddba998b0af5caa2566bcd79dcbe8948181eeaff"}, + {file = "httptools-0.6.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab9ba8dcf59de5181f6be44a77458e45a578fc99c31510b8c65b7d5acc3cf490"}, + {file = "httptools-0.6.4-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:fc411e1c0a7dcd2f902c7c48cf079947a7e65b5485dea9decb82b9105ca71a43"}, + {file = "httptools-0.6.4-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:d54efd20338ac52ba31e7da78e4a72570cf729fac82bc31ff9199bedf1dc7440"}, + {file = "httptools-0.6.4-cp38-cp38-win_amd64.whl", hash = "sha256:df959752a0c2748a65ab5387d08287abf6779ae9165916fe053e68ae1fbdc47f"}, + {file = "httptools-0.6.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:85797e37e8eeaa5439d33e556662cc370e474445d5fab24dcadc65a8ffb04003"}, + {file = "httptools-0.6.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:db353d22843cf1028f43c3651581e4bb49374d85692a85f95f7b9a130e1b2cab"}, + {file = "httptools-0.6.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d1ffd262a73d7c28424252381a5b854c19d9de5f56f075445d33919a637e3547"}, + {file = "httptools-0.6.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:703c346571fa50d2e9856a37d7cd9435a25e7fd15e236c397bf224afaa355fe9"}, + {file = "httptools-0.6.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:aafe0f1918ed07b67c1e838f950b1c1fabc683030477e60b335649b8020e1076"}, + {file = "httptools-0.6.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:0e563e54979e97b6d13f1bbc05a96109923e76b901f786a5eae36e99c01237bd"}, + {file = "httptools-0.6.4-cp39-cp39-win_amd64.whl", hash = "sha256:b799de31416ecc589ad79dd85a0b2657a8fe39327944998dea368c1d4c9e55e6"}, + {file = "httptools-0.6.4.tar.gz", hash = "sha256:4e93eee4add6493b59a5c514da98c939b244fce4a0d8879cd3f466562f4b7d5c"}, ] [package.extras] -test = ["Cython (>=0.29.24,<0.30.0)"] +test = ["Cython (>=0.29.24)"] [[package]] name = "humanize" -version = "4.10.0" +version = "4.11.0" description = "Python humanize utilities" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "humanize-4.10.0-py3-none-any.whl", hash = "sha256:39e7ccb96923e732b5c2e27aeaa3b10a8dfeeba3eb965ba7b74a3eb0e30040a6"}, - {file = "humanize-4.10.0.tar.gz", hash = "sha256:06b6eb0293e4b85e8d385397c5868926820db32b9b654b932f57fa41c23c9978"}, + {file = "humanize-4.11.0-py3-none-any.whl", hash = "sha256:b53caaec8532bcb2fff70c8826f904c35943f8cecaca29d272d9df38092736c0"}, + {file = "humanize-4.11.0.tar.gz", hash = "sha256:e66f36020a2d5a974c504bd2555cf770621dbdbb6d82f94a6857c0b1ea2608be"}, ] [package.extras] @@ -1428,6 +1540,7 @@ version = "21.0.0" description = "A featureful, immutable, and correct URL for Python." optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["main"] files = [ {file = "hyperlink-21.0.0-py2.py3-none-any.whl", hash = "sha256:e6b14c37ecb73e89c77d78cdb4c2cc8f3fb59a885c5b3f819ff4ed80f25af1b4"}, {file = "hyperlink-21.0.0.tar.gz", hash = "sha256:427af957daa58bc909471c6c40f74c5450fa123dd093fc53efd2e91d2705a56b"}, @@ -1442,6 +1555,7 @@ version = "3.0" description = "Humane API for storing and accessing persistent data in IDA Pro databases" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "ida-netnode-3.0.tar.gz", hash = "sha256:b7e1f2fbf57e3e104ed779d58e5f3f050aa48cce581bab8bf14ccee7a315e32e"}, {file = "ida_netnode-3.0-py3-none-any.whl", hash = "sha256:b9d117703e076c9d219c9337d0b43d935b5102b5cb677835f7c9776a9f0340a5"}, @@ -1456,6 +1570,7 @@ version = "2.1.0" description = "Fetch and set configuration values in IDA Pro IDAPython scripts" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "ida-settings-2.1.0.tar.gz", hash = "sha256:1371730b4e64bf388845b65a41bea094ebde8a5d3a05bdf4edf2f42aaba83262"}, {file = "ida_settings-2.1.0-py2-none-any.whl", hash = "sha256:eab913b31ed0565aacea6d31976696c33b60c7e7bce3fccb75f32bef3878fc61"}, @@ -1467,13 +1582,14 @@ six = "*" [[package]] name = "identify" -version = "2.6.1" +version = "2.6.6" description = "File identification library for Python" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["dev"] files = [ - {file = "identify-2.6.1-py2.py3-none-any.whl", hash = "sha256:53863bcac7caf8d2ed85bd20312ea5dcfc22226800f6d6881f232d861db5a8f0"}, - {file = "identify-2.6.1.tar.gz", hash = "sha256:91478c5fb7c3aac5ff7bf9b4344f803843dc586832d5f110d672b19aa1984c98"}, + {file = "identify-2.6.6-py2.py3-none-any.whl", hash = "sha256:cbd1810bce79f8b671ecb20f53ee0ae8e86ae84b557de31d89709dc2a48ba881"}, + {file = "identify-2.6.6.tar.gz", hash = "sha256:7bec12768ed44ea4761efb47806f0a41f86e7c0a5fdf5950d4648c90eca7e251"}, ] [package.extras] @@ -1485,6 +1601,7 @@ version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, @@ -1499,6 +1616,7 @@ version = "24.7.2" description = "A small library that versions your Python projects." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "incremental-24.7.2-py3-none-any.whl", hash = "sha256:8cb2c3431530bec48ad70513931a760f446ad6c25e8333ca5d95e24b0ed7b8fe"}, {file = "incremental-24.7.2.tar.gz", hash = "sha256:fb4f1d47ee60efe87d4f6f0ebb5f70b9760db2b2574c59c8e8912be4ebd464c9"}, @@ -1517,6 +1635,7 @@ version = "2.0.0" description = "brain-dead simple config-ini parsing" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, @@ -1528,6 +1647,7 @@ version = "0.5" description = "Accessing and Modifying INI files" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "iniparse-0.5-py2-none-any.whl", hash = "sha256:88ca60473b1637055a937933d48840be1b1b6835f381a6158ef118a532583675"}, {file = "iniparse-0.5-py3-none-any.whl", hash = "sha256:db6ef1d8a02395448e0e7b17ac0aa28b8d338b632bbd1ffca08c02ddae32cf97"}, @@ -1543,6 +1663,7 @@ version = "3.1.0" description = "Editable interval tree data structure for Python 2 and 3" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "intervaltree-3.1.0.tar.gz", hash = "sha256:902b1b88936918f9b2a19e0e5eb7ccb430ae45cde4f39ea4b36932920d33952d"}, ] @@ -1556,6 +1677,7 @@ version = "5.13.2" description = "A Python utility / library to sort Python imports." optional = false python-versions = ">=3.8.0" +groups = ["dev"] files = [ {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, @@ -1566,13 +1688,14 @@ colors = ["colorama (>=0.4.6)"] [[package]] name = "jinja2" -version = "3.1.4" +version = "3.1.6" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" +groups = ["main"] files = [ - {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, - {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, + {file = "jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67"}, + {file = "jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d"}, ] [package.dependencies] @@ -1581,23 +1704,199 @@ MarkupSafe = ">=2.0" [package.extras] i18n = ["Babel (>=2.7)"] +[[package]] +name = "jsbeautifier" +version = "1.15.1" +description = "JavaScript unobfuscator and beautifier." +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "jsbeautifier-1.15.1.tar.gz", hash = "sha256:ebd733b560704c602d744eafc839db60a1ee9326e30a2a80c4adb8718adc1b24"}, +] + +[package.dependencies] +editorconfig = ">=0.12.2" +six = ">=1.13.0" + [[package]] name = "lnkparse3" -version = "1.2.0" +version = "1.5.0" description = "Windows Shortcut file (LNK) parser" optional = false python-versions = "*" +groups = ["main"] files = [ - {file = "LnkParse3-1.2.0-py3-none-any.whl", hash = "sha256:b97f9a3dfffa62ecbd5f1f6561d8b5b75b0045241482b4a980657d5aac696ee3"}, - {file = "LnkParse3-1.2.0.tar.gz", hash = "sha256:102b2aba6c2896127cb719f814a8579210368f9277fd5ec0d0151fe070166e1d"}, + {file = "LnkParse3-1.5.0-py3-none-any.whl", hash = "sha256:56b549389254f4d25375621249aa3a8c31f1dabf375e88bf7dc8c73a0f4f8f1e"}, + {file = "lnkparse3-1.5.0.tar.gz", hash = "sha256:3ecbd8f4107be07b8e8d7b770daa53271abf66222ee892618d30f86952e1121a"}, ] +[package.dependencies] +pyyaml = "*" + +[[package]] +name = "lxml" +version = "5.3.0" +description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "lxml-5.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:dd36439be765e2dde7660212b5275641edbc813e7b24668831a5c8ac91180656"}, + {file = "lxml-5.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ae5fe5c4b525aa82b8076c1a59d642c17b6e8739ecf852522c6321852178119d"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:501d0d7e26b4d261fca8132854d845e4988097611ba2531408ec91cf3fd9d20a"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb66442c2546446944437df74379e9cf9e9db353e61301d1a0e26482f43f0dd8"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9e41506fec7a7f9405b14aa2d5c8abbb4dbbd09d88f9496958b6d00cb4d45330"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f7d4a670107d75dfe5ad080bed6c341d18c4442f9378c9f58e5851e86eb79965"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:41ce1f1e2c7755abfc7e759dc34d7d05fd221723ff822947132dc934d122fe22"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:44264ecae91b30e5633013fb66f6ddd05c006d3e0e884f75ce0b4755b3e3847b"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_ppc64le.whl", hash = "sha256:3c174dc350d3ec52deb77f2faf05c439331d6ed5e702fc247ccb4e6b62d884b7"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_s390x.whl", hash = "sha256:2dfab5fa6a28a0b60a20638dc48e6343c02ea9933e3279ccb132f555a62323d8"}, + {file = "lxml-5.3.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:b1c8c20847b9f34e98080da785bb2336ea982e7f913eed5809e5a3c872900f32"}, + {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:2c86bf781b12ba417f64f3422cfc302523ac9cd1d8ae8c0f92a1c66e56ef2e86"}, + {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:c162b216070f280fa7da844531169be0baf9ccb17263cf5a8bf876fcd3117fa5"}, + {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:36aef61a1678cb778097b4a6eeae96a69875d51d1e8f4d4b491ab3cfb54b5a03"}, + {file = "lxml-5.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f65e5120863c2b266dbcc927b306c5b78e502c71edf3295dfcb9501ec96e5fc7"}, + {file = "lxml-5.3.0-cp310-cp310-win32.whl", hash = "sha256:ef0c1fe22171dd7c7c27147f2e9c3e86f8bdf473fed75f16b0c2e84a5030ce80"}, + {file = "lxml-5.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:052d99051e77a4f3e8482c65014cf6372e61b0a6f4fe9edb98503bb5364cfee3"}, + {file = "lxml-5.3.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:74bcb423462233bc5d6066e4e98b0264e7c1bed7541fff2f4e34fe6b21563c8b"}, + {file = "lxml-5.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a3d819eb6f9b8677f57f9664265d0a10dd6551d227afb4af2b9cd7bdc2ccbf18"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5b8f5db71b28b8c404956ddf79575ea77aa8b1538e8b2ef9ec877945b3f46442"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c3406b63232fc7e9b8783ab0b765d7c59e7c59ff96759d8ef9632fca27c7ee4"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ecdd78ab768f844c7a1d4a03595038c166b609f6395e25af9b0f3f26ae1230f"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:168f2dfcfdedf611eb285efac1516c8454c8c99caf271dccda8943576b67552e"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa617107a410245b8660028a7483b68e7914304a6d4882b5ff3d2d3eb5948d8c"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:69959bd3167b993e6e710b99051265654133a98f20cec1d9b493b931942e9c16"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_ppc64le.whl", hash = "sha256:bd96517ef76c8654446fc3db9242d019a1bb5fe8b751ba414765d59f99210b79"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_s390x.whl", hash = "sha256:ab6dd83b970dc97c2d10bc71aa925b84788c7c05de30241b9e96f9b6d9ea3080"}, + {file = "lxml-5.3.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:eec1bb8cdbba2925bedc887bc0609a80e599c75b12d87ae42ac23fd199445654"}, + {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:6a7095eeec6f89111d03dabfe5883a1fd54da319c94e0fb104ee8f23616b572d"}, + {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:6f651ebd0b21ec65dfca93aa629610a0dbc13dbc13554f19b0113da2e61a4763"}, + {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:f422a209d2455c56849442ae42f25dbaaba1c6c3f501d58761c619c7836642ec"}, + {file = "lxml-5.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:62f7fdb0d1ed2065451f086519865b4c90aa19aed51081979ecd05a21eb4d1be"}, + {file = "lxml-5.3.0-cp311-cp311-win32.whl", hash = "sha256:c6379f35350b655fd817cd0d6cbeef7f265f3ae5fedb1caae2eb442bbeae9ab9"}, + {file = "lxml-5.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:9c52100e2c2dbb0649b90467935c4b0de5528833c76a35ea1a2691ec9f1ee7a1"}, + {file = "lxml-5.3.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:e99f5507401436fdcc85036a2e7dc2e28d962550afe1cbfc07c40e454256a859"}, + {file = "lxml-5.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:384aacddf2e5813a36495233b64cb96b1949da72bef933918ba5c84e06af8f0e"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:874a216bf6afaf97c263b56371434e47e2c652d215788396f60477540298218f"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65ab5685d56914b9a2a34d67dd5488b83213d680b0c5d10b47f81da5a16b0b0e"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:aac0bbd3e8dd2d9c45ceb82249e8bdd3ac99131a32b4d35c8af3cc9db1657179"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b369d3db3c22ed14c75ccd5af429086f166a19627e84a8fdade3f8f31426e52a"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c24037349665434f375645fa9d1f5304800cec574d0310f618490c871fd902b3"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:62d172f358f33a26d6b41b28c170c63886742f5b6772a42b59b4f0fa10526cb1"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_ppc64le.whl", hash = "sha256:c1f794c02903c2824fccce5b20c339a1a14b114e83b306ff11b597c5f71a1c8d"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_s390x.whl", hash = "sha256:5d6a6972b93c426ace71e0be9a6f4b2cfae9b1baed2eed2006076a746692288c"}, + {file = "lxml-5.3.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:3879cc6ce938ff4eb4900d901ed63555c778731a96365e53fadb36437a131a99"}, + {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:74068c601baff6ff021c70f0935b0c7bc528baa8ea210c202e03757c68c5a4ff"}, + {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:ecd4ad8453ac17bc7ba3868371bffb46f628161ad0eefbd0a855d2c8c32dd81a"}, + {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:7e2f58095acc211eb9d8b5771bf04df9ff37d6b87618d1cbf85f92399c98dae8"}, + {file = "lxml-5.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e63601ad5cd8f860aa99d109889b5ac34de571c7ee902d6812d5d9ddcc77fa7d"}, + {file = "lxml-5.3.0-cp312-cp312-win32.whl", hash = "sha256:17e8d968d04a37c50ad9c456a286b525d78c4a1c15dd53aa46c1d8e06bf6fa30"}, + {file = "lxml-5.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:c1a69e58a6bb2de65902051d57fde951febad631a20a64572677a1052690482f"}, + {file = "lxml-5.3.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8c72e9563347c7395910de6a3100a4840a75a6f60e05af5e58566868d5eb2d6a"}, + {file = "lxml-5.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e92ce66cd919d18d14b3856906a61d3f6b6a8500e0794142338da644260595cd"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d04f064bebdfef9240478f7a779e8c5dc32b8b7b0b2fc6a62e39b928d428e51"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c2fb570d7823c2bbaf8b419ba6e5662137f8166e364a8b2b91051a1fb40ab8b"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0c120f43553ec759f8de1fee2f4794452b0946773299d44c36bfe18e83caf002"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:562e7494778a69086f0312ec9689f6b6ac1c6b65670ed7d0267e49f57ffa08c4"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:423b121f7e6fa514ba0c7918e56955a1d4470ed35faa03e3d9f0e3baa4c7e492"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:c00f323cc00576df6165cc9d21a4c21285fa6b9989c5c39830c3903dc4303ef3"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_ppc64le.whl", hash = "sha256:1fdc9fae8dd4c763e8a31e7630afef517eab9f5d5d31a278df087f307bf601f4"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_s390x.whl", hash = "sha256:658f2aa69d31e09699705949b5fc4719cbecbd4a97f9656a232e7d6c7be1a367"}, + {file = "lxml-5.3.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:1473427aff3d66a3fa2199004c3e601e6c4500ab86696edffdbc84954c72d832"}, + {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a87de7dd873bf9a792bf1e58b1c3887b9264036629a5bf2d2e6579fe8e73edff"}, + {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:0d7b36afa46c97875303a94e8f3ad932bf78bace9e18e603f2085b652422edcd"}, + {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:cf120cce539453ae086eacc0130a324e7026113510efa83ab42ef3fcfccac7fb"}, + {file = "lxml-5.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:df5c7333167b9674aa8ae1d4008fa4bc17a313cc490b2cca27838bbdcc6bb15b"}, + {file = "lxml-5.3.0-cp313-cp313-win32.whl", hash = "sha256:c802e1c2ed9f0c06a65bc4ed0189d000ada8049312cfeab6ca635e39c9608957"}, + {file = "lxml-5.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:406246b96d552e0503e17a1006fd27edac678b3fcc9f1be71a2f94b4ff61528d"}, + {file = "lxml-5.3.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:8f0de2d390af441fe8b2c12626d103540b5d850d585b18fcada58d972b74a74e"}, + {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1afe0a8c353746e610bd9031a630a95bcfb1a720684c3f2b36c4710a0a96528f"}, + {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56b9861a71575f5795bde89256e7467ece3d339c9b43141dbdd54544566b3b94"}, + {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_28_x86_64.whl", hash = "sha256:9fb81d2824dff4f2e297a276297e9031f46d2682cafc484f49de182aa5e5df99"}, + {file = "lxml-5.3.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:2c226a06ecb8cdef28845ae976da407917542c5e6e75dcac7cc33eb04aaeb237"}, + {file = "lxml-5.3.0-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:7d3d1ca42870cdb6d0d29939630dbe48fa511c203724820fc0fd507b2fb46577"}, + {file = "lxml-5.3.0-cp36-cp36m-win32.whl", hash = "sha256:094cb601ba9f55296774c2d57ad68730daa0b13dc260e1f941b4d13678239e70"}, + {file = "lxml-5.3.0-cp36-cp36m-win_amd64.whl", hash = "sha256:eafa2c8658f4e560b098fe9fc54539f86528651f61849b22111a9b107d18910c"}, + {file = "lxml-5.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:cb83f8a875b3d9b458cada4f880fa498646874ba4011dc974e071a0a84a1b033"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25f1b69d41656b05885aa185f5fdf822cb01a586d1b32739633679699f220391"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:23e0553b8055600b3bf4a00b255ec5c92e1e4aebf8c2c09334f8368e8bd174d6"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ada35dd21dc6c039259596b358caab6b13f4db4d4a7f8665764d616daf9cc1d"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:81b4e48da4c69313192d8c8d4311e5d818b8be1afe68ee20f6385d0e96fc9512"}, + {file = "lxml-5.3.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:2bc9fd5ca4729af796f9f59cd8ff160fe06a474da40aca03fcc79655ddee1a8b"}, + {file = "lxml-5.3.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:07da23d7ee08577760f0a71d67a861019103e4812c87e2fab26b039054594cc5"}, + {file = "lxml-5.3.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:ea2e2f6f801696ad7de8aec061044d6c8c0dd4037608c7cab38a9a4d316bfb11"}, + {file = "lxml-5.3.0-cp37-cp37m-win32.whl", hash = "sha256:5c54afdcbb0182d06836cc3d1be921e540be3ebdf8b8a51ee3ef987537455f84"}, + {file = "lxml-5.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:f2901429da1e645ce548bf9171784c0f74f0718c3f6150ce166be39e4dd66c3e"}, + {file = "lxml-5.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c56a1d43b2f9ee4786e4658c7903f05da35b923fb53c11025712562d5cc02753"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ee8c39582d2652dcd516d1b879451500f8db3fe3607ce45d7c5957ab2596040"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fdf3a3059611f7585a78ee10399a15566356116a4288380921a4b598d807a22"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:146173654d79eb1fc97498b4280c1d3e1e5d58c398fa530905c9ea50ea849b22"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:0a7056921edbdd7560746f4221dca89bb7a3fe457d3d74267995253f46343f15"}, + {file = "lxml-5.3.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:9e4b47ac0f5e749cfc618efdf4726269441014ae1d5583e047b452a32e221920"}, + {file = "lxml-5.3.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:f914c03e6a31deb632e2daa881fe198461f4d06e57ac3d0e05bbcab8eae01945"}, + {file = "lxml-5.3.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:213261f168c5e1d9b7535a67e68b1f59f92398dd17a56d934550837143f79c42"}, + {file = "lxml-5.3.0-cp38-cp38-win32.whl", hash = "sha256:218c1b2e17a710e363855594230f44060e2025b05c80d1f0661258142b2add2e"}, + {file = "lxml-5.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:315f9542011b2c4e1d280e4a20ddcca1761993dda3afc7a73b01235f8641e903"}, + {file = "lxml-5.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1ffc23010330c2ab67fac02781df60998ca8fe759e8efde6f8b756a20599c5de"}, + {file = "lxml-5.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:2b3778cb38212f52fac9fe913017deea2fdf4eb1a4f8e4cfc6b009a13a6d3fcc"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4b0c7a688944891086ba192e21c5229dea54382f4836a209ff8d0a660fac06be"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:747a3d3e98e24597981ca0be0fd922aebd471fa99d0043a3842d00cdcad7ad6a"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86a6b24b19eaebc448dc56b87c4865527855145d851f9fc3891673ff97950540"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b11a5d918a6216e521c715b02749240fb07ae5a1fefd4b7bf12f833bc8b4fe70"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68b87753c784d6acb8a25b05cb526c3406913c9d988d51f80adecc2b0775d6aa"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:109fa6fede314cc50eed29e6e56c540075e63d922455346f11e4d7a036d2b8cf"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_ppc64le.whl", hash = "sha256:02ced472497b8362c8e902ade23e3300479f4f43e45f4105c85ef43b8db85229"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_s390x.whl", hash = "sha256:6b038cc86b285e4f9fea2ba5ee76e89f21ed1ea898e287dc277a25884f3a7dfe"}, + {file = "lxml-5.3.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:7437237c6a66b7ca341e868cda48be24b8701862757426852c9b3186de1da8a2"}, + {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7f41026c1d64043a36fda21d64c5026762d53a77043e73e94b71f0521939cc71"}, + {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:482c2f67761868f0108b1743098640fbb2a28a8e15bf3f47ada9fa59d9fe08c3"}, + {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:1483fd3358963cc5c1c9b122c80606a3a79ee0875bcac0204149fa09d6ff2727"}, + {file = "lxml-5.3.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2dec2d1130a9cda5b904696cec33b2cfb451304ba9081eeda7f90f724097300a"}, + {file = "lxml-5.3.0-cp39-cp39-win32.whl", hash = "sha256:a0eabd0a81625049c5df745209dc7fcef6e2aea7793e5f003ba363610aa0a3ff"}, + {file = "lxml-5.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:89e043f1d9d341c52bf2af6d02e6adde62e0a46e6755d5eb60dc6e4f0b8aeca2"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7b1cd427cb0d5f7393c31b7496419da594fe600e6fdc4b105a54f82405e6626c"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:51806cfe0279e06ed8500ce19479d757db42a30fd509940b1701be9c86a5ff9a"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ee70d08fd60c9565ba8190f41a46a54096afa0eeb8f76bd66f2c25d3b1b83005"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:8dc2c0395bea8254d8daebc76dcf8eb3a95ec2a46fa6fae5eaccee366bfe02ce"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:6ba0d3dcac281aad8a0e5b14c7ed6f9fa89c8612b47939fc94f80b16e2e9bc83"}, + {file = "lxml-5.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:6e91cf736959057f7aac7adfc83481e03615a8e8dd5758aa1d95ea69e8931dba"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:94d6c3782907b5e40e21cadf94b13b0842ac421192f26b84c45f13f3c9d5dc27"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c300306673aa0f3ed5ed9372b21867690a17dba38c68c44b287437c362ce486b"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78d9b952e07aed35fe2e1a7ad26e929595412db48535921c5013edc8aa4a35ce"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:01220dca0d066d1349bd6a1726856a78f7929f3878f7e2ee83c296c69495309e"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:2d9b8d9177afaef80c53c0a9e30fa252ff3036fb1c6494d427c066a4ce6a282f"}, + {file = "lxml-5.3.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:20094fc3f21ea0a8669dc4c61ed7fa8263bd37d97d93b90f28fc613371e7a875"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ace2c2326a319a0bb8a8b0e5b570c764962e95818de9f259ce814ee666603f19"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:92e67a0be1639c251d21e35fe74df6bcc40cba445c2cda7c4a967656733249e2"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd5350b55f9fecddc51385463a4f67a5da829bc741e38cf689f38ec9023f54ab"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:4c1fefd7e3d00921c44dc9ca80a775af49698bbfd92ea84498e56acffd4c5469"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:71a8dd38fbd2f2319136d4ae855a7078c69c9a38ae06e0c17c73fd70fc6caad8"}, + {file = "lxml-5.3.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:97acf1e1fd66ab53dacd2c35b319d7e548380c2e9e8c54525c6e76d21b1ae3b1"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:68934b242c51eb02907c5b81d138cb977b2129a0a75a8f8b60b01cb8586c7b21"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b710bc2b8292966b23a6a0121f7a6c51d45d2347edcc75f016ac123b8054d3f2"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:18feb4b93302091b1541221196a2155aa296c363fd233814fa11e181adebc52f"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:3eb44520c4724c2e1a57c0af33a379eee41792595023f367ba3952a2d96c2aab"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:609251a0ca4770e5a8768ff902aa02bf636339c5a93f9349b48eb1f606f7f3e9"}, + {file = "lxml-5.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:516f491c834eb320d6c843156440fe7fc0d50b33e44387fcec5b02f0bc118a4c"}, + {file = "lxml-5.3.0.tar.gz", hash = "sha256:4e109ca30d1edec1ac60cdbe341905dc3b8f55b16855e03a54aaf59e51ec8c6f"}, +] + +[package.extras] +cssselect = ["cssselect (>=0.7)"] +html-clean = ["lxml-html-clean"] +html5 = ["html5lib"] +htmlsoup = ["BeautifulSoup4"] +source = ["Cython (>=3.0.11)"] + [[package]] name = "maco" version = "1.1.8" description = "" optional = false python-versions = ">=3.8" +groups = ["main"] +markers = "extra == \"maco\"" files = [ {file = "maco-1.1.8-py3-none-any.whl", hash = "sha256:ab2d1d8e846c0abc455d16f718ba71dda5492ddc22533484156090aa4439fb06"}, {file = "maco-1.1.8.tar.gz", hash = "sha256:e0985efdf645d3c55e3d4d4f2bf40b8d2260fa4add608bb8e8fdefba0500cb4a"}, @@ -1611,13 +1910,14 @@ yara-python = "*" [[package]] name = "mako" -version = "1.3.5" +version = "1.3.8" description = "A super-fast templating language that borrows the best ideas from the existing templating languages." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "Mako-1.3.5-py3-none-any.whl", hash = "sha256:260f1dbc3a519453a9c856dedfe4beb4e50bd5a26d96386cb6c80856556bb91a"}, - {file = "Mako-1.3.5.tar.gz", hash = "sha256:48dbc20568c1d276a2698b36d968fa76161bf127194907ea6fc594fa81f943bc"}, + {file = "Mako-1.3.8-py3-none-any.whl", hash = "sha256:42f48953c7eb91332040ff567eb7eea69b22e7a4affbc5ba8e845e8f730f6627"}, + {file = "mako-1.3.8.tar.gz", hash = "sha256:577b97e414580d3e088d47c2dbbe9594aa7a5146ed2875d4dfa9075af2dd3cc8"}, ] [package.dependencies] @@ -1634,6 +1934,7 @@ version = "3.0.0" description = "Python port of markdown-it. Markdown parsing, done right!" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, @@ -1654,139 +1955,171 @@ testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] [[package]] name = "markupsafe" -version = "2.1.5" +version = "3.0.2" description = "Safely add untrusted strings to HTML/XML markup." optional = false -python-versions = ">=3.7" -files = [ - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, - {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, - {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, - {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, - {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, - {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, - {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, - {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50"}, + {file = "MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d"}, + {file = "MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30"}, + {file = "MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1"}, + {file = "MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6"}, + {file = "MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f"}, + {file = "MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a"}, + {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"}, ] [[package]] name = "maxminddb" -version = "2.5.1" +version = "2.6.3" description = "Reader for the MaxMind DB format" optional = false python-versions = ">=3.8" -files = [ - {file = "maxminddb-2.5.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:62e93a8e99937bf4307eeece3ca37e1161325ebf9363c4ce195410fb5daf64a0"}, - {file = "maxminddb-2.5.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ea2e27a507b53dfbf2ba2ba85c98682a1ad2dac3f9941a7bffa5cb86150d0c47"}, - {file = "maxminddb-2.5.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a01b0341bd6bee431bb8c07c7ac0ed221250c7390b125c025b7d57578e78e8a3"}, - {file = "maxminddb-2.5.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:607344b1079ea647629bf962dcea7580ec864faaad3f5aae650e2e8652121d89"}, - {file = "maxminddb-2.5.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2c2901daebd7c8a702302315e7a58cdc38e626406ad4a05b4d48634897d5f5a3"}, - {file = "maxminddb-2.5.1-cp310-cp310-win32.whl", hash = "sha256:7805ae8c9de433c38939ada2e376706a9f6740239f61fd445927b88f5b42c267"}, - {file = "maxminddb-2.5.1-cp310-cp310-win_amd64.whl", hash = "sha256:f1e5bd58b71f322dc6c16a95a129433b1bc229d4b714f870a61c2367425396ee"}, - {file = "maxminddb-2.5.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b0bbbd58b300aaddf985f763720bdebba9f7a73168ff9f57168117f630ad1c06"}, - {file = "maxminddb-2.5.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a6751e2e89d62d53217870bcc2a8c887dc56ae370ba1b74e52e880761916e54"}, - {file = "maxminddb-2.5.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2ecb1be961f1969be047d07743093f0dcf2f6d4ec3a06a4555587f380a96f6e7"}, - {file = "maxminddb-2.5.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1e091c2b44673c218ee2df23adbc0b6d04fd5c646cfcb6c6fe26fb849434812a"}, - {file = "maxminddb-2.5.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e09b295c401c104ae0e30f66c1a3f3c2aa4ba2cbe12a787576499356a5a4d6c1"}, - {file = "maxminddb-2.5.1-cp311-cp311-win32.whl", hash = "sha256:3d52c693baf07bba897d109b0ecb067f21fd0cc0fb266d67db456e85b80d699e"}, - {file = "maxminddb-2.5.1-cp311-cp311-win_amd64.whl", hash = "sha256:4c67621e842c415ce336ab019a9f087305dfcf24c095b68b8e9d27848f6f6d91"}, - {file = "maxminddb-2.5.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:17ea454f61631b9815d420d48d00663f8718fc7de30be53ffcec0f73989475eb"}, - {file = "maxminddb-2.5.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ef4d508c899ce0f37de731340759c68bfd1102a39a873675c71fae2c8d71ad97"}, - {file = "maxminddb-2.5.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c4e5ca423b1e310f0327536f5ed1a2c6e08d83289a7f909e021590b0b477cae2"}, - {file = "maxminddb-2.5.1-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:0a21abd85e10e5e0f60244b49c3db17e7e48befd4972e62a62833d91e2acbb49"}, - {file = "maxminddb-2.5.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:85a302d79577efe5bc308647394ffdc535dd5f062644c41103604ccf24931a05"}, - {file = "maxminddb-2.5.1-cp312-cp312-win32.whl", hash = "sha256:dd28c434fb44f825dde6a75df2c338d44645791b03480af66a4d993f93801e10"}, - {file = "maxminddb-2.5.1-cp312-cp312-win_amd64.whl", hash = "sha256:b477852cf1741d9187b021e23723e64b063794bbf946a9b5b84cc222f3caf58a"}, - {file = "maxminddb-2.5.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a1e1a19f9740f586362f47862d0095b54d50b9d465babcaa8a563746132fe5be"}, - {file = "maxminddb-2.5.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d654895b546a47e85f2e071b98e377a60bb03cd643b9423017fa66fcd5adedce"}, - {file = "maxminddb-2.5.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0702da59b9670a72761b65cb1a52bc3032d8f6799bdab641cb8350ad5740580b"}, - {file = "maxminddb-2.5.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:2e20a70c1545d6626dcd4ce2d7ecf3d566d978ea64cb37e7952f93baff66b812"}, - {file = "maxminddb-2.5.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0cbd272db3202e948c9088e48dec62add071a47971d84ceb11d2cb2880f83e5a"}, - {file = "maxminddb-2.5.1-cp38-cp38-win32.whl", hash = "sha256:fbd01fc7d7b5b2befe914e8cdb5ed3a1c5476e57b765197cceff8d897f33d012"}, - {file = "maxminddb-2.5.1-cp38-cp38-win_amd64.whl", hash = "sha256:fe0af3ba9e1a78ed5f2ad32fc18d18b78ef233e7d0c627e1a77a525a7eb0c241"}, - {file = "maxminddb-2.5.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5d772be68cce812f7c4b15ae8c68e624c8b88ff83071e3903ca5b5f55e343c25"}, - {file = "maxminddb-2.5.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:910e7b3ad87d5352ed3f496bd42bffbf9f896245278b0d8e76afa1382e42a7ae"}, - {file = "maxminddb-2.5.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:892c11a8694394e97d3ac0f8d5974ea588c732d14e721f22095c58b4f584c144"}, - {file = "maxminddb-2.5.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:3ce1f42bdfce7b86cb5a56cba730fed611fb879d867e6024f0d520257bef6891"}, - {file = "maxminddb-2.5.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:6667948e7501a513caef90edda2d367865097239d4c2381eb3998e9905af7209"}, - {file = "maxminddb-2.5.1-cp39-cp39-win32.whl", hash = "sha256:500d321bdefe4dcd351e4390a79b7786aab49b0536bedfa0788e5ffb0e91e421"}, - {file = "maxminddb-2.5.1-cp39-cp39-win_amd64.whl", hash = "sha256:93f7055779caf7753810f1e2c6444af6d727393fd116ffa0767fbd54fb8c9bbf"}, - {file = "maxminddb-2.5.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8cee4315da7cdd3f2a18f1ab1418953a7a9eda65e63095b01f03c7d3645d633e"}, - {file = "maxminddb-2.5.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c97eac5af102cede4b5f57cecb25e8f949fa4e4a8d812bed575539951c60ecaf"}, - {file = "maxminddb-2.5.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:526744b12075051fa20979090c111cc3a42a3b55e2714818270c7b84a41a8cfe"}, - {file = "maxminddb-2.5.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:fad45cd2f2e3c5fbebacb8d172a60fb22443222e549bf740a0bc7eeb849e5ce7"}, - {file = "maxminddb-2.5.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:8b98ed5c34955c48e72d35daed713ba4a6833a8a6d1204e79d2c85e644049792"}, - {file = "maxminddb-2.5.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:639aee8abd63a95baa12b94b6f3a842d51877d631879c7d08c98c68dc44a84c3"}, - {file = "maxminddb-2.5.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2a7a73ab4bbc16b81983531c99fa102a0c7dae459db958c17fea48c981f5e764"}, - {file = "maxminddb-2.5.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:aae262da1940a67c3ba765c49e2308947ce68ff647f87630002c306433a98ca1"}, - {file = "maxminddb-2.5.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b223c53077a736c304b63cf5afceb928975fbd12ddae5afd6b71370bab7b4700"}, - {file = "maxminddb-2.5.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:969d0057ea5472e0b574c5293c4f3ecf49585362351c543e8ea55dc48b60f1eb"}, - {file = "maxminddb-2.5.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d4d36cf3d390f02d2bdf53d9efefb92be7bd70e07a5a86cdb79020c48c2d81b7"}, - {file = "maxminddb-2.5.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:188173c07dce0692fd5660a6eb7ea8c126d7b3a4b61496c8a8ee9e8b10186ff5"}, - {file = "maxminddb-2.5.1.tar.gz", hash = "sha256:4807d374e645bd68334e4f487ba85a27189dbc1267a98e644aa686a7927e0559"}, +groups = ["main"] +files = [ + {file = "maxminddb-2.6.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d69c5493c81f11bca90961b4dfa028c031aa8e7bb156653edf242a03dfc51561"}, + {file = "maxminddb-2.6.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6cc002099c9e1637309df772789a36db9a4601c4623dd1ace8145d057358c20b"}, + {file = "maxminddb-2.6.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef41bfe15692fe15e1799d600366a0faa3673a0d7d7dbe6a305ec3a5b6f07708"}, + {file = "maxminddb-2.6.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46bdc8dc528a2f64ef34182bf40084e05410344d40097c1e93554d732dfb0e15"}, + {file = "maxminddb-2.6.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:98258a295149aadf96ed8d667468722b248fe47bb991891ad01cfa8cb9e9684a"}, + {file = "maxminddb-2.6.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0dd55d2498d287b6cfd6b857deed9070e53c4b22a1acd69615e88dec92d95fb3"}, + {file = "maxminddb-2.6.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e38a449890a976365da1f2c927ac076838aa2715b464593080075a18ae4e0dc8"}, + {file = "maxminddb-2.6.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:a70d46337c9497a5b3329d9c7fa7f45be33243ffad04924b8f06ffe41a136279"}, + {file = "maxminddb-2.6.3-cp310-cp310-win32.whl", hash = "sha256:45da7549c952f88da39c9f440cb3fa2abbd7472571597699467641af88512730"}, + {file = "maxminddb-2.6.3-cp310-cp310-win_amd64.whl", hash = "sha256:6c977da32cc72784980da1928a79d38b3e9fe83faa9a40ea9bae598a6bf2f7bb"}, + {file = "maxminddb-2.6.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:27ba5e22bd09fe324f0a4c5ed97e73c1c7c3ab7e3bae4e1e6fcaa15f175b9f5a"}, + {file = "maxminddb-2.6.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fa36f1ca12fd3a37ad758afd0666457a749b2c4b16db0eb3f8c953f55ae6325d"}, + {file = "maxminddb-2.6.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83d2324788a31a28bbb38b0dbdece5826f56db4df6e1538cf6f4b72f6a3da66c"}, + {file = "maxminddb-2.6.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4bac2b7b7609bed8dcf6beef1ef4a1e411e9e39c311070ffc2ace80d6de6444"}, + {file = "maxminddb-2.6.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8868580f34b483d5b74edd4270db417e211906d57fb13bbeeb11ea8d5cd01829"}, + {file = "maxminddb-2.6.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b29cea50b191784e2242227e0fac5bc985972b3849f97fe96c7f37fb7a7426d7"}, + {file = "maxminddb-2.6.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4d470fc4f9c5ed8854a945dc5ea56b2f0644a5c3e5872d0e579d66a5a9238d7f"}, + {file = "maxminddb-2.6.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:2849357de35bfed0011ad1ff14a83c946273ae8c75a8867612d22f559df70e7d"}, + {file = "maxminddb-2.6.3-cp311-cp311-win32.whl", hash = "sha256:39254e173af7b0018c1508c2dd68ecda0c043032176140cfe917587e2d082f42"}, + {file = "maxminddb-2.6.3-cp311-cp311-win_amd64.whl", hash = "sha256:489c5ae835198a228380b83cc537a5ffb1911f1579d7545baf097e4a8eefcd9a"}, + {file = "maxminddb-2.6.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2b0fef825b23df047876d2056cbb69fb8d8e4b965f744f674be75e16fb86a52e"}, + {file = "maxminddb-2.6.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a38faf03db15cc285009c0ddaacd04071b84ebd8ff7d773f700c7def695a291c"}, + {file = "maxminddb-2.6.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edab18a50470031fc8447bcd9285c9f5f952abef2b6db5579fe50665bdcda941"}, + {file = "maxminddb-2.6.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:415dd5de87adc7640d3da2a8e7cf19a313c1a715cb84a3433f0e3b2d27665319"}, + {file = "maxminddb-2.6.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d78a02b70ededb3ba7317c24266217d7b68283e3be04cad0c34ee446a0217ee0"}, + {file = "maxminddb-2.6.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4b80275603bba6a95ed69d859d184dfa60bfd8e83cd4c8b722d7f7eaa9d95f8f"}, + {file = "maxminddb-2.6.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a6868438d1771c0bd0bbc95d84480c1ae04df72a85879e1ada42762250a00f59"}, + {file = "maxminddb-2.6.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:efd875d43c4207fb90e10d582e4394d8a04f7b55c83c4d6bc0593a7be450e04f"}, + {file = "maxminddb-2.6.3-cp312-cp312-win32.whl", hash = "sha256:aadb9d12e887a1f52e8214e539e5d78338356fad4ef2a51931f6f7dbe56c2228"}, + {file = "maxminddb-2.6.3-cp312-cp312-win_amd64.whl", hash = "sha256:7d6024d1e40244b5549c5e6063af109399a2f89503a24916b5139c4d0657f1c8"}, + {file = "maxminddb-2.6.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9580b2cd017185db07baacd9d629ca01f3fe6f236528681c88a0209725376e9c"}, + {file = "maxminddb-2.6.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:47828bed767b82c219ba7aa65f0cb03d7f7443d7270259ce931e133a40691d34"}, + {file = "maxminddb-2.6.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77112cb1a2e381de42c443d1bf222c58b9da203183bb2008dd370c3d2a587a4e"}, + {file = "maxminddb-2.6.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:448d062e95242e3088df85fe7ed3f2890a9f4aea924bde336e9ff5d2337ca5fd"}, + {file = "maxminddb-2.6.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a59d72bf373c61da156fd43e2be6da802f68370a50a2205de84ee76916e05f9f"}, + {file = "maxminddb-2.6.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e867852037a8a26a24cfcf31b697dce63d488e1617af244c2895568d8f6c7a31"}, + {file = "maxminddb-2.6.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:5a1586260eac831d61c2665b26ca1ae3ad00caca57c8031346767f4527025311"}, + {file = "maxminddb-2.6.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6eb23f842a72ab3096f9f9b1c292f4feb55a8d758567cb6d77637c2257a3187c"}, + {file = "maxminddb-2.6.3-cp313-cp313-win32.whl", hash = "sha256:acf46e20709a27d2b519669888e3f53a37bc4204b98a0c690664c48ff8cb1364"}, + {file = "maxminddb-2.6.3-cp313-cp313-win_amd64.whl", hash = "sha256:3015afb00e6168837938dbe5fda40ace37442c22b292ccee27c1690fbf6078ed"}, + {file = "maxminddb-2.6.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:9d913971187326e59be8a63068128b6439f6717b13c7c451e6d9e1723286d9ff"}, + {file = "maxminddb-2.6.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:89afed255ac3652db7f91d8f6b278a4c490c47283ddbff5589c22cfdef4b8453"}, + {file = "maxminddb-2.6.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01b143a38ae38c71ebc9028d67bbcb05c1b954e0f3a28c508eaee46833807903"}, + {file = "maxminddb-2.6.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:deebf098c79ce031069fec1d7202cba0e766b3f12adbb631d16223174994724a"}, + {file = "maxminddb-2.6.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f49eefddad781e088969188c606b7988a7da27592590f6c4cc2b64fd2a85ff28"}, + {file = "maxminddb-2.6.3-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:da584edc3e4465f5417a48602ed7e2bee4f2a7a2b43fcf2c40728cfc9f9fd5aa"}, + {file = "maxminddb-2.6.3-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:e5a8cfe71db548aa9a520a3f7e92430b6b7900affadef3b0c83c530c759dd12f"}, + {file = "maxminddb-2.6.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:daa20961ad0fb550038c02dbf76a04e1c1958a3b899fa14a7c412aed67380812"}, + {file = "maxminddb-2.6.3-cp38-cp38-win32.whl", hash = "sha256:6480ca47db4d8d09296c268e8ff4e6f4c1d455773a67233c9f899dfa6af3e6c6"}, + {file = "maxminddb-2.6.3-cp38-cp38-win_amd64.whl", hash = "sha256:0348c8dadef9493dbcd45f032ae271c7fd2216ed4bb4bab0aff371ffc522f871"}, + {file = "maxminddb-2.6.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1bc2edcef76ce54d4df04f58aec98f4df0377f37aae2217587bfecd663ed5c66"}, + {file = "maxminddb-2.6.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1f0b78c40a12588e9e0ca0ffe5306b6dea028dcd21f2c120d1ceb328a3307a98"}, + {file = "maxminddb-2.6.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f06e9c908a9270e882f0d23f041a9674680a7a110412b453f902d22323f86d38"}, + {file = "maxminddb-2.6.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46096646c284835c8a580ec2ccbf0d6d5398191531fa543bb0437983c75cb7ba"}, + {file = "maxminddb-2.6.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0d82fbddf3a88e6aa6181bd16bc08a6939d6353f97f143eeddec16bc5394e361"}, + {file = "maxminddb-2.6.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:6136dc8ad8c8f7e95a7d84174a990c1b47d5e641e3a3a8ae67d7bde625342dbb"}, + {file = "maxminddb-2.6.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:01773fee182cc36f6d38c277936accf7c85b8f4c20d13bb630666f6b3f087ad8"}, + {file = "maxminddb-2.6.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:890dd845e371f67edef7b19a2866191d9fff85faf88f4b4c416a0aaa37204416"}, + {file = "maxminddb-2.6.3-cp39-cp39-win32.whl", hash = "sha256:4e0865069ef76b4f3eb862c042b107088171cbf43fea3dcaae0dd7253effe6e3"}, + {file = "maxminddb-2.6.3-cp39-cp39-win_amd64.whl", hash = "sha256:7c3209d7a4b2f50d4b28a1d886d95b19094cdc840208e69dbbc40cae2c1cc65b"}, + {file = "maxminddb-2.6.3-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:b4729936fedb4793d9162b92d6de63e267e388c8938e19e700120b6df6a6ae6c"}, + {file = "maxminddb-2.6.3-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:6887315de47f3a9840df19f498a4e68723c160c9448d276c3ef454531555778e"}, + {file = "maxminddb-2.6.3-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34943b4b724a35ef63ec40dcf894a100575d233b23b6cd4f8224017ea1195265"}, + {file = "maxminddb-2.6.3-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d25acb42ef8829e8e3491b6b3b4ced9dbb4eea6c4ec24afdc4028051e7b8803"}, + {file = "maxminddb-2.6.3-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a23c7c88f9df0727a3e56f2385ec19fb5f61bb46dcbebb6ddc5c948cf0b73b0a"}, + {file = "maxminddb-2.6.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:85763c19246dce43044be58cb9119579c2efd0b85a7b79d865b741a698866488"}, + {file = "maxminddb-2.6.3-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7c5d15a0546821a7e9104b71ca701c01462390d0a1bee5cad75f583cf26c400b"}, + {file = "maxminddb-2.6.3-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:de8415538d778ae4f4bb40e2cee9581e2d5c860abdbdbba1458953f5b314a6b0"}, + {file = "maxminddb-2.6.3-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81340e52c743cdc3c0f4a9f45f9cf4e3c2ae87bf4bbb34613c5059a5b829eb65"}, + {file = "maxminddb-2.6.3-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b140c1db0c218f485b033b51a086d98d57f55f4a4c2b1cb72fe6a5e1e57359a"}, + {file = "maxminddb-2.6.3-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ec674a2c2e4b47ab9f582460670a5c1d7725b1cbf16e6cbb94de1ae51ee9edf"}, + {file = "maxminddb-2.6.3-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e28622fd7c4ccd298c3f630161d0801182eb38038ca01319693a70264de40b89"}, + {file = "maxminddb-2.6.3-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:b09bb7bb98418a620b1ec1881d1594c02e715a68cdc925781de1e79b39cefe77"}, + {file = "maxminddb-2.6.3-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a6597599cde3916730d69b023045e6c22ff1c076d9cad7fb63641d36d01e3e93"}, + {file = "maxminddb-2.6.3-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e7e6d5f3c1aa6350303edab8f0dd471e616d69b5d47ff5ecbf2c7c82998b9c6"}, + {file = "maxminddb-2.6.3-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:536a39fb917a44b1cd037da624e3d11d49898b5579dfc00c4d7103a057dc51ab"}, + {file = "maxminddb-2.6.3-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9ebd373a4ef69218bfbce93e9b97f583cfe681b28d4e32e0d64f76ded148fba"}, + {file = "maxminddb-2.6.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:e441478922c2d311b8bc96f35d6e78306802774149fc20d07d96cc5c3b57dd02"}, + {file = "maxminddb-2.6.3.tar.gz", hash = "sha256:d2c3806baa7aa047aa1bac7419e7e353db435f88f09d51106a84dbacf645d254"}, ] -[package.dependencies] -setuptools = ">=68.2.2" - [[package]] name = "mdurl" version = "0.1.2" description = "Markdown URL utilities" optional = false python-versions = ">=3.7" +groups = ["main", "dev"] files = [ {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, @@ -1798,6 +2131,7 @@ version = "1.0.8" description = "MessagePack serializer" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "msgpack-1.0.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:505fe3d03856ac7d215dbe005414bc28505d26f0c128906037e66d98c4e95868"}, {file = "msgpack-1.0.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e6b7842518a63a9f17107eb176320960ec095a8ee3b4420b5f688e24bf50c53c"}, @@ -1859,54 +2193,55 @@ files = [ [[package]] name = "msgspec" -version = "0.18.6" +version = "0.19.0" description = "A fast serialization and validation library, with builtin support for JSON, MessagePack, YAML, and TOML." optional = false -python-versions = ">=3.8" -files = [ - {file = "msgspec-0.18.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:77f30b0234eceeff0f651119b9821ce80949b4d667ad38f3bfed0d0ebf9d6d8f"}, - {file = "msgspec-0.18.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1a76b60e501b3932782a9da039bd1cd552b7d8dec54ce38332b87136c64852dd"}, - {file = "msgspec-0.18.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:06acbd6edf175bee0e36295d6b0302c6de3aaf61246b46f9549ca0041a9d7177"}, - {file = "msgspec-0.18.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40a4df891676d9c28a67c2cc39947c33de516335680d1316a89e8f7218660410"}, - {file = "msgspec-0.18.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a6896f4cd5b4b7d688018805520769a8446df911eb93b421c6c68155cdf9dd5a"}, - {file = "msgspec-0.18.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:3ac4dd63fd5309dd42a8c8c36c1563531069152be7819518be0a9d03be9788e4"}, - {file = "msgspec-0.18.6-cp310-cp310-win_amd64.whl", hash = "sha256:fda4c357145cf0b760000c4ad597e19b53adf01382b711f281720a10a0fe72b7"}, - {file = "msgspec-0.18.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e77e56ffe2701e83a96e35770c6adb655ffc074d530018d1b584a8e635b4f36f"}, - {file = "msgspec-0.18.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d5351afb216b743df4b6b147691523697ff3a2fc5f3d54f771e91219f5c23aaa"}, - {file = "msgspec-0.18.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3232fabacef86fe8323cecbe99abbc5c02f7698e3f5f2e248e3480b66a3596b"}, - {file = "msgspec-0.18.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e3b524df6ea9998bbc99ea6ee4d0276a101bcc1aa8d14887bb823914d9f60d07"}, - {file = "msgspec-0.18.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:37f67c1d81272131895bb20d388dd8d341390acd0e192a55ab02d4d6468b434c"}, - {file = "msgspec-0.18.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d0feb7a03d971c1c0353de1a8fe30bb6579c2dc5ccf29b5f7c7ab01172010492"}, - {file = "msgspec-0.18.6-cp311-cp311-win_amd64.whl", hash = "sha256:41cf758d3f40428c235c0f27bc6f322d43063bc32da7b9643e3f805c21ed57b4"}, - {file = "msgspec-0.18.6-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d86f5071fe33e19500920333c11e2267a31942d18fed4d9de5bc2fbab267d28c"}, - {file = "msgspec-0.18.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce13981bfa06f5eb126a3a5a38b1976bddb49a36e4f46d8e6edecf33ccf11df1"}, - {file = "msgspec-0.18.6-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e97dec6932ad5e3ee1e3c14718638ba333befc45e0661caa57033cd4cc489466"}, - {file = "msgspec-0.18.6-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad237100393f637b297926cae1868b0d500f764ccd2f0623a380e2bcfb2809ca"}, - {file = "msgspec-0.18.6-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db1d8626748fa5d29bbd15da58b2d73af25b10aa98abf85aab8028119188ed57"}, - {file = "msgspec-0.18.6-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:d70cb3d00d9f4de14d0b31d38dfe60c88ae16f3182988246a9861259c6722af6"}, - {file = "msgspec-0.18.6-cp312-cp312-win_amd64.whl", hash = "sha256:1003c20bfe9c6114cc16ea5db9c5466e49fae3d7f5e2e59cb70693190ad34da0"}, - {file = "msgspec-0.18.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f7d9faed6dfff654a9ca7d9b0068456517f63dbc3aa704a527f493b9200b210a"}, - {file = "msgspec-0.18.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9da21f804c1a1471f26d32b5d9bc0480450ea77fbb8d9db431463ab64aaac2cf"}, - {file = "msgspec-0.18.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46eb2f6b22b0e61c137e65795b97dc515860bf6ec761d8fb65fdb62aa094ba61"}, - {file = "msgspec-0.18.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c8355b55c80ac3e04885d72db515817d9fbb0def3bab936bba104e99ad22cf46"}, - {file = "msgspec-0.18.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:9080eb12b8f59e177bd1eb5c21e24dd2ba2fa88a1dbc9a98e05ad7779b54c681"}, - {file = "msgspec-0.18.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:cc001cf39becf8d2dcd3f413a4797c55009b3a3cdbf78a8bf5a7ca8fdb76032c"}, - {file = "msgspec-0.18.6-cp38-cp38-win_amd64.whl", hash = "sha256:fac5834e14ac4da1fca373753e0c4ec9c8069d1fe5f534fa5208453b6065d5be"}, - {file = "msgspec-0.18.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:974d3520fcc6b824a6dedbdf2b411df31a73e6e7414301abac62e6b8d03791b4"}, - {file = "msgspec-0.18.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:fd62e5818731a66aaa8e9b0a1e5543dc979a46278da01e85c3c9a1a4f047ef7e"}, - {file = "msgspec-0.18.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7481355a1adcf1f08dedd9311193c674ffb8bf7b79314b4314752b89a2cf7f1c"}, - {file = "msgspec-0.18.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6aa85198f8f154cf35d6f979998f6dadd3dc46a8a8c714632f53f5d65b315c07"}, - {file = "msgspec-0.18.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0e24539b25c85c8f0597274f11061c102ad6b0c56af053373ba4629772b407be"}, - {file = "msgspec-0.18.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c61ee4d3be03ea9cd089f7c8e36158786cd06e51fbb62529276452bbf2d52ece"}, - {file = "msgspec-0.18.6-cp39-cp39-win_amd64.whl", hash = "sha256:b5c390b0b0b7da879520d4ae26044d74aeee5144f83087eb7842ba59c02bc090"}, - {file = "msgspec-0.18.6.tar.gz", hash = "sha256:a59fc3b4fcdb972d09138cb516dbde600c99d07c38fd9372a6ef500d2d031b4e"}, +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "msgspec-0.19.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d8dd848ee7ca7c8153462557655570156c2be94e79acec3561cf379581343259"}, + {file = "msgspec-0.19.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0553bbc77662e5708fe66aa75e7bd3e4b0f209709c48b299afd791d711a93c36"}, + {file = "msgspec-0.19.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fe2c4bf29bf4e89790b3117470dea2c20b59932772483082c468b990d45fb947"}, + {file = "msgspec-0.19.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:00e87ecfa9795ee5214861eab8326b0e75475c2e68a384002aa135ea2a27d909"}, + {file = "msgspec-0.19.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3c4ec642689da44618f68c90855a10edbc6ac3ff7c1d94395446c65a776e712a"}, + {file = "msgspec-0.19.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:2719647625320b60e2d8af06b35f5b12d4f4d281db30a15a1df22adb2295f633"}, + {file = "msgspec-0.19.0-cp310-cp310-win_amd64.whl", hash = "sha256:695b832d0091edd86eeb535cd39e45f3919f48d997685f7ac31acb15e0a2ed90"}, + {file = "msgspec-0.19.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:aa77046904db764b0462036bc63ef71f02b75b8f72e9c9dd4c447d6da1ed8f8e"}, + {file = "msgspec-0.19.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:047cfa8675eb3bad68722cfe95c60e7afabf84d1bd8938979dd2b92e9e4a9551"}, + {file = "msgspec-0.19.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e78f46ff39a427e10b4a61614a2777ad69559cc8d603a7c05681f5a595ea98f7"}, + {file = "msgspec-0.19.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c7adf191e4bd3be0e9231c3b6dc20cf1199ada2af523885efc2ed218eafd011"}, + {file = "msgspec-0.19.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:f04cad4385e20be7c7176bb8ae3dca54a08e9756cfc97bcdb4f18560c3042063"}, + {file = "msgspec-0.19.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:45c8fb410670b3b7eb884d44a75589377c341ec1392b778311acdbfa55187716"}, + {file = "msgspec-0.19.0-cp311-cp311-win_amd64.whl", hash = "sha256:70eaef4934b87193a27d802534dc466778ad8d536e296ae2f9334e182ac27b6c"}, + {file = "msgspec-0.19.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f98bd8962ad549c27d63845b50af3f53ec468b6318400c9f1adfe8b092d7b62f"}, + {file = "msgspec-0.19.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:43bbb237feab761b815ed9df43b266114203f53596f9b6e6f00ebd79d178cdf2"}, + {file = "msgspec-0.19.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4cfc033c02c3e0aec52b71710d7f84cb3ca5eb407ab2ad23d75631153fdb1f12"}, + {file = "msgspec-0.19.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d911c442571605e17658ca2b416fd8579c5050ac9adc5e00c2cb3126c97f73bc"}, + {file = "msgspec-0.19.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:757b501fa57e24896cf40a831442b19a864f56d253679f34f260dcb002524a6c"}, + {file = "msgspec-0.19.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5f0f65f29b45e2816d8bded36e6b837a4bf5fb60ec4bc3c625fa2c6da4124537"}, + {file = "msgspec-0.19.0-cp312-cp312-win_amd64.whl", hash = "sha256:067f0de1c33cfa0b6a8206562efdf6be5985b988b53dd244a8e06f993f27c8c0"}, + {file = "msgspec-0.19.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f12d30dd6266557aaaf0aa0f9580a9a8fbeadfa83699c487713e355ec5f0bd86"}, + {file = "msgspec-0.19.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82b2c42c1b9ebc89e822e7e13bbe9d17ede0c23c187469fdd9505afd5a481314"}, + {file = "msgspec-0.19.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:19746b50be214a54239aab822964f2ac81e38b0055cca94808359d779338c10e"}, + {file = "msgspec-0.19.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60ef4bdb0ec8e4ad62e5a1f95230c08efb1f64f32e6e8dd2ced685bcc73858b5"}, + {file = "msgspec-0.19.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac7f7c377c122b649f7545810c6cd1b47586e3aa3059126ce3516ac7ccc6a6a9"}, + {file = "msgspec-0.19.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a5bc1472223a643f5ffb5bf46ccdede7f9795078194f14edd69e3aab7020d327"}, + {file = "msgspec-0.19.0-cp313-cp313-win_amd64.whl", hash = "sha256:317050bc0f7739cb30d257ff09152ca309bf5a369854bbf1e57dffc310c1f20f"}, + {file = "msgspec-0.19.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:15c1e86fff77184c20a2932cd9742bf33fe23125fa3fcf332df9ad2f7d483044"}, + {file = "msgspec-0.19.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3b5541b2b3294e5ffabe31a09d604e23a88533ace36ac288fa32a420aa38d229"}, + {file = "msgspec-0.19.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0f5c043ace7962ef188746e83b99faaa9e3e699ab857ca3f367b309c8e2c6b12"}, + {file = "msgspec-0.19.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca06aa08e39bf57e39a258e1996474f84d0dd8130d486c00bec26d797b8c5446"}, + {file = "msgspec-0.19.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:e695dad6897896e9384cf5e2687d9ae9feaef50e802f93602d35458e20d1fb19"}, + {file = "msgspec-0.19.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3be5c02e1fee57b54130316a08fe40cca53af92999a302a6054cd451700ea7db"}, + {file = "msgspec-0.19.0-cp39-cp39-win_amd64.whl", hash = "sha256:0684573a821be3c749912acf5848cce78af4298345cb2d7a8b8948a0a5a27cfe"}, + {file = "msgspec-0.19.0.tar.gz", hash = "sha256:604037e7cd475345848116e89c553aa9a233259733ab51986ac924ab1b976f8e"}, ] [package.extras] -dev = ["attrs", "coverage", "furo", "gcovr", "ipython", "msgpack", "mypy", "pre-commit", "pyright", "pytest", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "tomli", "tomli-w"] +dev = ["attrs", "coverage", "eval-type-backport", "furo", "ipython", "msgpack", "mypy", "pre-commit", "pyright", "pytest", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "tomli", "tomli_w"] doc = ["furo", "ipython", "sphinx", "sphinx-copybutton", "sphinx-design"] -test = ["attrs", "msgpack", "mypy", "pyright", "pytest", "pyyaml", "tomli", "tomli-w"] -toml = ["tomli", "tomli-w"] +test = ["attrs", "eval-type-backport", "msgpack", "pytest", "pyyaml", "tomli", "tomli_w"] +toml = ["tomli", "tomli_w"] yaml = ["pyyaml"] [[package]] @@ -1915,6 +2250,8 @@ version = "5.4.2" description = "Python tool and library for decrypting and encrypting MS Office files using a password or other keys" optional = false python-versions = "<4.0,>=3.8" +groups = ["main"] +markers = "platform_python_implementation != \"PyPy\" or platform_system != \"Windows\" and platform_system != \"Darwin\"" files = [ {file = "msoffcrypto_tool-5.4.2-py3-none-any.whl", hash = "sha256:274fe2181702d1e5a107ec1b68a4c9fea997a44972ae1cc9ae0cb4f6a50fef0e"}, {file = "msoffcrypto_tool-5.4.2.tar.gz", hash = "sha256:44b545adba0407564a0cc3d6dde6ca36b7c0fdf352b85bca51618fa1d4817370"}, @@ -1924,54 +2261,108 @@ files = [ cryptography = ">=39.0" olefile = ">=0.46" +[[package]] +name = "mypy" +version = "1.14.1" +description = "Optional static typing for Python" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "mypy-1.14.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:52686e37cf13d559f668aa398dd7ddf1f92c5d613e4f8cb262be2fb4fedb0fcb"}, + {file = "mypy-1.14.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1fb545ca340537d4b45d3eecdb3def05e913299ca72c290326be19b3804b39c0"}, + {file = "mypy-1.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:90716d8b2d1f4cd503309788e51366f07c56635a3309b0f6a32547eaaa36a64d"}, + {file = "mypy-1.14.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2ae753f5c9fef278bcf12e1a564351764f2a6da579d4a81347e1d5a15819997b"}, + {file = "mypy-1.14.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:e0fe0f5feaafcb04505bcf439e991c6d8f1bf8b15f12b05feeed96e9e7bf1427"}, + {file = "mypy-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:7d54bd85b925e501c555a3227f3ec0cfc54ee8b6930bd6141ec872d1c572f81f"}, + {file = "mypy-1.14.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f995e511de847791c3b11ed90084a7a0aafdc074ab88c5a9711622fe4751138c"}, + {file = "mypy-1.14.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d64169ec3b8461311f8ce2fd2eb5d33e2d0f2c7b49116259c51d0d96edee48d1"}, + {file = "mypy-1.14.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ba24549de7b89b6381b91fbc068d798192b1b5201987070319889e93038967a8"}, + {file = "mypy-1.14.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:183cf0a45457d28ff9d758730cd0210419ac27d4d3f285beda038c9083363b1f"}, + {file = "mypy-1.14.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f2a0ecc86378f45347f586e4163d1769dd81c5a223d577fe351f26b179e148b1"}, + {file = "mypy-1.14.1-cp311-cp311-win_amd64.whl", hash = "sha256:ad3301ebebec9e8ee7135d8e3109ca76c23752bac1e717bc84cd3836b4bf3eae"}, + {file = "mypy-1.14.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:30ff5ef8519bbc2e18b3b54521ec319513a26f1bba19a7582e7b1f58a6e69f14"}, + {file = "mypy-1.14.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:cb9f255c18052343c70234907e2e532bc7e55a62565d64536dbc7706a20b78b9"}, + {file = "mypy-1.14.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b4e3413e0bddea671012b063e27591b953d653209e7a4fa5e48759cda77ca11"}, + {file = "mypy-1.14.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:553c293b1fbdebb6c3c4030589dab9fafb6dfa768995a453d8a5d3b23784af2e"}, + {file = "mypy-1.14.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fad79bfe3b65fe6a1efaed97b445c3d37f7be9fdc348bdb2d7cac75579607c89"}, + {file = "mypy-1.14.1-cp312-cp312-win_amd64.whl", hash = "sha256:8fa2220e54d2946e94ab6dbb3ba0a992795bd68b16dc852db33028df2b00191b"}, + {file = "mypy-1.14.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:92c3ed5afb06c3a8e188cb5da4984cab9ec9a77ba956ee419c68a388b4595255"}, + {file = "mypy-1.14.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:dbec574648b3e25f43d23577309b16534431db4ddc09fda50841f1e34e64ed34"}, + {file = "mypy-1.14.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8c6d94b16d62eb3e947281aa7347d78236688e21081f11de976376cf010eb31a"}, + {file = "mypy-1.14.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d4b19b03fdf54f3c5b2fa474c56b4c13c9dbfb9a2db4370ede7ec11a2c5927d9"}, + {file = "mypy-1.14.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0c911fde686394753fff899c409fd4e16e9b294c24bfd5e1ea4675deae1ac6fd"}, + {file = "mypy-1.14.1-cp313-cp313-win_amd64.whl", hash = "sha256:8b21525cb51671219f5307be85f7e646a153e5acc656e5cebf64bfa076c50107"}, + {file = "mypy-1.14.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:7084fb8f1128c76cd9cf68fe5971b37072598e7c31b2f9f95586b65c741a9d31"}, + {file = "mypy-1.14.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8f845a00b4f420f693f870eaee5f3e2692fa84cc8514496114649cfa8fd5e2c6"}, + {file = "mypy-1.14.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:44bf464499f0e3a2d14d58b54674dee25c031703b2ffc35064bd0df2e0fac319"}, + {file = "mypy-1.14.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c99f27732c0b7dc847adb21c9d47ce57eb48fa33a17bc6d7d5c5e9f9e7ae5bac"}, + {file = "mypy-1.14.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:bce23c7377b43602baa0bd22ea3265c49b9ff0b76eb315d6c34721af4cdf1d9b"}, + {file = "mypy-1.14.1-cp38-cp38-win_amd64.whl", hash = "sha256:8edc07eeade7ebc771ff9cf6b211b9a7d93687ff892150cb5692e4f4272b0837"}, + {file = "mypy-1.14.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3888a1816d69f7ab92092f785a462944b3ca16d7c470d564165fe703b0970c35"}, + {file = "mypy-1.14.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:46c756a444117c43ee984bd055db99e498bc613a70bbbc120272bd13ca579fbc"}, + {file = "mypy-1.14.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:27fc248022907e72abfd8e22ab1f10e903915ff69961174784a3900a8cba9ad9"}, + {file = "mypy-1.14.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:499d6a72fb7e5de92218db961f1a66d5f11783f9ae549d214617edab5d4dbdbb"}, + {file = "mypy-1.14.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:57961db9795eb566dc1d1b4e9139ebc4c6b0cb6e7254ecde69d1552bf7613f60"}, + {file = "mypy-1.14.1-cp39-cp39-win_amd64.whl", hash = "sha256:07ba89fdcc9451f2ebb02853deb6aaaa3d2239a236669a63ab3801bbf923ef5c"}, + {file = "mypy-1.14.1-py3-none-any.whl", hash = "sha256:b66a60cc4073aeb8ae00057f9c1f64d49e90f918fbcef9a977eb121da8b8f1d1"}, + {file = "mypy-1.14.1.tar.gz", hash = "sha256:7ec88144fe9b510e8475ec2f5f251992690fcf89ccb4500b214b4226abcd32d6"}, +] + +[package.dependencies] +mypy_extensions = ">=1.0.0" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typing_extensions = ">=4.6.0" + +[package.extras] +dmypy = ["psutil (>=4.0)"] +faster-cache = ["orjson"] +install-types = ["pip"] +mypyc = ["setuptools (>=50)"] +reports = ["lxml"] + [[package]] name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." optional = false python-versions = ">=3.5" +groups = ["dev"] files = [ {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] -[[package]] -name = "nanobind" -version = "2.1.0" -description = "nanobind: tiny and efficient C++/Python bindings" -optional = false -python-versions = "*" -files = [ - {file = "nanobind-2.1.0-py3-none-any.whl", hash = "sha256:a613a2ce750fee63f03dc8a36593be2bdc2929cb4cea56b38fafeb74b85c3a5f"}, -] - [[package]] name = "netstruct" version = "1.1.2" description = "Packed binary data for networking." optional = false python-versions = "*" +groups = ["main"] files = [ {file = "netstruct-1.1.2.zip", hash = "sha256:70b6a5c73f5bbc7ab57b019369642adfb34dd8af41b948c400ce95f952b7df9a"}, ] [[package]] name = "networkx" -version = "3.1" +version = "3.4.2" description = "Python package for creating and manipulating graphs and networks" optional = false -python-versions = ">=3.8" +python-versions = ">=3.10" +groups = ["main"] files = [ - {file = "networkx-3.1-py3-none-any.whl", hash = "sha256:4f33f68cb2afcf86f28a45f43efc27a9386b535d567d2127f8f61d51dec58d36"}, - {file = "networkx-3.1.tar.gz", hash = "sha256:de346335408f84de0eada6ff9fafafff9bcda11f0a0dfaa931133debb146ab61"}, + {file = "networkx-3.4.2-py3-none-any.whl", hash = "sha256:df5d4365b724cf81b8c6a7312509d0c22386097011ad1abe274afd5e9d3bbc5f"}, + {file = "networkx-3.4.2.tar.gz", hash = "sha256:307c3669428c5362aab27c8a1260aa8f47c4e91d3891f48be0141738d8d053e1"}, ] [package.extras] -default = ["matplotlib (>=3.4)", "numpy (>=1.20)", "pandas (>=1.3)", "scipy (>=1.8)"] -developer = ["mypy (>=1.1)", "pre-commit (>=3.2)"] -doc = ["nb2plots (>=0.6)", "numpydoc (>=1.5)", "pillow (>=9.4)", "pydata-sphinx-theme (>=0.13)", "sphinx (>=6.1)", "sphinx-gallery (>=0.12)", "texext (>=0.6.7)"] -extra = ["lxml (>=4.6)", "pydot (>=1.4.2)", "pygraphviz (>=1.10)", "sympy (>=1.10)"] -test = ["codecov (>=2.1)", "pytest (>=7.2)", "pytest-cov (>=4.0)"] +default = ["matplotlib (>=3.7)", "numpy (>=1.24)", "pandas (>=2.0)", "scipy (>=1.10,!=1.11.0,!=1.11.1)"] +developer = ["changelist (==0.5)", "mypy (>=1.1)", "pre-commit (>=3.2)", "rtoml"] +doc = ["intersphinx-registry", "myst-nb (>=1.1)", "numpydoc (>=1.8.0)", "pillow (>=9.4)", "pydata-sphinx-theme (>=0.15)", "sphinx (>=7.3)", "sphinx-gallery (>=0.16)", "texext (>=0.6.7)"] +example = ["cairocffi (>=1.7)", "contextily (>=1.6)", "igraph (>=0.11)", "momepy (>=0.7.2)", "osmnx (>=1.9)", "scikit-learn (>=1.5)", "seaborn (>=0.13)"] +extra = ["lxml (>=4.6)", "pydot (>=3.0.1)", "pygraphviz (>=1.14)", "sympy (>=1.10)"] +test = ["pytest (>=7.2)", "pytest-cov (>=4.0)"] [[package]] name = "nodeenv" @@ -1979,46 +2370,37 @@ version = "1.9.1" description = "Node.js virtual environment builder" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["dev"] files = [ {file = "nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9"}, {file = "nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f"}, ] -[[package]] -name = "oauthlib" -version = "3.2.2" -description = "A generic, spec-compliant, thorough implementation of the OAuth request-signing logic" -optional = false -python-versions = ">=3.6" -files = [ - {file = "oauthlib-3.2.2-py3-none-any.whl", hash = "sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca"}, - {file = "oauthlib-3.2.2.tar.gz", hash = "sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918"}, -] - -[package.extras] -rsa = ["cryptography (>=3.0.0)"] -signals = ["blinker (>=1.4.0)"] -signedtoken = ["cryptography (>=3.0.0)", "pyjwt (>=2.0.0,<3)"] - [[package]] name = "olefile" -version = "0.46" +version = "0.47" description = "Python package to parse, read and write Microsoft OLE2 files (Structured Storage or Compound Document, Microsoft Office)" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +groups = ["main"] files = [ - {file = "olefile-0.46.zip", hash = "sha256:133b031eaf8fd2c9399b78b8bc5b8fcbe4c31e85295749bb17a87cba8f3c3964"}, + {file = "olefile-0.47-py2.py3-none-any.whl", hash = "sha256:543c7da2a7adadf21214938bb79c83ea12b473a4b6ee4ad4bf854e7715e13d1f"}, + {file = "olefile-0.47.zip", hash = "sha256:599383381a0bf3dfbd932ca0ca6515acd174ed48870cbf7fee123d698c192c1c"}, ] +[package.extras] +tests = ["pytest", "pytest-cov"] + [[package]] name = "oletools" -version = "0.60" +version = "0.60.2" description = "Python tools to analyze security characteristics of MS Office and OLE files (also called Structured Storage, Compound File Binary Format or Compound Document File Format), for Malware Analysis and Incident Response #DFIR" optional = false python-versions = "*" +groups = ["main"] files = [ - {file = "oletools-0.60-py2.py3-none-any.whl", hash = "sha256:bad54d3ced34f3475a5bffc0122f8481c66c3f3e09ad946dbda6ec80b75f72cb"}, - {file = "oletools-0.60.zip", hash = "sha256:dfad0328ac83b4f8db9f47e706cbd64db739ae4ebf9d98b2dcc465728a35f4a6"}, + {file = "oletools-0.60.2-py2.py3-none-any.whl", hash = "sha256:72ad8bd748fd0c4e7b5b4733af770d11543ebb2bf2697455f99f975fcd50cc96"}, + {file = "oletools-0.60.2.zip", hash = "sha256:ad452099f4695ffd8855113f453348200d195ee9fa341a09e197d66ee7e0b2c3"}, ] [package.dependencies] @@ -2027,90 +2409,122 @@ easygui = "*" msoffcrypto-tool = {version = "*", markers = "platform_python_implementation != \"PyPy\" or python_version >= \"3\" and (platform_system != \"Windows\" and platform_system != \"Darwin\")"} olefile = ">=0.46" pcodedmp = ">=1.2.5" -pyparsing = ">=2.1.0,<3" +pyparsing = ">=2.1.0,<4" [package.extras] full = ["XLMMacroDeobfuscator"] [[package]] name = "orjson" -version = "3.9.15" +version = "3.10.15" description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" optional = false python-versions = ">=3.8" -files = [ - {file = "orjson-3.9.15-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:d61f7ce4727a9fa7680cd6f3986b0e2c732639f46a5e0156e550e35258aa313a"}, - {file = "orjson-3.9.15-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4feeb41882e8aa17634b589533baafdceb387e01e117b1ec65534ec724023d04"}, - {file = "orjson-3.9.15-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fbbeb3c9b2edb5fd044b2a070f127a0ac456ffd079cb82746fc84af01ef021a4"}, - {file = "orjson-3.9.15-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b66bcc5670e8a6b78f0313bcb74774c8291f6f8aeef10fe70e910b8040f3ab75"}, - {file = "orjson-3.9.15-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2973474811db7b35c30248d1129c64fd2bdf40d57d84beed2a9a379a6f57d0ab"}, - {file = "orjson-3.9.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fe41b6f72f52d3da4db524c8653e46243c8c92df826ab5ffaece2dba9cccd58"}, - {file = "orjson-3.9.15-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4228aace81781cc9d05a3ec3a6d2673a1ad0d8725b4e915f1089803e9efd2b99"}, - {file = "orjson-3.9.15-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:6f7b65bfaf69493c73423ce9db66cfe9138b2f9ef62897486417a8fcb0a92bfe"}, - {file = "orjson-3.9.15-cp310-none-win32.whl", hash = "sha256:2d99e3c4c13a7b0fb3792cc04c2829c9db07838fb6973e578b85c1745e7d0ce7"}, - {file = "orjson-3.9.15-cp310-none-win_amd64.whl", hash = "sha256:b725da33e6e58e4a5d27958568484aa766e825e93aa20c26c91168be58e08cbb"}, - {file = "orjson-3.9.15-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:c8e8fe01e435005d4421f183038fc70ca85d2c1e490f51fb972db92af6e047c2"}, - {file = "orjson-3.9.15-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87f1097acb569dde17f246faa268759a71a2cb8c96dd392cd25c668b104cad2f"}, - {file = "orjson-3.9.15-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ff0f9913d82e1d1fadbd976424c316fbc4d9c525c81d047bbdd16bd27dd98cfc"}, - {file = "orjson-3.9.15-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8055ec598605b0077e29652ccfe9372247474375e0e3f5775c91d9434e12d6b1"}, - {file = "orjson-3.9.15-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d6768a327ea1ba44c9114dba5fdda4a214bdb70129065cd0807eb5f010bfcbb5"}, - {file = "orjson-3.9.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:12365576039b1a5a47df01aadb353b68223da413e2e7f98c02403061aad34bde"}, - {file = "orjson-3.9.15-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:71c6b009d431b3839d7c14c3af86788b3cfac41e969e3e1c22f8a6ea13139404"}, - {file = "orjson-3.9.15-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e18668f1bd39e69b7fed19fa7cd1cd110a121ec25439328b5c89934e6d30d357"}, - {file = "orjson-3.9.15-cp311-none-win32.whl", hash = "sha256:62482873e0289cf7313461009bf62ac8b2e54bc6f00c6fabcde785709231a5d7"}, - {file = "orjson-3.9.15-cp311-none-win_amd64.whl", hash = "sha256:b3d336ed75d17c7b1af233a6561cf421dee41d9204aa3cfcc6c9c65cd5bb69a8"}, - {file = "orjson-3.9.15-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:82425dd5c7bd3adfe4e94c78e27e2fa02971750c2b7ffba648b0f5d5cc016a73"}, - {file = "orjson-3.9.15-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c51378d4a8255b2e7c1e5cc430644f0939539deddfa77f6fac7b56a9784160a"}, - {file = "orjson-3.9.15-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6ae4e06be04dc00618247c4ae3f7c3e561d5bc19ab6941427f6d3722a0875ef7"}, - {file = "orjson-3.9.15-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bcef128f970bb63ecf9a65f7beafd9b55e3aaf0efc271a4154050fc15cdb386e"}, - {file = "orjson-3.9.15-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b72758f3ffc36ca566ba98a8e7f4f373b6c17c646ff8ad9b21ad10c29186f00d"}, - {file = "orjson-3.9.15-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c57bc7b946cf2efa67ac55766e41764b66d40cbd9489041e637c1304400494"}, - {file = "orjson-3.9.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:946c3a1ef25338e78107fba746f299f926db408d34553b4754e90a7de1d44068"}, - {file = "orjson-3.9.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2f256d03957075fcb5923410058982aea85455d035607486ccb847f095442bda"}, - {file = "orjson-3.9.15-cp312-none-win_amd64.whl", hash = "sha256:5bb399e1b49db120653a31463b4a7b27cf2fbfe60469546baf681d1b39f4edf2"}, - {file = "orjson-3.9.15-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:b17f0f14a9c0ba55ff6279a922d1932e24b13fc218a3e968ecdbf791b3682b25"}, - {file = "orjson-3.9.15-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f6cbd8e6e446fb7e4ed5bac4661a29e43f38aeecbf60c4b900b825a353276a1"}, - {file = "orjson-3.9.15-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:76bc6356d07c1d9f4b782813094d0caf1703b729d876ab6a676f3aaa9a47e37c"}, - {file = "orjson-3.9.15-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fdfa97090e2d6f73dced247a2f2d8004ac6449df6568f30e7fa1a045767c69a6"}, - {file = "orjson-3.9.15-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7413070a3e927e4207d00bd65f42d1b780fb0d32d7b1d951f6dc6ade318e1b5a"}, - {file = "orjson-3.9.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9cf1596680ac1f01839dba32d496136bdd5d8ffb858c280fa82bbfeb173bdd40"}, - {file = "orjson-3.9.15-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:809d653c155e2cc4fd39ad69c08fdff7f4016c355ae4b88905219d3579e31eb7"}, - {file = "orjson-3.9.15-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:920fa5a0c5175ab14b9c78f6f820b75804fb4984423ee4c4f1e6d748f8b22bc1"}, - {file = "orjson-3.9.15-cp38-none-win32.whl", hash = "sha256:2b5c0f532905e60cf22a511120e3719b85d9c25d0e1c2a8abb20c4dede3b05a5"}, - {file = "orjson-3.9.15-cp38-none-win_amd64.whl", hash = "sha256:67384f588f7f8daf040114337d34a5188346e3fae6c38b6a19a2fe8c663a2f9b"}, - {file = "orjson-3.9.15-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:6fc2fe4647927070df3d93f561d7e588a38865ea0040027662e3e541d592811e"}, - {file = "orjson-3.9.15-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34cbcd216e7af5270f2ffa63a963346845eb71e174ea530867b7443892d77180"}, - {file = "orjson-3.9.15-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f541587f5c558abd93cb0de491ce99a9ef8d1ae29dd6ab4dbb5a13281ae04cbd"}, - {file = "orjson-3.9.15-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:92255879280ef9c3c0bcb327c5a1b8ed694c290d61a6a532458264f887f052cb"}, - {file = "orjson-3.9.15-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:05a1f57fb601c426635fcae9ddbe90dfc1ed42245eb4c75e4960440cac667262"}, - {file = "orjson-3.9.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ede0bde16cc6e9b96633df1631fbcd66491d1063667f260a4f2386a098393790"}, - {file = "orjson-3.9.15-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:e88b97ef13910e5f87bcbc4dd7979a7de9ba8702b54d3204ac587e83639c0c2b"}, - {file = "orjson-3.9.15-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:57d5d8cf9c27f7ef6bc56a5925c7fbc76b61288ab674eb352c26ac780caa5b10"}, - {file = "orjson-3.9.15-cp39-none-win32.whl", hash = "sha256:001f4eb0ecd8e9ebd295722d0cbedf0748680fb9998d3993abaed2f40587257a"}, - {file = "orjson-3.9.15-cp39-none-win_amd64.whl", hash = "sha256:ea0b183a5fe6b2b45f3b854b0d19c4e932d6f5934ae1f723b07cf9560edd4ec7"}, - {file = "orjson-3.9.15.tar.gz", hash = "sha256:95cae920959d772f30ab36d3b25f83bb0f3be671e986c72ce22f8fa700dae061"}, +groups = ["main"] +files = [ + {file = "orjson-3.10.15-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:552c883d03ad185f720d0c09583ebde257e41b9521b74ff40e08b7dec4559c04"}, + {file = "orjson-3.10.15-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:616e3e8d438d02e4854f70bfdc03a6bcdb697358dbaa6bcd19cbe24d24ece1f8"}, + {file = "orjson-3.10.15-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7c2c79fa308e6edb0ffab0a31fd75a7841bf2a79a20ef08a3c6e3b26814c8ca8"}, + {file = "orjson-3.10.15-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73cb85490aa6bf98abd20607ab5c8324c0acb48d6da7863a51be48505646c814"}, + {file = "orjson-3.10.15-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:763dadac05e4e9d2bc14938a45a2d0560549561287d41c465d3c58aec818b164"}, + {file = "orjson-3.10.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a330b9b4734f09a623f74a7490db713695e13b67c959713b78369f26b3dee6bf"}, + {file = "orjson-3.10.15-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a61a4622b7ff861f019974f73d8165be1bd9a0855e1cad18ee167acacabeb061"}, + {file = "orjson-3.10.15-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:acd271247691574416b3228db667b84775c497b245fa275c6ab90dc1ffbbd2b3"}, + {file = "orjson-3.10.15-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:e4759b109c37f635aa5c5cc93a1b26927bfde24b254bcc0e1149a9fada253d2d"}, + {file = "orjson-3.10.15-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:9e992fd5cfb8b9f00bfad2fd7a05a4299db2bbe92e6440d9dd2fab27655b3182"}, + {file = "orjson-3.10.15-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f95fb363d79366af56c3f26b71df40b9a583b07bbaaf5b317407c4d58497852e"}, + {file = "orjson-3.10.15-cp310-cp310-win32.whl", hash = "sha256:f9875f5fea7492da8ec2444839dcc439b0ef298978f311103d0b7dfd775898ab"}, + {file = "orjson-3.10.15-cp310-cp310-win_amd64.whl", hash = "sha256:17085a6aa91e1cd70ca8533989a18b5433e15d29c574582f76f821737c8d5806"}, + {file = "orjson-3.10.15-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:c4cc83960ab79a4031f3119cc4b1a1c627a3dc09df125b27c4201dff2af7eaa6"}, + {file = "orjson-3.10.15-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ddbeef2481d895ab8be5185f2432c334d6dec1f5d1933a9c83014d188e102cef"}, + {file = "orjson-3.10.15-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9e590a0477b23ecd5b0ac865b1b907b01b3c5535f5e8a8f6ab0e503efb896334"}, + {file = "orjson-3.10.15-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a6be38bd103d2fd9bdfa31c2720b23b5d47c6796bcb1d1b598e3924441b4298d"}, + {file = "orjson-3.10.15-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ff4f6edb1578960ed628a3b998fa54d78d9bb3e2eb2cfc5c2a09732431c678d0"}, + {file = "orjson-3.10.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b0482b21d0462eddd67e7fce10b89e0b6ac56570424662b685a0d6fccf581e13"}, + {file = "orjson-3.10.15-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:bb5cc3527036ae3d98b65e37b7986a918955f85332c1ee07f9d3f82f3a6899b5"}, + {file = "orjson-3.10.15-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d569c1c462912acdd119ccbf719cf7102ea2c67dd03b99edcb1a3048651ac96b"}, + {file = "orjson-3.10.15-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:1e6d33efab6b71d67f22bf2962895d3dc6f82a6273a965fab762e64fa90dc399"}, + {file = "orjson-3.10.15-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:c33be3795e299f565681d69852ac8c1bc5c84863c0b0030b2b3468843be90388"}, + {file = "orjson-3.10.15-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:eea80037b9fae5339b214f59308ef0589fc06dc870578b7cce6d71eb2096764c"}, + {file = "orjson-3.10.15-cp311-cp311-win32.whl", hash = "sha256:d5ac11b659fd798228a7adba3e37c010e0152b78b1982897020a8e019a94882e"}, + {file = "orjson-3.10.15-cp311-cp311-win_amd64.whl", hash = "sha256:cf45e0214c593660339ef63e875f32ddd5aa3b4adc15e662cdb80dc49e194f8e"}, + {file = "orjson-3.10.15-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:9d11c0714fc85bfcf36ada1179400862da3288fc785c30e8297844c867d7505a"}, + {file = "orjson-3.10.15-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dba5a1e85d554e3897fa9fe6fbcff2ed32d55008973ec9a2b992bd9a65d2352d"}, + {file = "orjson-3.10.15-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7723ad949a0ea502df656948ddd8b392780a5beaa4c3b5f97e525191b102fff0"}, + {file = "orjson-3.10.15-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6fd9bc64421e9fe9bd88039e7ce8e58d4fead67ca88e3a4014b143cec7684fd4"}, + {file = "orjson-3.10.15-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dadba0e7b6594216c214ef7894c4bd5f08d7c0135f4dd0145600be4fbcc16767"}, + {file = "orjson-3.10.15-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b48f59114fe318f33bbaee8ebeda696d8ccc94c9e90bc27dbe72153094e26f41"}, + {file = "orjson-3.10.15-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:035fb83585e0f15e076759b6fedaf0abb460d1765b6a36f48018a52858443514"}, + {file = "orjson-3.10.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d13b7fe322d75bf84464b075eafd8e7dd9eae05649aa2a5354cfa32f43c59f17"}, + {file = "orjson-3.10.15-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:7066b74f9f259849629e0d04db6609db4cf5b973248f455ba5d3bd58a4daaa5b"}, + {file = "orjson-3.10.15-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:88dc3f65a026bd3175eb157fea994fca6ac7c4c8579fc5a86fc2114ad05705b7"}, + {file = "orjson-3.10.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b342567e5465bd99faa559507fe45e33fc76b9fb868a63f1642c6bc0735ad02a"}, + {file = "orjson-3.10.15-cp312-cp312-win32.whl", hash = "sha256:0a4f27ea5617828e6b58922fdbec67b0aa4bb844e2d363b9244c47fa2180e665"}, + {file = "orjson-3.10.15-cp312-cp312-win_amd64.whl", hash = "sha256:ef5b87e7aa9545ddadd2309efe6824bd3dd64ac101c15dae0f2f597911d46eaa"}, + {file = "orjson-3.10.15-cp313-cp313-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:bae0e6ec2b7ba6895198cd981b7cca95d1487d0147c8ed751e5632ad16f031a6"}, + {file = "orjson-3.10.15-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f93ce145b2db1252dd86af37d4165b6faa83072b46e3995ecc95d4b2301b725a"}, + {file = "orjson-3.10.15-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7c203f6f969210128af3acae0ef9ea6aab9782939f45f6fe02d05958fe761ef9"}, + {file = "orjson-3.10.15-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8918719572d662e18b8af66aef699d8c21072e54b6c82a3f8f6404c1f5ccd5e0"}, + {file = "orjson-3.10.15-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f71eae9651465dff70aa80db92586ad5b92df46a9373ee55252109bb6b703307"}, + {file = "orjson-3.10.15-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e117eb299a35f2634e25ed120c37c641398826c2f5a3d3cc39f5993b96171b9e"}, + {file = "orjson-3.10.15-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:13242f12d295e83c2955756a574ddd6741c81e5b99f2bef8ed8d53e47a01e4b7"}, + {file = "orjson-3.10.15-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7946922ada8f3e0b7b958cc3eb22cfcf6c0df83d1fe5521b4a100103e3fa84c8"}, + {file = "orjson-3.10.15-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:b7155eb1623347f0f22c38c9abdd738b287e39b9982e1da227503387b81b34ca"}, + {file = "orjson-3.10.15-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:208beedfa807c922da4e81061dafa9c8489c6328934ca2a562efa707e049e561"}, + {file = "orjson-3.10.15-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eca81f83b1b8c07449e1d6ff7074e82e3fd6777e588f1a6632127f286a968825"}, + {file = "orjson-3.10.15-cp313-cp313-win32.whl", hash = "sha256:c03cd6eea1bd3b949d0d007c8d57049aa2b39bd49f58b4b2af571a5d3833d890"}, + {file = "orjson-3.10.15-cp313-cp313-win_amd64.whl", hash = "sha256:fd56a26a04f6ba5fb2045b0acc487a63162a958ed837648c5781e1fe3316cfbf"}, + {file = "orjson-3.10.15-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:5e8afd6200e12771467a1a44e5ad780614b86abb4b11862ec54861a82d677746"}, + {file = "orjson-3.10.15-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da9a18c500f19273e9e104cca8c1f0b40a6470bcccfc33afcc088045d0bf5ea6"}, + {file = "orjson-3.10.15-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bb00b7bfbdf5d34a13180e4805d76b4567025da19a197645ca746fc2fb536586"}, + {file = "orjson-3.10.15-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:33aedc3d903378e257047fee506f11e0833146ca3e57a1a1fb0ddb789876c1e1"}, + {file = "orjson-3.10.15-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd0099ae6aed5eb1fc84c9eb72b95505a3df4267e6962eb93cdd5af03be71c98"}, + {file = "orjson-3.10.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7c864a80a2d467d7786274fce0e4f93ef2a7ca4ff31f7fc5634225aaa4e9e98c"}, + {file = "orjson-3.10.15-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c25774c9e88a3e0013d7d1a6c8056926b607a61edd423b50eb5c88fd7f2823ae"}, + {file = "orjson-3.10.15-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:e78c211d0074e783d824ce7bb85bf459f93a233eb67a5b5003498232ddfb0e8a"}, + {file = "orjson-3.10.15-cp38-cp38-musllinux_1_2_armv7l.whl", hash = "sha256:43e17289ffdbbac8f39243916c893d2ae41a2ea1a9cbb060a56a4d75286351ae"}, + {file = "orjson-3.10.15-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:781d54657063f361e89714293c095f506c533582ee40a426cb6489c48a637b81"}, + {file = "orjson-3.10.15-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:6875210307d36c94873f553786a808af2788e362bd0cf4c8e66d976791e7b528"}, + {file = "orjson-3.10.15-cp38-cp38-win32.whl", hash = "sha256:305b38b2b8f8083cc3d618927d7f424349afce5975b316d33075ef0f73576b60"}, + {file = "orjson-3.10.15-cp38-cp38-win_amd64.whl", hash = "sha256:5dd9ef1639878cc3efffed349543cbf9372bdbd79f478615a1c633fe4e4180d1"}, + {file = "orjson-3.10.15-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:ffe19f3e8d68111e8644d4f4e267a069ca427926855582ff01fc012496d19969"}, + {file = "orjson-3.10.15-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d433bf32a363823863a96561a555227c18a522a8217a6f9400f00ddc70139ae2"}, + {file = "orjson-3.10.15-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:da03392674f59a95d03fa5fb9fe3a160b0511ad84b7a3914699ea5a1b3a38da2"}, + {file = "orjson-3.10.15-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3a63bb41559b05360ded9132032239e47983a39b151af1201f07ec9370715c82"}, + {file = "orjson-3.10.15-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3766ac4702f8f795ff3fa067968e806b4344af257011858cc3d6d8721588b53f"}, + {file = "orjson-3.10.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a1c73dcc8fadbd7c55802d9aa093b36878d34a3b3222c41052ce6b0fc65f8e8"}, + {file = "orjson-3.10.15-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b299383825eafe642cbab34be762ccff9fd3408d72726a6b2a4506d410a71ab3"}, + {file = "orjson-3.10.15-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:abc7abecdbf67a173ef1316036ebbf54ce400ef2300b4e26a7b843bd446c2480"}, + {file = "orjson-3.10.15-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:3614ea508d522a621384c1d6639016a5a2e4f027f3e4a1c93a51867615d28829"}, + {file = "orjson-3.10.15-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:295c70f9dc154307777ba30fe29ff15c1bcc9dfc5c48632f37d20a607e9ba85a"}, + {file = "orjson-3.10.15-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:63309e3ff924c62404923c80b9e2048c1f74ba4b615e7584584389ada50ed428"}, + {file = "orjson-3.10.15-cp39-cp39-win32.whl", hash = "sha256:a2f708c62d026fb5340788ba94a55c23df4e1869fec74be455e0b2f5363b8507"}, + {file = "orjson-3.10.15-cp39-cp39-win_amd64.whl", hash = "sha256:efcf6c735c3d22ef60c4aa27a5238f1a477df85e9b15f2142f9d669beb2d13fd"}, + {file = "orjson-3.10.15.tar.gz", hash = "sha256:05ca7fe452a2e9d8d9d706a2984c95b9c2ebc5db417ce0b7a49b91d50642a23e"}, ] [[package]] name = "packaging" -version = "23.1" +version = "24.2" description = "Core utilities for Python packages" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" +groups = ["main", "dev"] files = [ - {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"}, - {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, + {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, + {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, ] [[package]] name = "paramiko" -version = "3.4.0" +version = "3.5.0" description = "SSH2 protocol library" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ - {file = "paramiko-3.4.0-py3-none-any.whl", hash = "sha256:43f0b51115a896f9c00f59618023484cb3a14b98bbceab43394a39c6739b7ee7"}, - {file = "paramiko-3.4.0.tar.gz", hash = "sha256:aac08f26a31dc4dffd92821527d1682d99d52f9ef6851968114a8728f3c274d3"}, + {file = "paramiko-3.5.0-py3-none-any.whl", hash = "sha256:1fedf06b085359051cd7d0d270cebe19e755a8a921cc2ddbfa647fb0cd7d68f9"}, + {file = "paramiko-3.5.0.tar.gz", hash = "sha256:ad11e540da4f55cedda52931f1a3f812a8238a7af7f62a60de538cd80bb28124"}, ] [package.dependencies] @@ -2129,6 +2543,7 @@ version = "0.12.1" description = "Utility library for gitignore style pattern matching of file paths." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, @@ -2140,6 +2555,7 @@ version = "1.2.6" description = "A VBA p-code disassembler" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "pcodedmp-1.2.6-py2.py3-none-any.whl", hash = "sha256:4441f7c0ab4cbda27bd4668db3b14f36261d86e5059ce06c0828602cbe1c4278"}, {file = "pcodedmp-1.2.6.tar.gz", hash = "sha256:025f8c809a126f45a082ffa820893e6a8d990d9d7ddb68694b5a9f0a6dbcd955"}, @@ -2151,21 +2567,46 @@ win-unicode-console = {version = "*", markers = "platform_system == \"Windows\" [[package]] name = "pebble" -version = "4.6.3" +version = "5.1.0" description = "Threading and multiprocessing eye-candy." optional = false -python-versions = "*" +python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "Pebble-4.6.3-py2.py3-none-any.whl", hash = "sha256:46e02767b239a29b8150466514fabb5c6632bea8c9b7456dfdb715f4636fc8a3"}, - {file = "Pebble-4.6.3.tar.gz", hash = "sha256:694e1105db888f3576b8f00662f90b057cf3780e6f8b7f57955a568008d0f497"}, + {file = "Pebble-5.1.0-py3-none-any.whl", hash = "sha256:530a398299ecd3a4ed1baf2e4b8045d8280b1e665560b0b409f8d8e58db60111"}, + {file = "Pebble-5.1.0.tar.gz", hash = "sha256:5c30376f1827b21ecec4126ff90e7f22ad5501cac1ff2b32c86ff2601681f932"}, ] +[[package]] +name = "peepdf-3" +version = "5.0.0" +description = "A Python 3 tool to explore, analyse, and disassemble PDF files" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "peepdf_3-5.0.0-py3-none-any.whl", hash = "sha256:2c9220c39c1ffc7d1db4268874d5c68329e9f9f331643b0bb1e6e84798439bd8"}, + {file = "peepdf_3-5.0.0.tar.gz", hash = "sha256:1c6dbb873be22633535bb7a0edfcc005781837869bb5d5b99c9a36591d1e9e95"}, +] + +[package.dependencies] +colorama = "*" +jsbeautifier = "*" +lxml = "*" +Pillow = "*" +prettytable = ">=3.12.0" +pypdf = "*" +pythonaes = "*" +requests = "*" +STPyV8 = "*" + [[package]] name = "pefile" version = "2024.8.26" description = "Python PE parsing module" optional = false python-versions = ">=3.6.0" +groups = ["main"] files = [ {file = "pefile-2024.8.26-py3-none-any.whl", hash = "sha256:76f8b485dcd3b1bb8166f1128d395fa3d87af26360c2358fb75b80019b957c6f"}, {file = "pefile-2024.8.26.tar.gz", hash = "sha256:3ff6c5d8b43e8c37bb6e6dd5085658d658a7a0bdcd20b6a07b1fcfc1c4e9d632"}, @@ -2173,98 +2614,90 @@ files = [ [[package]] name = "pillow" -version = "10.4.0" +version = "11.1.0" description = "Python Imaging Library (Fork)" optional = false -python-versions = ">=3.8" -files = [ - {file = "pillow-10.4.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:4d9667937cfa347525b319ae34375c37b9ee6b525440f3ef48542fcf66f2731e"}, - {file = "pillow-10.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:543f3dc61c18dafb755773efc89aae60d06b6596a63914107f75459cf984164d"}, - {file = "pillow-10.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7928ecbf1ece13956b95d9cbcfc77137652b02763ba384d9ab508099a2eca856"}, - {file = "pillow-10.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e4d49b85c4348ea0b31ea63bc75a9f3857869174e2bf17e7aba02945cd218e6f"}, - {file = "pillow-10.4.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:6c762a5b0997f5659a5ef2266abc1d8851ad7749ad9a6a5506eb23d314e4f46b"}, - {file = "pillow-10.4.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:a985e028fc183bf12a77a8bbf36318db4238a3ded7fa9df1b9a133f1cb79f8fc"}, - {file = "pillow-10.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:812f7342b0eee081eaec84d91423d1b4650bb9828eb53d8511bcef8ce5aecf1e"}, - {file = "pillow-10.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ac1452d2fbe4978c2eec89fb5a23b8387aba707ac72810d9490118817d9c0b46"}, - {file = "pillow-10.4.0-cp310-cp310-win32.whl", hash = "sha256:bcd5e41a859bf2e84fdc42f4edb7d9aba0a13d29a2abadccafad99de3feff984"}, - {file = "pillow-10.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:ecd85a8d3e79cd7158dec1c9e5808e821feea088e2f69a974db5edf84dc53141"}, - {file = "pillow-10.4.0-cp310-cp310-win_arm64.whl", hash = "sha256:ff337c552345e95702c5fde3158acb0625111017d0e5f24bf3acdb9cc16b90d1"}, - {file = "pillow-10.4.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:0a9ec697746f268507404647e531e92889890a087e03681a3606d9b920fbee3c"}, - {file = "pillow-10.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:dfe91cb65544a1321e631e696759491ae04a2ea11d36715eca01ce07284738be"}, - {file = "pillow-10.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5dc6761a6efc781e6a1544206f22c80c3af4c8cf461206d46a1e6006e4429ff3"}, - {file = "pillow-10.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e84b6cc6a4a3d76c153a6b19270b3526a5a8ed6b09501d3af891daa2a9de7d6"}, - {file = "pillow-10.4.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:bbc527b519bd3aa9d7f429d152fea69f9ad37c95f0b02aebddff592688998abe"}, - {file = "pillow-10.4.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:76a911dfe51a36041f2e756b00f96ed84677cdeb75d25c767f296c1c1eda1319"}, - {file = "pillow-10.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:59291fb29317122398786c2d44427bbd1a6d7ff54017075b22be9d21aa59bd8d"}, - {file = "pillow-10.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:416d3a5d0e8cfe4f27f574362435bc9bae57f679a7158e0096ad2beb427b8696"}, - {file = "pillow-10.4.0-cp311-cp311-win32.whl", hash = "sha256:7086cc1d5eebb91ad24ded9f58bec6c688e9f0ed7eb3dbbf1e4800280a896496"}, - {file = "pillow-10.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cbed61494057c0f83b83eb3a310f0bf774b09513307c434d4366ed64f4128a91"}, - {file = "pillow-10.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:f5f0c3e969c8f12dd2bb7e0b15d5c468b51e5017e01e2e867335c81903046a22"}, - {file = "pillow-10.4.0-cp312-cp312-macosx_10_10_x86_64.whl", hash = "sha256:673655af3eadf4df6b5457033f086e90299fdd7a47983a13827acf7459c15d94"}, - {file = "pillow-10.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:866b6942a92f56300012f5fbac71f2d610312ee65e22f1aa2609e491284e5597"}, - {file = "pillow-10.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29dbdc4207642ea6aad70fbde1a9338753d33fb23ed6956e706936706f52dd80"}, - {file = "pillow-10.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf2342ac639c4cf38799a44950bbc2dfcb685f052b9e262f446482afaf4bffca"}, - {file = "pillow-10.4.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:f5b92f4d70791b4a67157321c4e8225d60b119c5cc9aee8ecf153aace4aad4ef"}, - {file = "pillow-10.4.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:86dcb5a1eb778d8b25659d5e4341269e8590ad6b4e8b44d9f4b07f8d136c414a"}, - {file = "pillow-10.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:780c072c2e11c9b2c7ca37f9a2ee8ba66f44367ac3e5c7832afcfe5104fd6d1b"}, - {file = "pillow-10.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:37fb69d905be665f68f28a8bba3c6d3223c8efe1edf14cc4cfa06c241f8c81d9"}, - {file = "pillow-10.4.0-cp312-cp312-win32.whl", hash = "sha256:7dfecdbad5c301d7b5bde160150b4db4c659cee2b69589705b6f8a0c509d9f42"}, - {file = "pillow-10.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:1d846aea995ad352d4bdcc847535bd56e0fd88d36829d2c90be880ef1ee4668a"}, - {file = "pillow-10.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:e553cad5179a66ba15bb18b353a19020e73a7921296a7979c4a2b7f6a5cd57f9"}, - {file = "pillow-10.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8bc1a764ed8c957a2e9cacf97c8b2b053b70307cf2996aafd70e91a082e70df3"}, - {file = "pillow-10.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6209bb41dc692ddfee4942517c19ee81b86c864b626dbfca272ec0f7cff5d9fb"}, - {file = "pillow-10.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bee197b30783295d2eb680b311af15a20a8b24024a19c3a26431ff83eb8d1f70"}, - {file = "pillow-10.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1ef61f5dd14c300786318482456481463b9d6b91ebe5ef12f405afbba77ed0be"}, - {file = "pillow-10.4.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:297e388da6e248c98bc4a02e018966af0c5f92dfacf5a5ca22fa01cb3179bca0"}, - {file = "pillow-10.4.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:e4db64794ccdf6cb83a59d73405f63adbe2a1887012e308828596100a0b2f6cc"}, - {file = "pillow-10.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bd2880a07482090a3bcb01f4265f1936a903d70bc740bfcb1fd4e8a2ffe5cf5a"}, - {file = "pillow-10.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4b35b21b819ac1dbd1233317adeecd63495f6babf21b7b2512d244ff6c6ce309"}, - {file = "pillow-10.4.0-cp313-cp313-win32.whl", hash = "sha256:551d3fd6e9dc15e4c1eb6fc4ba2b39c0c7933fa113b220057a34f4bb3268a060"}, - {file = "pillow-10.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:030abdbe43ee02e0de642aee345efa443740aa4d828bfe8e2eb11922ea6a21ea"}, - {file = "pillow-10.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:5b001114dd152cfd6b23befeb28d7aee43553e2402c9f159807bf55f33af8a8d"}, - {file = "pillow-10.4.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:8d4d5063501b6dd4024b8ac2f04962d661222d120381272deea52e3fc52d3736"}, - {file = "pillow-10.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7c1ee6f42250df403c5f103cbd2768a28fe1a0ea1f0f03fe151c8741e1469c8b"}, - {file = "pillow-10.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b15e02e9bb4c21e39876698abf233c8c579127986f8207200bc8a8f6bb27acf2"}, - {file = "pillow-10.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a8d4bade9952ea9a77d0c3e49cbd8b2890a399422258a77f357b9cc9be8d680"}, - {file = "pillow-10.4.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:43efea75eb06b95d1631cb784aa40156177bf9dd5b4b03ff38979e048258bc6b"}, - {file = "pillow-10.4.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:950be4d8ba92aca4b2bb0741285a46bfae3ca699ef913ec8416c1b78eadd64cd"}, - {file = "pillow-10.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:d7480af14364494365e89d6fddc510a13e5a2c3584cb19ef65415ca57252fb84"}, - {file = "pillow-10.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:73664fe514b34c8f02452ffb73b7a92c6774e39a647087f83d67f010eb9a0cf0"}, - {file = "pillow-10.4.0-cp38-cp38-win32.whl", hash = "sha256:e88d5e6ad0d026fba7bdab8c3f225a69f063f116462c49892b0149e21b6c0a0e"}, - {file = "pillow-10.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:5161eef006d335e46895297f642341111945e2c1c899eb406882a6c61a4357ab"}, - {file = "pillow-10.4.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:0ae24a547e8b711ccaaf99c9ae3cd975470e1a30caa80a6aaee9a2f19c05701d"}, - {file = "pillow-10.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:298478fe4f77a4408895605f3482b6cc6222c018b2ce565c2b6b9c354ac3229b"}, - {file = "pillow-10.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:134ace6dc392116566980ee7436477d844520a26a4b1bd4053f6f47d096997fd"}, - {file = "pillow-10.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:930044bb7679ab003b14023138b50181899da3f25de50e9dbee23b61b4de2126"}, - {file = "pillow-10.4.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:c76e5786951e72ed3686e122d14c5d7012f16c8303a674d18cdcd6d89557fc5b"}, - {file = "pillow-10.4.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:b2724fdb354a868ddf9a880cb84d102da914e99119211ef7ecbdc613b8c96b3c"}, - {file = "pillow-10.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:dbc6ae66518ab3c5847659e9988c3b60dc94ffb48ef9168656e0019a93dbf8a1"}, - {file = "pillow-10.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:06b2f7898047ae93fad74467ec3d28fe84f7831370e3c258afa533f81ef7f3df"}, - {file = "pillow-10.4.0-cp39-cp39-win32.whl", hash = "sha256:7970285ab628a3779aecc35823296a7869f889b8329c16ad5a71e4901a3dc4ef"}, - {file = "pillow-10.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:961a7293b2457b405967af9c77dcaa43cc1a8cd50d23c532e62d48ab6cdd56f5"}, - {file = "pillow-10.4.0-cp39-cp39-win_arm64.whl", hash = "sha256:32cda9e3d601a52baccb2856b8ea1fc213c90b340c542dcef77140dfa3278a9e"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:5b4815f2e65b30f5fbae9dfffa8636d992d49705723fe86a3661806e069352d4"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:8f0aef4ef59694b12cadee839e2ba6afeab89c0f39a3adc02ed51d109117b8da"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f4727572e2918acaa9077c919cbbeb73bd2b3ebcfe033b72f858fc9fbef0026"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff25afb18123cea58a591ea0244b92eb1e61a1fd497bf6d6384f09bc3262ec3e"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:dc3e2db6ba09ffd7d02ae9141cfa0ae23393ee7687248d46a7507b75d610f4f5"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:02a2be69f9c9b8c1e97cf2713e789d4e398c751ecfd9967c18d0ce304efbf885"}, - {file = "pillow-10.4.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:0755ffd4a0c6f267cccbae2e9903d95477ca2f77c4fcf3a3a09570001856c8a5"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:a02364621fe369e06200d4a16558e056fe2805d3468350df3aef21e00d26214b"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:1b5dea9831a90e9d0721ec417a80d4cbd7022093ac38a568db2dd78363b00908"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b885f89040bb8c4a1573566bbb2f44f5c505ef6e74cec7ab9068c900047f04b"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87dd88ded2e6d74d31e1e0a99a726a6765cda32d00ba72dc37f0651f306daaa8"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:2db98790afc70118bd0255c2eeb465e9767ecf1f3c25f9a1abb8ffc8cfd1fe0a"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f7baece4ce06bade126fb84b8af1c33439a76d8a6fd818970215e0560ca28c27"}, - {file = "pillow-10.4.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:cfdd747216947628af7b259d274771d84db2268ca062dd5faf373639d00113a3"}, - {file = "pillow-10.4.0.tar.gz", hash = "sha256:166c1cd4d24309b30d61f79f4a9114b7b2313d7450912277855ff5dfd7cd4a06"}, +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pillow-11.1.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:e1abe69aca89514737465752b4bcaf8016de61b3be1397a8fc260ba33321b3a8"}, + {file = "pillow-11.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c640e5a06869c75994624551f45e5506e4256562ead981cce820d5ab39ae2192"}, + {file = "pillow-11.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a07dba04c5e22824816b2615ad7a7484432d7f540e6fa86af60d2de57b0fcee2"}, + {file = "pillow-11.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e267b0ed063341f3e60acd25c05200df4193e15a4a5807075cd71225a2386e26"}, + {file = "pillow-11.1.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:bd165131fd51697e22421d0e467997ad31621b74bfc0b75956608cb2906dda07"}, + {file = "pillow-11.1.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:abc56501c3fd148d60659aae0af6ddc149660469082859fa7b066a298bde9482"}, + {file = "pillow-11.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:54ce1c9a16a9561b6d6d8cb30089ab1e5eb66918cb47d457bd996ef34182922e"}, + {file = "pillow-11.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:73ddde795ee9b06257dac5ad42fcb07f3b9b813f8c1f7f870f402f4dc54b5269"}, + {file = "pillow-11.1.0-cp310-cp310-win32.whl", hash = "sha256:3a5fe20a7b66e8135d7fd617b13272626a28278d0e578c98720d9ba4b2439d49"}, + {file = "pillow-11.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:b6123aa4a59d75f06e9dd3dac5bf8bc9aa383121bb3dd9a7a612e05eabc9961a"}, + {file = "pillow-11.1.0-cp310-cp310-win_arm64.whl", hash = "sha256:a76da0a31da6fcae4210aa94fd779c65c75786bc9af06289cd1c184451ef7a65"}, + {file = "pillow-11.1.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:e06695e0326d05b06833b40b7ef477e475d0b1ba3a6d27da1bb48c23209bf457"}, + {file = "pillow-11.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:96f82000e12f23e4f29346e42702b6ed9a2f2fea34a740dd5ffffcc8c539eb35"}, + {file = "pillow-11.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3cd561ded2cf2bbae44d4605837221b987c216cff94f49dfeed63488bb228d2"}, + {file = "pillow-11.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f189805c8be5ca5add39e6f899e6ce2ed824e65fb45f3c28cb2841911da19070"}, + {file = "pillow-11.1.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:dd0052e9db3474df30433f83a71b9b23bd9e4ef1de13d92df21a52c0303b8ab6"}, + {file = "pillow-11.1.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:837060a8599b8f5d402e97197d4924f05a2e0d68756998345c829c33186217b1"}, + {file = "pillow-11.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:aa8dd43daa836b9a8128dbe7d923423e5ad86f50a7a14dc688194b7be5c0dea2"}, + {file = "pillow-11.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0a2f91f8a8b367e7a57c6e91cd25af510168091fb89ec5146003e424e1558a96"}, + {file = "pillow-11.1.0-cp311-cp311-win32.whl", hash = "sha256:c12fc111ef090845de2bb15009372175d76ac99969bdf31e2ce9b42e4b8cd88f"}, + {file = "pillow-11.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:fbd43429d0d7ed6533b25fc993861b8fd512c42d04514a0dd6337fb3ccf22761"}, + {file = "pillow-11.1.0-cp311-cp311-win_arm64.whl", hash = "sha256:f7955ecf5609dee9442cbface754f2c6e541d9e6eda87fad7f7a989b0bdb9d71"}, + {file = "pillow-11.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2062ffb1d36544d42fcaa277b069c88b01bb7298f4efa06731a7fd6cc290b81a"}, + {file = "pillow-11.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a85b653980faad27e88b141348707ceeef8a1186f75ecc600c395dcac19f385b"}, + {file = "pillow-11.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9409c080586d1f683df3f184f20e36fb647f2e0bc3988094d4fd8c9f4eb1b3b3"}, + {file = "pillow-11.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7fdadc077553621911f27ce206ffcbec7d3f8d7b50e0da39f10997e8e2bb7f6a"}, + {file = "pillow-11.1.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:93a18841d09bcdd774dcdc308e4537e1f867b3dec059c131fde0327899734aa1"}, + {file = "pillow-11.1.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:9aa9aeddeed452b2f616ff5507459e7bab436916ccb10961c4a382cd3e03f47f"}, + {file = "pillow-11.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3cdcdb0b896e981678eee140d882b70092dac83ac1cdf6b3a60e2216a73f2b91"}, + {file = "pillow-11.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:36ba10b9cb413e7c7dfa3e189aba252deee0602c86c309799da5a74009ac7a1c"}, + {file = "pillow-11.1.0-cp312-cp312-win32.whl", hash = "sha256:cfd5cd998c2e36a862d0e27b2df63237e67273f2fc78f47445b14e73a810e7e6"}, + {file = "pillow-11.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:a697cd8ba0383bba3d2d3ada02b34ed268cb548b369943cd349007730c92bddf"}, + {file = "pillow-11.1.0-cp312-cp312-win_arm64.whl", hash = "sha256:4dd43a78897793f60766563969442020e90eb7847463eca901e41ba186a7d4a5"}, + {file = "pillow-11.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ae98e14432d458fc3de11a77ccb3ae65ddce70f730e7c76140653048c71bfcbc"}, + {file = "pillow-11.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cc1331b6d5a6e144aeb5e626f4375f5b7ae9934ba620c0ac6b3e43d5e683a0f0"}, + {file = "pillow-11.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:758e9d4ef15d3560214cddbc97b8ef3ef86ce04d62ddac17ad39ba87e89bd3b1"}, + {file = "pillow-11.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b523466b1a31d0dcef7c5be1f20b942919b62fd6e9a9be199d035509cbefc0ec"}, + {file = "pillow-11.1.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:9044b5e4f7083f209c4e35aa5dd54b1dd5b112b108648f5c902ad586d4f945c5"}, + {file = "pillow-11.1.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:3764d53e09cdedd91bee65c2527815d315c6b90d7b8b79759cc48d7bf5d4f114"}, + {file = "pillow-11.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:31eba6bbdd27dde97b0174ddf0297d7a9c3a507a8a1480e1e60ef914fe23d352"}, + {file = "pillow-11.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b5d658fbd9f0d6eea113aea286b21d3cd4d3fd978157cbf2447a6035916506d3"}, + {file = "pillow-11.1.0-cp313-cp313-win32.whl", hash = "sha256:f86d3a7a9af5d826744fabf4afd15b9dfef44fe69a98541f666f66fbb8d3fef9"}, + {file = "pillow-11.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:593c5fd6be85da83656b93ffcccc2312d2d149d251e98588b14fbc288fd8909c"}, + {file = "pillow-11.1.0-cp313-cp313-win_arm64.whl", hash = "sha256:11633d58b6ee5733bde153a8dafd25e505ea3d32e261accd388827ee987baf65"}, + {file = "pillow-11.1.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:70ca5ef3b3b1c4a0812b5c63c57c23b63e53bc38e758b37a951e5bc466449861"}, + {file = "pillow-11.1.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:8000376f139d4d38d6851eb149b321a52bb8893a88dae8ee7d95840431977081"}, + {file = "pillow-11.1.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9ee85f0696a17dd28fbcfceb59f9510aa71934b483d1f5601d1030c3c8304f3c"}, + {file = "pillow-11.1.0-cp313-cp313t-manylinux_2_28_x86_64.whl", hash = "sha256:dd0e081319328928531df7a0e63621caf67652c8464303fd102141b785ef9547"}, + {file = "pillow-11.1.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:e63e4e5081de46517099dc30abe418122f54531a6ae2ebc8680bcd7096860eab"}, + {file = "pillow-11.1.0-cp313-cp313t-win32.whl", hash = "sha256:dda60aa465b861324e65a78c9f5cf0f4bc713e4309f83bc387be158b077963d9"}, + {file = "pillow-11.1.0-cp313-cp313t-win_amd64.whl", hash = "sha256:ad5db5781c774ab9a9b2c4302bbf0c1014960a0a7be63278d13ae6fdf88126fe"}, + {file = "pillow-11.1.0-cp313-cp313t-win_arm64.whl", hash = "sha256:67cd427c68926108778a9005f2a04adbd5e67c442ed21d95389fe1d595458756"}, + {file = "pillow-11.1.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:bf902d7413c82a1bfa08b06a070876132a5ae6b2388e2712aab3a7cbc02205c6"}, + {file = "pillow-11.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c1eec9d950b6fe688edee07138993e54ee4ae634c51443cfb7c1e7613322718e"}, + {file = "pillow-11.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e275ee4cb11c262bd108ab2081f750db2a1c0b8c12c1897f27b160c8bd57bbc"}, + {file = "pillow-11.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4db853948ce4e718f2fc775b75c37ba2efb6aaea41a1a5fc57f0af59eee774b2"}, + {file = "pillow-11.1.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:ab8a209b8485d3db694fa97a896d96dd6533d63c22829043fd9de627060beade"}, + {file = "pillow-11.1.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:54251ef02a2309b5eec99d151ebf5c9904b77976c8abdcbce7891ed22df53884"}, + {file = "pillow-11.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5bb94705aea800051a743aa4874bb1397d4695fb0583ba5e425ee0328757f196"}, + {file = "pillow-11.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:89dbdb3e6e9594d512780a5a1c42801879628b38e3efc7038094430844e271d8"}, + {file = "pillow-11.1.0-cp39-cp39-win32.whl", hash = "sha256:e5449ca63da169a2e6068dd0e2fcc8d91f9558aba89ff6d02121ca8ab11e79e5"}, + {file = "pillow-11.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:3362c6ca227e65c54bf71a5f88b3d4565ff1bcbc63ae72c34b07bbb1cc59a43f"}, + {file = "pillow-11.1.0-cp39-cp39-win_arm64.whl", hash = "sha256:b20be51b37a75cc54c2c55def3fa2c65bb94ba859dde241cd0a4fd302de5ae0a"}, + {file = "pillow-11.1.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:8c730dc3a83e5ac137fbc92dfcfe1511ce3b2b5d7578315b63dbbb76f7f51d90"}, + {file = "pillow-11.1.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:7d33d2fae0e8b170b6a6c57400e077412240f6f5bb2a342cf1ee512a787942bb"}, + {file = "pillow-11.1.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a8d65b38173085f24bc07f8b6c505cbb7418009fa1a1fcb111b1f4961814a442"}, + {file = "pillow-11.1.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:015c6e863faa4779251436db398ae75051469f7c903b043a48f078e437656f83"}, + {file = "pillow-11.1.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d44ff19eea13ae4acdaaab0179fa68c0c6f2f45d66a4d8ec1eda7d6cecbcc15f"}, + {file = "pillow-11.1.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d3d8da4a631471dfaf94c10c85f5277b1f8e42ac42bade1ac67da4b4a7359b73"}, + {file = "pillow-11.1.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:4637b88343166249fe8aa94e7c4a62a180c4b3898283bb5d3d2fd5fe10d8e4e0"}, + {file = "pillow-11.1.0.tar.gz", hash = "sha256:368da70808b36d73b4b390a8ffac11069f8a5c85f29eff1f1b01bcf3ef5b2a20"}, ] [package.extras] -docs = ["furo", "olefile", "sphinx (>=7.3)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinxext-opengraph"] +docs = ["furo", "olefile", "sphinx (>=8.1)", "sphinx-copybutton", "sphinx-inline-tabs", "sphinxext-opengraph"] fpx = ["olefile"] mic = ["olefile"] -tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] +tests = ["check-manifest", "coverage (>=7.4.2)", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout", "trove-classifiers (>=2024.10.12)"] typing = ["typing-extensions"] xmp = ["defusedxml"] @@ -2274,6 +2707,7 @@ version = "4.3.6" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "platformdirs-4.3.6-py3-none-any.whl", hash = "sha256:73e575e1408ab8103900836b97580d5307456908a03e92031bab39e4554cc3fb"}, {file = "platformdirs-4.3.6.tar.gz", hash = "sha256:357fb2acbc885b0419afd3ce3ed34564c13c9b95c89360cd9563f73aa5e2b907"}, @@ -2290,6 +2724,7 @@ version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, @@ -2305,6 +2740,7 @@ version = "2.21.0" description = "A framework for managing and maintaining multi-language pre-commit hooks." optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "pre_commit-2.21.0-py2.py3-none-any.whl", hash = "sha256:e2f91727039fc39a92f58a588a25b87f936de6567eed4f0e673e0507edc75bad"}, {file = "pre_commit-2.21.0.tar.gz", hash = "sha256:31ef31af7e474a8d8995027fefdfcf509b5c913ff31f2015b4ec4beb26a6f658"}, @@ -2317,133 +2753,151 @@ nodeenv = ">=0.11.1" pyyaml = ">=5.1" virtualenv = ">=20.10.0" +[[package]] +name = "prettytable" +version = "3.14.0" +description = "A simple Python library for easily displaying tabular data in a visually appealing ASCII table format" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "prettytable-3.14.0-py3-none-any.whl", hash = "sha256:61d5c68f04a94acc73c7aac64f0f380f5bed4d2959d59edc6e4cbb7a0e7b55c4"}, + {file = "prettytable-3.14.0.tar.gz", hash = "sha256:b804b8d51db23959b96b329094debdbbdf10c8c3aa75958c5988cfd7f78501dd"}, +] + +[package.dependencies] +wcwidth = "*" + +[package.extras] +tests = ["pytest", "pytest-cov", "pytest-lazy-fixtures"] + [[package]] name = "protobuf" -version = "5.28.2" +version = "5.29.3" description = "" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "protobuf-5.28.2-cp310-abi3-win32.whl", hash = "sha256:eeea10f3dc0ac7e6b4933d32db20662902b4ab81bf28df12218aa389e9c2102d"}, - {file = "protobuf-5.28.2-cp310-abi3-win_amd64.whl", hash = "sha256:2c69461a7fcc8e24be697624c09a839976d82ae75062b11a0972e41fd2cd9132"}, - {file = "protobuf-5.28.2-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:a8b9403fc70764b08d2f593ce44f1d2920c5077bf7d311fefec999f8c40f78b7"}, - {file = "protobuf-5.28.2-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:35cfcb15f213449af7ff6198d6eb5f739c37d7e4f1c09b5d0641babf2cc0c68f"}, - {file = "protobuf-5.28.2-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:5e8a95246d581eef20471b5d5ba010d55f66740942b95ba9b872d918c459452f"}, - {file = "protobuf-5.28.2-cp38-cp38-win32.whl", hash = "sha256:87317e9bcda04a32f2ee82089a204d3a2f0d3c8aeed16568c7daf4756e4f1fe0"}, - {file = "protobuf-5.28.2-cp38-cp38-win_amd64.whl", hash = "sha256:c0ea0123dac3399a2eeb1a1443d82b7afc9ff40241433296769f7da42d142ec3"}, - {file = "protobuf-5.28.2-cp39-cp39-win32.whl", hash = "sha256:ca53faf29896c526863366a52a8f4d88e69cd04ec9571ed6082fa117fac3ab36"}, - {file = "protobuf-5.28.2-cp39-cp39-win_amd64.whl", hash = "sha256:8ddc60bf374785fb7cb12510b267f59067fa10087325b8e1855b898a0d81d276"}, - {file = "protobuf-5.28.2-py3-none-any.whl", hash = "sha256:52235802093bd8a2811abbe8bf0ab9c5f54cca0a751fdd3f6ac2a21438bffece"}, - {file = "protobuf-5.28.2.tar.gz", hash = "sha256:59379674ff119717404f7454647913787034f03fe7049cbef1d74a97bb4593f0"}, + {file = "protobuf-5.29.3-cp310-abi3-win32.whl", hash = "sha256:3ea51771449e1035f26069c4c7fd51fba990d07bc55ba80701c78f886bf9c888"}, + {file = "protobuf-5.29.3-cp310-abi3-win_amd64.whl", hash = "sha256:a4fa6f80816a9a0678429e84973f2f98cbc218cca434abe8db2ad0bffc98503a"}, + {file = "protobuf-5.29.3-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:a8434404bbf139aa9e1300dbf989667a83d42ddda9153d8ab76e0d5dcaca484e"}, + {file = "protobuf-5.29.3-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:daaf63f70f25e8689c072cfad4334ca0ac1d1e05a92fc15c54eb9cf23c3efd84"}, + {file = "protobuf-5.29.3-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:c027e08a08be10b67c06bf2370b99c811c466398c357e615ca88c91c07f0910f"}, + {file = "protobuf-5.29.3-cp38-cp38-win32.whl", hash = "sha256:84a57163a0ccef3f96e4b6a20516cedcf5bb3a95a657131c5c3ac62200d23252"}, + {file = "protobuf-5.29.3-cp38-cp38-win_amd64.whl", hash = "sha256:b89c115d877892a512f79a8114564fb435943b59067615894c3b13cd3e1fa107"}, + {file = "protobuf-5.29.3-cp39-cp39-win32.whl", hash = "sha256:0eb32bfa5219fc8d4111803e9a690658aa2e6366384fd0851064b963b6d1f2a7"}, + {file = "protobuf-5.29.3-cp39-cp39-win_amd64.whl", hash = "sha256:6ce8cc3389a20693bfde6c6562e03474c40851b44975c9b2bf6df7d8c4f864da"}, + {file = "protobuf-5.29.3-py3-none-any.whl", hash = "sha256:0a18ed4a24198528f2333802eb075e59dea9d679ab7a6c5efb017a59004d849f"}, + {file = "protobuf-5.29.3.tar.gz", hash = "sha256:5da0f41edaf117bde316404bad1a486cb4ededf8e4a54891296f648e8e076620"}, ] [[package]] name = "psutil" -version = "5.9.8" +version = "6.1.1" description = "Cross-platform lib for process and system monitoring in Python." optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" -files = [ - {file = "psutil-5.9.8-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:26bd09967ae00920df88e0352a91cff1a78f8d69b3ecabbfe733610c0af486c8"}, - {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:05806de88103b25903dff19bb6692bd2e714ccf9e668d050d144012055cbca73"}, - {file = "psutil-5.9.8-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:611052c4bc70432ec770d5d54f64206aa7203a101ec273a0cd82418c86503bb7"}, - {file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:50187900d73c1381ba1454cf40308c2bf6f34268518b3f36a9b663ca87e65e36"}, - {file = "psutil-5.9.8-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:02615ed8c5ea222323408ceba16c60e99c3f91639b07da6373fb7e6539abc56d"}, - {file = "psutil-5.9.8-cp27-none-win32.whl", hash = "sha256:36f435891adb138ed3c9e58c6af3e2e6ca9ac2f365efe1f9cfef2794e6c93b4e"}, - {file = "psutil-5.9.8-cp27-none-win_amd64.whl", hash = "sha256:bd1184ceb3f87651a67b2708d4c3338e9b10c5df903f2e3776b62303b26cb631"}, - {file = "psutil-5.9.8-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:aee678c8720623dc456fa20659af736241f575d79429a0e5e9cf88ae0605cc81"}, - {file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8cb6403ce6d8e047495a701dc7c5bd788add903f8986d523e3e20b98b733e421"}, - {file = "psutil-5.9.8-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d06016f7f8625a1825ba3732081d77c94589dca78b7a3fc072194851e88461a4"}, - {file = "psutil-5.9.8-cp36-cp36m-win32.whl", hash = "sha256:7d79560ad97af658a0f6adfef8b834b53f64746d45b403f225b85c5c2c140eee"}, - {file = "psutil-5.9.8-cp36-cp36m-win_amd64.whl", hash = "sha256:27cc40c3493bb10de1be4b3f07cae4c010ce715290a5be22b98493509c6299e2"}, - {file = "psutil-5.9.8-cp37-abi3-win32.whl", hash = "sha256:bc56c2a1b0d15aa3eaa5a60c9f3f8e3e565303b465dbf57a1b730e7a2b9844e0"}, - {file = "psutil-5.9.8-cp37-abi3-win_amd64.whl", hash = "sha256:8db4c1b57507eef143a15a6884ca10f7c73876cdf5d51e713151c1236a0e68cf"}, - {file = "psutil-5.9.8-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:d16bbddf0693323b8c6123dd804100241da461e41d6e332fb0ba6058f630f8c8"}, - {file = "psutil-5.9.8.tar.gz", hash = "sha256:6be126e3225486dff286a8fb9a06246a5253f4c7c53b475ea5f5ac934e64194c"}, +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +groups = ["main"] +files = [ + {file = "psutil-6.1.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:9ccc4316f24409159897799b83004cb1e24f9819b0dcf9c0b68bdcb6cefee6a8"}, + {file = "psutil-6.1.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ca9609c77ea3b8481ab005da74ed894035936223422dc591d6772b147421f777"}, + {file = "psutil-6.1.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:8df0178ba8a9e5bc84fed9cfa61d54601b371fbec5c8eebad27575f1e105c0d4"}, + {file = "psutil-6.1.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:1924e659d6c19c647e763e78670a05dbb7feaf44a0e9c94bf9e14dfc6ba50468"}, + {file = "psutil-6.1.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:018aeae2af92d943fdf1da6b58665124897cfc94faa2ca92098838f83e1b1bca"}, + {file = "psutil-6.1.1-cp27-none-win32.whl", hash = "sha256:6d4281f5bbca041e2292be3380ec56a9413b790579b8e593b1784499d0005dac"}, + {file = "psutil-6.1.1-cp27-none-win_amd64.whl", hash = "sha256:c777eb75bb33c47377c9af68f30e9f11bc78e0f07fbf907be4a5d70b2fe5f030"}, + {file = "psutil-6.1.1-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:fc0ed7fe2231a444fc219b9c42d0376e0a9a1a72f16c5cfa0f68d19f1a0663e8"}, + {file = "psutil-6.1.1-cp36-abi3-macosx_11_0_arm64.whl", hash = "sha256:0bdd4eab935276290ad3cb718e9809412895ca6b5b334f5a9111ee6d9aff9377"}, + {file = "psutil-6.1.1-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b6e06c20c05fe95a3d7302d74e7097756d4ba1247975ad6905441ae1b5b66003"}, + {file = "psutil-6.1.1-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97f7cb9921fbec4904f522d972f0c0e1f4fabbdd4e0287813b21215074a0f160"}, + {file = "psutil-6.1.1-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:33431e84fee02bc84ea36d9e2c4a6d395d479c9dd9bba2376c1f6ee8f3a4e0b3"}, + {file = "psutil-6.1.1-cp36-cp36m-win32.whl", hash = "sha256:384636b1a64b47814437d1173be1427a7c83681b17a450bfc309a1953e329603"}, + {file = "psutil-6.1.1-cp36-cp36m-win_amd64.whl", hash = "sha256:8be07491f6ebe1a693f17d4f11e69d0dc1811fa082736500f649f79df7735303"}, + {file = "psutil-6.1.1-cp37-abi3-win32.whl", hash = "sha256:eaa912e0b11848c4d9279a93d7e2783df352b082f40111e078388701fd479e53"}, + {file = "psutil-6.1.1-cp37-abi3-win_amd64.whl", hash = "sha256:f35cfccb065fff93529d2afb4a2e89e363fe63ca1e4a5da22b603a85833c2649"}, + {file = "psutil-6.1.1.tar.gz", hash = "sha256:cf8496728c18f2d0b45198f06895be52f36611711746b7f30c464b422b50e2f5"}, ] [package.extras] -test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] +dev = ["abi3audit", "black", "check-manifest", "coverage", "packaging", "pylint", "pyperf", "pypinfo", "pytest-cov", "requests", "rstcheck", "ruff", "sphinx", "sphinx_rtd_theme", "toml-sort", "twine", "virtualenv", "vulture", "wheel"] +test = ["pytest", "pytest-xdist", "setuptools"] [[package]] name = "psycopg2-binary" -version = "2.9.9" +version = "2.9.10" description = "psycopg2 - Python-PostgreSQL Database Adapter" optional = false -python-versions = ">=3.7" -files = [ - {file = "psycopg2-binary-2.9.9.tar.gz", hash = "sha256:7f01846810177d829c7692f1f5ada8096762d9172af1b1a28d4ab5b77c923c1c"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c2470da5418b76232f02a2fcd2229537bb2d5a7096674ce61859c3229f2eb202"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c6af2a6d4b7ee9615cbb162b0738f6e1fd1f5c3eda7e5da17861eacf4c717ea7"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:75723c3c0fbbf34350b46a3199eb50638ab22a0228f93fb472ef4d9becc2382b"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83791a65b51ad6ee6cf0845634859d69a038ea9b03d7b26e703f94c7e93dbcf9"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0ef4854e82c09e84cc63084a9e4ccd6d9b154f1dbdd283efb92ecd0b5e2b8c84"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ed1184ab8f113e8d660ce49a56390ca181f2981066acc27cf637d5c1e10ce46e"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d2997c458c690ec2bc6b0b7ecbafd02b029b7b4283078d3b32a852a7ce3ddd98"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:b58b4710c7f4161b5e9dcbe73bb7c62d65670a87df7bcce9e1faaad43e715245"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:0c009475ee389757e6e34611d75f6e4f05f0cf5ebb76c6037508318e1a1e0d7e"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8dbf6d1bc73f1d04ec1734bae3b4fb0ee3cb2a493d35ede9badbeb901fb40f6f"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-win32.whl", hash = "sha256:3f78fd71c4f43a13d342be74ebbc0666fe1f555b8837eb113cb7416856c79682"}, - {file = "psycopg2_binary-2.9.9-cp310-cp310-win_amd64.whl", hash = "sha256:876801744b0dee379e4e3c38b76fc89f88834bb15bf92ee07d94acd06ec890a0"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ee825e70b1a209475622f7f7b776785bd68f34af6e7a46e2e42f27b659b5bc26"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1ea665f8ce695bcc37a90ee52de7a7980be5161375d42a0b6c6abedbf0d81f0f"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:143072318f793f53819048fdfe30c321890af0c3ec7cb1dfc9cc87aa88241de2"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c332c8d69fb64979ebf76613c66b985414927a40f8defa16cf1bc028b7b0a7b0"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7fc5a5acafb7d6ccca13bfa8c90f8c51f13d8fb87d95656d3950f0158d3ce53"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:977646e05232579d2e7b9c59e21dbe5261f403a88417f6a6512e70d3f8a046be"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:b6356793b84728d9d50ead16ab43c187673831e9d4019013f1402c41b1db9b27"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:bc7bb56d04601d443f24094e9e31ae6deec9ccb23581f75343feebaf30423359"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:77853062a2c45be16fd6b8d6de2a99278ee1d985a7bd8b103e97e41c034006d2"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:78151aa3ec21dccd5cdef6c74c3e73386dcdfaf19bced944169697d7ac7482fc"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-win32.whl", hash = "sha256:dc4926288b2a3e9fd7b50dc6a1909a13bbdadfc67d93f3374d984e56f885579d"}, - {file = "psycopg2_binary-2.9.9-cp311-cp311-win_amd64.whl", hash = "sha256:b76bedd166805480ab069612119ea636f5ab8f8771e640ae103e05a4aae3e417"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:8532fd6e6e2dc57bcb3bc90b079c60de896d2128c5d9d6f24a63875a95a088cf"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b0605eaed3eb239e87df0d5e3c6489daae3f7388d455d0c0b4df899519c6a38d"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f8544b092a29a6ddd72f3556a9fcf249ec412e10ad28be6a0c0d948924f2212"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2d423c8d8a3c82d08fe8af900ad5b613ce3632a1249fd6a223941d0735fce493"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2e5afae772c00980525f6d6ecf7cbca55676296b580c0e6abb407f15f3706996"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e6f98446430fdf41bd36d4faa6cb409f5140c1c2cf58ce0bbdaf16af7d3f119"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c77e3d1862452565875eb31bdb45ac62502feabbd53429fdc39a1cc341d681ba"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:cb16c65dcb648d0a43a2521f2f0a2300f40639f6f8c1ecbc662141e4e3e1ee07"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:911dda9c487075abd54e644ccdf5e5c16773470a6a5d3826fda76699410066fb"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:57fede879f08d23c85140a360c6a77709113efd1c993923c59fde17aa27599fe"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-win32.whl", hash = "sha256:64cf30263844fa208851ebb13b0732ce674d8ec6a0c86a4e160495d299ba3c93"}, - {file = "psycopg2_binary-2.9.9-cp312-cp312-win_amd64.whl", hash = "sha256:81ff62668af011f9a48787564ab7eded4e9fb17a4a6a74af5ffa6a457400d2ab"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2293b001e319ab0d869d660a704942c9e2cce19745262a8aba2115ef41a0a42a"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03ef7df18daf2c4c07e2695e8cfd5ee7f748a1d54d802330985a78d2a5a6dca9"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a602ea5aff39bb9fac6308e9c9d82b9a35c2bf288e184a816002c9fae930b77"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8359bf4791968c5a78c56103702000105501adb557f3cf772b2c207284273984"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:275ff571376626195ab95a746e6a04c7df8ea34638b99fc11160de91f2fef503"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:f9b5571d33660d5009a8b3c25dc1db560206e2d2f89d3df1cb32d72c0d117d52"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:420f9bbf47a02616e8554e825208cb947969451978dceb77f95ad09c37791dae"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:4154ad09dac630a0f13f37b583eae260c6aa885d67dfbccb5b02c33f31a6d420"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:a148c5d507bb9b4f2030a2025c545fccb0e1ef317393eaba42e7eabd28eb6041"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-win32.whl", hash = "sha256:68fc1f1ba168724771e38bee37d940d2865cb0f562380a1fb1ffb428b75cb692"}, - {file = "psycopg2_binary-2.9.9-cp37-cp37m-win_amd64.whl", hash = "sha256:281309265596e388ef483250db3640e5f414168c5a67e9c665cafce9492eda2f"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:60989127da422b74a04345096c10d416c2b41bd7bf2a380eb541059e4e999980"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:246b123cc54bb5361588acc54218c8c9fb73068bf227a4a531d8ed56fa3ca7d6"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34eccd14566f8fe14b2b95bb13b11572f7c7d5c36da61caf414d23b91fcc5d94"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18d0ef97766055fec15b5de2c06dd8e7654705ce3e5e5eed3b6651a1d2a9a152"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d3f82c171b4ccd83bbaf35aa05e44e690113bd4f3b7b6cc54d2219b132f3ae55"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ead20f7913a9c1e894aebe47cccf9dc834e1618b7aa96155d2091a626e59c972"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:ca49a8119c6cbd77375ae303b0cfd8c11f011abbbd64601167ecca18a87e7cdd"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:323ba25b92454adb36fa425dc5cf6f8f19f78948cbad2e7bc6cdf7b0d7982e59"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:1236ed0952fbd919c100bc839eaa4a39ebc397ed1c08a97fc45fee2a595aa1b3"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:729177eaf0aefca0994ce4cffe96ad3c75e377c7b6f4efa59ebf003b6d398716"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-win32.whl", hash = "sha256:804d99b24ad523a1fe18cc707bf741670332f7c7412e9d49cb5eab67e886b9b5"}, - {file = "psycopg2_binary-2.9.9-cp38-cp38-win_amd64.whl", hash = "sha256:a6cdcc3ede532f4a4b96000b6362099591ab4a3e913d70bcbac2b56c872446f7"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:72dffbd8b4194858d0941062a9766f8297e8868e1dd07a7b36212aaa90f49472"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:30dcc86377618a4c8f3b72418df92e77be4254d8f89f14b8e8f57d6d43603c0f"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:31a34c508c003a4347d389a9e6fcc2307cc2150eb516462a7a17512130de109e"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:15208be1c50b99203fe88d15695f22a5bed95ab3f84354c494bcb1d08557df67"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1873aade94b74715be2246321c8650cabf5a0d098a95bab81145ffffa4c13876"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a58c98a7e9c021f357348867f537017057c2ed7f77337fd914d0bedb35dace7"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4686818798f9194d03c9129a4d9a702d9e113a89cb03bffe08c6cf799e053291"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:ebdc36bea43063116f0486869652cb2ed7032dbc59fbcb4445c4862b5c1ecf7f"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:ca08decd2697fdea0aea364b370b1249d47336aec935f87b8bbfd7da5b2ee9c1"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ac05fb791acf5e1a3e39402641827780fe44d27e72567a000412c648a85ba860"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-win32.whl", hash = "sha256:9dba73be7305b399924709b91682299794887cbbd88e38226ed9f6712eabee90"}, - {file = "psycopg2_binary-2.9.9-cp39-cp39-win_amd64.whl", hash = "sha256:f7ae5d65ccfbebdfa761585228eb4d0df3a8b15cfb53bd953e713e09fbb12957"}, +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "psycopg2-binary-2.9.10.tar.gz", hash = "sha256:4b3df0e6990aa98acda57d983942eff13d824135fe2250e6522edaa782a06de2"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:0ea8e3d0ae83564f2fc554955d327fa081d065c8ca5cc6d2abb643e2c9c1200f"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:3e9c76f0ac6f92ecfc79516a8034a544926430f7b080ec5a0537bca389ee0906"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ad26b467a405c798aaa1458ba09d7e2b6e5f96b1ce0ac15d82fd9f95dc38a92"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:270934a475a0e4b6925b5f804e3809dd5f90f8613621d062848dd82f9cd62007"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:48b338f08d93e7be4ab2b5f1dbe69dc5e9ef07170fe1f86514422076d9c010d0"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f4152f8f76d2023aac16285576a9ecd2b11a9895373a1f10fd9db54b3ff06b4"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:32581b3020c72d7a421009ee1c6bf4a131ef5f0a968fab2e2de0c9d2bb4577f1"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:2ce3e21dc3437b1d960521eca599d57408a695a0d3c26797ea0f72e834c7ffe5"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e984839e75e0b60cfe75e351db53d6db750b00de45644c5d1f7ee5d1f34a1ce5"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3c4745a90b78e51d9ba06e2088a2fe0c693ae19cc8cb051ccda44e8df8a6eb53"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-win32.whl", hash = "sha256:e5720a5d25e3b99cd0dc5c8a440570469ff82659bb09431c1439b92caf184d3b"}, + {file = "psycopg2_binary-2.9.10-cp310-cp310-win_amd64.whl", hash = "sha256:3c18f74eb4386bf35e92ab2354a12c17e5eb4d9798e4c0ad3a00783eae7cd9f1"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:04392983d0bb89a8717772a193cfaac58871321e3ec69514e1c4e0d4957b5aff"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:1a6784f0ce3fec4edc64e985865c17778514325074adf5ad8f80636cd029ef7c"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5f86c56eeb91dc3135b3fd8a95dc7ae14c538a2f3ad77a19645cf55bab1799c"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b3d2491d4d78b6b14f76881905c7a8a8abcf974aad4a8a0b065273a0ed7a2cb"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2286791ececda3a723d1910441c793be44625d86d1a4e79942751197f4d30341"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:512d29bb12608891e349af6a0cccedce51677725a921c07dba6342beaf576f9a"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5a507320c58903967ef7384355a4da7ff3f28132d679aeb23572753cbf2ec10b"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6d4fa1079cab9018f4d0bd2db307beaa612b0d13ba73b5c6304b9fe2fb441ff7"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:851485a42dbb0bdc1edcdabdb8557c09c9655dfa2ca0460ff210522e073e319e"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:35958ec9e46432d9076286dda67942ed6d968b9c3a6a2fd62b48939d1d78bf68"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-win32.whl", hash = "sha256:ecced182e935529727401b24d76634a357c71c9275b356efafd8a2a91ec07392"}, + {file = "psycopg2_binary-2.9.10-cp311-cp311-win_amd64.whl", hash = "sha256:ee0e8c683a7ff25d23b55b11161c2663d4b099770f6085ff0a20d4505778d6b4"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:880845dfe1f85d9d5f7c412efea7a08946a46894537e4e5d091732eb1d34d9a0"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:9440fa522a79356aaa482aa4ba500b65f28e5d0e63b801abf6aa152a29bd842a"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3923c1d9870c49a2d44f795df0c889a22380d36ef92440ff618ec315757e539"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b2c956c028ea5de47ff3a8d6b3cc3330ab45cf0b7c3da35a2d6ff8420896526"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f758ed67cab30b9a8d2833609513ce4d3bd027641673d4ebc9c067e4d208eec1"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cd9b4f2cfab88ed4a9106192de509464b75a906462fb846b936eabe45c2063e"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dc08420625b5a20b53551c50deae6e231e6371194fa0651dbe0fb206452ae1f"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d7cd730dfa7c36dbe8724426bf5612798734bff2d3c3857f36f2733f5bfc7c00"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:155e69561d54d02b3c3209545fb08938e27889ff5a10c19de8d23eb5a41be8a5"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3cc28a6fd5a4a26224007712e79b81dbaee2ffb90ff406256158ec4d7b52b47"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-win32.whl", hash = "sha256:ec8a77f521a17506a24a5f626cb2aee7850f9b69a0afe704586f63a464f3cd64"}, + {file = "psycopg2_binary-2.9.10-cp312-cp312-win_amd64.whl", hash = "sha256:18c5ee682b9c6dd3696dad6e54cc7ff3a1a9020df6a5c0f861ef8bfd338c3ca0"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:26540d4a9a4e2b096f1ff9cce51253d0504dca5a85872c7f7be23be5a53eb18d"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:e217ce4d37667df0bc1c397fdcd8de5e81018ef305aed9415c3b093faaeb10fb"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:245159e7ab20a71d989da00f280ca57da7641fa2cdcf71749c193cea540a74f7"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c4ded1a24b20021ebe677b7b08ad10bf09aac197d6943bfe6fec70ac4e4690d"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3abb691ff9e57d4a93355f60d4f4c1dd2d68326c968e7db17ea96df3c023ef73"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8608c078134f0b3cbd9f89b34bd60a943b23fd33cc5f065e8d5f840061bd0673"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:230eeae2d71594103cd5b93fd29d1ace6420d0b86f4778739cb1a5a32f607d1f"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:bb89f0a835bcfc1d42ccd5f41f04870c1b936d8507c6df12b7737febc40f0909"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f0c2d907a1e102526dd2986df638343388b94c33860ff3bbe1384130828714b1"}, + {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f8157bed2f51db683f31306aa497311b560f2265998122abe1dce6428bd86567"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:eb09aa7f9cecb45027683bb55aebaaf45a0df8bf6de68801a6afdc7947bb09d4"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b73d6d7f0ccdad7bc43e6d34273f70d587ef62f824d7261c4ae9b8b1b6af90e8"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce5ab4bf46a211a8e924d307c1b1fcda82368586a19d0a24f8ae166f5c784864"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:056470c3dc57904bbf63d6f534988bafc4e970ffd50f6271fc4ee7daad9498a5"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73aa0e31fa4bb82578f3a6c74a73c273367727de397a7a0f07bd83cbea696baa"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:8de718c0e1c4b982a54b41779667242bc630b2197948405b7bd8ce16bcecac92"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:5c370b1e4975df846b0277b4deba86419ca77dbc25047f535b0bb03d1a544d44"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:ffe8ed017e4ed70f68b7b371d84b7d4a790368db9203dfc2d222febd3a9c8863"}, + {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:8aecc5e80c63f7459a1a2ab2c64df952051df196294d9f739933a9f6687e86b3"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:7a813c8bdbaaaab1f078014b9b0b13f5de757e2b5d9be6403639b298a04d218b"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d00924255d7fc916ef66e4bf22f354a940c67179ad3fd7067d7a0a9c84d2fbfc"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7559bce4b505762d737172556a4e6ea8a9998ecac1e39b5233465093e8cee697"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e8b58f0a96e7a1e341fc894f62c1177a7c83febebb5ff9123b579418fdc8a481"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b269105e59ac96aba877c1707c600ae55711d9dcd3fc4b5012e4af68e30c648"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:79625966e176dc97ddabc142351e0409e28acf4660b88d1cf6adb876d20c490d"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:8aabf1c1a04584c168984ac678a668094d831f152859d06e055288fa515e4d30"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:19721ac03892001ee8fdd11507e6a2e01f4e37014def96379411ca99d78aeb2c"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7f5d859928e635fa3ce3477704acee0f667b3a3d3e4bb109f2b18d4005f38287"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-win32.whl", hash = "sha256:3216ccf953b3f267691c90c6fe742e45d890d8272326b4a8b20850a03d05b7b8"}, + {file = "psycopg2_binary-2.9.10-cp39-cp39-win_amd64.whl", hash = "sha256:30e34c4e97964805f715206c7b789d54a78b70f3ff19fbe590104b71c45600e5"}, ] [[package]] @@ -2452,6 +2906,7 @@ version = "0.5.1" description = "Pure-Python implementation of ASN.1 types and DER/BER/CER codecs (X.208)" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +groups = ["main"] files = [ {file = "pyasn1-0.5.1-py2.py3-none-any.whl", hash = "sha256:4439847c58d40b1d0a573d07e3856e95333f1976294494c325775aeca506eb58"}, {file = "pyasn1-0.5.1.tar.gz", hash = "sha256:6d391a96e59b23130a5cfa74d6fd7f388dbbe26cc8f1edf39fdddf08d9d6676c"}, @@ -2463,6 +2918,7 @@ version = "0.3.0" description = "A collection of ASN.1-based protocols modules" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +groups = ["main"] files = [ {file = "pyasn1_modules-0.3.0-py2.py3-none-any.whl", hash = "sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d"}, {file = "pyasn1_modules-0.3.0.tar.gz", hash = "sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c"}, @@ -2477,6 +2933,7 @@ version = "2.22" description = "C parser in Python" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, @@ -2488,6 +2945,8 @@ version = "3.21.0" description = "Cryptographic library for Python" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +groups = ["main"] +markers = "extra == \"maco\"" files = [ {file = "pycryptodome-3.21.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:dad9bf36eda068e89059d1f07408e397856be9511d7113ea4b586642a429a4fd"}, {file = "pycryptodome-3.21.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:a1752eca64c60852f38bb29e2c86fca30d7672c024128ef5d70cc15868fa10f4"}, @@ -2525,63 +2984,62 @@ files = [ [[package]] name = "pycryptodomex" -version = "3.20.0" +version = "3.21.0" description = "Cryptographic library for Python" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "pycryptodomex-3.20.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:645bd4ca6f543685d643dadf6a856cc382b654cc923460e3a10a49c1b3832aeb"}, - {file = "pycryptodomex-3.20.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ff5c9a67f8a4fba4aed887216e32cbc48f2a6fb2673bb10a99e43be463e15913"}, - {file = "pycryptodomex-3.20.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:8ee606964553c1a0bc74057dd8782a37d1c2bc0f01b83193b6f8bb14523b877b"}, - {file = "pycryptodomex-3.20.0-cp27-cp27m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7805830e0c56d88f4d491fa5ac640dfc894c5ec570d1ece6ed1546e9df2e98d6"}, - {file = "pycryptodomex-3.20.0-cp27-cp27m-musllinux_1_1_aarch64.whl", hash = "sha256:bc3ee1b4d97081260d92ae813a83de4d2653206967c4a0a017580f8b9548ddbc"}, - {file = "pycryptodomex-3.20.0-cp27-cp27m-win32.whl", hash = "sha256:8af1a451ff9e123d0d8bd5d5e60f8e3315c3a64f3cdd6bc853e26090e195cdc8"}, - {file = "pycryptodomex-3.20.0-cp27-cp27m-win_amd64.whl", hash = "sha256:cbe71b6712429650e3883dc81286edb94c328ffcd24849accac0a4dbcc76958a"}, - {file = "pycryptodomex-3.20.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:76bd15bb65c14900d98835fcd10f59e5e0435077431d3a394b60b15864fddd64"}, - {file = "pycryptodomex-3.20.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:653b29b0819605fe0898829c8ad6400a6ccde096146730c2da54eede9b7b8baa"}, - {file = "pycryptodomex-3.20.0-cp27-cp27mu-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62a5ec91388984909bb5398ea49ee61b68ecb579123694bffa172c3b0a107079"}, - {file = "pycryptodomex-3.20.0-cp27-cp27mu-musllinux_1_1_aarch64.whl", hash = "sha256:108e5f1c1cd70ffce0b68739c75734437c919d2eaec8e85bffc2c8b4d2794305"}, - {file = "pycryptodomex-3.20.0-cp35-abi3-macosx_10_9_universal2.whl", hash = "sha256:59af01efb011b0e8b686ba7758d59cf4a8263f9ad35911bfe3f416cee4f5c08c"}, - {file = "pycryptodomex-3.20.0-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:82ee7696ed8eb9a82c7037f32ba9b7c59e51dda6f105b39f043b6ef293989cb3"}, - {file = "pycryptodomex-3.20.0-cp35-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91852d4480a4537d169c29a9d104dda44094c78f1f5b67bca76c29a91042b623"}, - {file = "pycryptodomex-3.20.0-cp35-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bca649483d5ed251d06daf25957f802e44e6bb6df2e8f218ae71968ff8f8edc4"}, - {file = "pycryptodomex-3.20.0-cp35-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e186342cfcc3aafaad565cbd496060e5a614b441cacc3995ef0091115c1f6c5"}, - {file = "pycryptodomex-3.20.0-cp35-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:25cd61e846aaab76d5791d006497134602a9e451e954833018161befc3b5b9ed"}, - {file = "pycryptodomex-3.20.0-cp35-abi3-musllinux_1_1_i686.whl", hash = "sha256:9c682436c359b5ada67e882fec34689726a09c461efd75b6ea77b2403d5665b7"}, - {file = "pycryptodomex-3.20.0-cp35-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:7a7a8f33a1f1fb762ede6cc9cbab8f2a9ba13b196bfaf7bc6f0b39d2ba315a43"}, - {file = "pycryptodomex-3.20.0-cp35-abi3-win32.whl", hash = "sha256:c39778fd0548d78917b61f03c1fa8bfda6cfcf98c767decf360945fe6f97461e"}, - {file = "pycryptodomex-3.20.0-cp35-abi3-win_amd64.whl", hash = "sha256:2a47bcc478741b71273b917232f521fd5704ab4b25d301669879e7273d3586cc"}, - {file = "pycryptodomex-3.20.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:1be97461c439a6af4fe1cf8bf6ca5936d3db252737d2f379cc6b2e394e12a458"}, - {file = "pycryptodomex-3.20.0-pp27-pypy_73-win32.whl", hash = "sha256:19764605feea0df966445d46533729b645033f134baeb3ea26ad518c9fdf212c"}, - {file = "pycryptodomex-3.20.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:f2e497413560e03421484189a6b65e33fe800d3bd75590e6d78d4dfdb7accf3b"}, - {file = "pycryptodomex-3.20.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e48217c7901edd95f9f097feaa0388da215ed14ce2ece803d3f300b4e694abea"}, - {file = "pycryptodomex-3.20.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d00fe8596e1cc46b44bf3907354e9377aa030ec4cd04afbbf6e899fc1e2a7781"}, - {file = "pycryptodomex-3.20.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:88afd7a3af7ddddd42c2deda43d53d3dfc016c11327d0915f90ca34ebda91499"}, - {file = "pycryptodomex-3.20.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d3584623e68a5064a04748fb6d76117a21a7cb5eaba20608a41c7d0c61721794"}, - {file = "pycryptodomex-3.20.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0daad007b685db36d977f9de73f61f8da2a7104e20aca3effd30752fd56f73e1"}, - {file = "pycryptodomex-3.20.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5dcac11031a71348faaed1f403a0debd56bf5404232284cf8c761ff918886ebc"}, - {file = "pycryptodomex-3.20.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:69138068268127cd605e03438312d8f271135a33140e2742b417d027a0539427"}, - {file = "pycryptodomex-3.20.0.tar.gz", hash = "sha256:7a710b79baddd65b806402e14766c721aee8fb83381769c27920f26476276c1e"}, +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +groups = ["main"] +files = [ + {file = "pycryptodomex-3.21.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:dbeb84a399373df84a69e0919c1d733b89e049752426041deeb30d68e9867822"}, + {file = "pycryptodomex-3.21.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:a192fb46c95489beba9c3f002ed7d93979423d1b2a53eab8771dbb1339eb3ddd"}, + {file = "pycryptodomex-3.21.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:1233443f19d278c72c4daae749872a4af3787a813e05c3561c73ab0c153c7b0f"}, + {file = "pycryptodomex-3.21.0-cp27-cp27m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bbb07f88e277162b8bfca7134b34f18b400d84eac7375ce73117f865e3c80d4c"}, + {file = "pycryptodomex-3.21.0-cp27-cp27m-musllinux_1_1_aarch64.whl", hash = "sha256:e859e53d983b7fe18cb8f1b0e29d991a5c93be2c8dd25db7db1fe3bd3617f6f9"}, + {file = "pycryptodomex-3.21.0-cp27-cp27m-win32.whl", hash = "sha256:ef046b2e6c425647971b51424f0f88d8a2e0a2a63d3531817968c42078895c00"}, + {file = "pycryptodomex-3.21.0-cp27-cp27m-win_amd64.whl", hash = "sha256:da76ebf6650323eae7236b54b1b1f0e57c16483be6e3c1ebf901d4ada47563b6"}, + {file = "pycryptodomex-3.21.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:c07e64867a54f7e93186a55bec08a18b7302e7bee1b02fd84c6089ec215e723a"}, + {file = "pycryptodomex-3.21.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:56435c7124dd0ce0c8bdd99c52e5d183a0ca7fdcd06c5d5509423843f487dd0b"}, + {file = "pycryptodomex-3.21.0-cp27-cp27mu-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65d275e3f866cf6fe891411be9c1454fb58809ccc5de6d3770654c47197acd65"}, + {file = "pycryptodomex-3.21.0-cp27-cp27mu-musllinux_1_1_aarch64.whl", hash = "sha256:5241bdb53bcf32a9568770a6584774b1b8109342bd033398e4ff2da052123832"}, + {file = "pycryptodomex-3.21.0-cp36-abi3-macosx_10_9_universal2.whl", hash = "sha256:34325b84c8b380675fd2320d0649cdcbc9cf1e0d1526edbe8fce43ed858cdc7e"}, + {file = "pycryptodomex-3.21.0-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:103c133d6cd832ae7266feb0a65b69e3a5e4dbbd6f3a3ae3211a557fd653f516"}, + {file = "pycryptodomex-3.21.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77ac2ea80bcb4b4e1c6a596734c775a1615d23e31794967416afc14852a639d3"}, + {file = "pycryptodomex-3.21.0-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9aa0cf13a1a1128b3e964dc667e5fe5c6235f7d7cfb0277213f0e2a783837cc2"}, + {file = "pycryptodomex-3.21.0-cp36-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:46eb1f0c8d309da63a2064c28de54e5e614ad17b7e2f88df0faef58ce192fc7b"}, + {file = "pycryptodomex-3.21.0-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:cc7e111e66c274b0df5f4efa679eb31e23c7545d702333dfd2df10ab02c2a2ce"}, + {file = "pycryptodomex-3.21.0-cp36-abi3-musllinux_1_2_i686.whl", hash = "sha256:770d630a5c46605ec83393feaa73a9635a60e55b112e1fb0c3cea84c2897aa0a"}, + {file = "pycryptodomex-3.21.0-cp36-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:52e23a0a6e61691134aa8c8beba89de420602541afaae70f66e16060fdcd677e"}, + {file = "pycryptodomex-3.21.0-cp36-abi3-win32.whl", hash = "sha256:a3d77919e6ff56d89aada1bd009b727b874d464cb0e2e3f00a49f7d2e709d76e"}, + {file = "pycryptodomex-3.21.0-cp36-abi3-win_amd64.whl", hash = "sha256:b0e9765f93fe4890f39875e6c90c96cb341767833cfa767f41b490b506fa9ec0"}, + {file = "pycryptodomex-3.21.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:feaecdce4e5c0045e7a287de0c4351284391fe170729aa9182f6bd967631b3a8"}, + {file = "pycryptodomex-3.21.0-pp27-pypy_73-win32.whl", hash = "sha256:365aa5a66d52fd1f9e0530ea97f392c48c409c2f01ff8b9a39c73ed6f527d36c"}, + {file = "pycryptodomex-3.21.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:3efddfc50ac0ca143364042324046800c126a1d63816d532f2e19e6f2d8c0c31"}, + {file = "pycryptodomex-3.21.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0df2608682db8279a9ebbaf05a72f62a321433522ed0e499bc486a6889b96bf3"}, + {file = "pycryptodomex-3.21.0-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5823d03e904ea3e53aebd6799d6b8ec63b7675b5d2f4a4bd5e3adcb512d03b37"}, + {file = "pycryptodomex-3.21.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:27e84eeff24250ffec32722334749ac2a57a5fd60332cd6a0680090e7c42877e"}, + {file = "pycryptodomex-3.21.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:8ef436cdeea794015263853311f84c1ff0341b98fc7908e8a70595a68cefd971"}, + {file = "pycryptodomex-3.21.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a1058e6dfe827f4209c5cae466e67610bcd0d66f2f037465daa2a29d92d952b"}, + {file = "pycryptodomex-3.21.0-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9ba09a5b407cbb3bcb325221e346a140605714b5e880741dc9a1e9ecf1688d42"}, + {file = "pycryptodomex-3.21.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:8a9d8342cf22b74a746e3c6c9453cb0cfbb55943410e3a2619bd9164b48dc9d9"}, + {file = "pycryptodomex-3.21.0.tar.gz", hash = "sha256:222d0bd05381dd25c32dd6065c071ebf084212ab79bab4599ba9e6a3e0009e6c"}, ] [[package]] name = "pydantic" -version = "2.9.2" +version = "2.10.6" description = "Data validation using Python type hints" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "pydantic-2.9.2-py3-none-any.whl", hash = "sha256:f048cec7b26778210e28a0459867920654d48e5e62db0958433636cde4254f12"}, - {file = "pydantic-2.9.2.tar.gz", hash = "sha256:d155cef71265d1e9807ed1c32b4c8deec042a44a50a4188b25ac67ecd81a9c0f"}, + {file = "pydantic-2.10.6-py3-none-any.whl", hash = "sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584"}, + {file = "pydantic-2.10.6.tar.gz", hash = "sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236"}, ] [package.dependencies] annotated-types = ">=0.6.0" -pydantic-core = "2.23.4" -typing-extensions = [ - {version = ">=4.6.1", markers = "python_version < \"3.13\""}, - {version = ">=4.12.2", markers = "python_version >= \"3.13\""}, -] +pydantic-core = "2.27.2" +typing-extensions = ">=4.12.2" [package.extras] email = ["email-validator (>=2.0.0)"] @@ -2589,100 +3047,112 @@ timezone = ["tzdata"] [[package]] name = "pydantic-core" -version = "2.23.4" +version = "2.27.2" description = "Core functionality for Pydantic validation and serialization" optional = false python-versions = ">=3.8" -files = [ - {file = "pydantic_core-2.23.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:b10bd51f823d891193d4717448fab065733958bdb6a6b351967bd349d48d5c9b"}, - {file = "pydantic_core-2.23.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4fc714bdbfb534f94034efaa6eadd74e5b93c8fa6315565a222f7b6f42ca1166"}, - {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63e46b3169866bd62849936de036f901a9356e36376079b05efa83caeaa02ceb"}, - {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed1a53de42fbe34853ba90513cea21673481cd81ed1be739f7f2efb931b24916"}, - {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cfdd16ab5e59fc31b5e906d1a3f666571abc367598e3e02c83403acabc092e07"}, - {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255a8ef062cbf6674450e668482456abac99a5583bbafb73f9ad469540a3a232"}, - {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4a7cd62e831afe623fbb7aabbb4fe583212115b3ef38a9f6b71869ba644624a2"}, - {file = "pydantic_core-2.23.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f09e2ff1f17c2b51f2bc76d1cc33da96298f0a036a137f5440ab3ec5360b624f"}, - {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:e38e63e6f3d1cec5a27e0afe90a085af8b6806ee208b33030e65b6516353f1a3"}, - {file = "pydantic_core-2.23.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0dbd8dbed2085ed23b5c04afa29d8fd2771674223135dc9bc937f3c09284d071"}, - {file = "pydantic_core-2.23.4-cp310-none-win32.whl", hash = "sha256:6531b7ca5f951d663c339002e91aaebda765ec7d61b7d1e3991051906ddde119"}, - {file = "pydantic_core-2.23.4-cp310-none-win_amd64.whl", hash = "sha256:7c9129eb40958b3d4500fa2467e6a83356b3b61bfff1b414c7361d9220f9ae8f"}, - {file = "pydantic_core-2.23.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:77733e3892bb0a7fa797826361ce8a9184d25c8dffaec60b7ffe928153680ba8"}, - {file = "pydantic_core-2.23.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b84d168f6c48fabd1f2027a3d1bdfe62f92cade1fb273a5d68e621da0e44e6d"}, - {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df49e7a0861a8c36d089c1ed57d308623d60416dab2647a4a17fe050ba85de0e"}, - {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ff02b6d461a6de369f07ec15e465a88895f3223eb75073ffea56b84d9331f607"}, - {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:996a38a83508c54c78a5f41456b0103c30508fed9abcad0a59b876d7398f25fd"}, - {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d97683ddee4723ae8c95d1eddac7c192e8c552da0c73a925a89fa8649bf13eea"}, - {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:216f9b2d7713eb98cb83c80b9c794de1f6b7e3145eef40400c62e86cee5f4e1e"}, - {file = "pydantic_core-2.23.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6f783e0ec4803c787bcea93e13e9932edab72068f68ecffdf86a99fd5918878b"}, - {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d0776dea117cf5272382634bd2a5c1b6eb16767c223c6a5317cd3e2a757c61a0"}, - {file = "pydantic_core-2.23.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d5f7a395a8cf1621939692dba2a6b6a830efa6b3cee787d82c7de1ad2930de64"}, - {file = "pydantic_core-2.23.4-cp311-none-win32.whl", hash = "sha256:74b9127ffea03643e998e0c5ad9bd3811d3dac8c676e47db17b0ee7c3c3bf35f"}, - {file = "pydantic_core-2.23.4-cp311-none-win_amd64.whl", hash = "sha256:98d134c954828488b153d88ba1f34e14259284f256180ce659e8d83e9c05eaa3"}, - {file = "pydantic_core-2.23.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f3e0da4ebaef65158d4dfd7d3678aad692f7666877df0002b8a522cdf088f231"}, - {file = "pydantic_core-2.23.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f69a8e0b033b747bb3e36a44e7732f0c99f7edd5cea723d45bc0d6e95377ffee"}, - {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:723314c1d51722ab28bfcd5240d858512ffd3116449c557a1336cbe3919beb87"}, - {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bb2802e667b7051a1bebbfe93684841cc9351004e2badbd6411bf357ab8d5ac8"}, - {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d18ca8148bebe1b0a382a27a8ee60350091a6ddaf475fa05ef50dc35b5df6327"}, - {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:33e3d65a85a2a4a0dc3b092b938a4062b1a05f3a9abde65ea93b233bca0e03f2"}, - {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:128585782e5bfa515c590ccee4b727fb76925dd04a98864182b22e89a4e6ed36"}, - {file = "pydantic_core-2.23.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:68665f4c17edcceecc112dfed5dbe6f92261fb9d6054b47d01bf6371a6196126"}, - {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:20152074317d9bed6b7a95ade3b7d6054845d70584216160860425f4fbd5ee9e"}, - {file = "pydantic_core-2.23.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9261d3ce84fa1d38ed649c3638feefeae23d32ba9182963e465d58d62203bd24"}, - {file = "pydantic_core-2.23.4-cp312-none-win32.whl", hash = "sha256:4ba762ed58e8d68657fc1281e9bb72e1c3e79cc5d464be146e260c541ec12d84"}, - {file = "pydantic_core-2.23.4-cp312-none-win_amd64.whl", hash = "sha256:97df63000f4fea395b2824da80e169731088656d1818a11b95f3b173747b6cd9"}, - {file = "pydantic_core-2.23.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7530e201d10d7d14abce4fb54cfe5b94a0aefc87da539d0346a484ead376c3cc"}, - {file = "pydantic_core-2.23.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:df933278128ea1cd77772673c73954e53a1c95a4fdf41eef97c2b779271bd0bd"}, - {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cb3da3fd1b6a5d0279a01877713dbda118a2a4fc6f0d821a57da2e464793f05"}, - {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c6dcb030aefb668a2b7009c85b27f90e51e6a3b4d5c9bc4c57631292015b0d"}, - {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:696dd8d674d6ce621ab9d45b205df149399e4bb9aa34102c970b721554828510"}, - {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2971bb5ffe72cc0f555c13e19b23c85b654dd2a8f7ab493c262071377bfce9f6"}, - {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8394d940e5d400d04cad4f75c0598665cbb81aecefaca82ca85bd28264af7f9b"}, - {file = "pydantic_core-2.23.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0dff76e0602ca7d4cdaacc1ac4c005e0ce0dcfe095d5b5259163a80d3a10d327"}, - {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7d32706badfe136888bdea71c0def994644e09fff0bfe47441deaed8e96fdbc6"}, - {file = "pydantic_core-2.23.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ed541d70698978a20eb63d8c5d72f2cc6d7079d9d90f6b50bad07826f1320f5f"}, - {file = "pydantic_core-2.23.4-cp313-none-win32.whl", hash = "sha256:3d5639516376dce1940ea36edf408c554475369f5da2abd45d44621cb616f769"}, - {file = "pydantic_core-2.23.4-cp313-none-win_amd64.whl", hash = "sha256:5a1504ad17ba4210df3a045132a7baeeba5a200e930f57512ee02909fc5c4cb5"}, - {file = "pydantic_core-2.23.4-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d4488a93b071c04dc20f5cecc3631fc78b9789dd72483ba15d423b5b3689b555"}, - {file = "pydantic_core-2.23.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:81965a16b675b35e1d09dd14df53f190f9129c0202356ed44ab2728b1c905658"}, - {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4ffa2ebd4c8530079140dd2d7f794a9d9a73cbb8e9d59ffe24c63436efa8f271"}, - {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:61817945f2fe7d166e75fbfb28004034b48e44878177fc54d81688e7b85a3665"}, - {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:29d2c342c4bc01b88402d60189f3df065fb0dda3654744d5a165a5288a657368"}, - {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5e11661ce0fd30a6790e8bcdf263b9ec5988e95e63cf901972107efc49218b13"}, - {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d18368b137c6295db49ce7218b1a9ba15c5bc254c96d7c9f9e924a9bc7825ad"}, - {file = "pydantic_core-2.23.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ec4e55f79b1c4ffb2eecd8a0cfba9955a2588497d96851f4c8f99aa4a1d39b12"}, - {file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:374a5e5049eda9e0a44c696c7ade3ff355f06b1fe0bb945ea3cac2bc336478a2"}, - {file = "pydantic_core-2.23.4-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:5c364564d17da23db1106787675fc7af45f2f7b58b4173bfdd105564e132e6fb"}, - {file = "pydantic_core-2.23.4-cp38-none-win32.whl", hash = "sha256:d7a80d21d613eec45e3d41eb22f8f94ddc758a6c4720842dc74c0581f54993d6"}, - {file = "pydantic_core-2.23.4-cp38-none-win_amd64.whl", hash = "sha256:5f5ff8d839f4566a474a969508fe1c5e59c31c80d9e140566f9a37bba7b8d556"}, - {file = "pydantic_core-2.23.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:a4fa4fc04dff799089689f4fd502ce7d59de529fc2f40a2c8836886c03e0175a"}, - {file = "pydantic_core-2.23.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0a7df63886be5e270da67e0966cf4afbae86069501d35c8c1b3b6c168f42cb36"}, - {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dcedcd19a557e182628afa1d553c3895a9f825b936415d0dbd3cd0bbcfd29b4b"}, - {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5f54b118ce5de9ac21c363d9b3caa6c800341e8c47a508787e5868c6b79c9323"}, - {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:86d2f57d3e1379a9525c5ab067b27dbb8a0642fb5d454e17a9ac434f9ce523e3"}, - {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:de6d1d1b9e5101508cb37ab0d972357cac5235f5c6533d1071964c47139257df"}, - {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1278e0d324f6908e872730c9102b0112477a7f7cf88b308e4fc36ce1bdb6d58c"}, - {file = "pydantic_core-2.23.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:9a6b5099eeec78827553827f4c6b8615978bb4b6a88e5d9b93eddf8bb6790f55"}, - {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:e55541f756f9b3ee346b840103f32779c695a19826a4c442b7954550a0972040"}, - {file = "pydantic_core-2.23.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a5c7ba8ffb6d6f8f2ab08743be203654bb1aaa8c9dcb09f82ddd34eadb695605"}, - {file = "pydantic_core-2.23.4-cp39-none-win32.whl", hash = "sha256:37b0fe330e4a58d3c58b24d91d1eb102aeec675a3db4c292ec3928ecd892a9a6"}, - {file = "pydantic_core-2.23.4-cp39-none-win_amd64.whl", hash = "sha256:1498bec4c05c9c787bde9125cfdcc63a41004ff167f495063191b863399b1a29"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:f455ee30a9d61d3e1a15abd5068827773d6e4dc513e795f380cdd59932c782d5"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1e90d2e3bd2c3863d48525d297cd143fe541be8bbf6f579504b9712cb6b643ec"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e203fdf807ac7e12ab59ca2bfcabb38c7cf0b33c41efeb00f8e5da1d86af480"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e08277a400de01bc72436a0ccd02bdf596631411f592ad985dcee21445bd0068"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:f220b0eea5965dec25480b6333c788fb72ce5f9129e8759ef876a1d805d00801"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d06b0c8da4f16d1d1e352134427cb194a0a6e19ad5db9161bf32b2113409e728"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:ba1a0996f6c2773bd83e63f18914c1de3c9dd26d55f4ac302a7efe93fb8e7433"}, - {file = "pydantic_core-2.23.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:9a5bce9d23aac8f0cf0836ecfc033896aa8443b501c58d0602dbfd5bd5b37753"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:78ddaaa81421a29574a682b3179d4cf9e6d405a09b99d93ddcf7e5239c742e21"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:883a91b5dd7d26492ff2f04f40fbb652de40fcc0afe07e8129e8ae779c2110eb"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:88ad334a15b32a791ea935af224b9de1bf99bcd62fabf745d5f3442199d86d59"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:233710f069d251feb12a56da21e14cca67994eab08362207785cf8c598e74577"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:19442362866a753485ba5e4be408964644dd6a09123d9416c54cd49171f50744"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:624e278a7d29b6445e4e813af92af37820fafb6dcc55c012c834f9e26f9aaaef"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f5ef8f42bec47f21d07668a043f077d507e5bf4e668d5c6dfe6aaba89de1a5b8"}, - {file = "pydantic_core-2.23.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:aea443fffa9fbe3af1a9ba721a87f926fe548d32cab71d188a6ede77d0ff244e"}, - {file = "pydantic_core-2.23.4.tar.gz", hash = "sha256:2584f7cf844ac4d970fba483a717dbe10c1c1c96a969bf65d61ffe94df1b2863"}, +groups = ["main"] +files = [ + {file = "pydantic_core-2.27.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa"}, + {file = "pydantic_core-2.27.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7969e133a6f183be60e9f6f56bfae753585680f3b7307a8e555a948d443cc05a"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3de9961f2a346257caf0aa508a4da705467f53778e9ef6fe744c038119737ef5"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e2bb4d3e5873c37bb3dd58714d4cd0b0e6238cebc4177ac8fe878f8b3aa8e74c"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:280d219beebb0752699480fe8f1dc61ab6615c2046d76b7ab7ee38858de0a4e7"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:47956ae78b6422cbd46f772f1746799cbb862de838fd8d1fbd34a82e05b0983a"}, + {file = "pydantic_core-2.27.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:14d4a5c49d2f009d62a2a7140d3064f686d17a5d1a268bc641954ba181880236"}, + {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:337b443af21d488716f8d0b6164de833e788aa6bd7e3a39c005febc1284f4962"}, + {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:03d0f86ea3184a12f41a2d23f7ccb79cdb5a18e06993f8a45baa8dfec746f0e9"}, + {file = "pydantic_core-2.27.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7041c36f5680c6e0f08d922aed302e98b3745d97fe1589db0a3eebf6624523af"}, + {file = "pydantic_core-2.27.2-cp310-cp310-win32.whl", hash = "sha256:50a68f3e3819077be2c98110c1f9dcb3817e93f267ba80a2c05bb4f8799e2ff4"}, + {file = "pydantic_core-2.27.2-cp310-cp310-win_amd64.whl", hash = "sha256:e0fd26b16394ead34a424eecf8a31a1f5137094cabe84a1bcb10fa6ba39d3d31"}, + {file = "pydantic_core-2.27.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:8e10c99ef58cfdf2a66fc15d66b16c4a04f62bca39db589ae8cba08bc55331bc"}, + {file = "pydantic_core-2.27.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:26f32e0adf166a84d0cb63be85c562ca8a6fa8de28e5f0d92250c6b7e9e2aff7"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c19d1ea0673cd13cc2f872f6c9ab42acc4e4f492a7ca9d3795ce2b112dd7e15"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:5e68c4446fe0810e959cdff46ab0a41ce2f2c86d227d96dc3847af0ba7def306"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d9640b0059ff4f14d1f37321b94061c6db164fbe49b334b31643e0528d100d99"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:40d02e7d45c9f8af700f3452f329ead92da4c5f4317ca9b896de7ce7199ea459"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c1fd185014191700554795c99b347d64f2bb637966c4cfc16998a0ca700d048"}, + {file = "pydantic_core-2.27.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d81d2068e1c1228a565af076598f9e7451712700b673de8f502f0334f281387d"}, + {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1a4207639fb02ec2dbb76227d7c751a20b1a6b4bc52850568e52260cae64ca3b"}, + {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:3de3ce3c9ddc8bbd88f6e0e304dea0e66d843ec9de1b0042b0911c1663ffd474"}, + {file = "pydantic_core-2.27.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:30c5f68ded0c36466acede341551106821043e9afaad516adfb6e8fa80a4e6a6"}, + {file = "pydantic_core-2.27.2-cp311-cp311-win32.whl", hash = "sha256:c70c26d2c99f78b125a3459f8afe1aed4d9687c24fd677c6a4436bc042e50d6c"}, + {file = "pydantic_core-2.27.2-cp311-cp311-win_amd64.whl", hash = "sha256:08e125dbdc505fa69ca7d9c499639ab6407cfa909214d500897d02afb816e7cc"}, + {file = "pydantic_core-2.27.2-cp311-cp311-win_arm64.whl", hash = "sha256:26f0d68d4b235a2bae0c3fc585c585b4ecc51382db0e3ba402a22cbc440915e4"}, + {file = "pydantic_core-2.27.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:9e0c8cfefa0ef83b4da9588448b6d8d2a2bf1a53c3f1ae5fca39eb3061e2f0b0"}, + {file = "pydantic_core-2.27.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:83097677b8e3bd7eaa6775720ec8e0405f1575015a463285a92bfdfe254529ef"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:172fce187655fece0c90d90a678424b013f8fbb0ca8b036ac266749c09438cb7"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:519f29f5213271eeeeb3093f662ba2fd512b91c5f188f3bb7b27bc5973816934"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05e3a55d124407fffba0dd6b0c0cd056d10e983ceb4e5dbd10dda135c31071d6"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9c3ed807c7b91de05e63930188f19e921d1fe90de6b4f5cd43ee7fcc3525cb8c"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fb4aadc0b9a0c063206846d603b92030eb6f03069151a625667f982887153e2"}, + {file = "pydantic_core-2.27.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:28ccb213807e037460326424ceb8b5245acb88f32f3d2777427476e1b32c48c4"}, + {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:de3cd1899e2c279b140adde9357c4495ed9d47131b4a4eaff9052f23398076b3"}, + {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:220f892729375e2d736b97d0e51466252ad84c51857d4d15f5e9692f9ef12be4"}, + {file = "pydantic_core-2.27.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a0fcd29cd6b4e74fe8ddd2c90330fd8edf2e30cb52acda47f06dd615ae72da57"}, + {file = "pydantic_core-2.27.2-cp312-cp312-win32.whl", hash = "sha256:1e2cb691ed9834cd6a8be61228471d0a503731abfb42f82458ff27be7b2186fc"}, + {file = "pydantic_core-2.27.2-cp312-cp312-win_amd64.whl", hash = "sha256:cc3f1a99a4f4f9dd1de4fe0312c114e740b5ddead65bb4102884b384c15d8bc9"}, + {file = "pydantic_core-2.27.2-cp312-cp312-win_arm64.whl", hash = "sha256:3911ac9284cd8a1792d3cb26a2da18f3ca26c6908cc434a18f730dc0db7bfa3b"}, + {file = "pydantic_core-2.27.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:7d14bd329640e63852364c306f4d23eb744e0f8193148d4044dd3dacdaacbd8b"}, + {file = "pydantic_core-2.27.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:82f91663004eb8ed30ff478d77c4d1179b3563df6cdb15c0817cd1cdaf34d154"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71b24c7d61131bb83df10cc7e687433609963a944ccf45190cfc21e0887b08c9"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fa8e459d4954f608fa26116118bb67f56b93b209c39b008277ace29937453dc9"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce8918cbebc8da707ba805b7fd0b382816858728ae7fe19a942080c24e5b7cd1"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eda3f5c2a021bbc5d976107bb302e0131351c2ba54343f8a496dc8783d3d3a6a"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bd8086fa684c4775c27f03f062cbb9eaa6e17f064307e86b21b9e0abc9c0f02e"}, + {file = "pydantic_core-2.27.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8d9b3388db186ba0c099a6d20f0604a44eabdeef1777ddd94786cdae158729e4"}, + {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:7a66efda2387de898c8f38c0cf7f14fca0b51a8ef0b24bfea5849f1b3c95af27"}, + {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:18a101c168e4e092ab40dbc2503bdc0f62010e95d292b27827871dc85450d7ee"}, + {file = "pydantic_core-2.27.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ba5dd002f88b78a4215ed2f8ddbdf85e8513382820ba15ad5ad8955ce0ca19a1"}, + {file = "pydantic_core-2.27.2-cp313-cp313-win32.whl", hash = "sha256:1ebaf1d0481914d004a573394f4be3a7616334be70261007e47c2a6fe7e50130"}, + {file = "pydantic_core-2.27.2-cp313-cp313-win_amd64.whl", hash = "sha256:953101387ecf2f5652883208769a79e48db18c6df442568a0b5ccd8c2723abee"}, + {file = "pydantic_core-2.27.2-cp313-cp313-win_arm64.whl", hash = "sha256:ac4dbfd1691affb8f48c2c13241a2e3b60ff23247cbcf981759c768b6633cf8b"}, + {file = "pydantic_core-2.27.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d3e8d504bdd3f10835468f29008d72fc8359d95c9c415ce6e767203db6127506"}, + {file = "pydantic_core-2.27.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:521eb9b7f036c9b6187f0b47318ab0d7ca14bd87f776240b90b21c1f4f149320"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85210c4d99a0114f5a9481b44560d7d1e35e32cc5634c656bc48e590b669b145"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d716e2e30c6f140d7560ef1538953a5cd1a87264c737643d481f2779fc247fe1"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f66d89ba397d92f840f8654756196d93804278457b5fbede59598a1f9f90b228"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:669e193c1c576a58f132e3158f9dfa9662969edb1a250c54d8fa52590045f046"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdbe7629b996647b99c01b37f11170a57ae675375b14b8c13b8518b8320ced5"}, + {file = "pydantic_core-2.27.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d262606bf386a5ba0b0af3b97f37c83d7011439e3dc1a9298f21efb292e42f1a"}, + {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:cabb9bcb7e0d97f74df8646f34fc76fbf793b7f6dc2438517d7a9e50eee4f14d"}, + {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_armv7l.whl", hash = "sha256:d2d63f1215638d28221f664596b1ccb3944f6e25dd18cd3b86b0a4c408d5ebb9"}, + {file = "pydantic_core-2.27.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:bca101c00bff0adb45a833f8451b9105d9df18accb8743b08107d7ada14bd7da"}, + {file = "pydantic_core-2.27.2-cp38-cp38-win32.whl", hash = "sha256:f6f8e111843bbb0dee4cb6594cdc73e79b3329b526037ec242a3e49012495b3b"}, + {file = "pydantic_core-2.27.2-cp38-cp38-win_amd64.whl", hash = "sha256:fd1aea04935a508f62e0d0ef1f5ae968774a32afc306fb8545e06f5ff5cdf3ad"}, + {file = "pydantic_core-2.27.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:c10eb4f1659290b523af58fa7cffb452a61ad6ae5613404519aee4bfbf1df993"}, + {file = "pydantic_core-2.27.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ef592d4bad47296fb11f96cd7dc898b92e795032b4894dfb4076cfccd43a9308"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c61709a844acc6bf0b7dce7daae75195a10aac96a596ea1b776996414791ede4"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42c5f762659e47fdb7b16956c71598292f60a03aa92f8b6351504359dbdba6cf"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4c9775e339e42e79ec99c441d9730fccf07414af63eac2f0e48e08fd38a64d76"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:57762139821c31847cfb2df63c12f725788bd9f04bc2fb392790959b8f70f118"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d1e85068e818c73e048fe28cfc769040bb1f475524f4745a5dc621f75ac7630"}, + {file = "pydantic_core-2.27.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:097830ed52fd9e427942ff3b9bc17fab52913b2f50f2880dc4a5611446606a54"}, + {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:044a50963a614ecfae59bb1eaf7ea7efc4bc62f49ed594e18fa1e5d953c40e9f"}, + {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:4e0b4220ba5b40d727c7f879eac379b822eee5d8fff418e9d3381ee45b3b0362"}, + {file = "pydantic_core-2.27.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5e4f4bb20d75e9325cc9696c6802657b58bc1dbbe3022f32cc2b2b632c3fbb96"}, + {file = "pydantic_core-2.27.2-cp39-cp39-win32.whl", hash = "sha256:cca63613e90d001b9f2f9a9ceb276c308bfa2a43fafb75c8031c4f66039e8c6e"}, + {file = "pydantic_core-2.27.2-cp39-cp39-win_amd64.whl", hash = "sha256:77d1bca19b0f7021b3a982e6f903dcd5b2b06076def36a652e3907f596e29f67"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2bf14caea37e91198329b828eae1618c068dfb8ef17bb33287a7ad4b61ac314e"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b0cb791f5b45307caae8810c2023a184c74605ec3bcbb67d13846c28ff731ff8"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:688d3fd9fcb71f41c4c015c023d12a79d1c4c0732ec9eb35d96e3388a120dcf3"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d591580c34f4d731592f0e9fe40f9cc1b430d297eecc70b962e93c5c668f15f"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:82f986faf4e644ffc189a7f1aafc86e46ef70372bb153e7001e8afccc6e54133"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:bec317a27290e2537f922639cafd54990551725fc844249e64c523301d0822fc"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:0296abcb83a797db256b773f45773da397da75a08f5fcaef41f2044adec05f50"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0d75070718e369e452075a6017fbf187f788e17ed67a3abd47fa934d001863d9"}, + {file = "pydantic_core-2.27.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:7e17b560be3c98a8e3aa66ce828bdebb9e9ac6ad5466fba92eb74c4c95cb1151"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:c33939a82924da9ed65dab5a65d427205a73181d8098e79b6b426bdf8ad4e656"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:00bad2484fa6bda1e216e7345a798bd37c68fb2d97558edd584942aa41b7d278"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c817e2b40aba42bac6f457498dacabc568c3b7a986fc9ba7c8d9d260b71485fb"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:251136cdad0cb722e93732cb45ca5299fb56e1344a833640bf93b2803f8d1bfd"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d2088237af596f0a524d3afc39ab3b036e8adb054ee57cbb1dcf8e09da5b29cc"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:d4041c0b966a84b4ae7a09832eb691a35aec90910cd2dbe7a208de59be77965b"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:8083d4e875ebe0b864ffef72a4304827015cff328a1be6e22cc850753bfb122b"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:f141ee28a0ad2123b6611b6ceff018039df17f32ada8b534e6aa039545a3efb2"}, + {file = "pydantic_core-2.27.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7d0c8399fcc1848491f00e0314bd59fb34a9c008761bcb422a057670c3f65e35"}, + {file = "pydantic_core-2.27.2.tar.gz", hash = "sha256:eb026e5a4c1fee05726072337ff51d1efb6f59090b7da90d30ea58625b1ffb39"}, ] [package.dependencies] @@ -2694,6 +3164,7 @@ version = "0.5.1" description = "Python bindings for ssdeep" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "pydeep2-0.5.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e14b310b820d895a7354be7fd025de874892df249cbfb3ad8a524459e1511fd8"}, {file = "pydeep2-0.5.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2283893e25826b547dd1e5c71a010e86ddfd7270e2f2b8c90973c1d7984c7eb7"}, @@ -2718,6 +3189,7 @@ version = "0.31" description = "Library for analyzing ELF files and DWARF debugging information" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "pyelftools-0.31-py3-none-any.whl", hash = "sha256:f52de7b3c7e8c64c8abc04a79a1cf37ac5fb0b8a49809827130b858944840607"}, {file = "pyelftools-0.31.tar.gz", hash = "sha256:c774416b10310156879443b81187d182d8d9ee499660380e645918b50bc88f99"}, @@ -2729,6 +3201,7 @@ version = "2.4.0" description = "A python svg graph plotting library" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "pygal-2.4.0-py2.py3-none-any.whl", hash = "sha256:27abab93cbc31e21f3c6bdecc05bda6cd3570cbdbd8297b7caa6904051b50d72"}, {file = "pygal-2.4.0.tar.gz", hash = "sha256:9204f05380b02a8a32f9bf99d310b51aa2a932cba5b369f7a4dc3705f0a4ce83"}, @@ -2742,13 +3215,14 @@ test = ["cairosvg", "coveralls", "flask", "lxml", "pygal-maps-ch", "pygal-maps-f [[package]] name = "pygments" -version = "2.18.0" +version = "2.19.1" description = "Pygments is a syntax highlighting package written in Python." optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ - {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, - {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, + {file = "pygments-2.19.1-py3-none-any.whl", hash = "sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c"}, + {file = "pygments-2.19.1.tar.gz", hash = "sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f"}, ] [package.extras] @@ -2760,6 +3234,7 @@ version = "0.11" description = "A Guacamole python client library." optional = false python-versions = "*" +groups = ["main"] files = [ {file = "pyguacamole-0.11-py3-none-any.whl", hash = "sha256:7f8d8652ce2e86473d72a50e0c9d8a8e0c3c74e373c6b926ca4c851774cae608"}, {file = "pyguacamole-0.11.tar.gz", hash = "sha256:d6facde097a1b1a3048b20fb2ff88b024744ceb2865fb912525da7ebb7779695"}, @@ -2769,92 +3244,59 @@ files = [ future = ">=0.15.2" six = ">=1.10.0" -[[package]] -name = "pyjwt" -version = "2.9.0" -description = "JSON Web Token implementation in Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "PyJWT-2.9.0-py3-none-any.whl", hash = "sha256:3b02fb0f44517787776cf48f2ae25d8e14f300e6d7545a4315cee571a415e850"}, - {file = "pyjwt-2.9.0.tar.gz", hash = "sha256:7e1e5b56cc735432a7369cbfa0efe50fa113ebecdc04ae6922deba8b84582d0c"}, -] - -[package.dependencies] -cryptography = {version = ">=3.4.0", optional = true, markers = "extra == \"crypto\""} - -[package.extras] -crypto = ["cryptography (>=3.4.0)"] -dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx", "sphinx-rtd-theme", "zope.interface"] -docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] -tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] - [[package]] name = "pymongo" -version = "4.9.1" +version = "4.11" description = "Python driver for MongoDB " optional = false -python-versions = ">=3.8" -files = [ - {file = "pymongo-4.9.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:dc3d070d746ab79e9b393a5c236df20e56607389af2b79bf1bfe9a841117558e"}, - {file = "pymongo-4.9.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fe709d05654c12fc513617c8d5c8d05b7e9cf1d5d94ada68add4e89530c867d2"}, - {file = "pymongo-4.9.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa4493f304b33c5d2ecee3055c98889ac6724d56f5f922d47420a45d0d4099c9"}, - {file = "pymongo-4.9.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f8e8b8deba6a4bff3dd5421071083219521c74d2acae0322de5c06f1a66c56af"}, - {file = "pymongo-4.9.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e3645aff8419ca60f9ccd08966b2f6b0d78053f9f98a814d025426f1d874c19a"}, - {file = "pymongo-4.9.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:51dbc6251c6783dfcc7d657c346986d8bad7210989b2fe15de16db5204a8e7ae"}, - {file = "pymongo-4.9.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d7aa9cc2d92e73bdb036c578ba019da94ea165eb147e691cd910a6fab7ce3b7"}, - {file = "pymongo-4.9.1-cp310-cp310-win32.whl", hash = "sha256:8b632e01617f2608880f7b9926f54a5f5ebb51631996e0540fff7fc7980663c9"}, - {file = "pymongo-4.9.1-cp310-cp310-win_amd64.whl", hash = "sha256:f05e34d401be871d7c87cb10727d49315444e4ded07ff876a595e4c23b7436da"}, - {file = "pymongo-4.9.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6bb3d5282278594753089dc7da48bfae4a7f337a2dd4d397eabb591c649e58d0"}, - {file = "pymongo-4.9.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8f0d5258bc85a4e6b5bcae8160628168e71ec4625a58ceb53327c3280a0b6914"}, - {file = "pymongo-4.9.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:96462fb2175f740701d229f52018ea6e4adc4148c4112e6628bb359dd534a3df"}, - {file = "pymongo-4.9.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:286fb275267f0293364ba579f6354452599161f1902ad411061c7f744ab88328"}, - {file = "pymongo-4.9.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4cddb51cead9700c4dccc916952bc0321b8d766bf782d374bfa0e93ef47c1d20"}, - {file = "pymongo-4.9.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1d79f20f9c7cbc1c708fb80b648b6fbd3220fd3437a9bd6017c1eb592e03b361"}, - {file = "pymongo-4.9.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dd3352eaf578f8e9bdea7a5692910eedad1e8680f60726fc70e99c8af51a5449"}, - {file = "pymongo-4.9.1-cp311-cp311-win32.whl", hash = "sha256:ea3f0196e7c311b9944a609ac175bd91ab97952164a1246716fdd38d53ca3bcc"}, - {file = "pymongo-4.9.1-cp311-cp311-win_amd64.whl", hash = "sha256:b4c793db8457c856f333f396798470b9bfe405e17c307d581532c74cec70150c"}, - {file = "pymongo-4.9.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:47b4896544095d172c366dd4d4ea1da6b0ab1a77d8416897cc1801e2421b1e67"}, - {file = "pymongo-4.9.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fbb1c7dfcf6c44e9e1928290631c7603817991cdf570691c9e15fca594918435"}, - {file = "pymongo-4.9.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a7689da1d1b444284e4ea9ab2eb64a15307b6b795918c0f3cd7774dd1d8a7556"}, - {file = "pymongo-4.9.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7f962d74201c772555f7a78792fed820a5ea76db5c7ee6cf43748e411b44e430"}, - {file = "pymongo-4.9.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:08fbab69f3fb6f8088c81f4c4a8abd84a99c132034f5e27e47f894bbcb6bf439"}, - {file = "pymongo-4.9.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4327c0d9bd616b8289691360f2d4a09a72fe35479795832eae0d4ff78af53923"}, - {file = "pymongo-4.9.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:34e4993ae78be56f9e27a141168a1ab78253576fa3e893fa335a719ce204c3ef"}, - {file = "pymongo-4.9.1-cp312-cp312-win32.whl", hash = "sha256:e1f346811d4a2369f88ab7a6f886fa9c3bbc9ed4e4f4a3becca8717a73d465cb"}, - {file = "pymongo-4.9.1-cp312-cp312-win_amd64.whl", hash = "sha256:a2b12c74cfd90147babb77f9728646bcedfdbd2bd2a5b4130a00e3a0af1a3d34"}, - {file = "pymongo-4.9.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a40ea8bc9cffb61c5c9c426c430d22235e085e610ee81ae075ddf51f12f76236"}, - {file = "pymongo-4.9.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:75d5974f874acdb2f125bdbe785045b23a39ecce1d3143dd5712800c7b6d25eb"}, - {file = "pymongo-4.9.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f23a046531030318622414f21198e232cf93c5640da9a80b45596a059c8cc090"}, - {file = "pymongo-4.9.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:91b1a92214c3912af5467f77c2f6435cd76f6de64c70cba7bb4ee43eba7f459e"}, - {file = "pymongo-4.9.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3a846423c4535428f69a90a1451df3718bc59f0c4ab685b9e96d3071951e0be4"}, - {file = "pymongo-4.9.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d476d91a5c9e6c37bc8ec3fb294e1c01d95736ccf01a59bb1540fe2f710f826e"}, - {file = "pymongo-4.9.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:172d8ba0f567e351a18765db23dab7dbcfdffd91a8788d90d46b350f80a40781"}, - {file = "pymongo-4.9.1-cp313-cp313-win32.whl", hash = "sha256:95418e334629440f70fe5ceeefc6cbbd50defb566901c8d68179ffbaec8d5f01"}, - {file = "pymongo-4.9.1-cp313-cp313-win_amd64.whl", hash = "sha256:1dfd2aa30174d36a3ef1dae4ee4c89710c2d65cac52ce6e13f17c710edbd61cf"}, - {file = "pymongo-4.9.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c4204fad54830a3173a5c939cd052d0561fba03dba7e0ff6852fd631f3314aa4"}, - {file = "pymongo-4.9.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:375765ec81b1f0a26d08928afea0c3dff897c36080a090be53fc7b70cc51d497"}, - {file = "pymongo-4.9.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d1b959a3dda0775d9111622ee47ad47772aed3a9da2e7d5f2f513fa68175dea"}, - {file = "pymongo-4.9.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:42c19d2b094cdd0ead7dbb38860bbe8268c140334ce55d8b39204ddb4ebd4904"}, - {file = "pymongo-4.9.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1fac1def9e9073f1c80198c99f0ec39c2528236c8912d96d7fd3b0237f4c523a"}, - {file = "pymongo-4.9.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b347052d510989d1f52b8553b31297f21cf74bd9f6aed71ee84e563492f4ff17"}, - {file = "pymongo-4.9.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1b4b961fce213f2bcdc92268f85111a3668c61b9b4d4e7ece27dce3a137cfcbd"}, - {file = "pymongo-4.9.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a0b10cf51ec14a487c94709d294c00e1fb6a0a4c38cdc3acfb2ced5ef60972a0"}, - {file = "pymongo-4.9.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:679b8d55854da7c7fdb82aa5e092ab4de0144daf6758defed8ab00ff9ce05360"}, - {file = "pymongo-4.9.1-cp38-cp38-win32.whl", hash = "sha256:432ad395d2233056b042ccc73234e7136aa65d944d6bd8b5138394bd38aaff79"}, - {file = "pymongo-4.9.1-cp38-cp38-win_amd64.whl", hash = "sha256:9fbe9fad27619ac4cfda5df0ade26a99906da7dfe7b01deddc25997eb1804e4c"}, - {file = "pymongo-4.9.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:99b611ff75b5d9e17183dcf9584a7b04f9db07e51a162f23ea05e485e0735c0a"}, - {file = "pymongo-4.9.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8089003a99127f917bdbeec177d41cef019cda8ec70534c1018cb60aacd23c2a"}, - {file = "pymongo-4.9.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d78adf25967c06298c7e488f4cfab79a390fc32c2b1d428613976f99031603d"}, - {file = "pymongo-4.9.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:56877cfcdf7dfc5c6408e4551ec0d6d65ebbca4d744a0bc90400f09ef6bbcc8a"}, - {file = "pymongo-4.9.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:16d2efe559d0d96bc0b74b3ff76701ad6f6e1a65f6581b573dcacc29158131c8"}, - {file = "pymongo-4.9.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f838f613e74b4dad8ace0d90f42346005bece4eda5bf6d389cfadb8322d39316"}, - {file = "pymongo-4.9.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db5b299e11284f8d82ce2983d8e19fcc28f98f902a179709ef1982b4cca6f8b8"}, - {file = "pymongo-4.9.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b23211c031b45d0f32de83ab7d77f9c26f1025c2d2c91463a5d8594a16103655"}, - {file = "pymongo-4.9.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:687cf70e096381bc65b4273a6a9319617618f7ace65caffc356e1099c4a68511"}, - {file = "pymongo-4.9.1-cp39-cp39-win32.whl", hash = "sha256:e02b03e3815b80a63e773e4c32aed3cf5633d406f376477be74550295c211256"}, - {file = "pymongo-4.9.1-cp39-cp39-win_amd64.whl", hash = "sha256:0492ef43f3342354cf581712e431621c221f60c877ebded84e3f3e53b71bbbe0"}, - {file = "pymongo-4.9.1.tar.gz", hash = "sha256:b7f2d34390acf60e229c30037d1473fcf69f4536cd7f48f6f78c0c931c61c505"}, +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pymongo-4.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1971039a8e3aab139e0382b26a9670cd34f43c5301da267360b9a640b637d09b"}, + {file = "pymongo-4.11-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b4878e92ae255a05756399a4e2b428f0fd3529561eacd9f4781a70ad5311397e"}, + {file = "pymongo-4.11-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e43b0533f291a8883e52fd7a8919353ae7038d50ef17873983877c2f6b76330e"}, + {file = "pymongo-4.11-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cf0a36fae1187fc2bdf3c527dc68c6bfd7c89726fbbf3215460b82e28bd8b81e"}, + {file = "pymongo-4.11-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff5176a083252a769e5bfb1a950d0e37da585c313e1a4af98903e22e7cf4c475"}, + {file = "pymongo-4.11-cp310-cp310-win32.whl", hash = "sha256:ee3cc9bce848a1024d1c96717540f3f30cba885df9610be70c0653764e30ae6e"}, + {file = "pymongo-4.11-cp310-cp310-win_amd64.whl", hash = "sha256:06e82968ea031aebc18820898b102efed1ea8dc21b51eff2a81dc9ba4191fa6b"}, + {file = "pymongo-4.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0197fe47e31bee488e82e7ab73e6a351a191bbd6e25cf4a380622e4b1ffcd143"}, + {file = "pymongo-4.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f300d1811bd33940b2dd1907dbe2b4ae473003a384d6a764babb3ea5a4edede4"}, + {file = "pymongo-4.11-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:212f14a882659a45db472f0bc0fdf83c16fad4e7e4d4257187797779c461320e"}, + {file = "pymongo-4.11-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c72165e4ee884c3c9ac85a443f20dd01849c9df26b38ff25b08260330ed78202"}, + {file = "pymongo-4.11-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bedcf4a602042cb2c19ae3ee450fbdd5832095ae5b563e004f7c0d307a22c9e1"}, + {file = "pymongo-4.11-cp311-cp311-win32.whl", hash = "sha256:64ad12ae8d79f18ec30d807b9b9b9802c30427c39599d8b1833bc00e63f0e4bb"}, + {file = "pymongo-4.11-cp311-cp311-win_amd64.whl", hash = "sha256:a308ad2eeaee370b3b4154a82840c8f8f9b18ccc76b71812323d243a7bda98a2"}, + {file = "pymongo-4.11-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:254aa90fafead13aca8dbcfbc407e2f6f7c125fce726925bd96adc74b6226f9e"}, + {file = "pymongo-4.11-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:e84e27d6fa4b3e532043daf89d52d2cfbd7b4697b44b86a7b3db8cacdcfcc58c"}, + {file = "pymongo-4.11-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ac81987985275abeb53f5faa5c4a7f7890df9368c0f730fe37460a6301d94d7"}, + {file = "pymongo-4.11-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:85cfc99af2ae0fb8699e15b1f3479018216eb75d80f0082973a876ecfdf1022e"}, + {file = "pymongo-4.11-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b885210269455e05f5704887a4c7b3b99d03c9ab37f320005acc21d1761f1656"}, + {file = "pymongo-4.11-cp312-cp312-win32.whl", hash = "sha256:b7af60e034e04c3206dfbe2a7bbcc46057ca4f9ad8ed66869bf669c02a1e6e99"}, + {file = "pymongo-4.11-cp312-cp312-win_amd64.whl", hash = "sha256:74f02b7dbb3b15c2e48c7bb28941f6198dc73ced09683738180a9fdbfc0983b2"}, + {file = "pymongo-4.11-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d33c80afcbfdd2934917fedfa005251484ca8d98767c74793c35d5f24b8e48f1"}, + {file = "pymongo-4.11-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d4db6f7515f5087ae9deecc3b60d3d8a512b1a0a9bff93113bc9edf20ed29ab9"}, + {file = "pymongo-4.11-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3a7ead31317d3a3eabc17486c75ad109f3d514a702002e52da474b59dd8acce"}, + {file = "pymongo-4.11-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c0ab91c06bf79dd3d3fd5f2a53fe22db8036084ddcfaacd80e83828b6eb4f25"}, + {file = "pymongo-4.11-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a04f49d6f1fa4f4e6a98ef76b4881b4a9c7decd8a90ee5c9aeff087662e2d8a"}, + {file = "pymongo-4.11-cp313-cp313-win32.whl", hash = "sha256:981ae0caa245bedf9af3af9159ae71b05444e35dd61a0250235b5c58dcd69178"}, + {file = "pymongo-4.11-cp313-cp313-win_amd64.whl", hash = "sha256:8e5a56a508612c64906d5375ebee413cee2fc432c09abd9cb4d9e23ac8bcdd3a"}, + {file = "pymongo-4.11-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:c3b67a2949240d63a0b8b1094e1d2d4b94dedf0317ac6e14164adc9aaf260963"}, + {file = "pymongo-4.11-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:17d4b787afed3163e5faa147b028d8ec7996a5388328499588c5c5858f26ab8a"}, + {file = "pymongo-4.11-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ae7f636010ce54885f1ea19c37ea2cb744d976ce689361a776fd0e991d020b81"}, + {file = "pymongo-4.11-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c7f2309384de675e7e25104fc5947acb65ad3ba69dc8246095d57ff1220dfcbb"}, + {file = "pymongo-4.11-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:17966e98d0350b578a337f053cc541458c296236d4b5b4b8adb76f5e82644fb8"}, + {file = "pymongo-4.11-cp313-cp313t-win32.whl", hash = "sha256:b7246f2060ba03dcc25ade803bc6a3c39f1cc7db93297875edd251ca8cf5cac7"}, + {file = "pymongo-4.11-cp313-cp313t-win_amd64.whl", hash = "sha256:488e3440f5bedcbf494fd02c0a433cb5be7e55ba44dc72202813e1007a865e6a"}, + {file = "pymongo-4.11-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8436f90f26379c167d47b61ac78c48224af98f3136ece380b74158cbef2ad6a7"}, + {file = "pymongo-4.11-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5837ee73ffa4a943360320d7b9b917da2752a4cb536e23a6eaca418c1c64f60a"}, + {file = "pymongo-4.11-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c7e5e500cc408f7f2aa38a4a6ed96d78a4c4cce3bcc6f1a55113f0b9d7823dd"}, + {file = "pymongo-4.11-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7cf9d18444259d86cd7028ac428f4fe7fbdefa3450575126ad2ee4447db0f52f"}, + {file = "pymongo-4.11-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ec86da9ce746de890b8d470a1f12f3d2634786fee002a4c56bdbbfd59e23f1f7"}, + {file = "pymongo-4.11-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:38f777f5e43f3acd651a79aaf95abf29e8e7f85429a12ce32c4b7f2f877b2200"}, + {file = "pymongo-4.11-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:314648ec4d5eaa755233343d6d110348371988257de35f5a84667b709262deab"}, + {file = "pymongo-4.11-cp39-cp39-win32.whl", hash = "sha256:d4a275ea70f597d3fd77bdc83054307a65749b2c669bc5045cb4620930eed596"}, + {file = "pymongo-4.11-cp39-cp39-win_amd64.whl", hash = "sha256:bb3ce0ce49cfe5c31db046d567403adb2d158459cf10d7c71f6d72f2324c900a"}, + {file = "pymongo-4.11.tar.gz", hash = "sha256:21b9969e155c4b16a160fbe90c390a07ca7514479af6c3811b1d15ead26e10ba"}, ] [package.dependencies] @@ -2862,8 +3304,8 @@ dnspython = ">=1.16.0,<3.0.0" [package.extras] aws = ["pymongo-auth-aws (>=1.1.0,<2.0.0)"] -docs = ["furo (==2023.9.10)", "readthedocs-sphinx-search (>=0.3,<1.0)", "sphinx (>=5.3,<8)", "sphinx-autobuild (>=2020.9.1)", "sphinx-rtd-theme (>=2,<3)", "sphinxcontrib-shellcheck (>=1,<2)"] -encryption = ["certifi", "pymongo-auth-aws (>=1.1.0,<2.0.0)", "pymongocrypt (>=1.10.0,<2.0.0)"] +docs = ["furo (==2024.8.6)", "readthedocs-sphinx-search (>=0.3,<1.0)", "sphinx (>=5.3,<9)", "sphinx-autobuild (>=2020.9.1)", "sphinx-rtd-theme (>=2,<4)", "sphinxcontrib-shellcheck (>=1,<2)"] +encryption = ["certifi", "pymongo-auth-aws (>=1.1.0,<2.0.0)", "pymongocrypt (>=1.12.0,<2.0.0)"] gssapi = ["pykerberos", "winkerberos (>=0.5.0)"] ocsp = ["certifi", "cryptography (>=2.5)", "pyopenssl (>=17.2.0)", "requests (<3.0.0)", "service-identity (>=18.1.0)"] snappy = ["python-snappy"] @@ -2876,6 +3318,7 @@ version = "1.5.0" description = "Python binding to the Networking and Cryptography (NaCl) library" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "PyNaCl-1.5.0-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:401002a4aaa07c9414132aaed7f6836ff98f59277a234704ff66878c2ee4a0d1"}, {file = "PyNaCl-1.5.0-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:52cb72a79269189d4e0dc537556f4740f7f0a9ec41c1322598799b0bdad4ef92"}, @@ -2898,39 +3341,101 @@ tests = ["hypothesis (>=3.27.0)", "pytest (>=3.2.1,!=3.3.0)"] [[package]] name = "pyopenssl" -version = "24.2.1" +version = "25.0.0" description = "Python wrapper module around the OpenSSL library" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ - {file = "pyOpenSSL-24.2.1-py3-none-any.whl", hash = "sha256:967d5719b12b243588573f39b0c677637145c7a1ffedcd495a487e58177fbb8d"}, - {file = "pyopenssl-24.2.1.tar.gz", hash = "sha256:4247f0dbe3748d560dcbb2ff3ea01af0f9a1a001ef5f7c4c647956ed8cbf0e95"}, + {file = "pyOpenSSL-25.0.0-py3-none-any.whl", hash = "sha256:424c247065e46e76a37411b9ab1782541c23bb658bf003772c3405fbaa128e90"}, + {file = "pyopenssl-25.0.0.tar.gz", hash = "sha256:cd2cef799efa3936bb08e8ccb9433a575722b9dd986023f1cabc4ae64e9dac16"}, ] [package.dependencies] -cryptography = ">=41.0.5,<44" +cryptography = ">=41.0.5,<45" +typing-extensions = {version = ">=4.9", markers = "python_version < \"3.13\" and python_version >= \"3.8\""} [package.extras] -docs = ["sphinx (!=5.2.0,!=5.2.0.post0,!=7.2.5)", "sphinx-rtd-theme"] +docs = ["sphinx (!=5.2.0,!=5.2.0.post0,!=7.2.5)", "sphinx_rtd_theme"] test = ["pretend", "pytest (>=3.0.1)", "pytest-rerunfailures"] [[package]] name = "pyparsing" -version = "2.4.7" -description = "Python parsing module" +version = "3.2.1" +description = "pyparsing module - Classes and methods to define and execute parsing grammars" optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pyparsing-3.2.1-py3-none-any.whl", hash = "sha256:506ff4f4386c4cec0590ec19e6302d3aedb992fdc02c761e90416f158dacf8e1"}, + {file = "pyparsing-3.2.1.tar.gz", hash = "sha256:61980854fd66de3a90028d679a954d5f2623e83144b5afe5ee86f43d762e5f0a"}, +] + +[package.extras] +diagrams = ["jinja2", "railroad-diagrams"] + +[[package]] +name = "pypdf" +version = "5.2.0" +description = "A pure-python PDF library capable of splitting, merging, cropping, and transforming PDF files" +optional = false +python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"}, - {file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"}, + {file = "pypdf-5.2.0-py3-none-any.whl", hash = "sha256:d107962ec45e65e3bd10c1d9242bdbbedaa38193c9e3a6617bd6d996e5747b19"}, + {file = "pypdf-5.2.0.tar.gz", hash = "sha256:7c38e68420f038f2c4998fd9d6717b6db4f6cef1642e9cf384d519c9cf094663"}, ] +[package.dependencies] +typing_extensions = {version = ">=4.0", markers = "python_version < \"3.11\""} + +[package.extras] +crypto = ["cryptography"] +cryptodome = ["PyCryptodome"] +dev = ["black", "flit", "pip-tools", "pre-commit (<2.18.0)", "pytest-cov", "pytest-socket", "pytest-timeout", "pytest-xdist", "wheel"] +docs = ["myst_parser", "sphinx", "sphinx_rtd_theme"] +full = ["Pillow (>=8.0.0)", "cryptography"] +image = ["Pillow (>=8.0.0)"] + +[[package]] +name = "pyre2-updated" +version = "0.3.8" +description = "Python wrapper for Google's RE2 using Cython" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "pyre2-updated-0.3.8.tar.gz", hash = "sha256:6d6aaa2f41a085095993b2d09562511cf40d4aedfc5bd00d78f53112be051e19"}, + {file = "pyre2_updated-0.3.8-cp310-cp310-macosx_10_15_universal2.whl", hash = "sha256:65e67527eb472cd5045966df42414205cbfc187633a844ba7d9f59480f46e748"}, + {file = "pyre2_updated-0.3.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ab79cd5c663d20eca146361f2b2b7cf23c631109e43064d4bb35a43cb0607ffd"}, + {file = "pyre2_updated-0.3.8-cp310-cp310-win_amd64.whl", hash = "sha256:b0820d8420caca762184f5eebe57631be5b985e405c53e807aac24897daa892a"}, + {file = "pyre2_updated-0.3.8-cp311-cp311-macosx_10_15_universal2.whl", hash = "sha256:2bda9bf4d59568152e085450ffc1c08fcf659000d06766861f7ff340ba601c3e"}, + {file = "pyre2_updated-0.3.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:54de291bb8b3aa2223f864293e0a9e62ea2877fea72a632fe5cd36a60012f7bc"}, + {file = "pyre2_updated-0.3.8-cp311-cp311-win_amd64.whl", hash = "sha256:45c0940dbda4a2c45652e69ab52946c2395171f3e3a96bce456a8ddb45b337e1"}, + {file = "pyre2_updated-0.3.8-cp312-cp312-macosx_10_15_universal2.whl", hash = "sha256:4db83d0f148d91f9b67b71eb3fd04a7e1d09397e7ecea75972632cd46c27ba6e"}, + {file = "pyre2_updated-0.3.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:53d27f552fac2149c9dbe45faf1f6236c55fba42ea579799a07cdc853e3732d6"}, + {file = "pyre2_updated-0.3.8-cp312-cp312-win_amd64.whl", hash = "sha256:648196c6fe7b115431f2bedc48660333a61f6628bde7efefc14098d67f86b7b4"}, + {file = "pyre2_updated-0.3.8-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:802dec5801912c76b21dcc3d91810ae9ff0cc308e78fb0aa32d93e921783f5d8"}, + {file = "pyre2_updated-0.3.8-cp37-cp37m-win_amd64.whl", hash = "sha256:daf366a83b70b08cc4c59477455a28b2a108484d282824d350217a8a7378d229"}, + {file = "pyre2_updated-0.3.8-cp38-cp38-macosx_10_15_universal2.whl", hash = "sha256:f1c406e30aed02699888ae6938b83058f0845650991cd97db09e1686ce8a181a"}, + {file = "pyre2_updated-0.3.8-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8331fc7bb1fa57d2654046ed189631d70db1170d935264ce82a7291413ce60e4"}, + {file = "pyre2_updated-0.3.8-cp38-cp38-win_amd64.whl", hash = "sha256:407ca7082e2049aeae0b2c716cc53cd92217d17886694fe0047ce5d636161155"}, + {file = "pyre2_updated-0.3.8-cp39-cp39-macosx_10_15_universal2.whl", hash = "sha256:99942f75a252691117880fc60941f20170d35bcb3ccb72aff9a1bbce951d4db0"}, + {file = "pyre2_updated-0.3.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ff764613bd436689cf5e4d3c14a25cf8465474e1db9f2a39738bbf481dd07300"}, + {file = "pyre2_updated-0.3.8-cp39-cp39-win_amd64.whl", hash = "sha256:350be9580700b67af87f5227453d1123bc9f4513f0bcc60450574f1bc46cb24f"}, +] + +[package.extras] +perf = ["regex"] +test = ["pytest"] + [[package]] name = "pysocks" version = "1.7.1" description = "A Python SOCKS client module. See https://github.com/Anorov/PySocks for more information." optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +groups = ["main"] files = [ {file = "PySocks-1.7.1-py27-none-any.whl", hash = "sha256:08e69f092cc6dbe92a0fdd16eeb9b9ffbc13cadfe5ca4c7bd92ffb078b293299"}, {file = "PySocks-1.7.1-py3-none-any.whl", hash = "sha256:2725bd0a9925919b9b51739eea5f9e2bae91e83288108a9ad338b2e3a4435ee5"}, @@ -2943,6 +3448,7 @@ version = "7.2.2" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "pytest-7.2.2-py3-none-any.whl", hash = "sha256:130328f552dcfac0b1cec75c12e3f005619dc5f874f0a06e8ff7263f0ee6225e"}, {file = "pytest-7.2.2.tar.gz", hash = "sha256:c99ab0c73aceb050f68929bc93af19ab6db0558791c6a0715723abe9d0ade9d4"}, @@ -2966,6 +3472,7 @@ version = "0.18.3" description = "Pytest support for asyncio" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "pytest-asyncio-0.18.3.tar.gz", hash = "sha256:7659bdb0a9eb9c6e3ef992eef11a2b3e69697800ad02fb06374a210d85b29f91"}, {file = "pytest_asyncio-0.18.3-1-py3-none-any.whl", hash = "sha256:16cf40bdf2b4fb7fc8e4b82bd05ce3fbcd454cbf7b92afc445fe299dabb88213"}, @@ -2984,6 +3491,7 @@ version = "3.0.0" description = "Pytest plugin for measuring coverage." optional = false python-versions = ">=3.6" +groups = ["dev"] files = [ {file = "pytest-cov-3.0.0.tar.gz", hash = "sha256:e7f0f5b1617d2210a2cabc266dfe2f4c75a8d32fb89eafb7ad9d06f6d076d470"}, {file = "pytest_cov-3.0.0-py3-none-any.whl", hash = "sha256:578d5d15ac4a25e5f961c938b85a05b09fdaae9deef3bb6de9a6e766622ca7a6"}, @@ -3002,6 +3510,7 @@ version = "4.5.2" description = "A Django plugin for pytest." optional = false python-versions = ">=3.5" +groups = ["dev"] files = [ {file = "pytest-django-4.5.2.tar.gz", hash = "sha256:d9076f759bb7c36939dbdd5ae6633c18edfc2902d1a69fdbefd2426b970ce6c2"}, {file = "pytest_django-4.5.2-py3-none-any.whl", hash = "sha256:c60834861933773109334fe5a53e83d1ef4828f2203a1d6a0fa9972f4f75ab3e"}, @@ -3020,6 +3529,7 @@ version = "0.4.8" description = "Pytest plugin providing a fixture interface for spulec/freezegun" optional = false python-versions = ">= 3.6" +groups = ["dev"] files = [ {file = "pytest_freezer-0.4.8-py3-none-any.whl", hash = "sha256:644ce7ddb8ba52b92a1df0a80a699bad2b93514c55cf92e9f2517b68ebe74814"}, {file = "pytest_freezer-0.4.8.tar.gz", hash = "sha256:8ee2f724b3ff3540523fa355958a22e6f4c1c819928b78a7a183ae4248ce6ee6"}, @@ -3035,6 +3545,7 @@ version = "3.7.0" description = "Thin-wrapper around the mock package for easier use with pytest" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "pytest-mock-3.7.0.tar.gz", hash = "sha256:5112bd92cc9f186ee96e1a92efc84969ea494939c3aead39c50f421c4cc69534"}, {file = "pytest_mock-3.7.0-py3-none-any.whl", hash = "sha256:6cff27cec936bf81dc5ee87f07132b807bcda51106b5ec4b90a04331cba76231"}, @@ -3052,6 +3563,7 @@ version = "1.1.0" description = "pytest plugin for printing summary data as I want it" optional = false python-versions = ">=3.7" +groups = ["dev"] files = [ {file = "pytest_pretty-1.1.0-py3-none-any.whl", hash = "sha256:e7e63e0437694e5a7c557d99c81d6e29e0dcaab3b6b9dcb3ba23ddf3eebb5b42"}, {file = "pytest_pretty-1.1.0.tar.gz", hash = "sha256:425e116c1ed10ce67eeb688f22d1bcb91ca58a97986026bf42c0717da80740b3"}, @@ -3067,6 +3579,7 @@ version = "3.6.1" description = "pytest xdist plugin for distributed testing, most importantly across multiple CPUs" optional = false python-versions = ">=3.8" +groups = ["dev"] files = [ {file = "pytest_xdist-3.6.1-py3-none-any.whl", hash = "sha256:9ed4adfb68a016610848639bb7e02c9352d5d9f03d04809919e2dafc3be4cca7"}, {file = "pytest_xdist-3.6.1.tar.gz", hash = "sha256:ead156a4db231eec769737f57668ef58a2084a34b2e55c4a8fa20d861107300d"}, @@ -3083,13 +3596,14 @@ testing = ["filelock"] [[package]] name = "python-dateutil" -version = "2.8.2" +version = "2.9.0.post0" description = "Extensions to the standard Python datetime module" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main", "dev"] files = [ - {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, - {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, ] [package.dependencies] @@ -3101,6 +3615,7 @@ version = "1.0.1" description = "Read key-value pairs from a .env file and set them as environment variables" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "python-dotenv-1.0.1.tar.gz", hash = "sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca"}, {file = "python_dotenv-1.0.1-py3-none-any.whl", hash = "sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a"}, @@ -3111,55 +3626,66 @@ cli = ["click (>=5.0)"] [[package]] name = "python-flirt" -version = "0.8.10" +version = "0.9.2" description = "A Python library for parsing, compiling, and matching Fast Library Identification and Recognition Technology (FLIRT) signatures." optional = false -python-versions = ">=3.8" -files = [ - {file = "python_flirt-0.8.10-cp310-cp310-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:6c3185790d3f3749ea2cb984f10259d533ad54036b5878cedcd4149ce3122d5e"}, - {file = "python_flirt-0.8.10-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:0db5c5d9a8920cdb32fd1dbff8fe00e99d32fd9d478b15317093e23942c0aef6"}, - {file = "python_flirt-0.8.10-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:9b5a285ea80a63d68af56fb406de5ab263ab1b6c988def94c76ebe91d1e9eeec"}, - {file = "python_flirt-0.8.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d3172c9ac87d36add8eb15d42f33636d408516c7e4834dd572836ef5f4e9e26"}, - {file = "python_flirt-0.8.10-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cc1b020e54f119916f7cdac4a24f692643915af83d51e2f73f35472da16213ac"}, - {file = "python_flirt-0.8.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b17592ac73c49515f4c7ae4945060dea18685bdfb320f0bbcd0f613d11492dbb"}, - {file = "python_flirt-0.8.10-cp310-none-win32.whl", hash = "sha256:b927124c042863d7d488c3b64796c6c690252f1b4f9cbe1135681c6918e53d99"}, - {file = "python_flirt-0.8.10-cp310-none-win_amd64.whl", hash = "sha256:2d8ab2d51b26502415a7a3cd82eb825252be508d7f32d870440fd1096b444232"}, - {file = "python_flirt-0.8.10-cp311-cp311-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:166d27e034118562aa95d7b403a5b111d5ba8469fe14dd4077b081bee7266d00"}, - {file = "python_flirt-0.8.10-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:354814081fcba58f5448ef8bd3b3a7814cce2cb1ed7ad9c1d572447bc85f9e7a"}, - {file = "python_flirt-0.8.10-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:055412a990b471acc2ce16d9372e4a1af379a81bc54bedd864d35f5f63c8b010"}, - {file = "python_flirt-0.8.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8da95d051a6bbcebb3043541adfb3dd635b792a837e8ac829c017549ba00d977"}, - {file = "python_flirt-0.8.10-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c14ff6b3a9dd1d0629d247688e8e69d348dab9df2f654b0f2424343f66446ce5"}, - {file = "python_flirt-0.8.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2547d34b146ab18bf64c1eceb46be0643d903bbf396aabe79d07ed8475893cea"}, - {file = "python_flirt-0.8.10-cp311-none-win32.whl", hash = "sha256:a59b5c26a12038484ece6bb66eb2e45c286fe476f5a4bed5e8f4e5dabba63991"}, - {file = "python_flirt-0.8.10-cp311-none-win_amd64.whl", hash = "sha256:4fb8d48f304ef6394dee74a5d6b1e31f9b91bc046305ea6ee1f26d8e28229cb4"}, - {file = "python_flirt-0.8.10-cp312-cp312-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:a19cbf0a2c87decb8d861b74a499eb4b3fd6fc65ac73227d6330641a5d070c6c"}, - {file = "python_flirt-0.8.10-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:2b0e285a582576c2ddba9af17a97fbfdd52187fbf098a797df9e00034000796d"}, - {file = "python_flirt-0.8.10-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:b9bb40cb897458ac0d9af121e35aa28ea33ea1a9eadf33bbf79ff84a81f2eaf4"}, - {file = "python_flirt-0.8.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:30960a682673bb25ae2bd8ba7c754e9e18faa4427fe75fe29d5fc341fa974346"}, - {file = "python_flirt-0.8.10-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:4bfe6e2b8eb0e587476fab7db21a15fb40e96e944f404e872c906d89450a52bc"}, - {file = "python_flirt-0.8.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:02792127c579d624a98c8f65a1b6f2b27c2d8be673a4ae3e26d9b42543051cf3"}, - {file = "python_flirt-0.8.10-cp312-none-win32.whl", hash = "sha256:fc7e6041c7e146328a6daf05303590aa18251c0e5cf92f8b93e8f1eafaadb7a7"}, - {file = "python_flirt-0.8.10-cp312-none-win_amd64.whl", hash = "sha256:f21d7e23a82ba6cfcde4fc252cd998af8848b7e5bbd7b8c670384073d3b7ad66"}, - {file = "python_flirt-0.8.10-cp38-cp38-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:38e96fdf79b6e4b6b86b37d33278c8522eb41972a17d98d8924d3772c527ecd8"}, - {file = "python_flirt-0.8.10-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:ce6f443bec1ac970e41c8238e945b6f08af402bab32256c6b818143acd0a86d6"}, - {file = "python_flirt-0.8.10-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:19e38ea51af68113d88e180c339873d202bef8637532a2c9173dbac38ff4aece"}, - {file = "python_flirt-0.8.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0102bfdda68ce2050cfb61cd2e8bbc8225bc2b5a0535fb492e12c42d9dbf23a"}, - {file = "python_flirt-0.8.10-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:547fc7d8567db0332b229be0551c3aa65c839d439fdd317b7184edb491dde322"}, - {file = "python_flirt-0.8.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:877a99236e7e37f4e94f223297e9029ab14426a4d4d5f7a621a1719b0375dda1"}, - {file = "python_flirt-0.8.10-cp38-none-win32.whl", hash = "sha256:cb173f1030cd05f586f20b68c594bdc54f745d4fb12dc8db100dc70127a771d7"}, - {file = "python_flirt-0.8.10-cp38-none-win_amd64.whl", hash = "sha256:2f4cebbf3cba105f5f0db64a030109c966f4aecb29f94c7ac6e4594f3d4b3c63"}, - {file = "python_flirt-0.8.10-cp39-cp39-macosx_10_12_x86_64.macosx_11_0_arm64.macosx_10_12_universal2.whl", hash = "sha256:c1f8e406d15f31049ff558534b4be952fb073aadc2e7174e650f4a71256282c6"}, - {file = "python_flirt-0.8.10-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:82b820c27a4f8f94804aa42617e8d5cb80e76ca55bd89b656ff3924dae8af293"}, - {file = "python_flirt-0.8.10-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:acedddf5acd384626c55b7ed2bda71a5651eb7aed3be1d7098507ea0d3adaabb"}, - {file = "python_flirt-0.8.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6745709e936c19100190c52220adda8290dd6bad31d8f708cf3c52deb87546d9"}, - {file = "python_flirt-0.8.10-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6897be2d12fed520524259640648f5022c3c1bff2bf2da10f018315823cc4134"}, - {file = "python_flirt-0.8.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d2f444799bc2b73271fb6e0715f9ccb831308999051ca58f121282499a955b1"}, - {file = "python_flirt-0.8.10-cp39-none-win32.whl", hash = "sha256:729002c7a6a15905a8dd60b646d5a28605ca73ed8a62886caaf3a49cfeb5a7bf"}, - {file = "python_flirt-0.8.10-cp39-none-win_amd64.whl", hash = "sha256:0b79ee2a6b0a098225510ab5cc0d54f1534ecfd4c647b3a614ce9dca9615d13a"}, +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "python_flirt-0.9.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:a853b217bc930738fafddd1b6c2d4e4387ffc33812f5695368b976f37f5a59ba"}, + {file = "python_flirt-0.9.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:113c0d865380117bbb5f52870b1459f9ee8fe8f6c5317d2206357c0834a2e9f3"}, + {file = "python_flirt-0.9.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87c17e413d675dcb34bb3af0e40296d20b971ff53f09f105b0135f30d96ae965"}, + {file = "python_flirt-0.9.2-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b029cb6d1db4b422f3547aa6f153894f100e400182a05f2d87aa9270b643b2c4"}, + {file = "python_flirt-0.9.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5840aa59c218b91e70b463e0d5fe0178539bc714e001d3f3130379cc505e6344"}, + {file = "python_flirt-0.9.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b2a3ddc1fb778f99ee254ab534aaa94387536cd0a38f5daa93921b19232fae69"}, + {file = "python_flirt-0.9.2-cp310-none-win32.whl", hash = "sha256:49993fed8fec8fc497f178ef75630870a910cd454d5169185282c1a92ccfe4cc"}, + {file = "python_flirt-0.9.2-cp310-none-win_amd64.whl", hash = "sha256:e604dd2b47362cc8379afd6c160a04ee5911c21985da5781ff9df96357b8553d"}, + {file = "python_flirt-0.9.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:3df5da165ab577fb056918342bbb45fd5952ae926609527a8fb172174d67cf8d"}, + {file = "python_flirt-0.9.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8936b389261b7ddbf738900a114a37b1af23e428d31bbd39f1cea40c0bc82a6c"}, + {file = "python_flirt-0.9.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c369efcb1774a91f810484e4907f938ea69f18b811162c95f04907b6670937c0"}, + {file = "python_flirt-0.9.2-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cd1e90da5de9ce4b77928f4ffaddac9b0d051e094cb82158d3ec2d4d0dd9b5b9"}, + {file = "python_flirt-0.9.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac0662be73a482fdb09ff95cf9a408b2f8e3110f63d99f6fdff00c845e25543e"}, + {file = "python_flirt-0.9.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:83e41452dedc730713058bb8dbbd01664c1f2cd89594cf7892f4243079cf72fa"}, + {file = "python_flirt-0.9.2-cp311-none-win32.whl", hash = "sha256:a511b947ae64c840f136a00adca38850cadfde7dd969420f8693c2112260c7e5"}, + {file = "python_flirt-0.9.2-cp311-none-win_amd64.whl", hash = "sha256:3e533bd2fbd5c6bab4ab1f46b4e7dd00f550f70ab6e811e5df96328aef124538"}, + {file = "python_flirt-0.9.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:4107b1bc92a5b5fa772828d692e319e6d6bbf595feceb89436ed6f0e0ebcafec"}, + {file = "python_flirt-0.9.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2d9ae9332fa14c88a36b731bd178336989f27284bd2ec1e07215cd42389723bd"}, + {file = "python_flirt-0.9.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:191f325a137339db07b83112a3a1117d4a8db2c1d17c37bbc7b9078c3fc032c7"}, + {file = "python_flirt-0.9.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3ede761720ac608b2a46482cbc04b130f89ac2d3e53575b303373e1b281c5ff2"}, + {file = "python_flirt-0.9.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79b0598076926b6912596e4a79f9fa910aaf63d741ea091bb1a228df20b3895f"}, + {file = "python_flirt-0.9.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8f5461360d5723368733cb214c12f92a85340ea64c522ebe34d780422b26aaf8"}, + {file = "python_flirt-0.9.2-cp312-none-win32.whl", hash = "sha256:626de70de4a49f5f252c07e6d5db1c7a3aa624922107564554da9e864e848253"}, + {file = "python_flirt-0.9.2-cp312-none-win_amd64.whl", hash = "sha256:ecb08316ead1567c53873725106f9925c766e9dbf35af11aa650f78ba589165b"}, + {file = "python_flirt-0.9.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:92fe65eab1587573b5b13ffb755dbb67d1fdec411425850c984566d44864c104"}, + {file = "python_flirt-0.9.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:23c1a513ab31760ffcd21200c96f7027e97dd37bdfc35a09948e291c8ebe53af"}, + {file = "python_flirt-0.9.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:98e9da4492303ef21d2224b7380fc22d510f7eccddc13f7a9476ce079cc30ce8"}, + {file = "python_flirt-0.9.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:dd3edb4cceb818558969481e60917ee607401bc86f2c079e053f6db758e2644e"}, + {file = "python_flirt-0.9.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b5fad3e4814c39799198d4f01fad89b28d45dc2ee162393936b29ba5ba65964b"}, + {file = "python_flirt-0.9.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2affcc3cf23f102ee6b2143801b956869307e52d6588896e2b6ee1a8dc8595f5"}, + {file = "python_flirt-0.9.2-cp313-none-win32.whl", hash = "sha256:f75ade2755a4de967296e06105f8250c364ed0072e09107a6794cabc3b7de80d"}, + {file = "python_flirt-0.9.2-cp313-none-win_amd64.whl", hash = "sha256:d48712f72e5769cb5ab096218924d68146b2341291373df70ec9b543124665ad"}, + {file = "python_flirt-0.9.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:5c82c2b7a1f54c9dc020e222b4d60cb02876ed7b9ff08e2cf34c27a7d7bf86cc"}, + {file = "python_flirt-0.9.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e46800fe6fcaaf0af1f59b7b64fe23f65cce7946c372687bc024d163390cb283"}, + {file = "python_flirt-0.9.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3e16582059cab76c5f23c103028ea09429f2af7f365086ddcbad293e1c65317"}, + {file = "python_flirt-0.9.2-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a98ecc2d1c0e4c77bd0878c515a295332d972034b44c5e829ef694baddd6fe16"}, + {file = "python_flirt-0.9.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a90f41c172afe99b149d1468514ac233534078fcd78864a6964382d24fd2c5f"}, + {file = "python_flirt-0.9.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:db7aac6bcce62d5e9aa6f3490a0df2689cda6674ad9a3f9ebe193bf6d09b4464"}, + {file = "python_flirt-0.9.2-cp39-none-win32.whl", hash = "sha256:9fac75e70aebaf282c5af2d3f9457c12c843992027079a6ba923875d169c09ea"}, + {file = "python_flirt-0.9.2-cp39-none-win_amd64.whl", hash = "sha256:b81b4661a907c3e9b9c60d943e404af9f198a8358c0194714da83ab6b4599d36"}, + {file = "python_flirt-0.9.2-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:aafb9802eb337b7bf63cd5bfadcc6d2686772a3a5213a43d82d8a256a9063b72"}, + {file = "python_flirt-0.9.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:b411dbd17f8b411113f109203b3cff608d9e6cc13f516694e34436e35f6c0196"}, + {file = "python_flirt-0.9.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8376882edb2fa931aebc89aedb9c27be0d1d5cf5ab85e266ba2f7c5dfe4cb637"}, + {file = "python_flirt-0.9.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75d6586af3791705c1d8f20795ea8dee696c47b69632ce55d8680e063802b802"}, + {file = "python_flirt-0.9.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d6c7c7f7ed7c0cdcec67cb06552d098f482a73683a23c319fa2f9fcbb787a1cc"}, + {file = "python_flirt-0.9.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:45f498b5021ed799f8188de04b77cf091d9386f6b26274e57b2bf661d4209a06"}, + {file = "python_flirt-0.9.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:1c41c4990971e6d7ea343726762a59d5731b1e8df11e72612409ba34083d888c"}, + {file = "python_flirt-0.9.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:757224a7c3ab0483086b12101ab42bbe73038b1cab4612e9dc120ff8384d36e3"}, + {file = "python_flirt-0.9.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:66a6b73afec2d33923b652071d2c0c3006ad2186fff4a54c7733acb32a566473"}, + {file = "python_flirt-0.9.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fe0a8dcc0bda98d235359cb75fc3960b04721ae50ec8b518e68dabdebadcd21a"}, ] [package.extras] -dev = ["black (==22.10.0)", "isort (==5.10.1)", "maturin", "mypy (==0.982)", "pycodestyle (==2.9.1)", "pytest (==7.1.3)", "pytest-cov (==4.0.0)", "pytest-instafail (==0.4.2)", "pytest-sugar (==0.9.4)"] +dev = ["black (>=22.10.0)", "isort (>=5.10.1)", "maturin (>=1.7.4)", "mypy (>=0.982)", "pycodestyle (>=2.9.1)", "pytest (>=7.1.3)", "pytest-cov (>=4.0.0)", "pytest-instafail (>=0.4.2)", "pytest-sugar (>=0.9.4)"] [[package]] name = "python-magic" @@ -3167,6 +3693,8 @@ version = "0.4.27" description = "File type identification using libmagic" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +groups = ["main"] +markers = "sys_platform == \"linux\"" files = [ {file = "python-magic-0.4.27.tar.gz", hash = "sha256:c1ba14b08e4a5f5c31a302b7721239695b2f0f058d125bd5ce1ee36b9d9d3c3b"}, {file = "python_magic-0.4.27-py2.py3-none-any.whl", hash = "sha256:c212960ad306f700aa0d01e5d7a325d20548ff97eb9920dcd29513174f0294d3"}, @@ -3178,50 +3706,44 @@ version = "4.5.0" description = "TLSH (C++ Python extension)" optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "python-tlsh-4.5.0.tar.gz", hash = "sha256:55e3df72cabdf48bf576b4580d7fa2feba9c14aacde0a8bd7b1ef40b03cb7969"}, ] [[package]] name = "python-whois" -version = "0.7.3" +version = "0.9.5" description = "Whois querying and parsing of domain registration information." optional = false python-versions = "*" +groups = ["main"] files = [ - {file = "python-whois-0.7.3.tar.gz", hash = "sha256:656a1100b8757f29daf010ec5a893a3d6349ccf097884021988c174eedea4a16"}, + {file = "python_whois-0.9.5-py3-none-any.whl", hash = "sha256:d435cf8012659745ce3b4e7c15428df9405648696727812d9eaacc36782d14e3"}, + {file = "python_whois-0.9.5.tar.gz", hash = "sha256:18968c21484752fcc4b9a5f0af477ef6b8dc2e8bb7f1bd5c33831499c0dd41ca"}, ] [package.dependencies] -future = "*" - -[package.extras] -"better date conversion" = ["python-dateutil"] +python-dateutil = "*" [[package]] -name = "python3-openid" -version = "3.2.0" -description = "OpenID support for modern servers and consumers." +name = "pythonaes" +version = "1.0" +description = "UNKNOWN" optional = false python-versions = "*" +groups = ["main"] files = [ - {file = "python3-openid-3.2.0.tar.gz", hash = "sha256:33fbf6928f401e0b790151ed2b5290b02545e8775f982485205a066f874aaeaf"}, - {file = "python3_openid-3.2.0-py3-none-any.whl", hash = "sha256:6626f771e0417486701e0b4daff762e7212e820ca5b29fcc0d05f6f8736dfa6b"}, + {file = "pythonaes-1.0.tar.gz", hash = "sha256:71dd31c03500b8cba06f83f17603dcca1dd1c1308fbdaef752f353ed1aaf9f67"}, ] -[package.dependencies] -defusedxml = "*" - -[package.extras] -mysql = ["mysql-connector-python"] -postgresql = ["psycopg2"] - [[package]] name = "pytz" version = "2021.1" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "pytz-2021.1-py2.py3-none-any.whl", hash = "sha256:eb10ce3e7736052ed3623d49975ce333bcd712c7bb19a58b9e2089d4057d0798"}, {file = "pytz-2021.1.tar.gz", hash = "sha256:83a4a90894bf38e243cf052c8b58f381bfe9a7a483f6a9cab140bc7f702ac4da"}, @@ -3233,6 +3755,7 @@ version = "6.0.2" description = "YAML parser and emitter for Python" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, @@ -3291,13 +3814,14 @@ files = [ [[package]] name = "pyzipper" -version = "0.3.5" +version = "0.3.6" description = "AES encryption for zipfile." optional = false python-versions = ">=3.4" +groups = ["main"] files = [ - {file = "pyzipper-0.3.5-py2.py3-none-any.whl", hash = "sha256:e696e9d306427400e23e13a766c7614b64d9fc3316bdc71bbcc8f0070a14f150"}, - {file = "pyzipper-0.3.5.tar.gz", hash = "sha256:6040069654dad040cf8708d4db78ce5829238e2091ad8006a47d97d6ffe275d6"}, + {file = "pyzipper-0.3.6-py2.py3-none-any.whl", hash = "sha256:6d097f465bfa47796b1494e12ea65d1478107d38e13bc56f6e58eedc4f6c1a87"}, + {file = "pyzipper-0.3.6.tar.gz", hash = "sha256:0adca90a00c36a93fbe49bfa8c5add452bfe4ef85a1b8e3638739dd1c7b26bfc"}, ] [package.dependencies] @@ -3305,107 +3829,40 @@ pycryptodomex = "*" [[package]] name = "rat-king-parser" -version = "3.0.0" +version = "4.1.6" description = "A robust, multiprocessing-capable, multi-family RAT config parser/config extractor for AsyncRAT, DcRAT, VenomRAT, QuasarRAT, XWorm, Xeno RAT, and cloned/derivative RAT families." optional = false python-versions = ">=3.10" -files = [] -develop = false +groups = ["main"] +files = [ + {file = "rat_king_parser-4.1.6-py3-none-any.whl", hash = "sha256:b5249842b8ea77b3d647cd6a559f39b51c4a15719a2d9cb0285deeb709c5cca9"}, + {file = "rat_king_parser-4.1.6.tar.gz", hash = "sha256:ea3db0274cb158e797953561624b9c9d513fcf34c5a6be53a41971b8008a079a"}, +] [package.dependencies] -cryptography = "*" dnfile = "*" +pycryptodomex = "*" yara-python = "*" [package.extras] +dev = ["pre-commit"] maco = ["maco", "validators"] -[package.source] -type = "git" -url = "https://github.com/jeFF0Falltrades/rat_king_parser" -reference = "ab849ec8face38c8dac3f803ae5fe7cf8be26583" -resolved_reference = "ab849ec8face38c8dac3f803ae5fe7cf8be26583" - -[[package]] -name = "regex" -version = "2021.7.6" -description = "Alternative regular expression module, to replace re." -optional = false -python-versions = "*" -files = [ - {file = "regex-2021.7.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:e6a1e5ca97d411a461041d057348e578dc344ecd2add3555aedba3b408c9f874"}, - {file = "regex-2021.7.6-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:6afe6a627888c9a6cfbb603d1d017ce204cebd589d66e0703309b8048c3b0854"}, - {file = "regex-2021.7.6-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:ccb3d2190476d00414aab36cca453e4596e8f70a206e2aa8db3d495a109153d2"}, - {file = "regex-2021.7.6-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:ed693137a9187052fc46eedfafdcb74e09917166362af4cc4fddc3b31560e93d"}, - {file = "regex-2021.7.6-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:99d8ab206a5270c1002bfcf25c51bf329ca951e5a169f3b43214fdda1f0b5f0d"}, - {file = "regex-2021.7.6-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:b85ac458354165405c8a84725de7bbd07b00d9f72c31a60ffbf96bb38d3e25fa"}, - {file = "regex-2021.7.6-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:3f5716923d3d0bfb27048242a6e0f14eecdb2e2a7fac47eda1d055288595f222"}, - {file = "regex-2021.7.6-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5983c19d0beb6af88cb4d47afb92d96751fb3fa1784d8785b1cdf14c6519407"}, - {file = "regex-2021.7.6-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf1d2d183abc7faa101ebe0b8d04fd19cb9138820abc8589083035c9440b8ca6"}, - {file = "regex-2021.7.6-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1947e7de155063e1c495c50590229fb98720d4c383af5031bbcb413db33fa1be"}, - {file = "regex-2021.7.6-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:17d8a3f99b18d87ac54a449b836d485cc8c195bb6f5e4379c84c8519045facc9"}, - {file = "regex-2021.7.6-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:d30895ec80cc80358392841add9dde81ea1d54a4949049269115e6b0555d0498"}, - {file = "regex-2021.7.6-cp36-cp36m-win32.whl", hash = "sha256:c92831dac113a6e0ab28bc98f33781383fe294df1a2c3dfd1e850114da35fd5b"}, - {file = "regex-2021.7.6-cp36-cp36m-win_amd64.whl", hash = "sha256:791aa1b300e5b6e5d597c37c346fb4d66422178566bbb426dd87eaae475053fb"}, - {file = "regex-2021.7.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:59506c6e8bd9306cd8a41511e32d16d5d1194110b8cfe5a11d102d8b63cf945d"}, - {file = "regex-2021.7.6-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:564a4c8a29435d1f2256ba247a0315325ea63335508ad8ed938a4f14c4116a5d"}, - {file = "regex-2021.7.6-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:59c00bb8dd8775473cbfb967925ad2c3ecc8886b3b2d0c90a8e2707e06c743f0"}, - {file = "regex-2021.7.6-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:9a854b916806c7e3b40e6616ac9e85d3cdb7649d9e6590653deb5b341a736cec"}, - {file = "regex-2021.7.6-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:db2b7df831c3187a37f3bb80ec095f249fa276dbe09abd3d35297fc250385694"}, - {file = "regex-2021.7.6-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:173bc44ff95bc1e96398c38f3629d86fa72e539c79900283afa895694229fe6a"}, - {file = "regex-2021.7.6-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:15dddb19823f5147e7517bb12635b3c82e6f2a3a6b696cc3e321522e8b9308ad"}, - {file = "regex-2021.7.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ddeabc7652024803666ea09f32dd1ed40a0579b6fbb2a213eba590683025895"}, - {file = "regex-2021.7.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8244c681018423a0d1784bc6b9af33bdf55f2ab8acb1f3cd9dd83d90e0813253"}, - {file = "regex-2021.7.6-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:8a4c742089faf0e51469c6a1ad7e3d3d21afae54a16a6cead85209dfe0a1ce65"}, - {file = "regex-2021.7.6-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:914e626dc8e75fe4fc9b7214763f141d9f40165d00dfe680b104fa1b24063bbf"}, - {file = "regex-2021.7.6-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3fabb19c82ecf39832a3f5060dfea9a7ab270ef156039a1143a29a83a09a62de"}, - {file = "regex-2021.7.6-cp37-cp37m-win32.whl", hash = "sha256:f080248b3e029d052bf74a897b9d74cfb7643537fbde97fe8225a6467fb559b5"}, - {file = "regex-2021.7.6-cp37-cp37m-win_amd64.whl", hash = "sha256:d8bbce0c96462dbceaa7ac4a7dfbbee92745b801b24bce10a98d2f2b1ea9432f"}, - {file = "regex-2021.7.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:edd1a68f79b89b0c57339bce297ad5d5ffcc6ae7e1afdb10f1947706ed066c9c"}, - {file = "regex-2021.7.6-cp38-cp38-manylinux1_i686.whl", hash = "sha256:422dec1e7cbb2efbbe50e3f1de36b82906def93ed48da12d1714cabcd993d7f0"}, - {file = "regex-2021.7.6-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:cbe23b323988a04c3e5b0c387fe3f8f363bf06c0680daf775875d979e376bd26"}, - {file = "regex-2021.7.6-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:0eb2c6e0fcec5e0f1d3bcc1133556563222a2ffd2211945d7b1480c1b1a42a6f"}, - {file = "regex-2021.7.6-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:1c78780bf46d620ff4fff40728f98b8afd8b8e35c3efd638c7df67be2d5cddbf"}, - {file = "regex-2021.7.6-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:bc84fb254a875a9f66616ed4538542fb7965db6356f3df571d783f7c8d256edd"}, - {file = "regex-2021.7.6-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:598c0a79b4b851b922f504f9f39a863d83ebdfff787261a5ed061c21e67dd761"}, - {file = "regex-2021.7.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:875c355360d0f8d3d827e462b29ea7682bf52327d500a4f837e934e9e4656068"}, - {file = "regex-2021.7.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfc0957c4a4b91eff5ad036088769e600a25774256cd0e1154378591ce573f08"}, - {file = "regex-2021.7.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:efb4af05fa4d2fc29766bf516f1f5098d6b5c3ed846fde980c18bf8646ad3979"}, - {file = "regex-2021.7.6-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7423aca7cc30a6228ccdcf2ea76f12923d652c5c7c6dc1959a0b004e308f39fb"}, - {file = "regex-2021.7.6-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:bb9834c1e77493efd7343b8e38950dee9797d2d6f2d5fd91c008dfaef64684b9"}, - {file = "regex-2021.7.6-cp38-cp38-win32.whl", hash = "sha256:e586f448df2bbc37dfadccdb7ccd125c62b4348cb90c10840d695592aa1b29e0"}, - {file = "regex-2021.7.6-cp38-cp38-win_amd64.whl", hash = "sha256:2fe5e71e11a54e3355fa272137d521a40aace5d937d08b494bed4529964c19c4"}, - {file = "regex-2021.7.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6110bab7eab6566492618540c70edd4d2a18f40ca1d51d704f1d81c52d245026"}, - {file = "regex-2021.7.6-cp39-cp39-manylinux1_i686.whl", hash = "sha256:4f64fc59fd5b10557f6cd0937e1597af022ad9b27d454e182485f1db3008f417"}, - {file = "regex-2021.7.6-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:89e5528803566af4df368df2d6f503c84fbfb8249e6631c7b025fe23e6bd0cde"}, - {file = "regex-2021.7.6-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:2366fe0479ca0e9afa534174faa2beae87847d208d457d200183f28c74eaea59"}, - {file = "regex-2021.7.6-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:f9392a4555f3e4cb45310a65b403d86b589adc773898c25a39184b1ba4db8985"}, - {file = "regex-2021.7.6-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:2bceeb491b38225b1fee4517107b8491ba54fba77cf22a12e996d96a3c55613d"}, - {file = "regex-2021.7.6-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:f98dc35ab9a749276f1a4a38ab3e0e2ba1662ce710f6530f5b0a6656f1c32b58"}, - {file = "regex-2021.7.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:319eb2a8d0888fa6f1d9177705f341bc9455a2c8aca130016e52c7fe8d6c37a3"}, - {file = "regex-2021.7.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:598ee917dbe961dcf827217bf2466bb86e4ee5a8559705af57cbabb3489dd37e"}, - {file = "regex-2021.7.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:56fc7045a1999a8d9dd1896715bc5c802dfec5b9b60e883d2cbdecb42adedea4"}, - {file = "regex-2021.7.6-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e8363ac90ea63c3dd0872dfdb695f38aff3334bfa5712cffb238bd3ffef300e3"}, - {file = "regex-2021.7.6-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:716a6db91b3641f566531ffcc03ceec00b2447f0db9942b3c6ea5d2827ad6be3"}, - {file = "regex-2021.7.6-cp39-cp39-win32.whl", hash = "sha256:eaf58b9e30e0e546cdc3ac06cf9165a1ca5b3de8221e9df679416ca667972035"}, - {file = "regex-2021.7.6-cp39-cp39-win_amd64.whl", hash = "sha256:4c9c3155fe74269f61e27617529b7f09552fbb12e44b1189cebbdb24294e6e1c"}, - {file = "regex-2021.7.6.tar.gz", hash = "sha256:8394e266005f2d8c6f0bc6780001f7afa3ef81a7a2111fa35058ded6fce79e4d"}, -] - [[package]] name = "requests" -version = "2.32.2" +version = "2.32.4" description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "requests-2.32.2-py3-none-any.whl", hash = "sha256:fc06670dd0ed212426dfeb94fc1b983d917c4f9847c863f313c9dfaaffb7c23c"}, - {file = "requests-2.32.2.tar.gz", hash = "sha256:dd951ff5ecf3e3b3aa26b40703ba77495dab41da839ae72ef3c8e5d8e2433289"}, + {file = "requests-2.32.4-py3-none-any.whl", hash = "sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c"}, + {file = "requests-2.32.4.tar.gz", hash = "sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422"}, ] [package.dependencies] certifi = ">=2017.4.17" -charset-normalizer = ">=2,<4" +charset_normalizer = ">=2,<4" idna = ">=2.5,<4" PySocks = {version = ">=1.5.6,<1.5.7 || >1.5.7", optional = true, markers = "extra == \"socks\""} urllib3 = ">=1.21.1,<3" @@ -3416,64 +3873,49 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-file" -version = "1.5.1" +version = "2.1.0" description = "File transport adapter for Requests" optional = false python-versions = "*" +groups = ["main"] files = [ - {file = "requests-file-1.5.1.tar.gz", hash = "sha256:07d74208d3389d01c38ab89ef403af0cfec63957d53a0081d8eca738d0247d8e"}, - {file = "requests_file-1.5.1-py2.py3-none-any.whl", hash = "sha256:dfe5dae75c12481f68ba353183c53a65e6044c923e64c24b2209f6c7570ca953"}, + {file = "requests_file-2.1.0-py2.py3-none-any.whl", hash = "sha256:cf270de5a4c5874e84599fc5778303d496c10ae5e870bfa378818f35d21bda5c"}, + {file = "requests_file-2.1.0.tar.gz", hash = "sha256:0f549a3f3b0699415ac04d167e9cb39bccfb730cb832b4d20be3d9867356e658"}, ] [package.dependencies] requests = ">=1.0.0" -six = "*" - -[[package]] -name = "requests-oauthlib" -version = "2.0.0" -description = "OAuthlib authentication support for Requests." -optional = false -python-versions = ">=3.4" -files = [ - {file = "requests-oauthlib-2.0.0.tar.gz", hash = "sha256:b3dffaebd884d8cd778494369603a9e7b58d29111bf6b41bdc2dcd87203af4e9"}, - {file = "requests_oauthlib-2.0.0-py2.py3-none-any.whl", hash = "sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36"}, -] - -[package.dependencies] -oauthlib = ">=3.0.0" -requests = ">=2.0.0" - -[package.extras] -rsa = ["oauthlib[signedtoken] (>=3.0.0)"] [[package]] name = "rich" -version = "13.8.1" +version = "13.9.4" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false -python-versions = ">=3.7.0" +python-versions = ">=3.8.0" +groups = ["main", "dev"] files = [ - {file = "rich-13.8.1-py3-none-any.whl", hash = "sha256:1760a3c0848469b97b558fc61c85233e3dafb69c7a071b4d60c38099d3cd4c06"}, - {file = "rich-13.8.1.tar.gz", hash = "sha256:8260cda28e3db6bf04d2d1ef4dbc03ba80a824c88b0e7668a0f23126a424844a"}, + {file = "rich-13.9.4-py3-none-any.whl", hash = "sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90"}, + {file = "rich-13.9.4.tar.gz", hash = "sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098"}, ] [package.dependencies] markdown-it-py = ">=2.2.0" pygments = ">=2.13.0,<3.0.0" +typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.11\""} [package.extras] jupyter = ["ipywidgets (>=7.5.1,<9)"] [[package]] name = "ruamel-yaml" -version = "0.18.6" +version = "0.18.10" description = "ruamel.yaml is a YAML parser/emitter that supports roundtrip preservation of comments, seq/map flow style, and map key order" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ - {file = "ruamel.yaml-0.18.6-py3-none-any.whl", hash = "sha256:57b53ba33def16c4f3d807c0ccbc00f8a6081827e81ba2491691b76882d0c636"}, - {file = "ruamel.yaml-0.18.6.tar.gz", hash = "sha256:8b27e6a217e786c6fbe5634d8f3f11bc63e0f80f6a5890f28863d9c45aac311b"}, + {file = "ruamel.yaml-0.18.10-py3-none-any.whl", hash = "sha256:30f22513ab2301b3d2b577adc121c6471f28734d3d9728581245f1e76468b4f1"}, + {file = "ruamel.yaml-0.18.10.tar.gz", hash = "sha256:20c86ab29ac2153f80a428e1254a8adf686d3383df04490514ca3b79a362db58"}, ] [package.dependencies] @@ -3485,98 +3927,94 @@ jinja2 = ["ruamel.yaml.jinja2 (>=0.2)"] [[package]] name = "ruamel-yaml-clib" -version = "0.2.8" +version = "0.2.12" description = "C version of reader, parser and emitter for ruamel.yaml derived from libyaml" optional = false -python-versions = ">=3.6" -files = [ - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:b42169467c42b692c19cf539c38d4602069d8c1505e97b86387fcf7afb766e1d"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:07238db9cbdf8fc1e9de2489a4f68474e70dffcb32232db7c08fa61ca0c7c462"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:fff3573c2db359f091e1589c3d7c5fc2f86f5bdb6f24252c2d8e539d4e45f412"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-manylinux_2_24_aarch64.whl", hash = "sha256:aa2267c6a303eb483de8d02db2871afb5c5fc15618d894300b88958f729ad74f"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:840f0c7f194986a63d2c2465ca63af8ccbbc90ab1c6001b1978f05119b5e7334"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:024cfe1fc7c7f4e1aff4a81e718109e13409767e4f871443cbff3dba3578203d"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win32.whl", hash = "sha256:c69212f63169ec1cfc9bb44723bf2917cbbd8f6191a00ef3410f5a7fe300722d"}, - {file = "ruamel.yaml.clib-0.2.8-cp310-cp310-win_amd64.whl", hash = "sha256:cabddb8d8ead485e255fe80429f833172b4cadf99274db39abc080e068cbcc31"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bef08cd86169d9eafb3ccb0a39edb11d8e25f3dae2b28f5c52fd997521133069"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:b16420e621d26fdfa949a8b4b47ade8810c56002f5389970db4ddda51dbff248"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:25c515e350e5b739842fc3228d662413ef28f295791af5e5110b543cf0b57d9b"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-manylinux_2_24_aarch64.whl", hash = "sha256:1707814f0d9791df063f8c19bb51b0d1278b8e9a2353abbb676c2f685dee6afe"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:46d378daaac94f454b3a0e3d8d78cafd78a026b1d71443f4966c696b48a6d899"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:09b055c05697b38ecacb7ac50bdab2240bfca1a0c4872b0fd309bb07dc9aa3a9"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win32.whl", hash = "sha256:53a300ed9cea38cf5a2a9b069058137c2ca1ce658a874b79baceb8f892f915a7"}, - {file = "ruamel.yaml.clib-0.2.8-cp311-cp311-win_amd64.whl", hash = "sha256:c2a72e9109ea74e511e29032f3b670835f8a59bbdc9ce692c5b4ed91ccf1eedb"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:ebc06178e8821efc9692ea7544aa5644217358490145629914d8020042c24aa1"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:edaef1c1200c4b4cb914583150dcaa3bc30e592e907c01117c08b13a07255ec2"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d176b57452ab5b7028ac47e7b3cf644bcfdc8cacfecf7e71759f7f51a59e5c92"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-manylinux_2_24_aarch64.whl", hash = "sha256:1dc67314e7e1086c9fdf2680b7b6c2be1c0d8e3a8279f2e993ca2a7545fecf62"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:3213ece08ea033eb159ac52ae052a4899b56ecc124bb80020d9bbceeb50258e9"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aab7fd643f71d7946f2ee58cc88c9b7bfc97debd71dcc93e03e2d174628e7e2d"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-win32.whl", hash = "sha256:5c365d91c88390c8d0a8545df0b5857172824b1c604e867161e6b3d59a827eaa"}, - {file = "ruamel.yaml.clib-0.2.8-cp312-cp312-win_amd64.whl", hash = "sha256:1758ce7d8e1a29d23de54a16ae867abd370f01b5a69e1a3ba75223eaa3ca1a1b"}, - {file = "ruamel.yaml.clib-0.2.8-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:a5aa27bad2bb83670b71683aae140a1f52b0857a2deff56ad3f6c13a017a26ed"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c58ecd827313af6864893e7af0a3bb85fd529f862b6adbefe14643947cfe2942"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-macosx_12_0_arm64.whl", hash = "sha256:f481f16baec5290e45aebdc2a5168ebc6d35189ae6fea7a58787613a25f6e875"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_24_aarch64.whl", hash = "sha256:77159f5d5b5c14f7c34073862a6b7d34944075d9f93e681638f6d753606c6ce6"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:7f67a1ee819dc4562d444bbafb135832b0b909f81cc90f7aa00260968c9ca1b3"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:4ecbf9c3e19f9562c7fdd462e8d18dd902a47ca046a2e64dba80699f0b6c09b7"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:87ea5ff66d8064301a154b3933ae406b0863402a799b16e4a1d24d9fbbcbe0d3"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win32.whl", hash = "sha256:75e1ed13e1f9de23c5607fe6bd1aeaae21e523b32d83bb33918245361e9cc51b"}, - {file = "ruamel.yaml.clib-0.2.8-cp37-cp37m-win_amd64.whl", hash = "sha256:3f215c5daf6a9d7bbed4a0a4f760f3113b10e82ff4c5c44bec20a68c8014f675"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1b617618914cb00bf5c34d4357c37aa15183fa229b24767259657746c9077615"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-macosx_12_0_arm64.whl", hash = "sha256:a6a9ffd280b71ad062eae53ac1659ad86a17f59a0fdc7699fd9be40525153337"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_24_aarch64.whl", hash = "sha256:305889baa4043a09e5b76f8e2a51d4ffba44259f6b4c72dec8ca56207d9c6fe1"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:700e4ebb569e59e16a976857c8798aee258dceac7c7d6b50cab63e080058df91"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:e2b4c44b60eadec492926a7270abb100ef9f72798e18743939bdbf037aab8c28"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e79e5db08739731b0ce4850bed599235d601701d5694c36570a99a0c5ca41a9d"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win32.whl", hash = "sha256:955eae71ac26c1ab35924203fda6220f84dce57d6d7884f189743e2abe3a9fbe"}, - {file = "ruamel.yaml.clib-0.2.8-cp38-cp38-win_amd64.whl", hash = "sha256:56f4252222c067b4ce51ae12cbac231bce32aee1d33fbfc9d17e5b8d6966c312"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:03d1162b6d1df1caa3a4bd27aa51ce17c9afc2046c31b0ad60a0a96ec22f8001"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:bba64af9fa9cebe325a62fa398760f5c7206b215201b0ec825005f1b18b9bccf"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_24_aarch64.whl", hash = "sha256:a1a45e0bb052edf6a1d3a93baef85319733a888363938e1fc9924cb00c8df24c"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:da09ad1c359a728e112d60116f626cc9f29730ff3e0e7db72b9a2dbc2e4beed5"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:184565012b60405d93838167f425713180b949e9d8dd0bbc7b49f074407c5a8b"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a75879bacf2c987c003368cf14bed0ffe99e8e85acfa6c0bfffc21a090f16880"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win32.whl", hash = "sha256:84b554931e932c46f94ab306913ad7e11bba988104c5cff26d90d03f68258cd5"}, - {file = "ruamel.yaml.clib-0.2.8-cp39-cp39-win_amd64.whl", hash = "sha256:25ac8c08322002b06fa1d49d1646181f0b2c72f5cbc15a85e80b4c30a544bb15"}, - {file = "ruamel.yaml.clib-0.2.8.tar.gz", hash = "sha256:beb2e0404003de9a4cab9753a8805a8fe9320ee6673136ed7f04255fe60bb512"}, +python-versions = ">=3.9" +groups = ["main"] +markers = "platform_python_implementation == \"CPython\" and python_version < \"3.13\"" +files = [ + {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-macosx_13_0_arm64.whl", hash = "sha256:11f891336688faf5156a36293a9c362bdc7c88f03a8a027c2c1d8e0bcde998e5"}, + {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-manylinux2014_aarch64.whl", hash = "sha256:a606ef75a60ecf3d924613892cc603b154178ee25abb3055db5062da811fd969"}, + {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd5415dded15c3822597455bc02bcd66e81ef8b7a48cb71a33628fc9fdde39df"}, + {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f66efbc1caa63c088dead1c4170d148eabc9b80d95fb75b6c92ac0aad2437d76"}, + {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:22353049ba4181685023b25b5b51a574bce33e7f51c759371a7422dcae5402a6"}, + {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:932205970b9f9991b34f55136be327501903f7c66830e9760a8ffb15b07f05cd"}, + {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-win32.whl", hash = "sha256:3eac5a91891ceb88138c113f9db04f3cebdae277f5d44eaa3651a4f573e6a5da"}, + {file = "ruamel.yaml.clib-0.2.12-cp310-cp310-win_amd64.whl", hash = "sha256:ab007f2f5a87bd08ab1499bdf96f3d5c6ad4dcfa364884cb4549aa0154b13a28"}, + {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:4a6679521a58256a90b0d89e03992c15144c5f3858f40d7c18886023d7943db6"}, + {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:d84318609196d6bd6da0edfa25cedfbabd8dbde5140a0a23af29ad4b8f91fb1e"}, + {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb43a269eb827806502c7c8efb7ae7e9e9d0573257a46e8e952f4d4caba4f31e"}, + {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:811ea1594b8a0fb466172c384267a4e5e367298af6b228931f273b111f17ef52"}, + {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cf12567a7b565cbf65d438dec6cfbe2917d3c1bdddfce84a9930b7d35ea59642"}, + {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:7dd5adc8b930b12c8fc5b99e2d535a09889941aa0d0bd06f4749e9a9397c71d2"}, + {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-win32.whl", hash = "sha256:bd0a08f0bab19093c54e18a14a10b4322e1eacc5217056f3c063bd2f59853ce4"}, + {file = "ruamel.yaml.clib-0.2.12-cp311-cp311-win_amd64.whl", hash = "sha256:a274fb2cb086c7a3dea4322ec27f4cb5cc4b6298adb583ab0e211a4682f241eb"}, + {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:20b0f8dc160ba83b6dcc0e256846e1a02d044e13f7ea74a3d1d56ede4e48c632"}, + {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-manylinux2014_aarch64.whl", hash = "sha256:943f32bc9dedb3abff9879edc134901df92cfce2c3d5c9348f172f62eb2d771d"}, + {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95c3829bb364fdb8e0332c9931ecf57d9be3519241323c5274bd82f709cebc0c"}, + {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:749c16fcc4a2b09f28843cda5a193e0283e47454b63ec4b81eaa2242f50e4ccd"}, + {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bf165fef1f223beae7333275156ab2022cffe255dcc51c27f066b4370da81e31"}, + {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:32621c177bbf782ca5a18ba4d7af0f1082a3f6e517ac2a18b3974d4edf349680"}, + {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-win32.whl", hash = "sha256:e8c4ebfcfd57177b572e2040777b8abc537cdef58a2120e830124946aa9b42c5"}, + {file = "ruamel.yaml.clib-0.2.12-cp312-cp312-win_amd64.whl", hash = "sha256:0467c5965282c62203273b838ae77c0d29d7638c8a4e3a1c8bdd3602c10904e4"}, + {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:4c8c5d82f50bb53986a5e02d1b3092b03622c02c2eb78e29bec33fd9593bae1a"}, + {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-manylinux2014_aarch64.whl", hash = "sha256:e7e3736715fbf53e9be2a79eb4db68e4ed857017344d697e8b9749444ae57475"}, + {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b7e75b4965e1d4690e93021adfcecccbca7d61c7bddd8e22406ef2ff20d74ef"}, + {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96777d473c05ee3e5e3c3e999f5d23c6f4ec5b0c38c098b3a5229085f74236c6"}, + {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_1_i686.whl", hash = "sha256:3bc2a80e6420ca8b7d3590791e2dfc709c88ab9152c00eeb511c9875ce5778bf"}, + {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:e188d2699864c11c36cdfdada94d781fd5d6b0071cd9c427bceb08ad3d7c70e1"}, + {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-win32.whl", hash = "sha256:6442cb36270b3afb1b4951f060eccca1ce49f3d087ca1ca4563a6eb479cb3de6"}, + {file = "ruamel.yaml.clib-0.2.12-cp313-cp313-win_amd64.whl", hash = "sha256:e5b8daf27af0b90da7bb903a876477a9e6d7270be6146906b276605997c7e9a3"}, + {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:fc4b630cd3fa2cf7fce38afa91d7cfe844a9f75d7f0f36393fa98815e911d987"}, + {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:bc5f1e1c28e966d61d2519f2a3d451ba989f9ea0f2307de7bc45baa526de9e45"}, + {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5a0e060aace4c24dcaf71023bbd7d42674e3b230f7e7b97317baf1e953e5b519"}, + {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2f1c3765db32be59d18ab3953f43ab62a761327aafc1594a2a1fbe038b8b8a7"}, + {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:d85252669dc32f98ebcd5d36768f5d4faeaeaa2d655ac0473be490ecdae3c285"}, + {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e143ada795c341b56de9418c58d028989093ee611aa27ffb9b7f609c00d813ed"}, + {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-win32.whl", hash = "sha256:beffaed67936fbbeffd10966a4eb53c402fafd3d6833770516bf7314bc6ffa12"}, + {file = "ruamel.yaml.clib-0.2.12-cp39-cp39-win_amd64.whl", hash = "sha256:040ae85536960525ea62868b642bdb0c2cc6021c9f9d507810c0c604e66f5a7b"}, + {file = "ruamel.yaml.clib-0.2.12.tar.gz", hash = "sha256:6c8fbb13ec503f99a91901ab46e0b07ae7941cd527393187039aec586fdfd36f"}, ] [[package]] name = "ruff" -version = "0.0.290" -description = "An extremely fast Python linter, written in Rust." +version = "0.9.4" +description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" -files = [ - {file = "ruff-0.0.290-py3-none-macosx_10_7_x86_64.whl", hash = "sha256:0e2b09ac4213b11a3520221083866a5816616f3ae9da123037b8ab275066fbac"}, - {file = "ruff-0.0.290-py3-none-macosx_10_9_x86_64.macosx_11_0_arm64.macosx_10_9_universal2.whl", hash = "sha256:4ca6285aa77b3d966be32c9a3cd531655b3d4a0171e1f9bf26d66d0372186767"}, - {file = "ruff-0.0.290-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35e3550d1d9f2157b0fcc77670f7bb59154f223bff281766e61bdd1dd854e0c5"}, - {file = "ruff-0.0.290-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d748c8bd97874f5751aed73e8dde379ce32d16338123d07c18b25c9a2796574a"}, - {file = "ruff-0.0.290-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:982af5ec67cecd099e2ef5e238650407fb40d56304910102d054c109f390bf3c"}, - {file = "ruff-0.0.290-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:bbd37352cea4ee007c48a44c9bc45a21f7ba70a57edfe46842e346651e2b995a"}, - {file = "ruff-0.0.290-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d9be6351b7889462912e0b8185a260c0219c35dfd920fb490c7f256f1d8313e"}, - {file = "ruff-0.0.290-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75cdc7fe32dcf33b7cec306707552dda54632ac29402775b9e212a3c16aad5e6"}, - {file = "ruff-0.0.290-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb07f37f7aecdbbc91d759c0c09870ce0fb3eed4025eebedf9c4b98c69abd527"}, - {file = "ruff-0.0.290-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:2ab41bc0ba359d3f715fc7b705bdeef19c0461351306b70a4e247f836b9350ed"}, - {file = "ruff-0.0.290-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:150bf8050214cea5b990945b66433bf9a5e0cef395c9bc0f50569e7de7540c86"}, - {file = "ruff-0.0.290-py3-none-musllinux_1_2_i686.whl", hash = "sha256:75386ebc15fe5467248c039f5bf6a0cfe7bfc619ffbb8cd62406cd8811815fca"}, - {file = "ruff-0.0.290-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:ac93eadf07bc4ab4c48d8bb4e427bf0f58f3a9c578862eb85d99d704669f5da0"}, - {file = "ruff-0.0.290-py3-none-win32.whl", hash = "sha256:461fbd1fb9ca806d4e3d5c745a30e185f7cf3ca77293cdc17abb2f2a990ad3f7"}, - {file = "ruff-0.0.290-py3-none-win_amd64.whl", hash = "sha256:f1f49f5ec967fd5778813780b12a5650ab0ebcb9ddcca28d642c689b36920796"}, - {file = "ruff-0.0.290-py3-none-win_arm64.whl", hash = "sha256:ae5a92dfbdf1f0c689433c223f8dac0782c2b2584bd502dfdbc76475669f1ba1"}, - {file = "ruff-0.0.290.tar.gz", hash = "sha256:949fecbc5467bb11b8db810a7fa53c7e02633856ee6bd1302b2f43adcd71b88d"}, +groups = ["main"] +files = [ + {file = "ruff-0.9.4-py3-none-linux_armv6l.whl", hash = "sha256:64e73d25b954f71ff100bb70f39f1ee09e880728efb4250c632ceed4e4cdf706"}, + {file = "ruff-0.9.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:6ce6743ed64d9afab4fafeaea70d3631b4d4b28b592db21a5c2d1f0ef52934bf"}, + {file = "ruff-0.9.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:54499fb08408e32b57360f6f9de7157a5fec24ad79cb3f42ef2c3f3f728dfe2b"}, + {file = "ruff-0.9.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:37c892540108314a6f01f105040b5106aeb829fa5fb0561d2dcaf71485021137"}, + {file = "ruff-0.9.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:de9edf2ce4b9ddf43fd93e20ef635a900e25f622f87ed6e3047a664d0e8f810e"}, + {file = "ruff-0.9.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:87c90c32357c74f11deb7fbb065126d91771b207bf9bfaaee01277ca59b574ec"}, + {file = "ruff-0.9.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:56acd6c694da3695a7461cc55775f3a409c3815ac467279dfa126061d84b314b"}, + {file = "ruff-0.9.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e0c93e7d47ed951b9394cf352d6695b31498e68fd5782d6cbc282425655f687a"}, + {file = "ruff-0.9.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1d4c8772670aecf037d1bf7a07c39106574d143b26cfe5ed1787d2f31e800214"}, + {file = "ruff-0.9.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bfc5f1d7afeda8d5d37660eeca6d389b142d7f2b5a1ab659d9214ebd0e025231"}, + {file = "ruff-0.9.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:faa935fc00ae854d8b638c16a5f1ce881bc3f67446957dd6f2af440a5fc8526b"}, + {file = "ruff-0.9.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:a6c634fc6f5a0ceae1ab3e13c58183978185d131a29c425e4eaa9f40afe1e6d6"}, + {file = "ruff-0.9.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:433dedf6ddfdec7f1ac7575ec1eb9844fa60c4c8c2f8887a070672b8d353d34c"}, + {file = "ruff-0.9.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:d612dbd0f3a919a8cc1d12037168bfa536862066808960e0cc901404b77968f0"}, + {file = "ruff-0.9.4-py3-none-win32.whl", hash = "sha256:db1192ddda2200671f9ef61d9597fcef89d934f5d1705e571a93a67fb13a4402"}, + {file = "ruff-0.9.4-py3-none-win_amd64.whl", hash = "sha256:05bebf4cdbe3ef75430d26c375773978950bbf4ee3c95ccb5448940dc092408e"}, + {file = "ruff-0.9.4-py3-none-win_arm64.whl", hash = "sha256:585792f1e81509e38ac5123492f8875fbc36f3ede8185af0a26df348e5154f41"}, + {file = "ruff-0.9.4.tar.gz", hash = "sha256:6907ee3529244bb0ed066683e075f09285b38dd5b4039370df6ff06041ca19e7"}, ] [[package]] name = "service-identity" -version = "24.1.0" +version = "24.2.0" description = "Service identity verification for pyOpenSSL & cryptography." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "service_identity-24.1.0-py3-none-any.whl", hash = "sha256:a28caf8130c8a5c1c7a6f5293faaf239bbfb7751e4862436920ee6f2616f568a"}, - {file = "service_identity-24.1.0.tar.gz", hash = "sha256:6829c9d62fb832c2e1c435629b0a8c476e1929881f28bee4d20bc24161009221"}, + {file = "service_identity-24.2.0-py3-none-any.whl", hash = "sha256:6b047fbd8a84fd0bb0d55ebce4031e400562b9196e1e0d3e0fe2b8a59f6d4a85"}, + {file = "service_identity-24.2.0.tar.gz", hash = "sha256:b8683ba13f0d39c6cd5d625d2c5f65421d6d707b013b375c355751557cbe8e09"}, ] [package.dependencies] @@ -3586,7 +4024,7 @@ pyasn1 = "*" pyasn1-modules = "*" [package.extras] -dev = ["pyopenssl", "service-identity[idna,mypy,tests]"] +dev = ["coverage[toml] (>=5.0.2)", "idna", "mypy", "pyopenssl", "pytest", "types-pyopenssl"] docs = ["furo", "myst-parser", "pyopenssl", "sphinx", "sphinx-notfound-page"] idna = ["idna"] mypy = ["idna", "mypy", "types-pyopenssl"] @@ -3598,6 +4036,7 @@ version = "1.3.2" description = "A Python module to customize the process title" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "setproctitle-1.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:288943dec88e178bb2fd868adf491197cc0fc8b6810416b1c6775e686bab87fe"}, {file = "setproctitle-1.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:630f6fe5e24a619ccf970c78e084319ee8be5be253ecc9b5b216b0f474f5ef18"}, @@ -3678,33 +4117,40 @@ test = ["pytest"] [[package]] name = "setuptools" -version = "70.0.0" +version = "78.1.1" description = "Easily download, build, install, upgrade, and uninstall Python packages" optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "setuptools-70.0.0-py3-none-any.whl", hash = "sha256:54faa7f2e8d2d11bcd2c07bed282eef1046b5c080d1c32add737d7b5817b1ad4"}, - {file = "setuptools-70.0.0.tar.gz", hash = "sha256:f211a66637b8fa059bb28183da127d4e86396c991a942b028c6650d4319c3fd0"}, + {file = "setuptools-78.1.1-py3-none-any.whl", hash = "sha256:c3a9c4211ff4c309edb8b8c4f1cbfa7ae324c4ba9f91ff254e3d305b9fd54561"}, + {file = "setuptools-78.1.1.tar.gz", hash = "sha256:fcc17fd9cd898242f6b4adfaca46137a9edef687f43e6f78469692a5e70d851d"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "importlib-metadata", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "mypy (==1.9)", "packaging (>=23.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.1)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-home (>=0.5)", "pytest-mypy", "pytest-perf", "pytest-ruff (>=0.2.1)", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.8.0)"] +core = ["importlib_metadata (>=6)", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.7.2)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] +type = ["importlib_metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (==1.14.*)", "pytest-mypy"] [[package]] name = "sflock2" -version = "0.3.66" +version = "0.3.76" description = "Sample staging and detonation utility" optional = false -python-versions = "<4.0,>=3.8" +python-versions = "<4.0,>=3.10" +groups = ["main"] files = [ - {file = "sflock2-0.3.66-py3-none-any.whl", hash = "sha256:3dc74a75491fe2502d70bbc3778b9a6da5cbeb9da479997eef40106208ee639f"}, - {file = "sflock2-0.3.66.tar.gz", hash = "sha256:fb4f8c52df5dfbbb8abde6fb46addf06d6456262ac3e8ad9c332354914381821"}, + {file = "sflock2-0.3.76-py3-none-any.whl", hash = "sha256:3d989d142fc49ebd049f75eb8d402451fcd20148cf27aaa20c540ac95a9c81ff"}, + {file = "sflock2-0.3.76.tar.gz", hash = "sha256:eed75b32adf3c82a60d9339fda63a151355f9be7639d7d583de8f43ea6604e4c"}, ] [package.dependencies] click = "*" -cryptography = ">=2.1" +cryptography = ">=44.0.0" olefile = ">=0.43" pefile = "*" python-magic = {version = ">=0.4.13", optional = true, markers = "sys_platform == \"linux\" and extra == \"linux\""} @@ -3712,20 +4158,20 @@ unicorn = {version = ">=2.0.0", optional = true, markers = "extra == \"shellcode yara-python = {version = ">=4.1.0", optional = true, markers = "extra == \"shellcode\""} [package.extras] -dev = ["mock (>=5.1.0)", "pytest (>=8.2.2)"] linux = ["python-magic (>=0.4.13)"] shellcode = ["unicorn (>=2.0.0)", "yara-python (>=4.1.0)"] -win32 = ["python-magic-bin (>=0.4.14)"] +windows = ["python-magic-bin (>=0.4.14)"] [[package]] name = "six" -version = "1.16.0" +version = "1.17.0" description = "Python 2 and 3 compatibility utilities" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main", "dev"] files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, + {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, + {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, ] [[package]] @@ -3734,6 +4180,7 @@ version = "1.3.1" description = "Sniff out which async library your code is running under" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, @@ -3745,6 +4192,7 @@ version = "2.4.0" description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0"}, {file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"}, @@ -3756,6 +4204,7 @@ version = "2.6" description = "A modern CSS selector implementation for Beautiful Soup." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "soupsieve-2.6-py3-none-any.whl", hash = "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9"}, {file = "soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb"}, @@ -3767,6 +4216,7 @@ version = "1.4.50" description = "Database Abstraction Library" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" +groups = ["main"] files = [ {file = "SQLAlchemy-1.4.50-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:54138aa80d2dedd364f4e8220eef284c364d3270aaef621570aa2bd99902e2e8"}, {file = "SQLAlchemy-1.4.50-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d00665725063692c42badfd521d0c4392e83c6c826795d38eb88fb108e5660e5"}, @@ -3846,6 +4296,7 @@ version = "0.41.1" description = "Various utility functions for SQLAlchemy." optional = false python-versions = ">=3.6" +groups = ["main"] files = [ {file = "SQLAlchemy-Utils-0.41.1.tar.gz", hash = "sha256:a2181bff01eeb84479e38571d2c0718eb52042f9afd8c194d0d02877e84b7d74"}, {file = "SQLAlchemy_Utils-0.41.1-py3-none-any.whl", hash = "sha256:6c96b0768ea3f15c0dc56b363d386138c562752b84f647fb8d31a2223aaab801"}, @@ -3870,13 +4321,14 @@ url = ["furl (>=0.4.1)"] [[package]] name = "sqlparse" -version = "0.5.1" +version = "0.5.3" description = "A non-validating SQL parser." optional = false python-versions = ">=3.8" +groups = ["main"] files = [ - {file = "sqlparse-0.5.1-py3-none-any.whl", hash = "sha256:773dcbf9a5ab44a090f3441e2180efe2560220203dc2f8c0b0fa141e18b505e4"}, - {file = "sqlparse-0.5.1.tar.gz", hash = "sha256:bb6b4df465655ef332548e24f08e205afc81b9ab86cb1c45657a7ff173a3a00e"}, + {file = "sqlparse-0.5.3-py3-none-any.whl", hash = "sha256:cf2196ed3418f3ba5de6af7e82c694a9fbdbfecccdfc72e281548517081f16ca"}, + {file = "sqlparse-0.5.3.tar.gz", hash = "sha256:09f67787f56a0b16ecdbde1bfc7f5d9c3371ca683cfeaa8e6ff60b4807ec9272"}, ] [package.extras] @@ -3884,25 +4336,42 @@ dev = ["build", "hatch"] doc = ["sphinx"] [[package]] -name = "tabulate" -version = "0.9.0" -description = "Pretty-print tabular data" +name = "stpyv8" +version = "13.1.201.22" +description = "Python Wrapper for Google V8 Engine" optional = false -python-versions = ">=3.7" -files = [ - {file = "tabulate-0.9.0-py3-none-any.whl", hash = "sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f"}, - {file = "tabulate-0.9.0.tar.gz", hash = "sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c"}, +python-versions = "*" +groups = ["main"] +files = [ + {file = "stpyv8-13.1.201.22-cp310-cp310-macosx_13_0_x86_64.whl", hash = "sha256:2ce8e85a4a88e80ae561654be1b55ff578c4f1a27d518ac35e6d87b93d91b096"}, + {file = "stpyv8-13.1.201.22-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:fe25c004b12f2616ae877e0469f95aa2164e62e6584d253b07f3c9f9ba69a888"}, + {file = "stpyv8-13.1.201.22-cp310-cp310-manylinux_2_31_x86_64.whl", hash = "sha256:90568ff08dfaf0ebd3bf1c79f7d21db06d82eada412a6e914b995bead7c78666"}, + {file = "stpyv8-13.1.201.22-cp310-cp310-win_amd64.whl", hash = "sha256:8019f19b29621ccde85125d86f60f5814175b17670f5949d2671cf22cf453ea6"}, + {file = "stpyv8-13.1.201.22-cp311-cp311-macosx_13_0_x86_64.whl", hash = "sha256:b2a660a331e82fa89d5938ec8418743ebfb544733269f24cd8461a18472701c2"}, + {file = "stpyv8-13.1.201.22-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:4d737935167c52ed72e5a78264d9adfeaf089bf54693b88f12cbdb439a36a102"}, + {file = "stpyv8-13.1.201.22-cp311-cp311-manylinux_2_31_x86_64.whl", hash = "sha256:da6d8f2945bd057057c64bc93ea3c064cc848b75f55d6d651120ee5d115e0761"}, + {file = "stpyv8-13.1.201.22-cp311-cp311-win_amd64.whl", hash = "sha256:b9d9499ed2007cc097a5d2ae0cb18226b2bf3ca429301811b2e12a787a8f137e"}, + {file = "stpyv8-13.1.201.22-cp312-cp312-macosx_13_0_x86_64.whl", hash = "sha256:fc2b956bfaf23531c490845edb7d80fc998fe8aee1c7cf1337317dae01169307"}, + {file = "stpyv8-13.1.201.22-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:6dc40b656cea7fe541f6bdbad83b6b4ed51e5ead985b54c139319a731253a55e"}, + {file = "stpyv8-13.1.201.22-cp312-cp312-manylinux_2_31_x86_64.whl", hash = "sha256:c24aa4215c64db7d67fc6c42c0d7731cabcf300596bf9c826ae74f426fe3b771"}, + {file = "stpyv8-13.1.201.22-cp312-cp312-win_amd64.whl", hash = "sha256:c4292843c8133fc99833aceef25925a97edf01031e186335582deb077b99d2bf"}, + {file = "stpyv8-13.1.201.22-cp313-cp313-macosx_13_0_x86_64.whl", hash = "sha256:b53df6114a88698ee6f3820cf46476e83ee09c9a67dd9f7cf58ca6a2928238b0"}, + {file = "stpyv8-13.1.201.22-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:6cb5e8751aee2487cc3b5f21eac6d459041a7180a779941b64db5736e27276ee"}, + {file = "stpyv8-13.1.201.22-cp313-cp313-manylinux_2_31_x86_64.whl", hash = "sha256:834b9761bb7f49da8b887847c7647495a2cf6c45f69e2124ae0e3f024493bc15"}, + {file = "stpyv8-13.1.201.22-cp313-cp313-win_amd64.whl", hash = "sha256:c8189b8c4d87579f353705441757f11e2f2260578b82000925dadf0ed59a47e3"}, + {file = "stpyv8-13.1.201.22-cp39-cp39-macosx_13_0_x86_64.whl", hash = "sha256:05c3ecaaf2dd8dbe06bdb70f3192b7e6161337ee04e6830a57b58eb4be7c70bd"}, + {file = "stpyv8-13.1.201.22-cp39-cp39-manylinux_2_31_x86_64.whl", hash = "sha256:bf51578ec84dba6519d75ca81a154a070910e638da0ec384f4bf6d535f9b5218"}, + {file = "stpyv8-13.1.201.22-cp39-cp39-win_amd64.whl", hash = "sha256:d00a220268d63d68490682b571d082d5b197de1f19d6f478a88357c61da94f7a"}, + {file = "stpyv8-13.1.201.22.tar.gz", hash = "sha256:2bdbc7307dbf86cfc4bd0a11f0ae1f1c6f91d1b3366a7b409f9fc5d01f69441d"}, ] -[package.extras] -widechars = ["wcwidth"] - [[package]] name = "tenacity" version = "8.1.0" description = "Retry code until it succeeds" optional = false python-versions = ">=3.6" +groups = ["dev"] files = [ {file = "tenacity-8.1.0-py3-none-any.whl", hash = "sha256:35525cd47f82830069f0d6b73f7eb83bc5b73ee2fff0437952cedf98b27653ac"}, {file = "tenacity-8.1.0.tar.gz", hash = "sha256:e48c437fdf9340f5666b92cd7990e96bc5fc955e1298baf4a907e3972067a445"}, @@ -3911,29 +4380,16 @@ files = [ [package.extras] doc = ["reno", "sphinx", "tornado (>=4.5)"] -[[package]] -name = "termcolor" -version = "2.4.0" -description = "ANSI color formatting for output in terminal" -optional = false -python-versions = ">=3.8" -files = [ - {file = "termcolor-2.4.0-py3-none-any.whl", hash = "sha256:9297c0df9c99445c2412e832e882a7884038a25617c60cea2ad69488d4040d63"}, - {file = "termcolor-2.4.0.tar.gz", hash = "sha256:aab9e56047c8ac41ed798fa36d892a37aca6b3e9159f3e0c24bc64a9b3ac7b7a"}, -] - -[package.extras] -tests = ["pytest", "pytest-cov"] - [[package]] name = "tldextract" -version = "3.5.0" +version = "5.1.3" description = "Accurately separates a URL's subdomain, domain, and public suffix, using the Public Suffix List (PSL). By default, this includes the public ICANN TLDs and their exceptions. You can optionally support the Public Suffix List's private domains as well." optional = false -python-versions = ">=3.7" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "tldextract-3.5.0-py3-none-any.whl", hash = "sha256:2cb271ca8d06ea1630a1361b58edad14e0cf81f34ce3c90b052854528fe2a281"}, - {file = "tldextract-3.5.0.tar.gz", hash = "sha256:4df1c65b95be61d59428e8611e955e54e6f1d4483d3e8d5733d3a9062155e910"}, + {file = "tldextract-5.1.3-py3-none-any.whl", hash = "sha256:78de310cc2ca018692de5ddf320f9d6bd7c5cf857d0fd4f2175f0cdf4440ea75"}, + {file = "tldextract-5.1.3.tar.gz", hash = "sha256:d43c7284c23f5dc8a42fd0fee2abede2ff74cc622674e4cb07f514ab3330c338"}, ] [package.dependencies] @@ -3942,51 +4398,68 @@ idna = "*" requests = ">=2.1.0" requests-file = ">=1.4" +[package.extras] +release = ["build", "twine"] +testing = ["mypy", "pytest", "pytest-gitignore", "pytest-mock", "responses", "ruff", "syrupy", "tox", "tox-uv", "types-filelock", "types-requests"] + [[package]] name = "tomli" -version = "2.0.1" +version = "2.2.1" description = "A lil' TOML parser" optional = false -python-versions = ">=3.7" -files = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, -] - -[[package]] -name = "tqdm" -version = "4.66.5" -description = "Fast, Extensible Progress Meter" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tqdm-4.66.5-py3-none-any.whl", hash = "sha256:90279a3770753eafc9194a0364852159802111925aa30eb3f9d85b0e805ac7cd"}, - {file = "tqdm-4.66.5.tar.gz", hash = "sha256:e1020aef2e5096702d8a025ac7d16b1577279c9d63f8375b63083e9a5f0fcbad"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[package.extras] -dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] -notebook = ["ipywidgets (>=6)"] -slack = ["slack-sdk"] -telegram = ["requests"] +python-versions = ">=3.8" +groups = ["main", "dev"] +files = [ + {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, + {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"}, + {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"}, + {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"}, + {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"}, + {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"}, + {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"}, + {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"}, + {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"}, + {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"}, + {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"}, + {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"}, + {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"}, + {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"}, + {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"}, + {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"}, + {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, + {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, +] +markers = {main = "python_version < \"3.11\"", dev = "python_full_version <= \"3.11.0a6\""} [[package]] name = "twisted" -version = "24.7.0" +version = "24.11.0" description = "An asynchronous networking framework written in Python" optional = false python-versions = ">=3.8.0" +groups = ["main"] files = [ - {file = "twisted-24.7.0-py3-none-any.whl", hash = "sha256:734832ef98108136e222b5230075b1079dad8a3fc5637319615619a7725b0c81"}, - {file = "twisted-24.7.0.tar.gz", hash = "sha256:5a60147f044187a127ec7da96d170d49bcce50c6fd36f594e60f4587eff4d394"}, + {file = "twisted-24.11.0-py3-none-any.whl", hash = "sha256:fe403076c71f04d5d2d789a755b687c5637ec3bcd3b2b8252d76f2ba65f54261"}, + {file = "twisted-24.11.0.tar.gz", hash = "sha256:695d0556d5ec579dcc464d2856b634880ed1319f45b10d19043f2b57eb0115b5"}, ] [package.dependencies] -attrs = ">=21.3.0" -automat = ">=0.8.0" +attrs = ">=22.2.0" +automat = ">=24.8.0" constantly = ">=15.1" hyperlink = ">=17.1.1" idna = {version = ">=2.4", optional = true, markers = "extra == \"tls\""} @@ -3997,19 +4470,19 @@ typing-extensions = ">=4.2.0" zope-interface = ">=5" [package.extras] -all-non-platform = ["appdirs (>=1.4.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "bcrypt (>=3.1.3)", "cryptography (>=3.3)", "cryptography (>=3.3)", "cython-test-exception-raiser (>=1.0.2,<2)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.56)", "hypothesis (>=6.56)", "idna (>=2.4)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "priority (>=1.1.0,<2.0)", "pyhamcrest (>=2)", "pyhamcrest (>=2)", "pyopenssl (>=21.0.0)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "pywin32 (!=226)", "service-identity (>=18.1.0)", "service-identity (>=18.1.0)"] +all-non-platform = ["appdirs (>=1.4.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "bcrypt (>=3.1.3)", "cryptography (>=3.3)", "cryptography (>=3.3)", "cython-test-exception-raiser (>=1.0.2,<2)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.2,<5.0)", "h2 (>=3.2,<5.0)", "httpx[http2] (>=0.27)", "httpx[http2] (>=0.27)", "hypothesis (>=6.56)", "hypothesis (>=6.56)", "idna (>=2.4)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "priority (>=1.1.0,<2.0)", "pyhamcrest (>=2)", "pyhamcrest (>=2)", "pyopenssl (>=21.0.0)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "pywin32 (!=226)", "service-identity (>=18.1.0)", "service-identity (>=18.1.0)"] conch = ["appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "cryptography (>=3.3)"] -dev = ["coverage (>=7.5,<8.0)", "cython-test-exception-raiser (>=1.0.2,<2)", "hypothesis (>=6.56)", "pydoctor (>=23.9.0,<23.10.0)", "pyflakes (>=2.2,<3.0)", "pyhamcrest (>=2)", "python-subunit (>=1.4,<2.0)", "sphinx (>=6,<7)", "sphinx-rtd-theme (>=1.3,<2.0)", "towncrier (>=23.6,<24.0)", "twistedchecker (>=0.7,<1.0)"] +dev = ["coverage (>=7.5,<8.0)", "cython-test-exception-raiser (>=1.0.2,<2)", "httpx[http2] (>=0.27)", "hypothesis (>=6.56)", "pydoctor (>=23.9.0,<23.10.0)", "pyflakes (>=2.2,<3.0)", "pyhamcrest (>=2)", "python-subunit (>=1.4,<2.0)", "sphinx (>=6,<7)", "sphinx-rtd-theme (>=1.3,<2.0)", "towncrier (>=23.6,<24.0)", "twistedchecker (>=0.7,<1.0)"] dev-release = ["pydoctor (>=23.9.0,<23.10.0)", "pydoctor (>=23.9.0,<23.10.0)", "sphinx (>=6,<7)", "sphinx (>=6,<7)", "sphinx-rtd-theme (>=1.3,<2.0)", "sphinx-rtd-theme (>=1.3,<2.0)", "towncrier (>=23.6,<24.0)", "towncrier (>=23.6,<24.0)"] -gtk-platform = ["appdirs (>=1.4.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "bcrypt (>=3.1.3)", "cryptography (>=3.3)", "cryptography (>=3.3)", "cython-test-exception-raiser (>=1.0.2,<2)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.56)", "hypothesis (>=6.56)", "idna (>=2.4)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "priority (>=1.1.0,<2.0)", "pygobject", "pygobject", "pyhamcrest (>=2)", "pyhamcrest (>=2)", "pyopenssl (>=21.0.0)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "pywin32 (!=226)", "service-identity (>=18.1.0)", "service-identity (>=18.1.0)"] -http2 = ["h2 (>=3.0,<5.0)", "priority (>=1.1.0,<2.0)"] -macos-platform = ["appdirs (>=1.4.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "bcrypt (>=3.1.3)", "cryptography (>=3.3)", "cryptography (>=3.3)", "cython-test-exception-raiser (>=1.0.2,<2)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.56)", "hypothesis (>=6.56)", "idna (>=2.4)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "priority (>=1.1.0,<2.0)", "pyhamcrest (>=2)", "pyhamcrest (>=2)", "pyobjc-core", "pyobjc-core", "pyobjc-framework-cfnetwork", "pyobjc-framework-cfnetwork", "pyobjc-framework-cocoa", "pyobjc-framework-cocoa", "pyopenssl (>=21.0.0)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "pywin32 (!=226)", "service-identity (>=18.1.0)", "service-identity (>=18.1.0)"] -mypy = ["appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "coverage (>=7.5,<8.0)", "cryptography (>=3.3)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.56)", "idna (>=2.4)", "mypy (>=1.8,<2.0)", "mypy-zope (>=1.0.3,<1.1.0)", "priority (>=1.1.0,<2.0)", "pydoctor (>=23.9.0,<23.10.0)", "pyflakes (>=2.2,<3.0)", "pyhamcrest (>=2)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "python-subunit (>=1.4,<2.0)", "pywin32 (!=226)", "service-identity (>=18.1.0)", "sphinx (>=6,<7)", "sphinx-rtd-theme (>=1.3,<2.0)", "towncrier (>=23.6,<24.0)", "twistedchecker (>=0.7,<1.0)", "types-pyopenssl", "types-setuptools"] -osx-platform = ["appdirs (>=1.4.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "bcrypt (>=3.1.3)", "cryptography (>=3.3)", "cryptography (>=3.3)", "cython-test-exception-raiser (>=1.0.2,<2)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.56)", "hypothesis (>=6.56)", "idna (>=2.4)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "priority (>=1.1.0,<2.0)", "pyhamcrest (>=2)", "pyhamcrest (>=2)", "pyobjc-core", "pyobjc-core", "pyobjc-framework-cfnetwork", "pyobjc-framework-cfnetwork", "pyobjc-framework-cocoa", "pyobjc-framework-cocoa", "pyopenssl (>=21.0.0)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "pywin32 (!=226)", "service-identity (>=18.1.0)", "service-identity (>=18.1.0)"] +gtk-platform = ["appdirs (>=1.4.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "bcrypt (>=3.1.3)", "cryptography (>=3.3)", "cryptography (>=3.3)", "cython-test-exception-raiser (>=1.0.2,<2)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.2,<5.0)", "h2 (>=3.2,<5.0)", "httpx[http2] (>=0.27)", "httpx[http2] (>=0.27)", "hypothesis (>=6.56)", "hypothesis (>=6.56)", "idna (>=2.4)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "priority (>=1.1.0,<2.0)", "pygobject", "pygobject", "pyhamcrest (>=2)", "pyhamcrest (>=2)", "pyopenssl (>=21.0.0)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "pywin32 (!=226)", "service-identity (>=18.1.0)", "service-identity (>=18.1.0)"] +http2 = ["h2 (>=3.2,<5.0)", "priority (>=1.1.0,<2.0)"] +macos-platform = ["appdirs (>=1.4.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "bcrypt (>=3.1.3)", "cryptography (>=3.3)", "cryptography (>=3.3)", "cython-test-exception-raiser (>=1.0.2,<2)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.2,<5.0)", "h2 (>=3.2,<5.0)", "httpx[http2] (>=0.27)", "httpx[http2] (>=0.27)", "hypothesis (>=6.56)", "hypothesis (>=6.56)", "idna (>=2.4)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "priority (>=1.1.0,<2.0)", "pyhamcrest (>=2)", "pyhamcrest (>=2)", "pyobjc-core", "pyobjc-core", "pyobjc-framework-cfnetwork", "pyobjc-framework-cfnetwork", "pyobjc-framework-cocoa", "pyobjc-framework-cocoa", "pyopenssl (>=21.0.0)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "pywin32 (!=226)", "service-identity (>=18.1.0)", "service-identity (>=18.1.0)"] +mypy = ["appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "coverage (>=7.5,<8.0)", "cryptography (>=3.3)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.2,<5.0)", "httpx[http2] (>=0.27)", "hypothesis (>=6.56)", "idna (>=2.4)", "mypy (==1.10.1)", "mypy-zope (==1.0.6)", "priority (>=1.1.0,<2.0)", "pydoctor (>=23.9.0,<23.10.0)", "pyflakes (>=2.2,<3.0)", "pyhamcrest (>=2)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "python-subunit (>=1.4,<2.0)", "pywin32 (!=226)", "service-identity (>=18.1.0)", "sphinx (>=6,<7)", "sphinx-rtd-theme (>=1.3,<2.0)", "towncrier (>=23.6,<24.0)", "twistedchecker (>=0.7,<1.0)", "types-pyopenssl", "types-setuptools"] +osx-platform = ["appdirs (>=1.4.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "bcrypt (>=3.1.3)", "cryptography (>=3.3)", "cryptography (>=3.3)", "cython-test-exception-raiser (>=1.0.2,<2)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.2,<5.0)", "h2 (>=3.2,<5.0)", "httpx[http2] (>=0.27)", "httpx[http2] (>=0.27)", "hypothesis (>=6.56)", "hypothesis (>=6.56)", "idna (>=2.4)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "priority (>=1.1.0,<2.0)", "pyhamcrest (>=2)", "pyhamcrest (>=2)", "pyobjc-core", "pyobjc-core", "pyobjc-framework-cfnetwork", "pyobjc-framework-cfnetwork", "pyobjc-framework-cocoa", "pyobjc-framework-cocoa", "pyopenssl (>=21.0.0)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "pywin32 (!=226)", "service-identity (>=18.1.0)", "service-identity (>=18.1.0)"] serial = ["pyserial (>=3.0)", "pywin32 (!=226)"] -test = ["cython-test-exception-raiser (>=1.0.2,<2)", "hypothesis (>=6.56)", "pyhamcrest (>=2)"] +test = ["cython-test-exception-raiser (>=1.0.2,<2)", "httpx[http2] (>=0.27)", "hypothesis (>=6.56)", "pyhamcrest (>=2)"] tls = ["idna (>=2.4)", "pyopenssl (>=21.0.0)", "service-identity (>=18.1.0)"] -windows-platform = ["appdirs (>=1.4.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "bcrypt (>=3.1.3)", "cryptography (>=3.3)", "cryptography (>=3.3)", "cython-test-exception-raiser (>=1.0.2,<2)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.0,<5.0)", "h2 (>=3.0,<5.0)", "hypothesis (>=6.56)", "hypothesis (>=6.56)", "idna (>=2.4)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "priority (>=1.1.0,<2.0)", "pyhamcrest (>=2)", "pyhamcrest (>=2)", "pyopenssl (>=21.0.0)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "pywin32 (!=226)", "pywin32 (!=226)", "pywin32 (!=226)", "service-identity (>=18.1.0)", "service-identity (>=18.1.0)", "twisted-iocpsupport (>=1.0.2)", "twisted-iocpsupport (>=1.0.2)"] +windows-platform = ["appdirs (>=1.4.0)", "appdirs (>=1.4.0)", "bcrypt (>=3.1.3)", "bcrypt (>=3.1.3)", "cryptography (>=3.3)", "cryptography (>=3.3)", "cython-test-exception-raiser (>=1.0.2,<2)", "cython-test-exception-raiser (>=1.0.2,<2)", "h2 (>=3.2,<5.0)", "h2 (>=3.2,<5.0)", "httpx[http2] (>=0.27)", "httpx[http2] (>=0.27)", "hypothesis (>=6.56)", "hypothesis (>=6.56)", "idna (>=2.4)", "idna (>=2.4)", "priority (>=1.1.0,<2.0)", "priority (>=1.1.0,<2.0)", "pyhamcrest (>=2)", "pyhamcrest (>=2)", "pyopenssl (>=21.0.0)", "pyopenssl (>=21.0.0)", "pyserial (>=3.0)", "pyserial (>=3.0)", "pywin32 (!=226)", "pywin32 (!=226)", "pywin32 (!=226)", "pywin32 (!=226)", "service-identity (>=18.1.0)", "service-identity (>=18.1.0)", "twisted-iocpsupport (>=1.0.2)", "twisted-iocpsupport (>=1.0.2)"] [[package]] name = "txaio" @@ -4017,6 +4490,7 @@ version = "23.1.1" description = "Compatibility API between asyncio/Twisted/Trollius" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "txaio-23.1.1-py2.py3-none-any.whl", hash = "sha256:aaea42f8aad50e0ecfb976130ada140797e9dcb85fad2cf72b0f37f8cefcb490"}, {file = "txaio-23.1.1.tar.gz", hash = "sha256:f9a9216e976e5e3246dfd112ad7ad55ca915606b60b84a757ac769bd404ff704"}, @@ -4027,12 +4501,28 @@ all = ["twisted (>=20.3.0)", "zope.interface (>=5.2.0)"] dev = ["pep8 (>=1.6.2)", "pyenchant (>=1.6.6)", "pytest (>=2.6.4)", "pytest-cov (>=1.8.1)", "sphinx (>=1.2.3)", "sphinx-rtd-theme (>=0.1.9)", "sphinxcontrib-spelling (>=2.1.2)", "tox (>=2.1.1)", "tox-gh-actions (>=2.2.0)", "twine (>=1.6.5)", "wheel"] twisted = ["twisted (>=20.3.0)", "zope.interface (>=5.2.0)"] +[[package]] +name = "types-requests" +version = "2.32.0.20241016" +description = "Typing stubs for requests" +optional = false +python-versions = ">=3.8" +groups = ["dev"] +files = [ + {file = "types-requests-2.32.0.20241016.tar.gz", hash = "sha256:0d9cad2f27515d0e3e3da7134a1b6f28fb97129d86b867f24d9c726452634d95"}, + {file = "types_requests-2.32.0.20241016-py3-none-any.whl", hash = "sha256:4195d62d6d3e043a4eaaf08ff8a62184584d2e8684e9d2aa178c7915a7da3747"}, +] + +[package.dependencies] +urllib3 = ">=2" + [[package]] name = "typing-extensions" version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" +groups = ["main", "dev"] files = [ {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, @@ -4040,13 +4530,15 @@ files = [ [[package]] name = "tzdata" -version = "2024.2" +version = "2025.1" description = "Provider of IANA time zone data" optional = false python-versions = ">=2" +groups = ["main"] +markers = "sys_platform == \"win32\"" files = [ - {file = "tzdata-2024.2-py2.py3-none-any.whl", hash = "sha256:a48093786cdcde33cad18c2555e8532f34422074448fbc874186f0abd79565cd"}, - {file = "tzdata-2024.2.tar.gz", hash = "sha256:7d85cc416e9382e69095b7bdf4afd9e3880418a2413feec7069d533d6b4e31cc"}, + {file = "tzdata-2025.1-py2.py3-none-any.whl", hash = "sha256:7e127113816800496f027041c570f50bcd464a020098a3b6b199517772303639"}, + {file = "tzdata-2025.1.tar.gz", hash = "sha256:24894909e88cdb28bd1636c6887801df64cb485bd593f2fd83ef29075a81d694"}, ] [[package]] @@ -4055,6 +4547,7 @@ version = "2.1.1" description = "Unicorn CPU emulator engine" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "unicorn-2.1.1-py2.py3-none-macosx_11_0_arm64.whl", hash = "sha256:8839c02de1914acb9decb291f89eefb2944148460cf5f09a9a1cde920e32e509"}, {file = "unicorn-2.1.1-py2.py3-none-macosx_12_7_x86_64.whl", hash = "sha256:a52470664a6fe76f735943de5c90865ef049e23522dd1751306fefe2384e5dba"}, @@ -4071,13 +4564,14 @@ files = [ [[package]] name = "urllib3" -version = "2.2.3" +version = "2.3.0" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main", "dev"] files = [ - {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, - {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, + {file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"}, + {file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"}, ] [package.extras] @@ -4092,6 +4586,7 @@ version = "0.18.3" description = "The lightning-fast ASGI server." optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "uvicorn-0.18.3-py3-none-any.whl", hash = "sha256:0abd429ebb41e604ed8d2be6c60530de3408f250e8d2d84967d85ba9e86fe3af"}, {file = "uvicorn-0.18.3.tar.gz", hash = "sha256:9a66e7c42a2a95222f76ec24a4b754c158261c4696e683b9dadc72b590e0311b"}, @@ -4113,57 +4608,67 @@ standard = ["colorama (>=0.4)", "httptools (>=0.4.0)", "python-dotenv (>=0.13)", [[package]] name = "uvloop" -version = "0.20.0" +version = "0.21.0" description = "Fast implementation of asyncio event loop on top of libuv" optional = false python-versions = ">=3.8.0" -files = [ - {file = "uvloop-0.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:9ebafa0b96c62881d5cafa02d9da2e44c23f9f0cd829f3a32a6aff771449c996"}, - {file = "uvloop-0.20.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:35968fc697b0527a06e134999eef859b4034b37aebca537daeb598b9d45a137b"}, - {file = "uvloop-0.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b16696f10e59d7580979b420eedf6650010a4a9c3bd8113f24a103dfdb770b10"}, - {file = "uvloop-0.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9b04d96188d365151d1af41fa2d23257b674e7ead68cfd61c725a422764062ae"}, - {file = "uvloop-0.20.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:94707205efbe809dfa3a0d09c08bef1352f5d3d6612a506f10a319933757c006"}, - {file = "uvloop-0.20.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:89e8d33bb88d7263f74dc57d69f0063e06b5a5ce50bb9a6b32f5fcbe655f9e73"}, - {file = "uvloop-0.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e50289c101495e0d1bb0bfcb4a60adde56e32f4449a67216a1ab2750aa84f037"}, - {file = "uvloop-0.20.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e237f9c1e8a00e7d9ddaa288e535dc337a39bcbf679f290aee9d26df9e72bce9"}, - {file = "uvloop-0.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:746242cd703dc2b37f9d8b9f173749c15e9a918ddb021575a0205ec29a38d31e"}, - {file = "uvloop-0.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82edbfd3df39fb3d108fc079ebc461330f7c2e33dbd002d146bf7c445ba6e756"}, - {file = "uvloop-0.20.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:80dc1b139516be2077b3e57ce1cb65bfed09149e1d175e0478e7a987863b68f0"}, - {file = "uvloop-0.20.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4f44af67bf39af25db4c1ac27e82e9665717f9c26af2369c404be865c8818dcf"}, - {file = "uvloop-0.20.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:4b75f2950ddb6feed85336412b9a0c310a2edbcf4cf931aa5cfe29034829676d"}, - {file = "uvloop-0.20.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:77fbc69c287596880ecec2d4c7a62346bef08b6209749bf6ce8c22bbaca0239e"}, - {file = "uvloop-0.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6462c95f48e2d8d4c993a2950cd3d31ab061864d1c226bbf0ee2f1a8f36674b9"}, - {file = "uvloop-0.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:649c33034979273fa71aa25d0fe120ad1777c551d8c4cd2c0c9851d88fcb13ab"}, - {file = "uvloop-0.20.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:3a609780e942d43a275a617c0839d85f95c334bad29c4c0918252085113285b5"}, - {file = "uvloop-0.20.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:aea15c78e0d9ad6555ed201344ae36db5c63d428818b4b2a42842b3870127c00"}, - {file = "uvloop-0.20.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:f0e94b221295b5e69de57a1bd4aeb0b3a29f61be6e1b478bb8a69a73377db7ba"}, - {file = "uvloop-0.20.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fee6044b64c965c425b65a4e17719953b96e065c5b7e09b599ff332bb2744bdf"}, - {file = "uvloop-0.20.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:265a99a2ff41a0fd56c19c3838b29bf54d1d177964c300dad388b27e84fd7847"}, - {file = "uvloop-0.20.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b10c2956efcecb981bf9cfb8184d27d5d64b9033f917115a960b83f11bfa0d6b"}, - {file = "uvloop-0.20.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:e7d61fe8e8d9335fac1bf8d5d82820b4808dd7a43020c149b63a1ada953d48a6"}, - {file = "uvloop-0.20.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2beee18efd33fa6fdb0976e18475a4042cd31c7433c866e8a09ab604c7c22ff2"}, - {file = "uvloop-0.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:d8c36fdf3e02cec92aed2d44f63565ad1522a499c654f07935c8f9d04db69e95"}, - {file = "uvloop-0.20.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a0fac7be202596c7126146660725157d4813aa29a4cc990fe51346f75ff8fde7"}, - {file = "uvloop-0.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d0fba61846f294bce41eb44d60d58136090ea2b5b99efd21cbdf4e21927c56a"}, - {file = "uvloop-0.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95720bae002ac357202e0d866128eb1ac82545bcf0b549b9abe91b5178d9b541"}, - {file = "uvloop-0.20.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:36c530d8fa03bfa7085af54a48f2ca16ab74df3ec7108a46ba82fd8b411a2315"}, - {file = "uvloop-0.20.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:e97152983442b499d7a71e44f29baa75b3b02e65d9c44ba53b10338e98dedb66"}, - {file = "uvloop-0.20.0.tar.gz", hash = "sha256:4603ca714a754fc8d9b197e325db25b2ea045385e8a3ad05d3463de725fdf469"}, +groups = ["main"] +markers = "(sys_platform != \"win32\" and sys_platform != \"cygwin\") and platform_python_implementation != \"PyPy\"" +files = [ + {file = "uvloop-0.21.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:ec7e6b09a6fdded42403182ab6b832b71f4edaf7f37a9a0e371a01db5f0cb45f"}, + {file = "uvloop-0.21.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:196274f2adb9689a289ad7d65700d37df0c0930fd8e4e743fa4834e850d7719d"}, + {file = "uvloop-0.21.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f38b2e090258d051d68a5b14d1da7203a3c3677321cf32a95a6f4db4dd8b6f26"}, + {file = "uvloop-0.21.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87c43e0f13022b998eb9b973b5e97200c8b90823454d4bc06ab33829e09fb9bb"}, + {file = "uvloop-0.21.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:10d66943def5fcb6e7b37310eb6b5639fd2ccbc38df1177262b0640c3ca68c1f"}, + {file = "uvloop-0.21.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:67dd654b8ca23aed0a8e99010b4c34aca62f4b7fce88f39d452ed7622c94845c"}, + {file = "uvloop-0.21.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c0f3fa6200b3108919f8bdabb9a7f87f20e7097ea3c543754cabc7d717d95cf8"}, + {file = "uvloop-0.21.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0878c2640cf341b269b7e128b1a5fed890adc4455513ca710d77d5e93aa6d6a0"}, + {file = "uvloop-0.21.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9fb766bb57b7388745d8bcc53a359b116b8a04c83a2288069809d2b3466c37e"}, + {file = "uvloop-0.21.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a375441696e2eda1c43c44ccb66e04d61ceeffcd76e4929e527b7fa401b90fb"}, + {file = "uvloop-0.21.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:baa0e6291d91649c6ba4ed4b2f982f9fa165b5bbd50a9e203c416a2797bab3c6"}, + {file = "uvloop-0.21.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4509360fcc4c3bd2c70d87573ad472de40c13387f5fda8cb58350a1d7475e58d"}, + {file = "uvloop-0.21.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:359ec2c888397b9e592a889c4d72ba3d6befba8b2bb01743f72fffbde663b59c"}, + {file = "uvloop-0.21.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f7089d2dc73179ce5ac255bdf37c236a9f914b264825fdaacaded6990a7fb4c2"}, + {file = "uvloop-0.21.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:baa4dcdbd9ae0a372f2167a207cd98c9f9a1ea1188a8a526431eef2f8116cc8d"}, + {file = "uvloop-0.21.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86975dca1c773a2c9864f4c52c5a55631038e387b47eaf56210f873887b6c8dc"}, + {file = "uvloop-0.21.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:461d9ae6660fbbafedd07559c6a2e57cd553b34b0065b6550685f6653a98c1cb"}, + {file = "uvloop-0.21.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:183aef7c8730e54c9a3ee3227464daed66e37ba13040bb3f350bc2ddc040f22f"}, + {file = "uvloop-0.21.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:bfd55dfcc2a512316e65f16e503e9e450cab148ef11df4e4e679b5e8253a5281"}, + {file = "uvloop-0.21.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:787ae31ad8a2856fc4e7c095341cccc7209bd657d0e71ad0dc2ea83c4a6fa8af"}, + {file = "uvloop-0.21.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ee4d4ef48036ff6e5cfffb09dd192c7a5027153948d85b8da7ff705065bacc6"}, + {file = "uvloop-0.21.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3df876acd7ec037a3d005b3ab85a7e4110422e4d9c1571d4fc89b0fc41b6816"}, + {file = "uvloop-0.21.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bd53ecc9a0f3d87ab847503c2e1552b690362e005ab54e8a48ba97da3924c0dc"}, + {file = "uvloop-0.21.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a5c39f217ab3c663dc699c04cbd50c13813e31d917642d459fdcec07555cc553"}, + {file = "uvloop-0.21.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:17df489689befc72c39a08359efac29bbee8eee5209650d4b9f34df73d22e414"}, + {file = "uvloop-0.21.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bc09f0ff191e61c2d592a752423c767b4ebb2986daa9ed62908e2b1b9a9ae206"}, + {file = "uvloop-0.21.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f0ce1b49560b1d2d8a2977e3ba4afb2414fb46b86a1b64056bc4ab929efdafbe"}, + {file = "uvloop-0.21.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e678ad6fe52af2c58d2ae3c73dc85524ba8abe637f134bf3564ed07f555c5e79"}, + {file = "uvloop-0.21.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:460def4412e473896ef179a1671b40c039c7012184b627898eea5072ef6f017a"}, + {file = "uvloop-0.21.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:10da8046cc4a8f12c91a1c39d1dd1585c41162a15caaef165c2174db9ef18bdc"}, + {file = "uvloop-0.21.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c097078b8031190c934ed0ebfee8cc5f9ba9642e6eb88322b9958b649750f72b"}, + {file = "uvloop-0.21.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:46923b0b5ee7fc0020bef24afe7836cb068f5050ca04caf6b487c513dc1a20b2"}, + {file = "uvloop-0.21.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53e420a3afe22cdcf2a0f4846e377d16e718bc70103d7088a4f7623567ba5fb0"}, + {file = "uvloop-0.21.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88cb67cdbc0e483da00af0b2c3cdad4b7c61ceb1ee0f33fe00e09c81e3a6cb75"}, + {file = "uvloop-0.21.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:221f4f2a1f46032b403bf3be628011caf75428ee3cc204a22addf96f586b19fd"}, + {file = "uvloop-0.21.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2d1f581393673ce119355d56da84fe1dd9d2bb8b3d13ce792524e1607139feff"}, + {file = "uvloop-0.21.0.tar.gz", hash = "sha256:3bf12b0fda68447806a7ad847bfa591613177275d35b6724b1ee573faa3704e3"}, ] [package.extras] +dev = ["Cython (>=3.0,<4.0)", "setuptools (>=60)"] docs = ["Sphinx (>=4.1.2,<4.2.0)", "sphinx-rtd-theme (>=0.5.2,<0.6.0)", "sphinxcontrib-asyncio (>=0.3.0,<0.4.0)"] -test = ["Cython (>=0.29.36,<0.30.0)", "aiohttp (==3.9.0b0)", "aiohttp (>=3.8.1)", "flake8 (>=5.0,<6.0)", "mypy (>=0.800)", "psutil", "pyOpenSSL (>=23.0.0,<23.1.0)", "pycodestyle (>=2.9.0,<2.10.0)"] +test = ["aiohttp (>=3.10.5)", "flake8 (>=5.0,<6.0)", "mypy (>=0.800)", "psutil", "pyOpenSSL (>=23.0.0,<23.1.0)", "pycodestyle (>=2.9.0,<2.10.0)"] [[package]] name = "virtualenv" -version = "20.26.6" +version = "20.29.1" description = "Virtual Python Environment builder" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" +groups = ["dev"] files = [ - {file = "virtualenv-20.26.6-py3-none-any.whl", hash = "sha256:7345cc5b25405607a624d8418154577459c3e0277f5466dd79c49d5e492995f2"}, - {file = "virtualenv-20.26.6.tar.gz", hash = "sha256:280aede09a2a5c317e409a00102e7077c6432c5a38f0ef938e643805a7ad2c48"}, + {file = "virtualenv-20.29.1-py3-none-any.whl", hash = "sha256:4e4cb403c0b0da39e13b46b1b2476e505cb0046b25f242bee80f62bf990b2779"}, + {file = "virtualenv-20.29.1.tar.gz", hash = "sha256:b8b8970138d32fb606192cb97f6cd4bb644fa486be9308fb9b63f81091b5dc35"}, ] [package.dependencies] @@ -4177,26 +4682,27 @@ test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess [[package]] name = "viv-utils" -version = "0.7.11" +version = "0.8.0" description = "Utilities for binary analysis using vivisect." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "viv_utils-0.7.11-py2.py3-none-any.whl", hash = "sha256:ab44c428315e014a52dbbaab6862876dd0cd2c930a138a1655c1c5be7270d81b"}, + {file = "viv_utils-0.8.0-py2.py3-none-any.whl", hash = "sha256:4fd2d8849529d00dc5b7f736f581beeb79064ee094eb2dfd9f84877bb5a95c12"}, ] [package.dependencies] funcy = ">=2.0" intervaltree = ">=3.1.0" pefile = ">=2023.2.7" -python-flirt = {version = "0.8.10", optional = true, markers = "extra == \"flirt\""} -typing-extensions = ">=4.5.0" +python-flirt = {version = ">=0.9.0", optional = true, markers = "extra == \"flirt\""} +typing_extensions = ">=4.5.0" vivisect = ">=1.1.0" [package.extras] -build = ["build (==1.2.1)", "setuptools (==70.0.0)"] -dev = ["black (==24.4.2)", "isort (==5.11.5)", "mypy (==1.10.0)", "pycodestyle (==2.11.1)", "pytest (==8.2.2)", "pytest-instafail (==0.5.0)", "pytest-sugar (==0.9.6)", "types-setuptools (==70.0.0.20240524)"] -flirt = ["python-flirt (==0.8.10)"] +build = ["build (==1.2.1)", "setuptools (==75.2.0)"] +dev = ["black (==24.4.2)", "isort (==5.13.2)", "mypy (==1.11.2)", "pycodestyle (==2.12.0)", "pytest (==8.2.2)", "pytest-instafail (==0.5.0)", "pytest-sugar (==1.0.0)", "types-setuptools (==75.2.0.20241019)"] +flirt = ["python-flirt (>=0.9.0)"] [[package]] name = "vivisect" @@ -4204,6 +4710,7 @@ version = "1.2.1" description = "Pure python disassembler, debugger, emulator, and static analysis framework" optional = false python-versions = ">=3.8" +groups = ["main"] files = [ {file = "vivisect-1.2.1-py3-none-any.whl", hash = "sha256:62eb383013318efcd42f2565e4ab5323ebb1fb6b2e8e9e7e432bd126743eccda"}, {file = "vivisect-1.2.1.tar.gz", hash = "sha256:cc15ab541b9be3cad8060ee4f420e680258fa4fcf7477f3f9ad5023353f73299"}, @@ -4223,94 +4730,83 @@ gui = ["pyqt5 (==5.15.7)", "pyqtwebengine (==5.15.6)"] [[package]] name = "watchfiles" -version = "0.24.0" +version = "1.0.4" description = "Simple, modern and high performance file watching and code reload in python." optional = false -python-versions = ">=3.8" -files = [ - {file = "watchfiles-0.24.0-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:083dc77dbdeef09fa44bb0f4d1df571d2e12d8a8f985dccde71ac3ac9ac067a0"}, - {file = "watchfiles-0.24.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e94e98c7cb94cfa6e071d401ea3342767f28eb5a06a58fafdc0d2a4974f4f35c"}, - {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82ae557a8c037c42a6ef26c494d0631cacca040934b101d001100ed93d43f361"}, - {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:acbfa31e315a8f14fe33e3542cbcafc55703b8f5dcbb7c1eecd30f141df50db3"}, - {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b74fdffce9dfcf2dc296dec8743e5b0332d15df19ae464f0e249aa871fc1c571"}, - {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:449f43f49c8ddca87c6b3980c9284cab6bd1f5c9d9a2b00012adaaccd5e7decd"}, - {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4abf4ad269856618f82dee296ac66b0cd1d71450fc3c98532d93798e73399b7a"}, - {file = "watchfiles-0.24.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9f895d785eb6164678ff4bb5cc60c5996b3ee6df3edb28dcdeba86a13ea0465e"}, - {file = "watchfiles-0.24.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:7ae3e208b31be8ce7f4c2c0034f33406dd24fbce3467f77223d10cd86778471c"}, - {file = "watchfiles-0.24.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:2efec17819b0046dde35d13fb8ac7a3ad877af41ae4640f4109d9154ed30a188"}, - {file = "watchfiles-0.24.0-cp310-none-win32.whl", hash = "sha256:6bdcfa3cd6fdbdd1a068a52820f46a815401cbc2cb187dd006cb076675e7b735"}, - {file = "watchfiles-0.24.0-cp310-none-win_amd64.whl", hash = "sha256:54ca90a9ae6597ae6dc00e7ed0a040ef723f84ec517d3e7ce13e63e4bc82fa04"}, - {file = "watchfiles-0.24.0-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:bdcd5538e27f188dd3c804b4a8d5f52a7fc7f87e7fd6b374b8e36a4ca03db428"}, - {file = "watchfiles-0.24.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2dadf8a8014fde6addfd3c379e6ed1a981c8f0a48292d662e27cabfe4239c83c"}, - {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6509ed3f467b79d95fc62a98229f79b1a60d1b93f101e1c61d10c95a46a84f43"}, - {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:8360f7314a070c30e4c976b183d1d8d1585a4a50c5cb603f431cebcbb4f66327"}, - {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:316449aefacf40147a9efaf3bd7c9bdd35aaba9ac5d708bd1eb5763c9a02bef5"}, - {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:73bde715f940bea845a95247ea3e5eb17769ba1010efdc938ffcb967c634fa61"}, - {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3770e260b18e7f4e576edca4c0a639f704088602e0bc921c5c2e721e3acb8d15"}, - {file = "watchfiles-0.24.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aa0fd7248cf533c259e59dc593a60973a73e881162b1a2f73360547132742823"}, - {file = "watchfiles-0.24.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:d7a2e3b7f5703ffbd500dabdefcbc9eafeff4b9444bbdd5d83d79eedf8428fab"}, - {file = "watchfiles-0.24.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:d831ee0a50946d24a53821819b2327d5751b0c938b12c0653ea5be7dea9c82ec"}, - {file = "watchfiles-0.24.0-cp311-none-win32.whl", hash = "sha256:49d617df841a63b4445790a254013aea2120357ccacbed00253f9c2b5dc24e2d"}, - {file = "watchfiles-0.24.0-cp311-none-win_amd64.whl", hash = "sha256:d3dcb774e3568477275cc76554b5a565024b8ba3a0322f77c246bc7111c5bb9c"}, - {file = "watchfiles-0.24.0-cp311-none-win_arm64.whl", hash = "sha256:9301c689051a4857d5b10777da23fafb8e8e921bcf3abe6448a058d27fb67633"}, - {file = "watchfiles-0.24.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:7211b463695d1e995ca3feb38b69227e46dbd03947172585ecb0588f19b0d87a"}, - {file = "watchfiles-0.24.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4b8693502d1967b00f2fb82fc1e744df128ba22f530e15b763c8d82baee15370"}, - {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cdab9555053399318b953a1fe1f586e945bc8d635ce9d05e617fd9fe3a4687d6"}, - {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:34e19e56d68b0dad5cff62273107cf5d9fbaf9d75c46277aa5d803b3ef8a9e9b"}, - {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:41face41f036fee09eba33a5b53a73e9a43d5cb2c53dad8e61fa6c9f91b5a51e"}, - {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5148c2f1ea043db13ce9b0c28456e18ecc8f14f41325aa624314095b6aa2e9ea"}, - {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e4bd963a935aaf40b625c2499f3f4f6bbd0c3776f6d3bc7c853d04824ff1c9f"}, - {file = "watchfiles-0.24.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c79d7719d027b7a42817c5d96461a99b6a49979c143839fc37aa5748c322f234"}, - {file = "watchfiles-0.24.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:32aa53a9a63b7f01ed32e316e354e81e9da0e6267435c7243bf8ae0f10b428ef"}, - {file = "watchfiles-0.24.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ce72dba6a20e39a0c628258b5c308779b8697f7676c254a845715e2a1039b968"}, - {file = "watchfiles-0.24.0-cp312-none-win32.whl", hash = "sha256:d9018153cf57fc302a2a34cb7564870b859ed9a732d16b41a9b5cb2ebed2d444"}, - {file = "watchfiles-0.24.0-cp312-none-win_amd64.whl", hash = "sha256:551ec3ee2a3ac9cbcf48a4ec76e42c2ef938a7e905a35b42a1267fa4b1645896"}, - {file = "watchfiles-0.24.0-cp312-none-win_arm64.whl", hash = "sha256:b52a65e4ea43c6d149c5f8ddb0bef8d4a1e779b77591a458a893eb416624a418"}, - {file = "watchfiles-0.24.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:3d2e3ab79a1771c530233cadfd277fcc762656d50836c77abb2e5e72b88e3a48"}, - {file = "watchfiles-0.24.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:327763da824817b38ad125dcd97595f942d720d32d879f6c4ddf843e3da3fe90"}, - {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd82010f8ab451dabe36054a1622870166a67cf3fce894f68895db6f74bbdc94"}, - {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d64ba08db72e5dfd5c33be1e1e687d5e4fcce09219e8aee893a4862034081d4e"}, - {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1cf1f6dd7825053f3d98f6d33f6464ebdd9ee95acd74ba2c34e183086900a827"}, - {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:43e3e37c15a8b6fe00c1bce2473cfa8eb3484bbeecf3aefbf259227e487a03df"}, - {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:88bcd4d0fe1d8ff43675360a72def210ebad3f3f72cabfeac08d825d2639b4ab"}, - {file = "watchfiles-0.24.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:999928c6434372fde16c8f27143d3e97201160b48a614071261701615a2a156f"}, - {file = "watchfiles-0.24.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:30bbd525c3262fd9f4b1865cb8d88e21161366561cd7c9e1194819e0a33ea86b"}, - {file = "watchfiles-0.24.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:edf71b01dec9f766fb285b73930f95f730bb0943500ba0566ae234b5c1618c18"}, - {file = "watchfiles-0.24.0-cp313-none-win32.whl", hash = "sha256:f4c96283fca3ee09fb044f02156d9570d156698bc3734252175a38f0e8975f07"}, - {file = "watchfiles-0.24.0-cp313-none-win_amd64.whl", hash = "sha256:a974231b4fdd1bb7f62064a0565a6b107d27d21d9acb50c484d2cdba515b9366"}, - {file = "watchfiles-0.24.0-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:ee82c98bed9d97cd2f53bdb035e619309a098ea53ce525833e26b93f673bc318"}, - {file = "watchfiles-0.24.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:fd92bbaa2ecdb7864b7600dcdb6f2f1db6e0346ed425fbd01085be04c63f0b05"}, - {file = "watchfiles-0.24.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f83df90191d67af5a831da3a33dd7628b02a95450e168785586ed51e6d28943c"}, - {file = "watchfiles-0.24.0-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fca9433a45f18b7c779d2bae7beeec4f740d28b788b117a48368d95a3233ed83"}, - {file = "watchfiles-0.24.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b995bfa6bf01a9e09b884077a6d37070464b529d8682d7691c2d3b540d357a0c"}, - {file = "watchfiles-0.24.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ed9aba6e01ff6f2e8285e5aa4154e2970068fe0fc0998c4380d0e6278222269b"}, - {file = "watchfiles-0.24.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5171ef898299c657685306d8e1478a45e9303ddcd8ac5fed5bd52ad4ae0b69b"}, - {file = "watchfiles-0.24.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4933a508d2f78099162da473841c652ad0de892719043d3f07cc83b33dfd9d91"}, - {file = "watchfiles-0.24.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:95cf3b95ea665ab03f5a54765fa41abf0529dbaf372c3b83d91ad2cfa695779b"}, - {file = "watchfiles-0.24.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:01def80eb62bd5db99a798d5e1f5f940ca0a05986dcfae21d833af7a46f7ee22"}, - {file = "watchfiles-0.24.0-cp38-none-win32.whl", hash = "sha256:4d28cea3c976499475f5b7a2fec6b3a36208656963c1a856d328aeae056fc5c1"}, - {file = "watchfiles-0.24.0-cp38-none-win_amd64.whl", hash = "sha256:21ab23fdc1208086d99ad3f69c231ba265628014d4aed31d4e8746bd59e88cd1"}, - {file = "watchfiles-0.24.0-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:b665caeeda58625c3946ad7308fbd88a086ee51ccb706307e5b1fa91556ac886"}, - {file = "watchfiles-0.24.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5c51749f3e4e269231510da426ce4a44beb98db2dce9097225c338f815b05d4f"}, - {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82b2509f08761f29a0fdad35f7e1638b8ab1adfa2666d41b794090361fb8b855"}, - {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9a60e2bf9dc6afe7f743e7c9b149d1fdd6dbf35153c78fe3a14ae1a9aee3d98b"}, - {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f7d9b87c4c55e3ea8881dfcbf6d61ea6775fffed1fedffaa60bd047d3c08c430"}, - {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:78470906a6be5199524641f538bd2c56bb809cd4bf29a566a75051610bc982c3"}, - {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:07cdef0c84c03375f4e24642ef8d8178e533596b229d32d2bbd69e5128ede02a"}, - {file = "watchfiles-0.24.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d337193bbf3e45171c8025e291530fb7548a93c45253897cd764a6a71c937ed9"}, - {file = "watchfiles-0.24.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ec39698c45b11d9694a1b635a70946a5bad066b593af863460a8e600f0dff1ca"}, - {file = "watchfiles-0.24.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:2e28d91ef48eab0afb939fa446d8ebe77e2f7593f5f463fd2bb2b14132f95b6e"}, - {file = "watchfiles-0.24.0-cp39-none-win32.whl", hash = "sha256:7138eff8baa883aeaa074359daabb8b6c1e73ffe69d5accdc907d62e50b1c0da"}, - {file = "watchfiles-0.24.0-cp39-none-win_amd64.whl", hash = "sha256:b3ef2c69c655db63deb96b3c3e587084612f9b1fa983df5e0c3379d41307467f"}, - {file = "watchfiles-0.24.0-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:632676574429bee8c26be8af52af20e0c718cc7f5f67f3fb658c71928ccd4f7f"}, - {file = "watchfiles-0.24.0-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:a2a9891723a735d3e2540651184be6fd5b96880c08ffe1a98bae5017e65b544b"}, - {file = "watchfiles-0.24.0-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4a7fa2bc0efef3e209a8199fd111b8969fe9db9c711acc46636686331eda7dd4"}, - {file = "watchfiles-0.24.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01550ccf1d0aed6ea375ef259706af76ad009ef5b0203a3a4cce0f6024f9b68a"}, - {file = "watchfiles-0.24.0-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:96619302d4374de5e2345b2b622dc481257a99431277662c30f606f3e22f42be"}, - {file = "watchfiles-0.24.0-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:85d5f0c7771dcc7a26c7a27145059b6bb0ce06e4e751ed76cdf123d7039b60b5"}, - {file = "watchfiles-0.24.0-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:951088d12d339690a92cef2ec5d3cfd957692834c72ffd570ea76a6790222777"}, - {file = "watchfiles-0.24.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49fb58bcaa343fedc6a9e91f90195b20ccb3135447dc9e4e2570c3a39565853e"}, - {file = "watchfiles-0.24.0.tar.gz", hash = "sha256:afb72325b74fa7a428c009c1b8be4b4d7c2afedafb2982827ef2156646df2fe1"}, +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "watchfiles-1.0.4-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:ba5bb3073d9db37c64520681dd2650f8bd40902d991e7b4cfaeece3e32561d08"}, + {file = "watchfiles-1.0.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9f25d0ba0fe2b6d2c921cf587b2bf4c451860086534f40c384329fb96e2044d1"}, + {file = "watchfiles-1.0.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47eb32ef8c729dbc4f4273baece89398a4d4b5d21a1493efea77a17059f4df8a"}, + {file = "watchfiles-1.0.4-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:076f293100db3b0b634514aa0d294b941daa85fc777f9c698adb1009e5aca0b1"}, + {file = "watchfiles-1.0.4-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1eacd91daeb5158c598fe22d7ce66d60878b6294a86477a4715154990394c9b3"}, + {file = "watchfiles-1.0.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:13c2ce7b72026cfbca120d652f02c7750f33b4c9395d79c9790b27f014c8a5a2"}, + {file = "watchfiles-1.0.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:90192cdc15ab7254caa7765a98132a5a41471cf739513cc9bcf7d2ffcc0ec7b2"}, + {file = "watchfiles-1.0.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:278aaa395f405972e9f523bd786ed59dfb61e4b827856be46a42130605fd0899"}, + {file = "watchfiles-1.0.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:a462490e75e466edbb9fc4cd679b62187153b3ba804868452ef0577ec958f5ff"}, + {file = "watchfiles-1.0.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8d0d0630930f5cd5af929040e0778cf676a46775753e442a3f60511f2409f48f"}, + {file = "watchfiles-1.0.4-cp310-cp310-win32.whl", hash = "sha256:cc27a65069bcabac4552f34fd2dce923ce3fcde0721a16e4fb1b466d63ec831f"}, + {file = "watchfiles-1.0.4-cp310-cp310-win_amd64.whl", hash = "sha256:8b1f135238e75d075359cf506b27bf3f4ca12029c47d3e769d8593a2024ce161"}, + {file = "watchfiles-1.0.4-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:2a9f93f8439639dc244c4d2902abe35b0279102bca7bbcf119af964f51d53c19"}, + {file = "watchfiles-1.0.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9eea33ad8c418847dd296e61eb683cae1c63329b6d854aefcd412e12d94ee235"}, + {file = "watchfiles-1.0.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:31f1a379c9dcbb3f09cf6be1b7e83b67c0e9faabed0471556d9438a4a4e14202"}, + {file = "watchfiles-1.0.4-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ab594e75644421ae0a2484554832ca5895f8cab5ab62de30a1a57db460ce06c6"}, + {file = "watchfiles-1.0.4-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fc2eb5d14a8e0d5df7b36288979176fbb39672d45184fc4b1c004d7c3ce29317"}, + {file = "watchfiles-1.0.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f68d8e9d5a321163ddacebe97091000955a1b74cd43724e346056030b0bacee"}, + {file = "watchfiles-1.0.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f9ce064e81fe79faa925ff03b9f4c1a98b0bbb4a1b8c1b015afa93030cb21a49"}, + {file = "watchfiles-1.0.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b77d5622ac5cc91d21ae9c2b284b5d5c51085a0bdb7b518dba263d0af006132c"}, + {file = "watchfiles-1.0.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1941b4e39de9b38b868a69b911df5e89dc43767feeda667b40ae032522b9b5f1"}, + {file = "watchfiles-1.0.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:4f8c4998506241dedf59613082d1c18b836e26ef2a4caecad0ec41e2a15e4226"}, + {file = "watchfiles-1.0.4-cp311-cp311-win32.whl", hash = "sha256:4ebbeca9360c830766b9f0df3640b791be569d988f4be6c06d6fae41f187f105"}, + {file = "watchfiles-1.0.4-cp311-cp311-win_amd64.whl", hash = "sha256:05d341c71f3d7098920f8551d4df47f7b57ac5b8dad56558064c3431bdfc0b74"}, + {file = "watchfiles-1.0.4-cp311-cp311-win_arm64.whl", hash = "sha256:32b026a6ab64245b584acf4931fe21842374da82372d5c039cba6bf99ef722f3"}, + {file = "watchfiles-1.0.4-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:229e6ec880eca20e0ba2f7e2249c85bae1999d330161f45c78d160832e026ee2"}, + {file = "watchfiles-1.0.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5717021b199e8353782dce03bd8a8f64438832b84e2885c4a645f9723bf656d9"}, + {file = "watchfiles-1.0.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0799ae68dfa95136dde7c472525700bd48777875a4abb2ee454e3ab18e9fc712"}, + {file = "watchfiles-1.0.4-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:43b168bba889886b62edb0397cab5b6490ffb656ee2fcb22dec8bfeb371a9e12"}, + {file = "watchfiles-1.0.4-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb2c46e275fbb9f0c92e7654b231543c7bbfa1df07cdc4b99fa73bedfde5c844"}, + {file = "watchfiles-1.0.4-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:857f5fc3aa027ff5e57047da93f96e908a35fe602d24f5e5d8ce64bf1f2fc733"}, + {file = "watchfiles-1.0.4-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:55ccfd27c497b228581e2838d4386301227fc0cb47f5a12923ec2fe4f97b95af"}, + {file = "watchfiles-1.0.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c11ea22304d17d4385067588123658e9f23159225a27b983f343fcffc3e796a"}, + {file = "watchfiles-1.0.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:74cb3ca19a740be4caa18f238298b9d472c850f7b2ed89f396c00a4c97e2d9ff"}, + {file = "watchfiles-1.0.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:c7cce76c138a91e720d1df54014a047e680b652336e1b73b8e3ff3158e05061e"}, + {file = "watchfiles-1.0.4-cp312-cp312-win32.whl", hash = "sha256:b045c800d55bc7e2cadd47f45a97c7b29f70f08a7c2fa13241905010a5493f94"}, + {file = "watchfiles-1.0.4-cp312-cp312-win_amd64.whl", hash = "sha256:c2acfa49dd0ad0bf2a9c0bb9a985af02e89345a7189be1efc6baa085e0f72d7c"}, + {file = "watchfiles-1.0.4-cp312-cp312-win_arm64.whl", hash = "sha256:22bb55a7c9e564e763ea06c7acea24fc5d2ee5dfc5dafc5cfbedfe58505e9f90"}, + {file = "watchfiles-1.0.4-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:8012bd820c380c3d3db8435e8cf7592260257b378b649154a7948a663b5f84e9"}, + {file = "watchfiles-1.0.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:aa216f87594f951c17511efe5912808dfcc4befa464ab17c98d387830ce07b60"}, + {file = "watchfiles-1.0.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62c9953cf85529c05b24705639ffa390f78c26449e15ec34d5339e8108c7c407"}, + {file = "watchfiles-1.0.4-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:7cf684aa9bba4cd95ecb62c822a56de54e3ae0598c1a7f2065d51e24637a3c5d"}, + {file = "watchfiles-1.0.4-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f44a39aee3cbb9b825285ff979ab887a25c5d336e5ec3574f1506a4671556a8d"}, + {file = "watchfiles-1.0.4-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a38320582736922be8c865d46520c043bff350956dfc9fbaee3b2df4e1740a4b"}, + {file = "watchfiles-1.0.4-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:39f4914548b818540ef21fd22447a63e7be6e24b43a70f7642d21f1e73371590"}, + {file = "watchfiles-1.0.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f12969a3765909cf5dc1e50b2436eb2c0e676a3c75773ab8cc3aa6175c16e902"}, + {file = "watchfiles-1.0.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:0986902677a1a5e6212d0c49b319aad9cc48da4bd967f86a11bde96ad9676ca1"}, + {file = "watchfiles-1.0.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:308ac265c56f936636e3b0e3f59e059a40003c655228c131e1ad439957592303"}, + {file = "watchfiles-1.0.4-cp313-cp313-win32.whl", hash = "sha256:aee397456a29b492c20fda2d8961e1ffb266223625346ace14e4b6d861ba9c80"}, + {file = "watchfiles-1.0.4-cp313-cp313-win_amd64.whl", hash = "sha256:d6097538b0ae5c1b88c3b55afa245a66793a8fec7ada6755322e465fb1a0e8cc"}, + {file = "watchfiles-1.0.4-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:d3452c1ec703aa1c61e15dfe9d482543e4145e7c45a6b8566978fbb044265a21"}, + {file = "watchfiles-1.0.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7b75fee5a16826cf5c46fe1c63116e4a156924d668c38b013e6276f2582230f0"}, + {file = "watchfiles-1.0.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e997802d78cdb02623b5941830ab06f8860038faf344f0d288d325cc9c5d2ff"}, + {file = "watchfiles-1.0.4-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e0611d244ce94d83f5b9aff441ad196c6e21b55f77f3c47608dcf651efe54c4a"}, + {file = "watchfiles-1.0.4-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9745a4210b59e218ce64c91deb599ae8775c8a9da4e95fb2ee6fe745fc87d01a"}, + {file = "watchfiles-1.0.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4810ea2ae622add560f4aa50c92fef975e475f7ac4900ce5ff5547b2434642d8"}, + {file = "watchfiles-1.0.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:740d103cd01458f22462dedeb5a3382b7f2c57d07ff033fbc9465919e5e1d0f3"}, + {file = "watchfiles-1.0.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdbd912a61543a36aef85e34f212e5d2486e7c53ebfdb70d1e0b060cc50dd0bf"}, + {file = "watchfiles-1.0.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0bc80d91ddaf95f70258cf78c471246846c1986bcc5fd33ccc4a1a67fcb40f9a"}, + {file = "watchfiles-1.0.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ab0311bb2ffcd9f74b6c9de2dda1612c13c84b996d032cd74799adb656af4e8b"}, + {file = "watchfiles-1.0.4-cp39-cp39-win32.whl", hash = "sha256:02a526ee5b5a09e8168314c905fc545c9bc46509896ed282aeb5a8ba9bd6ca27"}, + {file = "watchfiles-1.0.4-cp39-cp39-win_amd64.whl", hash = "sha256:a5ae5706058b27c74bac987d615105da17724172d5aaacc6c362a40599b6de43"}, + {file = "watchfiles-1.0.4-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:cdcc92daeae268de1acf5b7befcd6cfffd9a047098199056c72e4623f531de18"}, + {file = "watchfiles-1.0.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:d8d3d9203705b5797f0af7e7e5baa17c8588030aaadb7f6a86107b7247303817"}, + {file = "watchfiles-1.0.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bdef5a1be32d0b07dcea3318a0be95d42c98ece24177820226b56276e06b63b0"}, + {file = "watchfiles-1.0.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:342622287b5604ddf0ed2d085f3a589099c9ae8b7331df3ae9845571586c4f3d"}, + {file = "watchfiles-1.0.4-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:9fe37a2de80aa785d340f2980276b17ef697ab8db6019b07ee4fd28a8359d2f3"}, + {file = "watchfiles-1.0.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:9d1ef56b56ed7e8f312c934436dea93bfa3e7368adfcf3df4c0da6d4de959a1e"}, + {file = "watchfiles-1.0.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95b42cac65beae3a362629950c444077d1b44f1790ea2772beaea95451c086bb"}, + {file = "watchfiles-1.0.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e0227b8ed9074c6172cf55d85b5670199c99ab11fd27d2c473aa30aec67ee42"}, + {file = "watchfiles-1.0.4.tar.gz", hash = "sha256:6ba473efd11062d73e4f00c2b730255f9c1bdd73cd5f9fe5b5da8dbd4a717205"}, ] [package.dependencies] @@ -4322,6 +4818,7 @@ version = "0.2.13" description = "Measures the displayed width of unicode strings in a terminal" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859"}, {file = "wcwidth-0.2.13.tar.gz", hash = "sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5"}, @@ -4329,108 +4826,93 @@ files = [ [[package]] name = "websockets" -version = "13.1" +version = "14.2" description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" optional = false -python-versions = ">=3.8" -files = [ - {file = "websockets-13.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f48c749857f8fb598fb890a75f540e3221d0976ed0bf879cf3c7eef34151acee"}, - {file = "websockets-13.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c7e72ce6bda6fb9409cc1e8164dd41d7c91466fb599eb047cfda72fe758a34a7"}, - {file = "websockets-13.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f779498eeec470295a2b1a5d97aa1bc9814ecd25e1eb637bd9d1c73a327387f6"}, - {file = "websockets-13.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4676df3fe46956fbb0437d8800cd5f2b6d41143b6e7e842e60554398432cf29b"}, - {file = "websockets-13.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a7affedeb43a70351bb811dadf49493c9cfd1ed94c9c70095fd177e9cc1541fa"}, - {file = "websockets-13.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1971e62d2caa443e57588e1d82d15f663b29ff9dfe7446d9964a4b6f12c1e700"}, - {file = "websockets-13.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5f2e75431f8dc4a47f31565a6e1355fb4f2ecaa99d6b89737527ea917066e26c"}, - {file = "websockets-13.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:58cf7e75dbf7e566088b07e36ea2e3e2bd5676e22216e4cad108d4df4a7402a0"}, - {file = "websockets-13.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c90d6dec6be2c7d03378a574de87af9b1efea77d0c52a8301dd831ece938452f"}, - {file = "websockets-13.1-cp310-cp310-win32.whl", hash = "sha256:730f42125ccb14602f455155084f978bd9e8e57e89b569b4d7f0f0c17a448ffe"}, - {file = "websockets-13.1-cp310-cp310-win_amd64.whl", hash = "sha256:5993260f483d05a9737073be197371940c01b257cc45ae3f1d5d7adb371b266a"}, - {file = "websockets-13.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:61fc0dfcda609cda0fc9fe7977694c0c59cf9d749fbb17f4e9483929e3c48a19"}, - {file = "websockets-13.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ceec59f59d092c5007e815def4ebb80c2de330e9588e101cf8bd94c143ec78a5"}, - {file = "websockets-13.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:c1dca61c6db1166c48b95198c0b7d9c990b30c756fc2923cc66f68d17dc558fd"}, - {file = "websockets-13.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:308e20f22c2c77f3f39caca508e765f8725020b84aa963474e18c59accbf4c02"}, - {file = "websockets-13.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62d516c325e6540e8a57b94abefc3459d7dab8ce52ac75c96cad5549e187e3a7"}, - {file = "websockets-13.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:87c6e35319b46b99e168eb98472d6c7d8634ee37750d7693656dc766395df096"}, - {file = "websockets-13.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5f9fee94ebafbc3117c30be1844ed01a3b177bb6e39088bc6b2fa1dc15572084"}, - {file = "websockets-13.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:7c1e90228c2f5cdde263253fa5db63e6653f1c00e7ec64108065a0b9713fa1b3"}, - {file = "websockets-13.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6548f29b0e401eea2b967b2fdc1c7c7b5ebb3eeb470ed23a54cd45ef078a0db9"}, - {file = "websockets-13.1-cp311-cp311-win32.whl", hash = "sha256:c11d4d16e133f6df8916cc5b7e3e96ee4c44c936717d684a94f48f82edb7c92f"}, - {file = "websockets-13.1-cp311-cp311-win_amd64.whl", hash = "sha256:d04f13a1d75cb2b8382bdc16ae6fa58c97337253826dfe136195b7f89f661557"}, - {file = "websockets-13.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:9d75baf00138f80b48f1eac72ad1535aac0b6461265a0bcad391fc5aba875cfc"}, - {file = "websockets-13.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9b6f347deb3dcfbfde1c20baa21c2ac0751afaa73e64e5b693bb2b848efeaa49"}, - {file = "websockets-13.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:de58647e3f9c42f13f90ac7e5f58900c80a39019848c5547bc691693098ae1bd"}, - {file = "websockets-13.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1b54689e38d1279a51d11e3467dd2f3a50f5f2e879012ce8f2d6943f00e83f0"}, - {file = "websockets-13.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf1781ef73c073e6b0f90af841aaf98501f975d306bbf6221683dd594ccc52b6"}, - {file = "websockets-13.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d23b88b9388ed85c6faf0e74d8dec4f4d3baf3ecf20a65a47b836d56260d4b9"}, - {file = "websockets-13.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3c78383585f47ccb0fcf186dcb8a43f5438bd7d8f47d69e0b56f71bf431a0a68"}, - {file = "websockets-13.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d6d300f8ec35c24025ceb9b9019ae9040c1ab2f01cddc2bcc0b518af31c75c14"}, - {file = "websockets-13.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a9dcaf8b0cc72a392760bb8755922c03e17a5a54e08cca58e8b74f6902b433cf"}, - {file = "websockets-13.1-cp312-cp312-win32.whl", hash = "sha256:2f85cf4f2a1ba8f602298a853cec8526c2ca42a9a4b947ec236eaedb8f2dc80c"}, - {file = "websockets-13.1-cp312-cp312-win_amd64.whl", hash = "sha256:38377f8b0cdeee97c552d20cf1865695fcd56aba155ad1b4ca8779a5b6ef4ac3"}, - {file = "websockets-13.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a9ab1e71d3d2e54a0aa646ab6d4eebfaa5f416fe78dfe4da2839525dc5d765c6"}, - {file = "websockets-13.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b9d7439d7fab4dce00570bb906875734df13d9faa4b48e261c440a5fec6d9708"}, - {file = "websockets-13.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:327b74e915cf13c5931334c61e1a41040e365d380f812513a255aa804b183418"}, - {file = "websockets-13.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:325b1ccdbf5e5725fdcb1b0e9ad4d2545056479d0eee392c291c1bf76206435a"}, - {file = "websockets-13.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:346bee67a65f189e0e33f520f253d5147ab76ae42493804319b5716e46dddf0f"}, - {file = "websockets-13.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91a0fa841646320ec0d3accdff5b757b06e2e5c86ba32af2e0815c96c7a603c5"}, - {file = "websockets-13.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:18503d2c5f3943e93819238bf20df71982d193f73dcecd26c94514f417f6b135"}, - {file = "websockets-13.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:a9cd1af7e18e5221d2878378fbc287a14cd527fdd5939ed56a18df8a31136bb2"}, - {file = "websockets-13.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:70c5be9f416aa72aab7a2a76c90ae0a4fe2755c1816c153c1a2bcc3333ce4ce6"}, - {file = "websockets-13.1-cp313-cp313-win32.whl", hash = "sha256:624459daabeb310d3815b276c1adef475b3e6804abaf2d9d2c061c319f7f187d"}, - {file = "websockets-13.1-cp313-cp313-win_amd64.whl", hash = "sha256:c518e84bb59c2baae725accd355c8dc517b4a3ed8db88b4bc93c78dae2974bf2"}, - {file = "websockets-13.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:c7934fd0e920e70468e676fe7f1b7261c1efa0d6c037c6722278ca0228ad9d0d"}, - {file = "websockets-13.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:149e622dc48c10ccc3d2760e5f36753db9cacf3ad7bc7bbbfd7d9c819e286f23"}, - {file = "websockets-13.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a569eb1b05d72f9bce2ebd28a1ce2054311b66677fcd46cf36204ad23acead8c"}, - {file = "websockets-13.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95df24ca1e1bd93bbca51d94dd049a984609687cb2fb08a7f2c56ac84e9816ea"}, - {file = "websockets-13.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d8dbb1bf0c0a4ae8b40bdc9be7f644e2f3fb4e8a9aca7145bfa510d4a374eeb7"}, - {file = "websockets-13.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:035233b7531fb92a76beefcbf479504db8c72eb3bff41da55aecce3a0f729e54"}, - {file = "websockets-13.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:e4450fc83a3df53dec45922b576e91e94f5578d06436871dce3a6be38e40f5db"}, - {file = "websockets-13.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:463e1c6ec853202dd3657f156123d6b4dad0c546ea2e2e38be2b3f7c5b8e7295"}, - {file = "websockets-13.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:6d6855bbe70119872c05107e38fbc7f96b1d8cb047d95c2c50869a46c65a8e96"}, - {file = "websockets-13.1-cp38-cp38-win32.whl", hash = "sha256:204e5107f43095012b00f1451374693267adbb832d29966a01ecc4ce1db26faf"}, - {file = "websockets-13.1-cp38-cp38-win_amd64.whl", hash = "sha256:485307243237328c022bc908b90e4457d0daa8b5cf4b3723fd3c4a8012fce4c6"}, - {file = "websockets-13.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9b37c184f8b976f0c0a231a5f3d6efe10807d41ccbe4488df8c74174805eea7d"}, - {file = "websockets-13.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:163e7277e1a0bd9fb3c8842a71661ad19c6aa7bb3d6678dc7f89b17fbcc4aeb7"}, - {file = "websockets-13.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4b889dbd1342820cc210ba44307cf75ae5f2f96226c0038094455a96e64fb07a"}, - {file = "websockets-13.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:586a356928692c1fed0eca68b4d1c2cbbd1ca2acf2ac7e7ebd3b9052582deefa"}, - {file = "websockets-13.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7bd6abf1e070a6b72bfeb71049d6ad286852e285f146682bf30d0296f5fbadfa"}, - {file = "websockets-13.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2aad13a200e5934f5a6767492fb07151e1de1d6079c003ab31e1823733ae79"}, - {file = "websockets-13.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:df01aea34b6e9e33572c35cd16bae5a47785e7d5c8cb2b54b2acdb9678315a17"}, - {file = "websockets-13.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:e54affdeb21026329fb0744ad187cf812f7d3c2aa702a5edb562b325191fcab6"}, - {file = "websockets-13.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9ef8aa8bdbac47f4968a5d66462a2a0935d044bf35c0e5a8af152d58516dbeb5"}, - {file = "websockets-13.1-cp39-cp39-win32.whl", hash = "sha256:deeb929efe52bed518f6eb2ddc00cc496366a14c726005726ad62c2dd9017a3c"}, - {file = "websockets-13.1-cp39-cp39-win_amd64.whl", hash = "sha256:7c65ffa900e7cc958cd088b9a9157a8141c991f8c53d11087e6fb7277a03f81d"}, - {file = "websockets-13.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:5dd6da9bec02735931fccec99d97c29f47cc61f644264eb995ad6c0c27667238"}, - {file = "websockets-13.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:2510c09d8e8df777177ee3d40cd35450dc169a81e747455cc4197e63f7e7bfe5"}, - {file = "websockets-13.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1c3cf67185543730888b20682fb186fc8d0fa6f07ccc3ef4390831ab4b388d9"}, - {file = "websockets-13.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bcc03c8b72267e97b49149e4863d57c2d77f13fae12066622dc78fe322490fe6"}, - {file = "websockets-13.1-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:004280a140f220c812e65f36944a9ca92d766b6cc4560be652a0a3883a79ed8a"}, - {file = "websockets-13.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:e2620453c075abeb0daa949a292e19f56de518988e079c36478bacf9546ced23"}, - {file = "websockets-13.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9156c45750b37337f7b0b00e6248991a047be4aa44554c9886fe6bdd605aab3b"}, - {file = "websockets-13.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:80c421e07973a89fbdd93e6f2003c17d20b69010458d3a8e37fb47874bd67d51"}, - {file = "websockets-13.1-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:82d0ba76371769d6a4e56f7e83bb8e81846d17a6190971e38b5de108bde9b0d7"}, - {file = "websockets-13.1-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e9875a0143f07d74dc5e1ded1c4581f0d9f7ab86c78994e2ed9e95050073c94d"}, - {file = "websockets-13.1-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a11e38ad8922c7961447f35c7b17bffa15de4d17c70abd07bfbe12d6faa3e027"}, - {file = "websockets-13.1-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:4059f790b6ae8768471cddb65d3c4fe4792b0ab48e154c9f0a04cefaabcd5978"}, - {file = "websockets-13.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:25c35bf84bf7c7369d247f0b8cfa157f989862c49104c5cf85cb5436a641d93e"}, - {file = "websockets-13.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:83f91d8a9bb404b8c2c41a707ac7f7f75b9442a0a876df295de27251a856ad09"}, - {file = "websockets-13.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7a43cfdcddd07f4ca2b1afb459824dd3c6d53a51410636a2c7fc97b9a8cf4842"}, - {file = "websockets-13.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:48a2ef1381632a2f0cb4efeff34efa97901c9fbc118e01951ad7cfc10601a9bb"}, - {file = "websockets-13.1-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:459bf774c754c35dbb487360b12c5727adab887f1622b8aed5755880a21c4a20"}, - {file = "websockets-13.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:95858ca14a9f6fa8413d29e0a585b31b278388aa775b8a81fa24830123874678"}, - {file = "websockets-13.1-py3-none-any.whl", hash = "sha256:a9a396a6ad26130cdae92ae10c36af09d9bfe6cafe69670fd3b6da9b07b4044f"}, - {file = "websockets-13.1.tar.gz", hash = "sha256:a3b3366087c1bc0a2795111edcadddb8b3b59509d5db5d7ea3fdd69f954a8878"}, +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "websockets-14.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e8179f95323b9ab1c11723e5d91a89403903f7b001828161b480a7810b334885"}, + {file = "websockets-14.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0d8c3e2cdb38f31d8bd7d9d28908005f6fa9def3324edb9bf336d7e4266fd397"}, + {file = "websockets-14.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:714a9b682deb4339d39ffa674f7b674230227d981a37d5d174a4a83e3978a610"}, + {file = "websockets-14.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f2e53c72052f2596fb792a7acd9704cbc549bf70fcde8a99e899311455974ca3"}, + {file = "websockets-14.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e3fbd68850c837e57373d95c8fe352203a512b6e49eaae4c2f4088ef8cf21980"}, + {file = "websockets-14.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b27ece32f63150c268593d5fdb82819584831a83a3f5809b7521df0685cd5d8"}, + {file = "websockets-14.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4daa0faea5424d8713142b33825fff03c736f781690d90652d2c8b053345b0e7"}, + {file = "websockets-14.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:bc63cee8596a6ec84d9753fd0fcfa0452ee12f317afe4beae6b157f0070c6c7f"}, + {file = "websockets-14.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7a570862c325af2111343cc9b0257b7119b904823c675b22d4ac547163088d0d"}, + {file = "websockets-14.2-cp310-cp310-win32.whl", hash = "sha256:75862126b3d2d505e895893e3deac0a9339ce750bd27b4ba515f008b5acf832d"}, + {file = "websockets-14.2-cp310-cp310-win_amd64.whl", hash = "sha256:cc45afb9c9b2dc0852d5c8b5321759cf825f82a31bfaf506b65bf4668c96f8b2"}, + {file = "websockets-14.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3bdc8c692c866ce5fefcaf07d2b55c91d6922ac397e031ef9b774e5b9ea42166"}, + {file = "websockets-14.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c93215fac5dadc63e51bcc6dceca72e72267c11def401d6668622b47675b097f"}, + {file = "websockets-14.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1c9b6535c0e2cf8a6bf938064fb754aaceb1e6a4a51a80d884cd5db569886910"}, + {file = "websockets-14.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a52a6d7cf6938e04e9dceb949d35fbdf58ac14deea26e685ab6368e73744e4c"}, + {file = "websockets-14.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9f05702e93203a6ff5226e21d9b40c037761b2cfb637187c9802c10f58e40473"}, + {file = "websockets-14.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22441c81a6748a53bfcb98951d58d1af0661ab47a536af08920d129b4d1c3473"}, + {file = "websockets-14.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:efd9b868d78b194790e6236d9cbc46d68aba4b75b22497eb4ab64fa640c3af56"}, + {file = "websockets-14.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1a5a20d5843886d34ff8c57424cc65a1deda4375729cbca4cb6b3353f3ce4142"}, + {file = "websockets-14.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:34277a29f5303d54ec6468fb525d99c99938607bc96b8d72d675dee2b9f5bf1d"}, + {file = "websockets-14.2-cp311-cp311-win32.whl", hash = "sha256:02687db35dbc7d25fd541a602b5f8e451a238ffa033030b172ff86a93cb5dc2a"}, + {file = "websockets-14.2-cp311-cp311-win_amd64.whl", hash = "sha256:862e9967b46c07d4dcd2532e9e8e3c2825e004ffbf91a5ef9dde519ee2effb0b"}, + {file = "websockets-14.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1f20522e624d7ffbdbe259c6b6a65d73c895045f76a93719aa10cd93b3de100c"}, + {file = "websockets-14.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:647b573f7d3ada919fd60e64d533409a79dcf1ea21daeb4542d1d996519ca967"}, + {file = "websockets-14.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6af99a38e49f66be5a64b1e890208ad026cda49355661549c507152113049990"}, + {file = "websockets-14.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:091ab63dfc8cea748cc22c1db2814eadb77ccbf82829bac6b2fbe3401d548eda"}, + {file = "websockets-14.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b374e8953ad477d17e4851cdc66d83fdc2db88d9e73abf755c94510ebddceb95"}, + {file = "websockets-14.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a39d7eceeea35db85b85e1169011bb4321c32e673920ae9c1b6e0978590012a3"}, + {file = "websockets-14.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0a6f3efd47ffd0d12080594f434faf1cd2549b31e54870b8470b28cc1d3817d9"}, + {file = "websockets-14.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:065ce275e7c4ffb42cb738dd6b20726ac26ac9ad0a2a48e33ca632351a737267"}, + {file = "websockets-14.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e9d0e53530ba7b8b5e389c02282f9d2aa47581514bd6049d3a7cffe1385cf5fe"}, + {file = "websockets-14.2-cp312-cp312-win32.whl", hash = "sha256:20e6dd0984d7ca3037afcb4494e48c74ffb51e8013cac71cf607fffe11df7205"}, + {file = "websockets-14.2-cp312-cp312-win_amd64.whl", hash = "sha256:44bba1a956c2c9d268bdcdf234d5e5ff4c9b6dc3e300545cbe99af59dda9dcce"}, + {file = "websockets-14.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:6f1372e511c7409a542291bce92d6c83320e02c9cf392223272287ce55bc224e"}, + {file = "websockets-14.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4da98b72009836179bb596a92297b1a61bb5a830c0e483a7d0766d45070a08ad"}, + {file = "websockets-14.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8a86a269759026d2bde227652b87be79f8a734e582debf64c9d302faa1e9f03"}, + {file = "websockets-14.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:86cf1aaeca909bf6815ea714d5c5736c8d6dd3a13770e885aafe062ecbd04f1f"}, + {file = "websockets-14.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9b0f6c3ba3b1240f602ebb3971d45b02cc12bd1845466dd783496b3b05783a5"}, + {file = "websockets-14.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:669c3e101c246aa85bc8534e495952e2ca208bd87994650b90a23d745902db9a"}, + {file = "websockets-14.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:eabdb28b972f3729348e632ab08f2a7b616c7e53d5414c12108c29972e655b20"}, + {file = "websockets-14.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2066dc4cbcc19f32c12a5a0e8cc1b7ac734e5b64ac0a325ff8353451c4b15ef2"}, + {file = "websockets-14.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ab95d357cd471df61873dadf66dd05dd4709cae001dd6342edafc8dc6382f307"}, + {file = "websockets-14.2-cp313-cp313-win32.whl", hash = "sha256:a9e72fb63e5f3feacdcf5b4ff53199ec8c18d66e325c34ee4c551ca748623bbc"}, + {file = "websockets-14.2-cp313-cp313-win_amd64.whl", hash = "sha256:b439ea828c4ba99bb3176dc8d9b933392a2413c0f6b149fdcba48393f573377f"}, + {file = "websockets-14.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7cd5706caec1686c5d233bc76243ff64b1c0dc445339bd538f30547e787c11fe"}, + {file = "websockets-14.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ec607328ce95a2f12b595f7ae4c5d71bf502212bddcea528290b35c286932b12"}, + {file = "websockets-14.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:da85651270c6bfb630136423037dd4975199e5d4114cae6d3066641adcc9d1c7"}, + {file = "websockets-14.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3ecadc7ce90accf39903815697917643f5b7cfb73c96702318a096c00aa71f5"}, + {file = "websockets-14.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1979bee04af6a78608024bad6dfcc0cc930ce819f9e10342a29a05b5320355d0"}, + {file = "websockets-14.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2dddacad58e2614a24938a50b85969d56f88e620e3f897b7d80ac0d8a5800258"}, + {file = "websockets-14.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:89a71173caaf75fa71a09a5f614f450ba3ec84ad9fca47cb2422a860676716f0"}, + {file = "websockets-14.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:6af6a4b26eea4fc06c6818a6b962a952441e0e39548b44773502761ded8cc1d4"}, + {file = "websockets-14.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:80c8efa38957f20bba0117b48737993643204645e9ec45512579132508477cfc"}, + {file = "websockets-14.2-cp39-cp39-win32.whl", hash = "sha256:2e20c5f517e2163d76e2729104abc42639c41cf91f7b1839295be43302713661"}, + {file = "websockets-14.2-cp39-cp39-win_amd64.whl", hash = "sha256:b4c8cef610e8d7c70dea92e62b6814a8cd24fbd01d7103cc89308d2bfe1659ef"}, + {file = "websockets-14.2-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:d7d9cafbccba46e768be8a8ad4635fa3eae1ffac4c6e7cb4eb276ba41297ed29"}, + {file = "websockets-14.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:c76193c1c044bd1e9b3316dcc34b174bbf9664598791e6fb606d8d29000e070c"}, + {file = "websockets-14.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fd475a974d5352390baf865309fe37dec6831aafc3014ffac1eea99e84e83fc2"}, + {file = "websockets-14.2-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2c6c0097a41968b2e2b54ed3424739aab0b762ca92af2379f152c1aef0187e1c"}, + {file = "websockets-14.2-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d7ff794c8b36bc402f2e07c0b2ceb4a2424147ed4785ff03e2a7af03711d60a"}, + {file = "websockets-14.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:dec254fcabc7bd488dab64846f588fc5b6fe0d78f641180030f8ea27b76d72c3"}, + {file = "websockets-14.2-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:bbe03eb853e17fd5b15448328b4ec7fb2407d45fb0245036d06a3af251f8e48f"}, + {file = "websockets-14.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:a3c4aa3428b904d5404a0ed85f3644d37e2cb25996b7f096d77caeb0e96a3b42"}, + {file = "websockets-14.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:577a4cebf1ceaf0b65ffc42c54856214165fb8ceeba3935852fc33f6b0c55e7f"}, + {file = "websockets-14.2-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ad1c1d02357b7665e700eca43a31d52814ad9ad9b89b58118bdabc365454b574"}, + {file = "websockets-14.2-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f390024a47d904613577df83ba700bd189eedc09c57af0a904e5c39624621270"}, + {file = "websockets-14.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3c1426c021c38cf92b453cdf371228d3430acd775edee6bac5a4d577efc72365"}, + {file = "websockets-14.2-py3-none-any.whl", hash = "sha256:7a6ceec4ea84469f15cf15807a747e9efe57e369c384fa86e022b3bea679b79b"}, + {file = "websockets-14.2.tar.gz", hash = "sha256:5059ed9c54945efb321f097084b4c7e52c246f2c869815876a69d1efc4ad6eb5"}, ] [[package]] name = "werkzeug" -version = "3.0.6" +version = "3.1.3" description = "The comprehensive WSGI web application library." optional = false -python-versions = ">=3.8" +python-versions = ">=3.9" +groups = ["main"] files = [ - {file = "werkzeug-3.0.6-py3-none-any.whl", hash = "sha256:1bc0c2310d2fbb07b1dd1105eba2f7af72f322e1e455f2f93c993bee8c8a5f17"}, - {file = "werkzeug-3.0.6.tar.gz", hash = "sha256:a8dd59d4de28ca70471a34cba79bed5f7ef2e036a76b3ab0835474246eb41f8d"}, + {file = "werkzeug-3.1.3-py3-none-any.whl", hash = "sha256:54b78bf3716d19a65be4fceccc0d1d7b89e608834989dfae50ea87564639213e"}, + {file = "werkzeug-3.1.3.tar.gz", hash = "sha256:60723ce945c19328679790e3282cc758aa4a6040e4bb330f53d30fa546d44746"}, ] [package.dependencies] @@ -4439,39 +4921,28 @@ MarkupSafe = ">=2.1.1" [package.extras] watchdog = ["watchdog (>=2.3)"] -[[package]] -name = "wheel" -version = "0.44.0" -description = "A built-package format for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "wheel-0.44.0-py3-none-any.whl", hash = "sha256:2376a90c98cc337d18623527a97c31797bd02bad0033d41547043a1cbfbe448f"}, - {file = "wheel-0.44.0.tar.gz", hash = "sha256:a29c3f2817e95ab89aa4660681ad547c0e9547f20e75b0562fe7723c9a2a9d49"}, -] - -[package.extras] -test = ["pytest (>=6.0.0)", "setuptools (>=65)"] - [[package]] name = "win-unicode-console" version = "0.5" description = "Enable Unicode input and display when running Python from Windows console." optional = false python-versions = "*" +groups = ["main"] +markers = "platform_system == \"Windows\" and platform_python_implementation != \"PyPy\"" files = [ {file = "win_unicode_console-0.5.zip", hash = "sha256:d4142d4d56d46f449d6f00536a73625a871cba040f0bc1a2e305a04578f07d1e"}, ] [[package]] name = "xmltodict" -version = "0.13.0" +version = "0.14.2" description = "Makes working with XML feel like you are working with JSON" optional = false -python-versions = ">=3.4" +python-versions = ">=3.6" +groups = ["main"] files = [ - {file = "xmltodict-0.13.0-py2.py3-none-any.whl", hash = "sha256:aa89e8fd76320154a40d19a0df04a4695fb9dc5ba977cbb68ab3e4eb225e7852"}, - {file = "xmltodict-0.13.0.tar.gz", hash = "sha256:341595a488e3e01a85a9d8911d8912fd922ede5fecc4dce437eb4b6c8d037e56"}, + {file = "xmltodict-0.14.2-py2.py3-none-any.whl", hash = "sha256:20cc7d723ed729276e808f26fb6b3599f786cbc37e06c65e192ba77c40f20aac"}, + {file = "xmltodict-0.14.2.tar.gz", hash = "sha256:201e7c28bb210e374999d1dde6382923ab0ed1a8a5faeece48ab525b7810a553"}, ] [[package]] @@ -4480,6 +4951,7 @@ version = "4.5.1" description = "Python interface for YARA" optional = false python-versions = "*" +groups = ["main"] files = [ {file = "yara_python-4.5.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7c92219bf91caea277bc2736df70dda3709834c297a4a5906f1d9a46cd03579a"}, {file = "yara_python-4.5.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6e8e9eb5a49a70a013bf45e0ec97210b7cb124813271fddc666c3cfb1308a2d5"}, @@ -4569,6 +5041,7 @@ version = "5.0" description = "Very basic event publishing system" optional = false python-versions = ">=3.7" +groups = ["main"] files = [ {file = "zope.event-5.0-py3-none-any.whl", hash = "sha256:2832e95014f4db26c47a13fdaef84cef2f4df37e66b59d8f1f4a8f319a632c26"}, {file = "zope.event-5.0.tar.gz", hash = "sha256:bac440d8d9891b4068e2b5a2c5e2c9765a9df762944bda6955f96bb9b91e67cd"}, @@ -4583,59 +5056,63 @@ test = ["zope.testrunner"] [[package]] name = "zope-interface" -version = "7.0.3" +version = "7.2" description = "Interfaces for Python" optional = false python-versions = ">=3.8" -files = [ - {file = "zope.interface-7.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9b9369671a20b8d039b8e5a1a33abd12e089e319a3383b4cc0bf5c67bd05fe7b"}, - {file = "zope.interface-7.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:db6237e8fa91ea4f34d7e2d16d74741187e9105a63bbb5686c61fea04cdbacca"}, - {file = "zope.interface-7.0.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:53d678bb1c3b784edbfb0adeebfeea6bf479f54da082854406a8f295d36f8386"}, - {file = "zope.interface-7.0.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3aa8fcbb0d3c2be1bfd013a0f0acd636f6ed570c287743ae2bbd467ee967154d"}, - {file = "zope.interface-7.0.3-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6195c3c03fef9f87c0dbee0b3b6451df6e056322463cf35bca9a088e564a3c58"}, - {file = "zope.interface-7.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:11fa1382c3efb34abf16becff8cb214b0b2e3144057c90611621f2d186b7e1b7"}, - {file = "zope.interface-7.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:af94e429f9d57b36e71ef4e6865182090648aada0cb2d397ae2b3f7fc478493a"}, - {file = "zope.interface-7.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dd647fcd765030638577fe6984284e0ebba1a1008244c8a38824be096e37fe3"}, - {file = "zope.interface-7.0.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1bee1b722077d08721005e8da493ef3adf0b7908e0cd85cc7dc836ac117d6f32"}, - {file = "zope.interface-7.0.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2545d6d7aac425d528cd9bf0d9e55fcd47ab7fd15f41a64b1c4bf4c6b24946dc"}, - {file = "zope.interface-7.0.3-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d04b11ea47c9c369d66340dbe51e9031df2a0de97d68f442305ed7625ad6493"}, - {file = "zope.interface-7.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:064ade95cb54c840647205987c7b557f75d2b2f7d1a84bfab4cf81822ef6e7d1"}, - {file = "zope.interface-7.0.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3fcdc76d0cde1c09c37b7c6b0f8beba2d857d8417b055d4f47df9c34ec518bdd"}, - {file = "zope.interface-7.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3d4b91821305c8d8f6e6207639abcbdaf186db682e521af7855d0bea3047c8ca"}, - {file = "zope.interface-7.0.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35062d93bc49bd9b191331c897a96155ffdad10744ab812485b6bad5b588d7e4"}, - {file = "zope.interface-7.0.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c96b3e6b0d4f6ddfec4e947130ec30bd2c7b19db6aa633777e46c8eecf1d6afd"}, - {file = "zope.interface-7.0.3-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e0c151a6c204f3830237c59ee4770cc346868a7a1af6925e5e38650141a7f05"}, - {file = "zope.interface-7.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:3de1d553ce72868b77a7e9d598c9bff6d3816ad2b4cc81c04f9d8914603814f3"}, - {file = "zope.interface-7.0.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ab985c566a99cc5f73bc2741d93f1ed24a2cc9da3890144d37b9582965aff996"}, - {file = "zope.interface-7.0.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d976fa7b5faf5396eb18ce6c132c98e05504b52b60784e3401f4ef0b2e66709b"}, - {file = "zope.interface-7.0.3-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21a207c6b2c58def5011768140861a73f5240f4f39800625072ba84e76c9da0b"}, - {file = "zope.interface-7.0.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:382d31d1e68877061daaa6499468e9eb38eb7625d4369b1615ac08d3860fe896"}, - {file = "zope.interface-7.0.3-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2c4316a30e216f51acbd9fb318aa5af2e362b716596d82cbb92f9101c8f8d2e7"}, - {file = "zope.interface-7.0.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:01e6e58078ad2799130c14a1d34ec89044ada0e1495329d72ee0407b9ae5100d"}, - {file = "zope.interface-7.0.3-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:799ef7a444aebbad5a145c3b34bff012b54453cddbde3332d47ca07225792ea4"}, - {file = "zope.interface-7.0.3-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3b7ce6d46fb0e60897d62d1ff370790ce50a57d40a651db91a3dde74f73b738"}, - {file = "zope.interface-7.0.3-cp38-cp38-win_amd64.whl", hash = "sha256:f418c88f09c3ba159b95a9d1cfcdbe58f208443abb1f3109f4b9b12fd60b187c"}, - {file = "zope.interface-7.0.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:84f8794bd59ca7d09d8fce43ae1b571be22f52748169d01a13d3ece8394d8b5b"}, - {file = "zope.interface-7.0.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7d92920416f31786bc1b2f34cc4fc4263a35a407425319572cbf96b51e835cd3"}, - {file = "zope.interface-7.0.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95e5913ec718010dc0e7c215d79a9683b4990e7026828eedfda5268e74e73e11"}, - {file = "zope.interface-7.0.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1eeeb92cb7d95c45e726e3c1afe7707919370addae7ed14f614e22217a536958"}, - {file = "zope.interface-7.0.3-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ecd32f30f40bfd8511b17666895831a51b532e93fc106bfa97f366589d3e4e0e"}, - {file = "zope.interface-7.0.3-cp39-cp39-win_amd64.whl", hash = "sha256:5112c530fa8aa2108a3196b9c2f078f5738c1c37cfc716970edc0df0414acda8"}, - {file = "zope.interface-7.0.3.tar.gz", hash = "sha256:cd2690d4b08ec9eaf47a85914fe513062b20da78d10d6d789a792c0b20307fb1"}, +groups = ["main"] +files = [ + {file = "zope.interface-7.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ce290e62229964715f1011c3dbeab7a4a1e4971fd6f31324c4519464473ef9f2"}, + {file = "zope.interface-7.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:05b910a5afe03256b58ab2ba6288960a2892dfeef01336dc4be6f1b9ed02ab0a"}, + {file = "zope.interface-7.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:550f1c6588ecc368c9ce13c44a49b8d6b6f3ca7588873c679bd8fd88a1b557b6"}, + {file = "zope.interface-7.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0ef9e2f865721553c6f22a9ff97da0f0216c074bd02b25cf0d3af60ea4d6931d"}, + {file = "zope.interface-7.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:27f926f0dcb058211a3bb3e0e501c69759613b17a553788b2caeb991bed3b61d"}, + {file = "zope.interface-7.2-cp310-cp310-win_amd64.whl", hash = "sha256:144964649eba4c5e4410bb0ee290d338e78f179cdbfd15813de1a664e7649b3b"}, + {file = "zope.interface-7.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1909f52a00c8c3dcab6c4fad5d13de2285a4b3c7be063b239b8dc15ddfb73bd2"}, + {file = "zope.interface-7.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:80ecf2451596f19fd607bb09953f426588fc1e79e93f5968ecf3367550396b22"}, + {file = "zope.interface-7.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:033b3923b63474800b04cba480b70f6e6243a62208071fc148354f3f89cc01b7"}, + {file = "zope.interface-7.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a102424e28c6b47c67923a1f337ede4a4c2bba3965b01cf707978a801fc7442c"}, + {file = "zope.interface-7.2-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:25e6a61dcb184453bb00eafa733169ab6d903e46f5c2ace4ad275386f9ab327a"}, + {file = "zope.interface-7.2-cp311-cp311-win_amd64.whl", hash = "sha256:3f6771d1647b1fc543d37640b45c06b34832a943c80d1db214a37c31161a93f1"}, + {file = "zope.interface-7.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:086ee2f51eaef1e4a52bd7d3111a0404081dadae87f84c0ad4ce2649d4f708b7"}, + {file = "zope.interface-7.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:21328fcc9d5b80768bf051faa35ab98fb979080c18e6f84ab3f27ce703bce465"}, + {file = "zope.interface-7.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6dd02ec01f4468da0f234da9d9c8545c5412fef80bc590cc51d8dd084138a89"}, + {file = "zope.interface-7.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8e7da17f53e25d1a3bde5da4601e026adc9e8071f9f6f936d0fe3fe84ace6d54"}, + {file = "zope.interface-7.2-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cab15ff4832580aa440dc9790b8a6128abd0b88b7ee4dd56abacbc52f212209d"}, + {file = "zope.interface-7.2-cp312-cp312-win_amd64.whl", hash = "sha256:29caad142a2355ce7cfea48725aa8bcf0067e2b5cc63fcf5cd9f97ad12d6afb5"}, + {file = "zope.interface-7.2-cp313-cp313-macosx_10_9_x86_64.whl", hash = "sha256:3e0350b51e88658d5ad126c6a57502b19d5f559f6cb0a628e3dc90442b53dd98"}, + {file = "zope.interface-7.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:15398c000c094b8855d7d74f4fdc9e73aa02d4d0d5c775acdef98cdb1119768d"}, + {file = "zope.interface-7.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:802176a9f99bd8cc276dcd3b8512808716492f6f557c11196d42e26c01a69a4c"}, + {file = "zope.interface-7.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb23f58a446a7f09db85eda09521a498e109f137b85fb278edb2e34841055398"}, + {file = "zope.interface-7.2-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a71a5b541078d0ebe373a81a3b7e71432c61d12e660f1d67896ca62d9628045b"}, + {file = "zope.interface-7.2-cp313-cp313-win_amd64.whl", hash = "sha256:4893395d5dd2ba655c38ceb13014fd65667740f09fa5bb01caa1e6284e48c0cd"}, + {file = "zope.interface-7.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d3a8ffec2a50d8ec470143ea3d15c0c52d73df882eef92de7537e8ce13475e8a"}, + {file = "zope.interface-7.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:31d06db13a30303c08d61d5fb32154be51dfcbdb8438d2374ae27b4e069aac40"}, + {file = "zope.interface-7.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e204937f67b28d2dca73ca936d3039a144a081fc47a07598d44854ea2a106239"}, + {file = "zope.interface-7.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:224b7b0314f919e751f2bca17d15aad00ddbb1eadf1cb0190fa8175edb7ede62"}, + {file = "zope.interface-7.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baf95683cde5bc7d0e12d8e7588a3eb754d7c4fa714548adcd96bdf90169f021"}, + {file = "zope.interface-7.2-cp38-cp38-win_amd64.whl", hash = "sha256:7dc5016e0133c1a1ec212fc87a4f7e7e562054549a99c73c8896fa3a9e80cbc7"}, + {file = "zope.interface-7.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7bd449c306ba006c65799ea7912adbbfed071089461a19091a228998b82b1fdb"}, + {file = "zope.interface-7.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a19a6cc9c6ce4b1e7e3d319a473cf0ee989cbbe2b39201d7c19e214d2dfb80c7"}, + {file = "zope.interface-7.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:72cd1790b48c16db85d51fbbd12d20949d7339ad84fd971427cf00d990c1f137"}, + {file = "zope.interface-7.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52e446f9955195440e787596dccd1411f543743c359eeb26e9b2c02b077b0519"}, + {file = "zope.interface-7.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ad9913fd858274db8dd867012ebe544ef18d218f6f7d1e3c3e6d98000f14b75"}, + {file = "zope.interface-7.2-cp39-cp39-win_amd64.whl", hash = "sha256:1090c60116b3da3bfdd0c03406e2f14a1ff53e5771aebe33fec1edc0a350175d"}, + {file = "zope.interface-7.2.tar.gz", hash = "sha256:8b49f1a3d1ee4cdaf5b32d2e738362c7f5e40ac8b46dd7d1a65e82a4872728fe"}, ] [package.dependencies] setuptools = "*" [package.extras] -docs = ["Sphinx", "repoze.sphinx.autointerface", "sphinx-rtd-theme"] -test = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] -testing = ["coverage (>=5.0.3)", "zope.event", "zope.testing"] +docs = ["Sphinx", "furo", "repoze.sphinx.autointerface"] +test = ["coverage[toml]", "zope.event", "zope.testing"] +testing = ["coverage[toml]", "zope.event", "zope.testing"] [extras] maco = ["maco"] [metadata] -lock-version = "2.0" +lock-version = "2.1" python-versions = ">=3.10, <4.0" -content-hash = "ab65373ef8c8244e2d8237cb6208783a0276fa62f52545098cb12170c1cd7d76" +content-hash = "63e3b8417b14ca9fca563919021a939c18c21fecbb0b5840806aeebbf5f049c2" diff --git a/pyproject.toml b/pyproject.toml index 2d50b19820a..03dd99b6293 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,102 +4,101 @@ version = "0.1.0" description = "CAPE: Malware Configuration And Payload Extraction" authors = ["Kevin O'Reilly ", "doomedraven "] license = "MIT" +package-mode = false +requires-poetry = ">=2.0" [tool.poetry.dependencies] python = ">=3.10, <4.0" alembic = "1.9.4" gevent = "24.2.1" greenlet = "3.0.3" -Pebble = "4.6.3" +Pebble = "5.1.0" # pymisp = "2.4.144" -cryptography = "43.0.1" -requests = {version = "2.32.2", extras = ["security", "socks"]} +cryptography = ">=44.0.1" +requests = {version = "2.32.4", extras = ["security", "socks"]} # pyOpenSSL = "24.0.0" pefile = "*" -tldextract = "3.5.0" -oletools = "0.60" -olefile = "0.46" +tldextract = ">=5.1.2" +oletools = "0.60.2" +olefile = "0.47" # mixbox = "1.0.5" -capstone = "4.0.2" -pycryptodomex = "3.20.0" +capstone = "5.0.5" +pycryptodomex = ">=3.20.0" # xmltodict = "0.12.0" -requests-file = "1.5.1" -orjson = "3.9.15" +requests-file = ">=1.5.1" +orjson = ">=3.9.15" # maec = "4.1.0.17" -regex = "2021.7.6" -SFlock2 = {version = "0.3.66", extras = ["shellcode","linux"]} -# volatility3 = "2.0.0" +# regex = "2021.7.6" +SFlock2 = {version = ">=0.3.76", extras = ["shellcode","linux"]} +# volatility3 = "2.11.0" # XLMMacroDeobfuscator = "0.2.7" -pyzipper = "0.3.5" -flare-capa = "7.3.0" -Cython = "0.29.24" -# pyre2 = "0.3.6" # Dead for python3.11 -Django = "4.2.16" +pyzipper = "0.3.6" +flare-capa = "9.1.0" + +Cython = "3.0.11" +Django = ">=4.2.18" SQLAlchemy = "1.4.50" SQLAlchemy-Utils = "0.41.1" -Jinja2 = "^3.1.4" +Jinja2 = "^3.1.6" chardet = "4.0.0" pygal = "2.4.0" dpkt = "1.9.6" -dnspython = "2.6.1" +dnspython = "2.7.0" pytz = "2021.1" -maxminddb = "2.5.1" +maxminddb = "2.6.3" Pillow = ">=8.2.0" -python-whois = "0.7.3" +python-whois = "0.9.5" bs4 = "0.0.1" pydeep2 = "0.5.1" -django-recaptcha = "3.0.0" # https://pypi.org/project/django-recaptcha/ -django-crispy-forms = "1.14.0" +django-recaptcha = "4.0.0" # https://pypi.org/project/django-recaptcha/ +django-crispy-forms = "2.3" +crispy-bootstrap4 = "2024.10" django-settings-export = "1.2.1" -django-csp = "3.7" -django-extensions = "3.2.1" -django-ratelimit = "3.0.1" +django-csp = "3.8" +django-extensions = "3.2.3" +django-ratelimit = "4.1.0" # qrcode = "7.2" python-tlsh = "4.5.0" djangorestframework = "3.15.2" yara-python = "4.5.1" -netstruct = "1.1.2" pymongo = ">=4.0.1" # ImageHash = "4.3.1" -LnkParse3 = "1.2.0" -cachetools = "^5.3.0" -django-allauth = "0.54.0" # https://django-allauth.readthedocs.io/en/latest/configuration.html +LnkParse3 = "1.5.0" +cachetools = "^5.5.1" +django-allauth = "65.3.1" # https://django-allauth.readthedocs.io/en/latest/configuration.html # socks5man = {git = "https://github.com/CAPESandbox/socks5man.git", rev = "7b335d027297b67abdf28f38cc7d5d42c9d810b5"} # httpreplay = {git = "https://github.com/CAPESandbox/httpreplay.git", rev = "0d5a5b3144ab15f93189b83ca8188afde43db134"} # bingraph = {git = "https://github.com/CAPESandbox/binGraph.git", rev = "552d1210ac6770f8b202d0d1fc4610cc14d878ec"} -die-python = "0.1.0" -psycopg2-binary = "^2.9.5" -ruff = "0.0.290" -paramiko = "3.4.0" -psutil = "5.9.8" -# peepdf-3 = "4.0.0" -maco = "1.1.8" - -Werkzeug = "3.0.6" -packaging = "23.1" -setuptools = "70.0.0" +psycopg2-binary = "^2.9.10" +ruff = ">=0.7.2" +paramiko = "3.5.0" +psutil = "6.1.1" +peepdf-3 = "5.0.0" +pyre2-updated = ">=0.3.8" +Werkzeug = "3.1.3" +packaging = "24.2" +setuptools = "78.1.1" # command line config manipulation -crudini = "0.9.4" -python-dateutil = "2.8.2" +crudini = "0.9.5" +python-dateutil = "2.9.0.post0" # guac-session pyguacamole = "^0.11" uvicorn = {extras = ["standard"], version = "^0.18.2"} -gunicorn = "^22.0.0" +gunicorn = "^23.0.0" channels = "^3.0.5" setproctitle = "1.3.2" -# tmp dependency to fix vuln -certifi = "2024.7.4" -rat_king_parser = {git = "https://github.com/jeFF0Falltrades/rat_king_parser", rev = "ab849ec8face38c8dac3f803ae5fe7cf8be26583"} - +CAPE-parsers = ">=0.1.36" +maco = "1.1.8" [tool.poetry.extras] maco = ["maco"] -[tool.poetry.dev-dependencies] +[tool.poetry.group.dev.dependencies] black = "^24.3.0" isort = "^5.10.1" +mypy = "1.14.1" pytest = "7.2.2" pytest-pretty = "1.1.0" pytest-cov = "3.0.0" @@ -110,17 +109,11 @@ pytest-xdist = "3.6.1" pytest-asyncio = "0.18.3" pytest-freezer = "0.4.8" tenacity = "8.1.0" +types-requests = "^2.32" httpretty = "^1.1.4" func-timeout = "^4.3.5" pre-commit = "^2.19.0" -[tool.ruff] -select = ["E", "F"] -ignore = ["E402","E501"] -exclude = [ - "./analyzer/linux/dbus_next", -] - [tool.black] line-length = 132 include = "\\.py(_disabled)?$" @@ -141,7 +134,57 @@ DJANGO_SETTINGS_MODULE = "web.settings" pythonpath = [".", "web"] testpaths = ["tests", "agent"] norecursedirs = "tests/zip_compound" +asyncio_mode = "auto" [build-system] requires = ["poetry-core>=1.0.0"] build-backend = "poetry.core.masonry.api" + +[tool.ruff] +line-length = 132 +exclude = [ + "./analyzer/linux/dbus_next", +] + +[tool.ruff.lint] +select = [ + "F", # pyflakes + "E", # pycodestyle errors + "W", # pycodestyle warnings + # "I", # isort + # "N", # pep8-naming + "G", # flake8-logging-format +] + +ignore = [ + "E501", # ignore due to conflict with formatter + "N818", # exceptions don't need the Error suffix + "E741", # allow ambiguous variable names + "E402", + "W605", # ToDo to fix - Invalid escape sequence +] + +fixable = ["ALL"] + +[tool.ruff.lint.per-file-ignores] +"stubs/*" = [ + "N", # naming conventions don't matter in stubs + "F403", # star imports are okay in stubs + "F405", # star imports are okay in stubs +] + +[tool.ruff.format] +quote-style = "double" +indent-style = "space" +skip-magic-trailing-comma = false +line-ending = "auto" + +[tool.ruff.lint.isort] +known-first-party = ["libqtile", "test"] +default-section = "third-party" + +[tool.mypy] +warn_unused_configs = true +files = [ + "agent/**/*.py", +] diff --git a/requirements.txt b/requirements.txt index 4b920a5528a..fc7a58e4141 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,68 +4,73 @@ alembic==1.9.4 ; python_version >= "3.10" and python_version < "4.0" \ annotated-types==0.7.0 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53 \ --hash=sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89 -anyio==4.6.0 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:137b4559cbb034c477165047febb6ff83f390fc3b20bf181c1fc0a728cb8beeb \ - --hash=sha256:c7d2e9d63e31599eeb636c8c5c03a7e108d73b345f064f1c19fdc87b79036a9a +anyio==4.8.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:1d9fe889df5212298c0c0723fa20479d1b94883a2df44bd3897aa91083316f7a \ + --hash=sha256:b5011f270ab5eb0abf13385f851315585cc37ef330dd88e27ec3d34d651fd47a asgiref==3.8.1 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:3e1e3ecc849832fe52ccf2cb6686b7a55f82bb1d6aee72a58826471390335e47 \ --hash=sha256:c343bd80a0bec947a9860adb4c432ffa7db769836c64238fc34bdc3fec84d590 -attrs==24.2.0 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346 \ - --hash=sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2 +attrs==25.1.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:1c97078a80c814273a76b2a298a932eb681c87415c11dee0a6921de7f1b02c3e \ + --hash=sha256:c75a69e28a550a7e93789579c22aa26b0f5b83b75dc4e08fe092980051e1090a autobahn==24.4.2 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:a2d71ef1b0cf780b6d11f8b205fd2c7749765e65795f2ea7d823796642ee92c9 \ --hash=sha256:c56a2abe7ac78abbfb778c02892d673a4de58fd004d088cd7ab297db25918e81 automat==24.8.1 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:b34227cf63f6325b8ad2399ede780675083e439b20c323d376373d8ee6306d88 \ --hash=sha256:bf029a7bc3da1e2c24da2343e7598affaa9f10bf0ab63ff808566ce90551e02a -bcrypt==4.2.0 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:096a15d26ed6ce37a14c1ac1e48119660f21b24cba457f160a4b830f3fe6b5cb \ - --hash=sha256:0da52759f7f30e83f1e30a888d9163a81353ef224d82dc58eb5bb52efcabc399 \ - --hash=sha256:1bb429fedbe0249465cdd85a58e8376f31bb315e484f16e68ca4c786dcc04291 \ - --hash=sha256:1d84cf6d877918620b687b8fd1bf7781d11e8a0998f576c7aa939776b512b98d \ - --hash=sha256:1ee38e858bf5d0287c39b7a1fc59eec64bbf880c7d504d3a06a96c16e14058e7 \ - --hash=sha256:1ff39b78a52cf03fdf902635e4c81e544714861ba3f0efc56558979dd4f09170 \ - --hash=sha256:27fe0f57bb5573104b5a6de5e4153c60814c711b29364c10a75a54bb6d7ff48d \ - --hash=sha256:3413bd60460f76097ee2e0a493ccebe4a7601918219c02f503984f0a7ee0aebe \ - --hash=sha256:3698393a1b1f1fd5714524193849d0c6d524d33523acca37cd28f02899285060 \ - --hash=sha256:373db9abe198e8e2c70d12b479464e0d5092cc122b20ec504097b5f2297ed184 \ - --hash=sha256:39e1d30c7233cfc54f5c3f2c825156fe044efdd3e0b9d309512cc514a263ec2a \ - --hash=sha256:3bbbfb2734f0e4f37c5136130405332640a1e46e6b23e000eeff2ba8d005da68 \ - --hash=sha256:3d3a6d28cb2305b43feac298774b997e372e56c7c7afd90a12b3dc49b189151c \ - --hash=sha256:5a1e8aa9b28ae28020a3ac4b053117fb51c57a010b9f969603ed885f23841458 \ - --hash=sha256:61ed14326ee023917ecd093ee6ef422a72f3aec6f07e21ea5f10622b735538a9 \ - --hash=sha256:655ea221910bcac76ea08aaa76df427ef8625f92e55a8ee44fbf7753dbabb328 \ - --hash=sha256:762a2c5fb35f89606a9fde5e51392dad0cd1ab7ae64149a8b935fe8d79dd5ed7 \ - --hash=sha256:77800b7147c9dc905db1cba26abe31e504d8247ac73580b4aa179f98e6608f34 \ - --hash=sha256:8ac68872c82f1add6a20bd489870c71b00ebacd2e9134a8aa3f98a0052ab4b0e \ - --hash=sha256:8d7bb9c42801035e61c109c345a28ed7e84426ae4865511eb82e913df18f58c2 \ - --hash=sha256:8f6ede91359e5df88d1f5c1ef47428a4420136f3ce97763e31b86dd8280fbdf5 \ - --hash=sha256:9c1c4ad86351339c5f320ca372dfba6cb6beb25e8efc659bedd918d921956bae \ - --hash=sha256:c02d944ca89d9b1922ceb8a46460dd17df1ba37ab66feac4870f6862a1533c00 \ - --hash=sha256:c52aac18ea1f4a4f65963ea4f9530c306b56ccd0c6f8c8da0c06976e34a6e841 \ - --hash=sha256:cb2a8ec2bc07d3553ccebf0746bbf3d19426d1c6d1adbd4fa48925f66af7b9e8 \ - --hash=sha256:cf69eaf5185fd58f268f805b505ce31f9b9fc2d64b376642164e9244540c1221 \ - --hash=sha256:f4f4acf526fcd1c34e7ce851147deedd4e26e6402369304220250598b26448db +bcrypt==4.2.1 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:041fa0155c9004eb98a232d54da05c0b41d4b8e66b6fc3cb71b4b3f6144ba837 \ + --hash=sha256:04e56e3fe8308a88b77e0afd20bec516f74aecf391cdd6e374f15cbed32783d6 \ + --hash=sha256:1340411a0894b7d3ef562fb233e4b6ed58add185228650942bdc885362f32c17 \ + --hash=sha256:533e7f3bcf2f07caee7ad98124fab7499cb3333ba2274f7a36cf1daee7409d99 \ + --hash=sha256:6765386e3ab87f569b276988742039baab087b2cdb01e809d74e74503c2faafe \ + --hash=sha256:687cf30e6681eeda39548a93ce9bfbb300e48b4d445a43db4298d2474d2a1e54 \ + --hash=sha256:76132c176a6d9953cdc83c296aeaed65e1a708485fd55abf163e0d9f8f16ce0e \ + --hash=sha256:76d3e352b32f4eeb34703370e370997065d28a561e4a18afe4fef07249cb4396 \ + --hash=sha256:807261df60a8b1ccd13e6599c779014a362ae4e795f5c59747f60208daddd96d \ + --hash=sha256:89df2aea2c43be1e1fa066df5f86c8ce822ab70a30e4c210968669565c0f4685 \ + --hash=sha256:8ad2f4528cbf0febe80e5a3a57d7a74e6635e41af1ea5675282a33d769fba413 \ + --hash=sha256:8c458cd103e6c5d1d85cf600e546a639f234964d0228909d8f8dbeebff82d526 \ + --hash=sha256:8dbd0747208912b1e4ce730c6725cb56c07ac734b3629b60d4398f082ea718ad \ + --hash=sha256:909faa1027900f2252a9ca5dfebd25fc0ef1417943824783d1c8418dd7d6df4a \ + --hash=sha256:aaa2e285be097050dba798d537b6efd9b698aa88eef52ec98d23dcd6d7cf6fea \ + --hash=sha256:adadd36274510a01f33e6dc08f5824b97c9580583bd4487c564fc4617b328005 \ + --hash=sha256:b1ee315739bc8387aa36ff127afc99120ee452924e0df517a8f3e4c0187a0f5f \ + --hash=sha256:b588af02b89d9fad33e5f98f7838bf590d6d692df7153647724a7f20c186f6bf \ + --hash=sha256:b7703ede632dc945ed1172d6f24e9f30f27b1b1a067f32f68bf169c5f08d0425 \ + --hash=sha256:c6f5fa3775966cca251848d4d5393ab016b3afed251163c1436fefdec3b02c84 \ + --hash=sha256:cde78d385d5e93ece5479a0a87f73cd6fa26b171c786a884f955e165032b262c \ + --hash=sha256:cfdf3d7530c790432046c40cda41dfee8c83e29482e6a604f8930b9930e94139 \ + --hash=sha256:e158009a54c4c8bc91d5e0da80920d048f918c61a581f0a63e4e93bb556d362f \ + --hash=sha256:e84e0e6f8e40a242b11bce56c313edc2be121cec3e0ec2d76fce01f6af33c07c \ + --hash=sha256:f85b1ffa09240c89aa2e1ae9f3b1c687104f7b2b9d2098da4e923f1b7082d331 beautifulsoup4==4.12.3 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051 \ --hash=sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed bs4==0.0.1 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:36ecea1fd7cc5c0c6e4a1ff075df26d50da647b75376626cc186e2212886dd3a -cachetools==5.5.0 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:02134e8439cdc2ffb62023ce1debca2944c3f289d66bb17ead3ab3dede74b292 \ - --hash=sha256:2cc24fb4cbe39633fb7badd9db9ca6295d766d9c2995f245725a46715d050f2a -capstone==4.0.2 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:0d65ffe8620920976ceadedc769f22318f6f150a592368d8a735612367ac8a1a \ - --hash=sha256:2842913092c9b69fd903744bc1b87488e1451625460baac173056e1808ec1c66 \ - --hash=sha256:9d1a9096c5f875b11290317722ed44bb6e7c52e50cc79d791f142bce968c49aa \ - --hash=sha256:c3d9b443d1adb40ee2d9a4e7341169b76476ddcf3a54c03793b16cdc7cd35c5a \ - --hash=sha256:da442f979414cf27e4621e70e835880878c858ea438c4f0e957e132593579e37 -cart==1.2.2 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:c111398038683c85d3edcadaa3b16183461907bdb613e05cbb60d381f2886309 -certifi==2024.7.4 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:5a1e7645bc0ec61a09e26c36f6106dd4cf40c6db3a1fb6352b0244e7fb057c7b \ - --hash=sha256:c198e21b1289c2ab85ee4e67bb4b4ef3ead0892059901a8d5b622f24a1101e90 +cachetools==5.5.1 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:70f238fbba50383ef62e55c6aff6d9673175fe59f7c6782c7a0b9e38f4a9df95 \ + --hash=sha256:b76651fdc3b24ead3c648bbdeeb940c1b04d365b38b4af66788f9ec4a81d42bb +cape-parsers==0.1.36 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:459db9a10a2c09a1dfb386dcec1b7968293d2c6eef417c4179022895e396ddc5 \ + --hash=sha256:c0944f8183aaf53d4a6c8f370423ce1134bdc4a7f7b23e37f92f0e061423e229 +capstone==5.0.5 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:15f4b85df176999bbf7eb3f53f0cf2cee728254600c1be21442e2581189309e9 \ + --hash=sha256:24db89d74b571659fe6212e756795cd5d394378c50e19e41dbcfb6c087c2f87d \ + --hash=sha256:32346f6019d5351adaaf584ffc60c1e40db6b47d1d049eb924f903eb2b073e87 \ + --hash=sha256:50b646f0c56b0cac5c993dde08b5e5eacf8b1f66031ec8d60154eae6e3c0645e \ + --hash=sha256:5416621ac2d243d89b788f1309b143ea1f400da3eb5c47c6a87f1add99732a83 \ + --hash=sha256:6ef47da78f44de1cdff1519b360186681fca0097e92046a7d7203d56364f99da \ + --hash=sha256:754968f057d9e5d9c383f2918a1d56d455bfb274bbf307f219180b16e6d5aaeb \ + --hash=sha256:89dac65a1c84670ee30ccaf2ae688c4b27ad514d9dc8738a9826579051e29ecb \ + --hash=sha256:933797f7e2a257a77c3a699316deea92efa120a10d41e22725a96fc82f0a769e \ + --hash=sha256:a03b6b42b33bb0739b2436a555e699ac91cd1d1891134269b04e359b607e50e8 \ + --hash=sha256:cd35b666739d7b79066fc69fd0c145d5ceb6a4131df3db1225ec6dcfa3fe322f +certifi==2025.1.31 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651 \ + --hash=sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe cffi==1.17.1 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8 \ --hash=sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2 \ @@ -140,100 +145,102 @@ channels==3.0.5 ; python_version >= "3.10" and python_version < "4.0" \ chardet==4.0.0 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa \ --hash=sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5 -charset-normalizer==3.3.2 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027 \ - --hash=sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087 \ - --hash=sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786 \ - --hash=sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8 \ - --hash=sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09 \ - --hash=sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185 \ - --hash=sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574 \ - --hash=sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e \ - --hash=sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519 \ - --hash=sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898 \ - --hash=sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269 \ - --hash=sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3 \ - --hash=sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f \ - --hash=sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6 \ - --hash=sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8 \ - --hash=sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a \ - --hash=sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73 \ - --hash=sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc \ - --hash=sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714 \ - --hash=sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2 \ - --hash=sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc \ - --hash=sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce \ - --hash=sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d \ - --hash=sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e \ - --hash=sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6 \ - --hash=sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269 \ - --hash=sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96 \ - --hash=sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d \ - --hash=sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a \ - --hash=sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4 \ - --hash=sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77 \ - --hash=sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d \ - --hash=sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0 \ - --hash=sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed \ - --hash=sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068 \ - --hash=sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac \ - --hash=sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25 \ - --hash=sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8 \ - --hash=sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab \ - --hash=sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26 \ - --hash=sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2 \ - --hash=sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db \ - --hash=sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f \ - --hash=sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5 \ - --hash=sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99 \ - --hash=sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c \ - --hash=sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d \ - --hash=sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811 \ - --hash=sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa \ - --hash=sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a \ - --hash=sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03 \ - --hash=sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b \ - --hash=sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04 \ - --hash=sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c \ - --hash=sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001 \ - --hash=sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458 \ - --hash=sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389 \ - --hash=sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99 \ - --hash=sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985 \ - --hash=sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537 \ - --hash=sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238 \ - --hash=sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f \ - --hash=sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d \ - --hash=sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796 \ - --hash=sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a \ - --hash=sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143 \ - --hash=sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8 \ - --hash=sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c \ - --hash=sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5 \ - --hash=sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5 \ - --hash=sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711 \ - --hash=sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4 \ - --hash=sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6 \ - --hash=sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c \ - --hash=sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7 \ - --hash=sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4 \ - --hash=sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b \ - --hash=sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae \ - --hash=sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12 \ - --hash=sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c \ - --hash=sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae \ - --hash=sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8 \ - --hash=sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887 \ - --hash=sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b \ - --hash=sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4 \ - --hash=sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f \ - --hash=sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5 \ - --hash=sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33 \ - --hash=sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519 \ - --hash=sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561 -click==8.1.7 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28 \ - --hash=sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de +charset-normalizer==3.4.1 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537 \ + --hash=sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa \ + --hash=sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a \ + --hash=sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294 \ + --hash=sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b \ + --hash=sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd \ + --hash=sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601 \ + --hash=sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd \ + --hash=sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4 \ + --hash=sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d \ + --hash=sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2 \ + --hash=sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313 \ + --hash=sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd \ + --hash=sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa \ + --hash=sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8 \ + --hash=sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1 \ + --hash=sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2 \ + --hash=sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496 \ + --hash=sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d \ + --hash=sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b \ + --hash=sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e \ + --hash=sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a \ + --hash=sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4 \ + --hash=sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca \ + --hash=sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78 \ + --hash=sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408 \ + --hash=sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5 \ + --hash=sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3 \ + --hash=sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f \ + --hash=sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a \ + --hash=sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765 \ + --hash=sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6 \ + --hash=sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146 \ + --hash=sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6 \ + --hash=sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9 \ + --hash=sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd \ + --hash=sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c \ + --hash=sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f \ + --hash=sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545 \ + --hash=sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176 \ + --hash=sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770 \ + --hash=sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824 \ + --hash=sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f \ + --hash=sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf \ + --hash=sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487 \ + --hash=sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d \ + --hash=sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd \ + --hash=sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b \ + --hash=sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534 \ + --hash=sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f \ + --hash=sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b \ + --hash=sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9 \ + --hash=sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd \ + --hash=sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125 \ + --hash=sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9 \ + --hash=sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de \ + --hash=sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11 \ + --hash=sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d \ + --hash=sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35 \ + --hash=sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f \ + --hash=sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda \ + --hash=sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7 \ + --hash=sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a \ + --hash=sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971 \ + --hash=sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8 \ + --hash=sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41 \ + --hash=sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d \ + --hash=sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f \ + --hash=sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757 \ + --hash=sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a \ + --hash=sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886 \ + --hash=sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77 \ + --hash=sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76 \ + --hash=sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247 \ + --hash=sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85 \ + --hash=sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb \ + --hash=sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7 \ + --hash=sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e \ + --hash=sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6 \ + --hash=sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037 \ + --hash=sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1 \ + --hash=sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e \ + --hash=sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807 \ + --hash=sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407 \ + --hash=sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c \ + --hash=sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12 \ + --hash=sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3 \ + --hash=sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089 \ + --hash=sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd \ + --hash=sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e \ + --hash=sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00 \ + --hash=sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616 +click==8.1.8 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2 \ + --hash=sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a colorama==0.4.6 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \ --hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6 @@ -243,151 +250,169 @@ colorclass==2.2.2 ; python_version >= "3.10" and python_version < "4.0" \ constantly==23.10.4 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:3fd9b4d1c3dc1ec9757f3c52aef7e53ad9323dbe39f51dfd4c43853b68dfa3f9 \ --hash=sha256:aa92b70a33e2ac0bb33cd745eb61776594dc48764b06c35e0efd050b7f1c7cbd -crudini==0.9.4 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:639beb4649be5108bc00dc0947b5641995f40bab7814cbbb3e16e2082905b9c6 \ - --hash=sha256:6fd0eb341b6cbd91e1883030ea9f2102c1c95619eb563af7ddabc2161e019f6b -cryptography==43.0.1 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:014f58110f53237ace6a408b5beb6c427b64e084eb451ef25a28308270086494 \ - --hash=sha256:1bbcce1a551e262dfbafb6e6252f1ae36a248e615ca44ba302df077a846a8806 \ - --hash=sha256:203e92a75716d8cfb491dc47c79e17d0d9207ccffcbcb35f598fbe463ae3444d \ - --hash=sha256:27e613d7077ac613e399270253259d9d53872aaf657471473ebfc9a52935c062 \ - --hash=sha256:2bd51274dcd59f09dd952afb696bf9c61a7a49dfc764c04dd33ef7a6b502a1e2 \ - --hash=sha256:38926c50cff6f533f8a2dae3d7f19541432610d114a70808f0926d5aaa7121e4 \ - --hash=sha256:511f4273808ab590912a93ddb4e3914dfd8a388fed883361b02dea3791f292e1 \ - --hash=sha256:58d4e9129985185a06d849aa6df265bdd5a74ca6e1b736a77959b498e0505b85 \ - --hash=sha256:5b43d1ea6b378b54a1dc99dd8a2b5be47658fe9a7ce0a58ff0b55f4b43ef2b84 \ - --hash=sha256:61ec41068b7b74268fa86e3e9e12b9f0c21fcf65434571dbb13d954bceb08042 \ - --hash=sha256:666ae11966643886c2987b3b721899d250855718d6d9ce41b521252a17985f4d \ - --hash=sha256:68aaecc4178e90719e95298515979814bda0cbada1256a4485414860bd7ab962 \ - --hash=sha256:7c05650fe8023c5ed0d46793d4b7d7e6cd9c04e68eabe5b0aeea836e37bdcec2 \ - --hash=sha256:80eda8b3e173f0f247f711eef62be51b599b5d425c429b5d4ca6a05e9e856baa \ - --hash=sha256:8385d98f6a3bf8bb2d65a73e17ed87a3ba84f6991c155691c51112075f9ffc5d \ - --hash=sha256:88cce104c36870d70c49c7c8fd22885875d950d9ee6ab54df2745f83ba0dc365 \ - --hash=sha256:9d3cdb25fa98afdd3d0892d132b8d7139e2c087da1712041f6b762e4f807cc96 \ - --hash=sha256:a575913fb06e05e6b4b814d7f7468c2c660e8bb16d8d5a1faf9b33ccc569dd47 \ - --hash=sha256:ac119bb76b9faa00f48128b7f5679e1d8d437365c5d26f1c2c3f0da4ce1b553d \ - --hash=sha256:c1332724be35d23a854994ff0b66530119500b6053d0bd3363265f7e5e77288d \ - --hash=sha256:d03a475165f3134f773d1388aeb19c2d25ba88b6a9733c5c590b9ff7bbfa2e0c \ - --hash=sha256:d75601ad10b059ec832e78823b348bfa1a59f6b8d545db3a24fd44362a1564cb \ - --hash=sha256:de41fd81a41e53267cb020bb3a7212861da53a7d39f863585d13ea11049cf277 \ - --hash=sha256:e710bf40870f4db63c3d7d929aa9e09e4e7ee219e703f949ec4073b4294f6172 \ - --hash=sha256:ea25acb556320250756e53f9e20a4177515f012c9eaea17eb7587a8c4d8ae034 \ - --hash=sha256:f98bf604c82c416bc829e490c700ca1553eafdf2912a91e23a79d97d9801372a \ - --hash=sha256:fba1007b3ef89946dbbb515aeeb41e30203b004f0b4b00e5e16078b518563289 +crispy-bootstrap4==2024.10 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:138a97884044ae4c4799c80595b36c42066e4e933431e2e971611e251c84f96c \ + --hash=sha256:503e8922b0f3b5262a6fdf303a3a94eb2a07514812f1ca130b88f7c02dd25e2b +crudini==0.9.5 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:59ae650f45af82a64afc33eb876909ee0c4888dc4e8711ef59731c1edfda5e24 \ + --hash=sha256:84bc208dc7d89571bdc3c99274259d0b32d6b3a692d4255524f2eb4b64e9195c +cryptography==44.0.1 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:00918d859aa4e57db8299607086f793fa7813ae2ff5a4637e318a25ef82730f7 \ + --hash=sha256:1e8d181e90a777b63f3f0caa836844a1182f1f265687fac2115fcf245f5fbec3 \ + --hash=sha256:1f9a92144fa0c877117e9748c74501bea842f93d21ee00b0cf922846d9d0b183 \ + --hash=sha256:21377472ca4ada2906bc313168c9dc7b1d7ca417b63c1c3011d0c74b7de9ae69 \ + --hash=sha256:24979e9f2040c953a94bf3c6782e67795a4c260734e5264dceea65c8f4bae64a \ + --hash=sha256:2a46a89ad3e6176223b632056f321bc7de36b9f9b93b2cc1cccf935a3849dc62 \ + --hash=sha256:322eb03ecc62784536bc173f1483e76747aafeb69c8728df48537eb431cd1911 \ + --hash=sha256:436df4f203482f41aad60ed1813811ac4ab102765ecae7a2bbb1dbb66dcff5a7 \ + --hash=sha256:4f422e8c6a28cf8b7f883eb790695d6d45b0c385a2583073f3cec434cc705e1a \ + --hash=sha256:53f23339864b617a3dfc2b0ac8d5c432625c80014c25caac9082314e9de56f41 \ + --hash=sha256:5fed5cd6102bb4eb843e3315d2bf25fede494509bddadb81e03a859c1bc17b83 \ + --hash=sha256:610a83540765a8d8ce0f351ce42e26e53e1f774a6efb71eb1b41eb01d01c3d12 \ + --hash=sha256:6c8acf6f3d1f47acb2248ec3ea261171a671f3d9428e34ad0357148d492c7864 \ + --hash=sha256:6f76fdd6fd048576a04c5210d53aa04ca34d2ed63336d4abd306d0cbe298fddf \ + --hash=sha256:72198e2b5925155497a5a3e8c216c7fb3e64c16ccee11f0e7da272fa93b35c4c \ + --hash=sha256:887143b9ff6bad2b7570da75a7fe8bbf5f65276365ac259a5d2d5147a73775f2 \ + --hash=sha256:888fcc3fce0c888785a4876ca55f9f43787f4c5c1cc1e2e0da71ad481ff82c5b \ + --hash=sha256:8e6a85a93d0642bd774460a86513c5d9d80b5c002ca9693e63f6e540f1815ed0 \ + --hash=sha256:94f99f2b943b354a5b6307d7e8d19f5c423a794462bde2bf310c770ba052b1c4 \ + --hash=sha256:9b336599e2cb77b1008cb2ac264b290803ec5e8e89d618a5e978ff5eb6f715d9 \ + --hash=sha256:a2d8a7045e1ab9b9f803f0d9531ead85f90c5f2859e653b61497228b18452008 \ + --hash=sha256:b8272f257cf1cbd3f2e120f14c68bff2b6bdfcc157fafdee84a1b795efd72862 \ + --hash=sha256:bf688f615c29bfe9dfc44312ca470989279f0e94bb9f631f85e3459af8efc009 \ + --hash=sha256:d9c5b9f698a83c8bd71e0f4d3f9f839ef244798e5ffe96febfa9714717db7af7 \ + --hash=sha256:dd7c7e2d71d908dc0f8d2027e1604102140d84b155e658c20e8ad1304317691f \ + --hash=sha256:df978682c1504fc93b3209de21aeabf2375cb1571d4e61907b3e7a2540e83026 \ + --hash=sha256:e403f7f766ded778ecdb790da786b418a9f2394f36e8cc8b796cc056ab05f44f \ + --hash=sha256:eb3889330f2a4a148abead555399ec9a32b13b7c8ba969b72d8e500eb7ef84cd \ + --hash=sha256:f4daefc971c2d1f82f03097dc6f216744a6cd2ac0f04c68fb935ea2ba2a0d420 \ + --hash=sha256:f51f5705ab27898afda1aaa430f34ad90dc117421057782022edf0600bec5f14 \ + --hash=sha256:fd0ee90072861e276b0ff08bd627abec29e32a53b2be44e41dbcdf87cbee2b00 cxxfilt==0.3.0 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:774e85a8d0157775ed43276d89397d924b104135762d86b3a95f81f203094e07 \ --hash=sha256:7df6464ba5e8efbf0d8974c0b2c78b32546676f06059a83515dbdfa559b34214 -cython==0.29.24 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:09ac3087ac7a3d489ebcb3fb8402e00c13d1a3a1c6bc73fd3b0d756a3e341e79 \ - --hash=sha256:0a142c6b862e6ed6b02209d543062c038c110585b5e32d1ad7c9717af4f07e41 \ - --hash=sha256:0d414458cb22f8a90d64260da6dace5d5fcebde43f31be52ca51f818c46db8cb \ - --hash=sha256:10cb3def9774fa99e4583617a5616874aed3255dc241fd1f4a3c2978c78e1c53 \ - --hash=sha256:112efa54a58293a4fb0acf0dd8e5b3736e95b595eee24dd88615648e445abe41 \ - --hash=sha256:166f9f29cd0058ce1a14a7b3a2458b849ed34b1ec5fd4108af3fdd2c24afcbb0 \ - --hash=sha256:2d9e61ed1056a3b6a4b9156b62297ad18b357a7948e57a2f49b061217696567e \ - --hash=sha256:2f41ef7edd76dd23315925e003f0c58c8585f3ab24be6885c4b3f60e77c82746 \ - --hash=sha256:37bcfa5df2a3009f49624695d917c3804fccbdfcdc5eda6378754a879711a4d5 \ - --hash=sha256:416046a98255eff97ec02077d20ebeaae52682dfca1c35aadf31260442b92514 \ - --hash=sha256:4cf4452f0e4d50e11701bca38f3857fe6fa16593e7fd6a4d5f7be66f611b7da2 \ - --hash=sha256:55b0ee28c2c8118bfb3ad9b25cf7a6cbd724e442ea96956e32ccd908d5e3e043 \ - --hash=sha256:5dd56d0be50073f0e54825a8bc3393852de0eed126339ecbca0ae149dba55cfc \ - --hash=sha256:5fa12ebafc2f688ea6d26ab6d1d2e634a9872509ba7135b902bb0d8b368fb04b \ - --hash=sha256:5fb977945a2111f6b64501fdf7ed0ec162cc502b84457fd648d6a558ea8de0d6 \ - --hash=sha256:60c958bcab0ff315b4036a949bed1c65334e1f6a69e17e9966d742febb59043a \ - --hash=sha256:661dbdea519d9cfb288867252b75fef73ffa8e8bb674cec27acf70646afb369b \ - --hash=sha256:6a2cf2ccccc25413864928dfd730c29db6f63eaf98206c1e600003a445ca7f58 \ - --hash=sha256:6ade74eece909fd3a437d9a5084829180751d7ade118e281e9824dd75eafaff2 \ - --hash=sha256:73ac33a4379056a02031baa4def255717fadb9181b5ac2b244792d53eae1c925 \ - --hash=sha256:76cbca0188d278e93d12ebdaf5990678e6e436485fdfad49dbe9b07717d41a3c \ - --hash=sha256:774cb8fd931ee1ba52c472bc1c19077cd6895c1b24014ae07bb27df59aed5ebe \ - --hash=sha256:821c2d416ad7d006b069657ee1034c0e0cb45bdbe9ab6ab631e8c495dfcfa4ac \ - --hash=sha256:84826ec1c11cda56261a252ddecac0c7d6b02e47e81b94f40b27b4c23c29c17c \ - --hash=sha256:854fe2193d3ad4c8b61932ff54d6dbe10c5fa8749eb8958d72cc0ab28243f833 \ - --hash=sha256:88dc3c250dec280b0489a83950b15809762e27232f4799b1b8d0bad503f5ab84 \ - --hash=sha256:8cb87777e82d1996aef6c146560a19270684271c9c669ba62ac6803b3cd2ff82 \ - --hash=sha256:91339ee4b465924a3ea4b2a9cec7f7227bc4cadf673ce859d24c2b9ef60b1214 \ - --hash=sha256:9164aeef1af6f837e4fc20402a31d256188ba4d535e262c6cb78caf57ad744f8 \ - --hash=sha256:a102cfa795c6b3b81a29bdb9dbec545367cd7f353c03e6f30a056fdfefd92854 \ - --hash=sha256:ad43e684ade673565f6f9d6638015112f6c7f11aa2a632167b79014f613f0f5f \ - --hash=sha256:afb521523cb46ddaa8d269b421f88ea2731fee05e65b952b96d4db760f5a2a1c \ - --hash=sha256:b28f92e617f540d3f21f8fd479a9c6491be920ffff672a4c61b7fc4d7f749f39 \ - --hash=sha256:bc05de569f811be1fcfde6756c9048ae518f0c4b6d9f8f024752c5365d934cac \ - --hash=sha256:cdf04d07c3600860e8c2ebaad4e8f52ac3feb212453c1764a49ac08c827e8443 \ - --hash=sha256:d8d1a087f35e39384303f5e6b75d465d6f29d746d7138eae9d3b6e8e6f769eae \ - --hash=sha256:eb2843f8cc01c645725e6fc690a84e99cdb266ce8ebe427cf3a680ff09f876aa \ - --hash=sha256:f2e9381497b12e8f622af620bde0d1d094035d79b899abb2ddd3a7891f535083 \ - --hash=sha256:f96411f0120b5cae483923aaacd2872af8709be4b46522daedc32f051d778385 +cython==3.0.11 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:0b1d1f6f94cc5d42a4591f6d60d616786b9cd15576b112bc92a23131fcf38020 \ + --hash=sha256:0e25f6425ad4a700d7f77cd468da9161e63658837d1bc34861a9861a4ef6346d \ + --hash=sha256:0fc6fdd6fa493be7bdda22355689d5446ac944cd71286f6f44a14b0d67ee3ff5 \ + --hash=sha256:104d6f2f2c827ccc5e9e42c80ef6773a6aa94752fe6bc5b24a4eab4306fb7f07 \ + --hash=sha256:11996c40c32abf843ba652a6d53cb15944c88d91f91fc4e6f0028f5df8a8f8a1 \ + --hash=sha256:13062ce556a1e98d2821f7a0253b50569fdc98c36efd6653a65b21e3f8bbbf5f \ + --hash=sha256:14701edb3107a5d9305a82d9d646c4f28bfecbba74b26cc1ee2f4be08f602057 \ + --hash=sha256:187685e25e037320cae513b8cc4bf9dbc4465c037051aede509cbbf207524de2 \ + --hash=sha256:1dd47865f4c0a224da73acf83d113f93488d17624e2457dce1753acdfb1cc40c \ + --hash=sha256:221de0b48bf387f209003508e602ce839a80463522fc6f583ad3c8d5c890d2c1 \ + --hash=sha256:2252b5aa57621848e310fe7fa6f7dce5f73aa452884a183d201a8bcebfa05a00 \ + --hash=sha256:2a8ea2e7e2d3bc0d8630dafe6c4a5a89485598ff8a61885b74f8ed882597efd5 \ + --hash=sha256:301bde949b4f312a1c70e214b0c3bc51a3f955d466010d2f68eb042df36447b0 \ + --hash=sha256:3379c6521e25aa6cd7703bb7d635eaca75c0f9c7f1b0fdd6dd15a03bfac5f68d \ + --hash=sha256:351955559b37e6c98b48aecb178894c311be9d731b297782f2b78d111f0c9015 \ + --hash=sha256:3699391125ab344d8d25438074d1097d9ba0fb674d0320599316cfe7cf5f002a \ + --hash=sha256:3999fb52d3328a6a5e8c63122b0a8bd110dfcdb98dda585a3def1426b991cba7 \ + --hash=sha256:3f2b062f6df67e8a56c75e500ca330cf62c85ac26dd7fd006f07ef0f83aebfa3 \ + --hash=sha256:3ff8ac1f0ecd4f505db4ab051e58e4531f5d098b6ac03b91c3b902e8d10c67b3 \ + --hash=sha256:421017466e9260aca86823974e26e158e6358622f27c0f4da9c682f3b6d2e624 \ + --hash=sha256:4341d6a64d47112884e0bcf31e6c075268220ee4cd02223047182d4dda94d637 \ + --hash=sha256:44292aae17524abb4b70a25111fe7dec1a0ad718711d47e3786a211d5408fdaa \ + --hash=sha256:46aec30f217bdf096175a1a639203d44ac73a36fe7fa3dd06bd012e8f39eca0f \ + --hash=sha256:473d35681d9f93ce380e6a7c8feb2d65fc6333bd7117fbc62989e404e241dbb0 \ + --hash=sha256:4e9a8d92978b15a0c7ca7f98447c6c578dc8923a0941d9d172d0b077cb69c576 \ + --hash=sha256:52186101d51497519e99b60d955fd5cb3bf747c67f00d742e70ab913f1e42d31 \ + --hash=sha256:52205347e916dd65d2400b977df4c697390c3aae0e96275a438cc4ae85dadc08 \ + --hash=sha256:525d09b3405534763fa73bd78c8e51ac8264036ce4c16d37dfd1555a7da6d3a7 \ + --hash=sha256:53b6072a89049a991d07f42060f65398448365c59c9cb515c5925b9bdc9d71f8 \ + --hash=sha256:598699165cfa7c6d69513ee1bffc9e1fdd63b00b624409174c388538aa217975 \ + --hash=sha256:63f2c892e9f9c1698ecfee78205541623eb31cd3a1b682668be7ac12de94aa8e \ + --hash=sha256:6823aef13669a32caf18bbb036de56065c485d9f558551a9b55061acf9c4c27f \ + --hash=sha256:6fb68cef33684f8cc97987bee6ae919eee7e18ee6a3ad7ed9516b8386ef95ae6 \ + --hash=sha256:7146dd2af8682b4ca61331851e6aebce9fe5158e75300343f80c07ca80b1faff \ + --hash=sha256:75ba1c70b6deeaffbac123856b8d35f253da13552207aa969078611c197377e4 \ + --hash=sha256:780f89c95b8aec1e403005b3bf2f0a2afa060b3eba168c86830f079339adad89 \ + --hash=sha256:790263b74432cb997740d73665f4d8d00b9cd1cecbdd981d93591ddf993d4f12 \ + --hash=sha256:8948802e1f5677a673ea5d22a1e7e273ca5f83e7a452786ca286eebf97cee67c \ + --hash=sha256:8acdc87e9009110adbceb7569765eb0980129055cc954c62f99fe9f094c9505e \ + --hash=sha256:8b14c24f1dc4c4c9d997cca8d1b7fb01187a218aab932328247dcf5694a10102 \ + --hash=sha256:989899a85f0d9a57cebb508bd1f194cb52f0e3f7e22ac259f33d148d6422375c \ + --hash=sha256:9c02361af9bfa10ff1ccf967fc75159e56b1c8093caf565739ed77a559c1f29f \ + --hash=sha256:a0583076c4152b417a3a8a5d81ec02f58c09b67d3f22d5857e64c8734ceada8c \ + --hash=sha256:a1f4cbc70f6b7f0c939522118820e708e0d490edca42d852fa8004ec16780be2 \ + --hash=sha256:a690f2ff460682ea985e8d38ec541be97e0977fa0544aadc21efc116ff8d7579 \ + --hash=sha256:a75d45fbc20651c1b72e4111149fed3b33d270b0a4fb78328c54d965f28d55e1 \ + --hash=sha256:aedceb6090a60854b31bf9571dc55f642a3fa5b91f11b62bcef167c52cac93d8 \ + --hash=sha256:af91497dc098718e634d6ec8f91b182aea6bb3690f333fc9a7777bc70abe8810 \ + --hash=sha256:b4ab2b92a3e6ed552adbe9350fd2ef3aa0cc7853cf91569f9dbed0c0699bbeab \ + --hash=sha256:b8c7e514075696ca0f60c337f9e416e61d7ccbc1aa879a56c39181ed90ec3059 \ + --hash=sha256:bcd29945fafd12484cf37b1d84f12f0e7a33ba3eac5836531c6bd5283a6b3a0c \ + --hash=sha256:bfa550d9ae39e827a6e7198076df763571cb53397084974a6948af558355e028 \ + --hash=sha256:c3d68751668c66c7a140b6023dba5d5d507f72063407bb609d3a5b0f3b8dfbe4 \ + --hash=sha256:c69d5cad51388522b98a99b4be1b77316de85b0c0523fa865e0ea58bbb622e0a \ + --hash=sha256:c8eed5c015685106db15dd103fd040948ddca9197b1dd02222711815ea782a27 \ + --hash=sha256:cee29846471ce60226b18e931d8c1c66a158db94853e3e79bc2da9bd22345008 \ + --hash=sha256:d02f4ebe15aac7cdacce1a628e556c1983f26d140fd2e0ac5e0a090e605a2d38 \ + --hash=sha256:d566a4e09b8979be8ab9f843bac0dd216c81f5e5f45661a9b25cd162ed80508c \ + --hash=sha256:d80a7232938d523c1a12f6b1794ab5efb1ae77ad3fde79de4bb558d8ab261619 \ + --hash=sha256:d89a82937ce4037f092e9848a7bbcc65bc8e9fc9aef2bb74f5c15e7d21a73080 \ + --hash=sha256:da394654c6da15c1d37f0b7ec5afd325c69a15ceafee2afba14b67a5df8a82c8 \ + --hash=sha256:ddd1fe25af330f4e003421636746a546474e4ccd8f239f55d2898d80983d20ed \ + --hash=sha256:e6dd395d1a704e34a9fac00b25f0036dce6654c6b898be6f872ac2bb4f2eda48 \ + --hash=sha256:eeb6860b0f4bfa402de8929833fe5370fa34069c7ebacb2d543cb017f21fb891 \ + --hash=sha256:f3953d2f504176f929862e5579cfc421860c33e9707f585d70d24e1096accdf7 \ + --hash=sha256:f988f7f8164a6079c705c39e2d75dbe9967e3dacafe041420d9af7b9ee424162 daphne==3.0.2 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:76ffae916ba3aa66b46996c14fa713e46004788167a4873d647544e750e0e99f \ --hash=sha256:a9af943c79717bc52fe64a3c236ae5d3adccc8b5be19c881b442d2c3db233393 -defusedxml==0.7.1 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:1bb3032db185915b62d7c6209c5a8792be6a32ab2fedacc84e01b52c51aa3e69 \ - --hash=sha256:a352e7e428770286cc899e2542b6cdaedb2b4953ff269a210103ec58f6198a61 -die-python==0.1.0 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:1013431cc76e9de762f0d7c375699b7a5d6ba43cd812664fa9d891300476ba26 \ - --hash=sha256:215896714c97b8b679685d2593afe0508f7af234630b0e591f5ad5ca133106b6 \ - --hash=sha256:21f58eb5da2dabc06c9487705e272cb89a7436f2637a6f79d839b9cb3cb1726c \ - --hash=sha256:2e4d49917ab727f3e89e42713b0b71ce5e45e29c1ad7885d5a92d2a5555fde40 \ - --hash=sha256:31d064d6b7a31ac22529ad7cdf3be1de4e6dee7b005efdd9224e8a395f3b2b8e \ - --hash=sha256:4293a7ef32ff539db9abb011192b1ac007229ebb4f5f70cc09902ceea4a2635b \ - --hash=sha256:5a0b9a46f8007d77939415422d573c5e659ce712b6cea2ed9f2c825580073ba4 \ - --hash=sha256:613b9edba2337179df1b52ea97dfdf2ba5da4da1864a3887ec098ffd274540a5 \ - --hash=sha256:6babc19fac759724b316bc1ee4d23ce03235c4aac9dccfb6f174281ebfd353c5 \ - --hash=sha256:721ab6005cab42dee9b318a1f2a7c8e4c3dc6e5290e20a3cb716991f6ddd75e0 \ - --hash=sha256:a1a6932a1fdda99f37676d7d3925595eb44f362632f2168163f14f5637f2734a \ - --hash=sha256:afd9ccfb75adae7e785ac52287b6a017d31a46285cb64e060d6a2a34ad0c381c \ - --hash=sha256:ec8756cd695415b0fe8eccb6b2475ba9bff1a5f677dc1e845325ceed7e7c2512 \ - --hash=sha256:ef51d19d7d91a8bd65abf56ff01adff6cc5289fd8ada2c4362398a76d7d490f4 \ - --hash=sha256:f09c98d928a0f32c72b5bbed90fb40ba1afcb81b3ab3e362104a6cb23738b4ae -django-allauth==0.54.0 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:120e265f802b65738899c6cb627b827fde46a4d03067034c633f516c2adf3e3e -django-crispy-forms==1.14.0 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:35887b8851a931374dd697207a8f56c57a9c5cb9dbf0b9fa54314da5666cea5b \ - --hash=sha256:bc4d2037f6de602d39c0bc452ac3029d1f5d65e88458872cc4dbc01c3a400604 -django-csp==3.7 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:01443a07723f9a479d498bd7bb63571aaa771e690f64bde515db6cdb76e8041a \ - --hash=sha256:01eda02ad3f10261c74131cdc0b5a6a62b7c7ad4fd017fbefb7a14776e0a9727 -django-extensions==3.2.1 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:2a4f4d757be2563cd1ff7cfdf2e57468f5f931cc88b23cf82ca75717aae504a4 \ - --hash=sha256:421464be390289513f86cb5e18eb43e5dc1de8b4c27ba9faa3b91261b0d67e09 -django-ratelimit==3.0.1 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:73223d860abd5c5d7b9a807fabb39a6220068129b514be8d78044b52607ab154 \ - --hash=sha256:857e797f23de948b204a31dba9d88aea3ce731b7a5d926d0240c772e19b5486f -django-recaptcha==3.0.0 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:1aed69fd6ac8fd9e99e52665392ae6748f8b6339ace656fad779fe0c6c915a52 \ - --hash=sha256:253197051288923cae675d7eff91b619e3775311292a5dbaf27a8a55ffebc670 +django-allauth==65.3.1 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:e02e951b71a2753a746459f2efa114c7c72bf2cef6887dbe8607a577c0350587 +django-crispy-forms==2.3 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:2db17ae08527201be1273f0df789e5f92819e23dd28fec69cffba7f3762e1a38 \ + --hash=sha256:efc4c31e5202bbec6af70d383a35e12fc80ea769d464fb0e7fe21768bb138a20 +django-csp==3.8 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:19b2978b03fcd73517d7d67acbc04fbbcaec0facc3e83baa502965892d1e0719 \ + --hash=sha256:ef0f1a9f7d8da68ae6e169c02e9ac661c0ecf04db70e0d1d85640512a68471c0 +django-extensions==3.2.3 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:44d27919d04e23b3f40231c4ab7af4e61ce832ef46d610cc650d53e68328410a \ + --hash=sha256:9600b7562f79a92cbf1fde6403c04fee314608fefbb595502e34383ae8203401 +django-ratelimit==4.1.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:555943b283045b917ad59f196829530d63be2a39adb72788d985b90c81ba808b \ + --hash=sha256:d047a31cf94d83ef1465d7543ca66c6fc16695559b5f8d814d1b51df15110b92 +django-recaptcha==4.0.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:0d912d5c7c009df4e47accd25029133d47a74342dbd2a8edc2877b6bffa971a3 \ + --hash=sha256:5316438f97700c431d65351470d1255047e3f2cd9af0f2f13592b637dad9213e django-settings-export==1.2.1 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:fceeae49fc597f654c1217415d8e049fc81c930b7154f5d8f28c432db738ff79 -django==4.2.16 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:1ddc333a16fc139fd253035a1606bb24261951bbc3a6ca256717fa06cc41a898 \ - --hash=sha256:6f1616c2786c408ce86ab7e10f792b8f15742f7b7b7460243929cb371e7f1dad +django==5.1.9 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:2fd1d4a0a66a5ba702699eb692e75b0d828b73cc2f4e1fc4b6a854a918967411 \ + --hash=sha256:565881bdd0eb67da36442e9ac788bda90275386b549070d70aee86327781a4fc djangorestframework==3.15.2 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:2b8871b062ba1aefc2de01f773875441a961fefbf79f5eed1e32b2f096944b20 \ --hash=sha256:36fe88cd2d6c6bec23dca9804bab2ba5517a8bb9d8f47ebc68981b56840107ad dncil==1.0.2 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:1557675c2d1351d3260509881cff0383309f81cda4944ed2c3f5cc352953aa15 \ --hash=sha256:69d389e9b850fa9afa2e37ca252b01476379991eee88fd33ab76f924d36dd68d -dnfile==0.15.0 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:d60239de76035bed22f3c131925cf8d484f44a9da03c19659a01a615309add55 \ - --hash=sha256:e4ae8803a59d8f845c11524e8b007104b43c90adc2fb0a81dcdc2972c47dfc80 -dnspython==2.6.1 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:5ef3b9680161f6fa89daf8ad451b5f1a33b18ae8a1c6778cdf4b43f08c0a6e50 \ - --hash=sha256:e8f0f9c23a7b7cb99ded64e6c3a6f3e701d78f50c55e002b839dea7225cff7cc +dnfile==0.15.1 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:1529cf0f976b1382f60a3c56b2e0def90f3486e41193ffd34677e74563c8426c \ + --hash=sha256:585c8e3e4a29824402430a0a8b7e7ae82c040fc17eeb3a06758fdceebe2d923e +dnspython==2.7.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86 \ + --hash=sha256:ce9c432eda0dc91cf618a5cedf1a4e142651196bbcd2c80e89ed5a907e5cfaf1 dpkt==1.9.6 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:b1739c594297b2b6321dfbf133e3f8dcf54c0ef54cb5739d204331d34a0d8fe4 \ --hash=sha256:b5737010fd420d142e02ed04fa616edd1fc05e414980baef594f72287c875eef easygui==0.98.3 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:33498710c68b5376b459cd3fc48d1d1f33822139eb3ed01defbc0528326da3ba \ --hash=sha256:d653ff79ee1f42f63b5a090f2f98ce02335d86ad8963b3ce2661805cafe99a04 -exceptiongroup==1.2.2 ; python_version >= "3.10" and python_version < "3.11" \ +editorconfig==0.17.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:8739052279699840065d3a9f5c125d7d5a98daeefe53b0e5274261d77cb49aa2 \ + --hash=sha256:fe491719c5f65959ec00b167d07740e7ffec9a3f362038c72b289330b9991dfc +exceptiongroup==1.2.2 ; python_version == "3.10" \ --hash=sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b \ --hash=sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc -filelock==3.16.1 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0 \ - --hash=sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435 -flare-capa==7.3.0 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:21be4c9ce0af093bb0590ec7fd807096483d16c68753a375576420fe8ebcfecf \ - --hash=sha256:f7a7f35f4dce1aca723fcc792a6afbc384a696d889ef891649dc823d948e43ff +filelock==3.17.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:533dc2f7ba78dc2f0f531fc6c4940addf7b70a481e269a5a3b93be94ffbe8338 \ + --hash=sha256:ee4e77401ef576ebb38cd7f13b9b28893194acc20a8e68e18730ba9c0e54660e +flare-capa==9.1.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:7df5c8033b9ea568569611b5f1948b8257c055a191a01ab1e67ce3e407c86e21 \ + --hash=sha256:9e3214c66dd13c90e379c74d56a99fee2634c14980d55395535f3a39fe9159bc funcy==2.0 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:3963315d59d41c6f30c04bc910e10ab50a3ac4a225868bfa96feed133df075cb \ --hash=sha256:53df23c8bb1651b12f095df764bfb057935d49537a56de211b098f4c79614bb0 @@ -495,52 +520,59 @@ greenlet==3.0.3 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:fcd2469d6a2cf298f198f0487e0a5b1a47a42ca0fa4dfd1b6862c999f018ebbf \ --hash=sha256:fd096eb7ffef17c456cfa587523c5f92321ae02427ff955bebe9e3c63bc9f0da \ --hash=sha256:fe754d231288e1e64323cfad462fcee8f0288654c10bdf4f603a39ed923bef33 -gunicorn==22.0.0 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:350679f91b24062c86e386e198a15438d53a7a8207235a78ba1b53df4c4378d9 \ - --hash=sha256:4a0b436239ff76fb33f11c07a16482c521a7e09c1ce3cc293c2330afe01bec63 +gunicorn==23.0.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:ec400d38950de4dfd418cff8328b2c8faed0edb0d517d3394e457c317908ca4d \ + --hash=sha256:f014447a0101dc57e294f6c18ca6b40227a4c90e9bdb586042628030cba004ec h11==0.14.0 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d \ --hash=sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761 -httptools==0.6.1 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:00d5d4b68a717765b1fabfd9ca755bd12bf44105eeb806c03d1962acd9b8e563 \ - --hash=sha256:0ac5a0ae3d9f4fe004318d64b8a854edd85ab76cffbf7ef5e32920faef62f142 \ - --hash=sha256:0cf2372e98406efb42e93bfe10f2948e467edfd792b015f1b4ecd897903d3e8d \ - --hash=sha256:1ed99a373e327f0107cb513b61820102ee4f3675656a37a50083eda05dc9541b \ - --hash=sha256:3c3b214ce057c54675b00108ac42bacf2ab8f85c58e3f324a4e963bbc46424f4 \ - --hash=sha256:3e802e0b2378ade99cd666b5bffb8b2a7cc8f3d28988685dc300469ea8dd86cb \ - --hash=sha256:3f30d3ce413088a98b9db71c60a6ada2001a08945cb42dd65a9a9fe228627658 \ - --hash=sha256:405784577ba6540fa7d6ff49e37daf104e04f4b4ff2d1ac0469eaa6a20fde084 \ - --hash=sha256:48ed8129cd9a0d62cf4d1575fcf90fb37e3ff7d5654d3a5814eb3d55f36478c2 \ - --hash=sha256:4bd3e488b447046e386a30f07af05f9b38d3d368d1f7b4d8f7e10af85393db97 \ - --hash=sha256:4f0f8271c0a4db459f9dc807acd0eadd4839934a4b9b892f6f160e94da309837 \ - --hash=sha256:5cceac09f164bcba55c0500a18fe3c47df29b62353198e4f37bbcc5d591172c3 \ - --hash=sha256:639dc4f381a870c9ec860ce5c45921db50205a37cc3334e756269736ff0aac58 \ - --hash=sha256:678fcbae74477a17d103b7cae78b74800d795d702083867ce160fc202104d0da \ - --hash=sha256:6a4f5ccead6d18ec072ac0b84420e95d27c1cdf5c9f1bc8fbd8daf86bd94f43d \ - --hash=sha256:6f58e335a1402fb5a650e271e8c2d03cfa7cea46ae124649346d17bd30d59c90 \ - --hash=sha256:75c8022dca7935cba14741a42744eee13ba05db00b27a4b940f0d646bd4d56d0 \ - --hash=sha256:7a7ea483c1a4485c71cb5f38be9db078f8b0e8b4c4dc0210f531cdd2ddac1ef1 \ - --hash=sha256:7d9ceb2c957320def533671fc9c715a80c47025139c8d1f3797477decbc6edd2 \ - --hash=sha256:7ebaec1bf683e4bf5e9fbb49b8cc36da482033596a415b3e4ebab5a4c0d7ec5e \ - --hash=sha256:85ed077c995e942b6f1b07583e4eb0a8d324d418954fc6af913d36db7c05a5a0 \ - --hash=sha256:8ae5b97f690badd2ca27cbf668494ee1b6d34cf1c464271ef7bfa9ca6b83ffaf \ - --hash=sha256:8b0bb634338334385351a1600a73e558ce619af390c2b38386206ac6a27fecfc \ - --hash=sha256:8e216a038d2d52ea13fdd9b9c9c7459fb80d78302b257828285eca1c773b99b3 \ - --hash=sha256:93ad80d7176aa5788902f207a4e79885f0576134695dfb0fefc15b7a4648d503 \ - --hash=sha256:95658c342529bba4e1d3d2b1a874db16c7cca435e8827422154c9da76ac4e13a \ - --hash=sha256:95fb92dd3649f9cb139e9c56604cc2d7c7bf0fc2e7c8d7fbd58f96e35eddd2a3 \ - --hash=sha256:97662ce7fb196c785344d00d638fc9ad69e18ee4bfb4000b35a52efe5adcc949 \ - --hash=sha256:9bb68d3a085c2174c2477eb3ffe84ae9fb4fde8792edb7bcd09a1d8467e30a84 \ - --hash=sha256:b512aa728bc02354e5ac086ce76c3ce635b62f5fbc32ab7082b5e582d27867bb \ - --hash=sha256:c6e26c30455600b95d94b1b836085138e82f177351454ee841c148f93a9bad5a \ - --hash=sha256:d2f6c3c4cb1948d912538217838f6e9960bc4a521d7f9b323b3da579cd14532f \ - --hash=sha256:dcbab042cc3ef272adc11220517278519adf8f53fd3056d0e68f0a6f891ba94e \ - --hash=sha256:e0b281cf5a125c35f7f6722b65d8542d2e57331be573e9e88bc8b0115c4a7a81 \ - --hash=sha256:e57997ac7fb7ee43140cc03664de5f268813a481dff6245e0075925adc6aa185 \ - --hash=sha256:fe467eb086d80217b7584e61313ebadc8d187a4d95bb62031b7bab4b205c3ba3 -humanize==4.10.0 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:06b6eb0293e4b85e8d385397c5868926820db32b9b654b932f57fa41c23c9978 \ - --hash=sha256:39e7ccb96923e732b5c2e27aeaa3b10a8dfeeba3eb965ba7b74a3eb0e30040a6 +httptools==0.6.4 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:0614154d5454c21b6410fdf5262b4a3ddb0f53f1e1721cfd59d55f32138c578a \ + --hash=sha256:0e563e54979e97b6d13f1bbc05a96109923e76b901f786a5eae36e99c01237bd \ + --hash=sha256:16e603a3bff50db08cd578d54f07032ca1631450ceb972c2f834c2b860c28ea2 \ + --hash=sha256:288cd628406cc53f9a541cfaf06041b4c71d751856bab45e3702191f931ccd17 \ + --hash=sha256:28908df1b9bb8187393d5b5db91435ccc9c8e891657f9cbb42a2541b44c82fc8 \ + --hash=sha256:322d20ea9cdd1fa98bd6a74b77e2ec5b818abdc3d36695ab402a0de8ef2865a3 \ + --hash=sha256:342dd6946aa6bda4b8f18c734576106b8a31f2fe31492881a9a160ec84ff4bd5 \ + --hash=sha256:345c288418f0944a6fe67be8e6afa9262b18c7626c3ef3c28adc5eabc06a68da \ + --hash=sha256:3c73ce323711a6ffb0d247dcd5a550b8babf0f757e86a52558fe5b86d6fefcc0 \ + --hash=sha256:40a5ec98d3f49904b9fe36827dcf1aadfef3b89e2bd05b0e35e94f97c2b14721 \ + --hash=sha256:40b0f7fe4fd38e6a507bdb751db0379df1e99120c65fbdc8ee6c1d044897a636 \ + --hash=sha256:40dc6a8e399e15ea525305a2ddba998b0af5caa2566bcd79dcbe8948181eeaff \ + --hash=sha256:4b36913ba52008249223042dca46e69967985fb4051951f94357ea681e1f5dc0 \ + --hash=sha256:4d87b29bd4486c0093fc64dea80231f7c7f7eb4dc70ae394d70a495ab8436071 \ + --hash=sha256:4e93eee4add6493b59a5c514da98c939b244fce4a0d8879cd3f466562f4b7d5c \ + --hash=sha256:59e724f8b332319e2875efd360e61ac07f33b492889284a3e05e6d13746876f4 \ + --hash=sha256:69422b7f458c5af875922cdb5bd586cc1f1033295aa9ff63ee196a87519ac8e1 \ + --hash=sha256:703c346571fa50d2e9856a37d7cd9435a25e7fd15e236c397bf224afaa355fe9 \ + --hash=sha256:85071a1e8c2d051b507161f6c3e26155b5c790e4e28d7f236422dbacc2a9cc44 \ + --hash=sha256:856f4bc0478ae143bad54a4242fccb1f3f86a6e1be5548fecfd4102061b3a083 \ + --hash=sha256:85797e37e8eeaa5439d33e556662cc370e474445d5fab24dcadc65a8ffb04003 \ + --hash=sha256:90d96a385fa941283ebd231464045187a31ad932ebfa541be8edf5b3c2328959 \ + --hash=sha256:94978a49b8f4569ad607cd4946b759d90b285e39c0d4640c6b36ca7a3ddf2efc \ + --hash=sha256:aafe0f1918ed07b67c1e838f950b1c1fabc683030477e60b335649b8020e1076 \ + --hash=sha256:ab9ba8dcf59de5181f6be44a77458e45a578fc99c31510b8c65b7d5acc3cf490 \ + --hash=sha256:ade273d7e767d5fae13fa637f4d53b6e961fb7fd93c7797562663f0171c26660 \ + --hash=sha256:b799de31416ecc589ad79dd85a0b2657a8fe39327944998dea368c1d4c9e55e6 \ + --hash=sha256:c26f313951f6e26147833fc923f78f95604bbec812a43e5ee37f26dc9e5a686c \ + --hash=sha256:ca80b7485c76f768a3bc83ea58373f8db7b015551117375e4918e2aa77ea9b50 \ + --hash=sha256:d1ffd262a73d7c28424252381a5b854c19d9de5f56f075445d33919a637e3547 \ + --hash=sha256:d3f0d369e7ffbe59c4b6116a44d6a8eb4783aae027f2c0b366cf0aa964185dba \ + --hash=sha256:d54efd20338ac52ba31e7da78e4a72570cf729fac82bc31ff9199bedf1dc7440 \ + --hash=sha256:dacdd3d10ea1b4ca9df97a0a303cbacafc04b5cd375fa98732678151643d4988 \ + --hash=sha256:db353d22843cf1028f43c3651581e4bb49374d85692a85f95f7b9a130e1b2cab \ + --hash=sha256:db78cb9ca56b59b016e64b6031eda5653be0589dba2b1b43453f6e8b405a0970 \ + --hash=sha256:deee0e3343f98ee8047e9f4c5bc7cedbf69f5734454a94c38ee829fb2d5fa3c1 \ + --hash=sha256:df017d6c780287d5c80601dafa31f17bddb170232d85c066604d8558683711a2 \ + --hash=sha256:df959752a0c2748a65ab5387d08287abf6779ae9165916fe053e68ae1fbdc47f \ + --hash=sha256:ec4f178901fa1834d4a060320d2f3abc5c9e39766953d038f1458cb885f47e81 \ + --hash=sha256:f47f8ed67cc0ff862b84a1189831d1d33c963fb3ce1ee0c65d3b0cbe7b711069 \ + --hash=sha256:f8787367fbdfccae38e35abf7641dafc5310310a5987b689f4c32cc8cc3ee975 \ + --hash=sha256:f9eb89ecf8b290f2e293325c646a211ff1c2493222798bb80a530c5e7502494f \ + --hash=sha256:fc411e1c0a7dcd2f902c7c48cf079947a7e65b5485dea9decb82b9105ca71a43 +humanize==4.11.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:b53caaec8532bcb2fff70c8826f904c35943f8cecaca29d272d9df38092736c0 \ + --hash=sha256:e66f36020a2d5a974c504bd2555cf770621dbdbb6d82f94a6857c0b1ea2608be hyperlink==21.0.0 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:427af957daa58bc909471c6c40f74c5450fa123dd093fc53efd2e91d2705a56b \ --hash=sha256:e6b14c37ecb73e89c77d78cdb4c2cc8f3fb59a885c5b3f819ff4ed80f25af1b4 @@ -562,131 +594,301 @@ iniparse==0.5 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:db6ef1d8a02395448e0e7b17ac0aa28b8d338b632bbd1ffca08c02ddae32cf97 intervaltree==3.1.0 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:902b1b88936918f9b2a19e0e5eb7ccb430ae45cde4f39ea4b36932920d33952d -jinja2==3.1.4 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369 \ - --hash=sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d -lnkparse3==1.2.0 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:102b2aba6c2896127cb719f814a8579210368f9277fd5ec0d0151fe070166e1d \ - --hash=sha256:b97f9a3dfffa62ecbd5f1f6561d8b5b75b0045241482b4a980657d5aac696ee3 -maco==1.1.8 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:ab2d1d8e846c0abc455d16f718ba71dda5492ddc22533484156090aa4439fb06 \ - --hash=sha256:e0985efdf645d3c55e3d4d4f2bf40b8d2260fa4add608bb8e8fdefba0500cb4a -mako==1.3.5 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:260f1dbc3a519453a9c856dedfe4beb4e50bd5a26d96386cb6c80856556bb91a \ - --hash=sha256:48dbc20568c1d276a2698b36d968fa76161bf127194907ea6fc594fa81f943bc +jinja2==3.1.6 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d \ + --hash=sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67 +jsbeautifier==1.15.1 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:ebd733b560704c602d744eafc839db60a1ee9326e30a2a80c4adb8718adc1b24 +lnkparse3==1.5.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:3ecbd8f4107be07b8e8d7b770daa53271abf66222ee892618d30f86952e1121a \ + --hash=sha256:56b549389254f4d25375621249aa3a8c31f1dabf375e88bf7dc8c73a0f4f8f1e +lxml==5.3.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:01220dca0d066d1349bd6a1726856a78f7929f3878f7e2ee83c296c69495309e \ + --hash=sha256:02ced472497b8362c8e902ade23e3300479f4f43e45f4105c85ef43b8db85229 \ + --hash=sha256:052d99051e77a4f3e8482c65014cf6372e61b0a6f4fe9edb98503bb5364cfee3 \ + --hash=sha256:07da23d7ee08577760f0a71d67a861019103e4812c87e2fab26b039054594cc5 \ + --hash=sha256:094cb601ba9f55296774c2d57ad68730daa0b13dc260e1f941b4d13678239e70 \ + --hash=sha256:0a7056921edbdd7560746f4221dca89bb7a3fe457d3d74267995253f46343f15 \ + --hash=sha256:0c120f43553ec759f8de1fee2f4794452b0946773299d44c36bfe18e83caf002 \ + --hash=sha256:0d7b36afa46c97875303a94e8f3ad932bf78bace9e18e603f2085b652422edcd \ + --hash=sha256:0fdf3a3059611f7585a78ee10399a15566356116a4288380921a4b598d807a22 \ + --hash=sha256:109fa6fede314cc50eed29e6e56c540075e63d922455346f11e4d7a036d2b8cf \ + --hash=sha256:146173654d79eb1fc97498b4280c1d3e1e5d58c398fa530905c9ea50ea849b22 \ + --hash=sha256:1473427aff3d66a3fa2199004c3e601e6c4500ab86696edffdbc84954c72d832 \ + --hash=sha256:1483fd3358963cc5c1c9b122c80606a3a79ee0875bcac0204149fa09d6ff2727 \ + --hash=sha256:168f2dfcfdedf611eb285efac1516c8454c8c99caf271dccda8943576b67552e \ + --hash=sha256:17e8d968d04a37c50ad9c456a286b525d78c4a1c15dd53aa46c1d8e06bf6fa30 \ + --hash=sha256:18feb4b93302091b1541221196a2155aa296c363fd233814fa11e181adebc52f \ + --hash=sha256:1afe0a8c353746e610bd9031a630a95bcfb1a720684c3f2b36c4710a0a96528f \ + --hash=sha256:1d04f064bebdfef9240478f7a779e8c5dc32b8b7b0b2fc6a62e39b928d428e51 \ + --hash=sha256:1fdc9fae8dd4c763e8a31e7630afef517eab9f5d5d31a278df087f307bf601f4 \ + --hash=sha256:1ffc23010330c2ab67fac02781df60998ca8fe759e8efde6f8b756a20599c5de \ + --hash=sha256:20094fc3f21ea0a8669dc4c61ed7fa8263bd37d97d93b90f28fc613371e7a875 \ + --hash=sha256:213261f168c5e1d9b7535a67e68b1f59f92398dd17a56d934550837143f79c42 \ + --hash=sha256:218c1b2e17a710e363855594230f44060e2025b05c80d1f0661258142b2add2e \ + --hash=sha256:23e0553b8055600b3bf4a00b255ec5c92e1e4aebf8c2c09334f8368e8bd174d6 \ + --hash=sha256:25f1b69d41656b05885aa185f5fdf822cb01a586d1b32739633679699f220391 \ + --hash=sha256:2b3778cb38212f52fac9fe913017deea2fdf4eb1a4f8e4cfc6b009a13a6d3fcc \ + --hash=sha256:2bc9fd5ca4729af796f9f59cd8ff160fe06a474da40aca03fcc79655ddee1a8b \ + --hash=sha256:2c226a06ecb8cdef28845ae976da407917542c5e6e75dcac7cc33eb04aaeb237 \ + --hash=sha256:2c3406b63232fc7e9b8783ab0b765d7c59e7c59ff96759d8ef9632fca27c7ee4 \ + --hash=sha256:2c86bf781b12ba417f64f3422cfc302523ac9cd1d8ae8c0f92a1c66e56ef2e86 \ + --hash=sha256:2d9b8d9177afaef80c53c0a9e30fa252ff3036fb1c6494d427c066a4ce6a282f \ + --hash=sha256:2dec2d1130a9cda5b904696cec33b2cfb451304ba9081eeda7f90f724097300a \ + --hash=sha256:2dfab5fa6a28a0b60a20638dc48e6343c02ea9933e3279ccb132f555a62323d8 \ + --hash=sha256:2ecdd78ab768f844c7a1d4a03595038c166b609f6395e25af9b0f3f26ae1230f \ + --hash=sha256:315f9542011b2c4e1d280e4a20ddcca1761993dda3afc7a73b01235f8641e903 \ + --hash=sha256:36aef61a1678cb778097b4a6eeae96a69875d51d1e8f4d4b491ab3cfb54b5a03 \ + --hash=sha256:384aacddf2e5813a36495233b64cb96b1949da72bef933918ba5c84e06af8f0e \ + --hash=sha256:3879cc6ce938ff4eb4900d901ed63555c778731a96365e53fadb36437a131a99 \ + --hash=sha256:3c174dc350d3ec52deb77f2faf05c439331d6ed5e702fc247ccb4e6b62d884b7 \ + --hash=sha256:3eb44520c4724c2e1a57c0af33a379eee41792595023f367ba3952a2d96c2aab \ + --hash=sha256:406246b96d552e0503e17a1006fd27edac678b3fcc9f1be71a2f94b4ff61528d \ + --hash=sha256:41ce1f1e2c7755abfc7e759dc34d7d05fd221723ff822947132dc934d122fe22 \ + --hash=sha256:423b121f7e6fa514ba0c7918e56955a1d4470ed35faa03e3d9f0e3baa4c7e492 \ + --hash=sha256:44264ecae91b30e5633013fb66f6ddd05c006d3e0e884f75ce0b4755b3e3847b \ + --hash=sha256:482c2f67761868f0108b1743098640fbb2a28a8e15bf3f47ada9fa59d9fe08c3 \ + --hash=sha256:4b0c7a688944891086ba192e21c5229dea54382f4836a209ff8d0a660fac06be \ + --hash=sha256:4c1fefd7e3d00921c44dc9ca80a775af49698bbfd92ea84498e56acffd4c5469 \ + --hash=sha256:4e109ca30d1edec1ac60cdbe341905dc3b8f55b16855e03a54aaf59e51ec8c6f \ + --hash=sha256:501d0d7e26b4d261fca8132854d845e4988097611ba2531408ec91cf3fd9d20a \ + --hash=sha256:516f491c834eb320d6c843156440fe7fc0d50b33e44387fcec5b02f0bc118a4c \ + --hash=sha256:51806cfe0279e06ed8500ce19479d757db42a30fd509940b1701be9c86a5ff9a \ + --hash=sha256:562e7494778a69086f0312ec9689f6b6ac1c6b65670ed7d0267e49f57ffa08c4 \ + --hash=sha256:56b9861a71575f5795bde89256e7467ece3d339c9b43141dbdd54544566b3b94 \ + --hash=sha256:5b8f5db71b28b8c404956ddf79575ea77aa8b1538e8b2ef9ec877945b3f46442 \ + --hash=sha256:5c2fb570d7823c2bbaf8b419ba6e5662137f8166e364a8b2b91051a1fb40ab8b \ + --hash=sha256:5c54afdcbb0182d06836cc3d1be921e540be3ebdf8b8a51ee3ef987537455f84 \ + --hash=sha256:5d6a6972b93c426ace71e0be9a6f4b2cfae9b1baed2eed2006076a746692288c \ + --hash=sha256:609251a0ca4770e5a8768ff902aa02bf636339c5a93f9349b48eb1f606f7f3e9 \ + --hash=sha256:62d172f358f33a26d6b41b28c170c63886742f5b6772a42b59b4f0fa10526cb1 \ + --hash=sha256:62f7fdb0d1ed2065451f086519865b4c90aa19aed51081979ecd05a21eb4d1be \ + --hash=sha256:658f2aa69d31e09699705949b5fc4719cbecbd4a97f9656a232e7d6c7be1a367 \ + --hash=sha256:65ab5685d56914b9a2a34d67dd5488b83213d680b0c5d10b47f81da5a16b0b0e \ + --hash=sha256:68934b242c51eb02907c5b81d138cb977b2129a0a75a8f8b60b01cb8586c7b21 \ + --hash=sha256:68b87753c784d6acb8a25b05cb526c3406913c9d988d51f80adecc2b0775d6aa \ + --hash=sha256:69959bd3167b993e6e710b99051265654133a98f20cec1d9b493b931942e9c16 \ + --hash=sha256:6a7095eeec6f89111d03dabfe5883a1fd54da319c94e0fb104ee8f23616b572d \ + --hash=sha256:6b038cc86b285e4f9fea2ba5ee76e89f21ed1ea898e287dc277a25884f3a7dfe \ + --hash=sha256:6ba0d3dcac281aad8a0e5b14c7ed6f9fa89c8612b47939fc94f80b16e2e9bc83 \ + --hash=sha256:6e91cf736959057f7aac7adfc83481e03615a8e8dd5758aa1d95ea69e8931dba \ + --hash=sha256:6ee8c39582d2652dcd516d1b879451500f8db3fe3607ce45d7c5957ab2596040 \ + --hash=sha256:6f651ebd0b21ec65dfca93aa629610a0dbc13dbc13554f19b0113da2e61a4763 \ + --hash=sha256:71a8dd38fbd2f2319136d4ae855a7078c69c9a38ae06e0c17c73fd70fc6caad8 \ + --hash=sha256:74068c601baff6ff021c70f0935b0c7bc528baa8ea210c202e03757c68c5a4ff \ + --hash=sha256:7437237c6a66b7ca341e868cda48be24b8701862757426852c9b3186de1da8a2 \ + --hash=sha256:747a3d3e98e24597981ca0be0fd922aebd471fa99d0043a3842d00cdcad7ad6a \ + --hash=sha256:74bcb423462233bc5d6066e4e98b0264e7c1bed7541fff2f4e34fe6b21563c8b \ + --hash=sha256:78d9b952e07aed35fe2e1a7ad26e929595412db48535921c5013edc8aa4a35ce \ + --hash=sha256:7b1cd427cb0d5f7393c31b7496419da594fe600e6fdc4b105a54f82405e6626c \ + --hash=sha256:7d3d1ca42870cdb6d0d29939630dbe48fa511c203724820fc0fd507b2fb46577 \ + --hash=sha256:7e2f58095acc211eb9d8b5771bf04df9ff37d6b87618d1cbf85f92399c98dae8 \ + --hash=sha256:7f41026c1d64043a36fda21d64c5026762d53a77043e73e94b71f0521939cc71 \ + --hash=sha256:81b4e48da4c69313192d8c8d4311e5d818b8be1afe68ee20f6385d0e96fc9512 \ + --hash=sha256:86a6b24b19eaebc448dc56b87c4865527855145d851f9fc3891673ff97950540 \ + --hash=sha256:874a216bf6afaf97c263b56371434e47e2c652d215788396f60477540298218f \ + --hash=sha256:89e043f1d9d341c52bf2af6d02e6adde62e0a46e6755d5eb60dc6e4f0b8aeca2 \ + --hash=sha256:8c72e9563347c7395910de6a3100a4840a75a6f60e05af5e58566868d5eb2d6a \ + --hash=sha256:8dc2c0395bea8254d8daebc76dcf8eb3a95ec2a46fa6fae5eaccee366bfe02ce \ + --hash=sha256:8f0de2d390af441fe8b2c12626d103540b5d850d585b18fcada58d972b74a74e \ + --hash=sha256:92e67a0be1639c251d21e35fe74df6bcc40cba445c2cda7c4a967656733249e2 \ + --hash=sha256:94d6c3782907b5e40e21cadf94b13b0842ac421192f26b84c45f13f3c9d5dc27 \ + --hash=sha256:97acf1e1fd66ab53dacd2c35b319d7e548380c2e9e8c54525c6e76d21b1ae3b1 \ + --hash=sha256:9ada35dd21dc6c039259596b358caab6b13f4db4d4a7f8665764d616daf9cc1d \ + --hash=sha256:9c52100e2c2dbb0649b90467935c4b0de5528833c76a35ea1a2691ec9f1ee7a1 \ + --hash=sha256:9e41506fec7a7f9405b14aa2d5c8abbb4dbbd09d88f9496958b6d00cb4d45330 \ + --hash=sha256:9e4b47ac0f5e749cfc618efdf4726269441014ae1d5583e047b452a32e221920 \ + --hash=sha256:9fb81d2824dff4f2e297a276297e9031f46d2682cafc484f49de182aa5e5df99 \ + --hash=sha256:a0eabd0a81625049c5df745209dc7fcef6e2aea7793e5f003ba363610aa0a3ff \ + --hash=sha256:a3d819eb6f9b8677f57f9664265d0a10dd6551d227afb4af2b9cd7bdc2ccbf18 \ + --hash=sha256:a87de7dd873bf9a792bf1e58b1c3887b9264036629a5bf2d2e6579fe8e73edff \ + --hash=sha256:aa617107a410245b8660028a7483b68e7914304a6d4882b5ff3d2d3eb5948d8c \ + --hash=sha256:aac0bbd3e8dd2d9c45ceb82249e8bdd3ac99131a32b4d35c8af3cc9db1657179 \ + --hash=sha256:ab6dd83b970dc97c2d10bc71aa925b84788c7c05de30241b9e96f9b6d9ea3080 \ + --hash=sha256:ace2c2326a319a0bb8a8b0e5b570c764962e95818de9f259ce814ee666603f19 \ + --hash=sha256:ae5fe5c4b525aa82b8076c1a59d642c17b6e8739ecf852522c6321852178119d \ + --hash=sha256:b11a5d918a6216e521c715b02749240fb07ae5a1fefd4b7bf12f833bc8b4fe70 \ + --hash=sha256:b1c8c20847b9f34e98080da785bb2336ea982e7f913eed5809e5a3c872900f32 \ + --hash=sha256:b369d3db3c22ed14c75ccd5af429086f166a19627e84a8fdade3f8f31426e52a \ + --hash=sha256:b710bc2b8292966b23a6a0121f7a6c51d45d2347edcc75f016ac123b8054d3f2 \ + --hash=sha256:bd96517ef76c8654446fc3db9242d019a1bb5fe8b751ba414765d59f99210b79 \ + --hash=sha256:c00f323cc00576df6165cc9d21a4c21285fa6b9989c5c39830c3903dc4303ef3 \ + --hash=sha256:c162b216070f280fa7da844531169be0baf9ccb17263cf5a8bf876fcd3117fa5 \ + --hash=sha256:c1a69e58a6bb2de65902051d57fde951febad631a20a64572677a1052690482f \ + --hash=sha256:c1f794c02903c2824fccce5b20c339a1a14b114e83b306ff11b597c5f71a1c8d \ + --hash=sha256:c24037349665434f375645fa9d1f5304800cec574d0310f618490c871fd902b3 \ + --hash=sha256:c300306673aa0f3ed5ed9372b21867690a17dba38c68c44b287437c362ce486b \ + --hash=sha256:c56a1d43b2f9ee4786e4658c7903f05da35b923fb53c11025712562d5cc02753 \ + --hash=sha256:c6379f35350b655fd817cd0d6cbeef7f265f3ae5fedb1caae2eb442bbeae9ab9 \ + --hash=sha256:c802e1c2ed9f0c06a65bc4ed0189d000ada8049312cfeab6ca635e39c9608957 \ + --hash=sha256:cb83f8a875b3d9b458cada4f880fa498646874ba4011dc974e071a0a84a1b033 \ + --hash=sha256:cf120cce539453ae086eacc0130a324e7026113510efa83ab42ef3fcfccac7fb \ + --hash=sha256:dd36439be765e2dde7660212b5275641edbc813e7b24668831a5c8ac91180656 \ + --hash=sha256:dd5350b55f9fecddc51385463a4f67a5da829bc741e38cf689f38ec9023f54ab \ + --hash=sha256:df5c7333167b9674aa8ae1d4008fa4bc17a313cc490b2cca27838bbdcc6bb15b \ + --hash=sha256:e63601ad5cd8f860aa99d109889b5ac34de571c7ee902d6812d5d9ddcc77fa7d \ + --hash=sha256:e92ce66cd919d18d14b3856906a61d3f6b6a8500e0794142338da644260595cd \ + --hash=sha256:e99f5507401436fdcc85036a2e7dc2e28d962550afe1cbfc07c40e454256a859 \ + --hash=sha256:ea2e2f6f801696ad7de8aec061044d6c8c0dd4037608c7cab38a9a4d316bfb11 \ + --hash=sha256:eafa2c8658f4e560b098fe9fc54539f86528651f61849b22111a9b107d18910c \ + --hash=sha256:ecd4ad8453ac17bc7ba3868371bffb46f628161ad0eefbd0a855d2c8c32dd81a \ + --hash=sha256:ee70d08fd60c9565ba8190f41a46a54096afa0eeb8f76bd66f2c25d3b1b83005 \ + --hash=sha256:eec1bb8cdbba2925bedc887bc0609a80e599c75b12d87ae42ac23fd199445654 \ + --hash=sha256:ef0c1fe22171dd7c7c27147f2e9c3e86f8bdf473fed75f16b0c2e84a5030ce80 \ + --hash=sha256:f2901429da1e645ce548bf9171784c0f74f0718c3f6150ce166be39e4dd66c3e \ + --hash=sha256:f422a209d2455c56849442ae42f25dbaaba1c6c3f501d58761c619c7836642ec \ + --hash=sha256:f65e5120863c2b266dbcc927b306c5b78e502c71edf3295dfcb9501ec96e5fc7 \ + --hash=sha256:f7d4a670107d75dfe5ad080bed6c341d18c4442f9378c9f58e5851e86eb79965 \ + --hash=sha256:f914c03e6a31deb632e2daa881fe198461f4d06e57ac3d0e05bbcab8eae01945 \ + --hash=sha256:fb66442c2546446944437df74379e9cf9e9db353e61301d1a0e26482f43f0dd8 +mako==1.3.8 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:42f48953c7eb91332040ff567eb7eea69b22e7a4affbc5ba8e845e8f730f6627 \ + --hash=sha256:577b97e414580d3e088d47c2dbbe9594aa7a5146ed2875d4dfa9075af2dd3cc8 markdown-it-py==3.0.0 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1 \ --hash=sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb -markupsafe==2.1.5 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf \ - --hash=sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff \ - --hash=sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f \ - --hash=sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3 \ - --hash=sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532 \ - --hash=sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f \ - --hash=sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617 \ - --hash=sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df \ - --hash=sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4 \ - --hash=sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906 \ - --hash=sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f \ - --hash=sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4 \ - --hash=sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8 \ - --hash=sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371 \ - --hash=sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2 \ - --hash=sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465 \ - --hash=sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52 \ - --hash=sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6 \ - --hash=sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169 \ - --hash=sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad \ - --hash=sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2 \ - --hash=sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0 \ - --hash=sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029 \ - --hash=sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f \ - --hash=sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a \ - --hash=sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced \ - --hash=sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5 \ - --hash=sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c \ - --hash=sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf \ - --hash=sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9 \ - --hash=sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb \ - --hash=sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad \ - --hash=sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3 \ - --hash=sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1 \ - --hash=sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46 \ - --hash=sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc \ - --hash=sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a \ - --hash=sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee \ - --hash=sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900 \ - --hash=sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5 \ - --hash=sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea \ - --hash=sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f \ - --hash=sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5 \ - --hash=sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e \ - --hash=sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a \ - --hash=sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f \ - --hash=sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50 \ - --hash=sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a \ - --hash=sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b \ - --hash=sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4 \ - --hash=sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff \ - --hash=sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2 \ - --hash=sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46 \ - --hash=sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b \ - --hash=sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf \ - --hash=sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5 \ - --hash=sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5 \ - --hash=sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab \ - --hash=sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd \ - --hash=sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68 -maxminddb==2.5.1 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:0702da59b9670a72761b65cb1a52bc3032d8f6799bdab641cb8350ad5740580b \ - --hash=sha256:0a21abd85e10e5e0f60244b49c3db17e7e48befd4972e62a62833d91e2acbb49 \ - --hash=sha256:0cbd272db3202e948c9088e48dec62add071a47971d84ceb11d2cb2880f83e5a \ - --hash=sha256:17ea454f61631b9815d420d48d00663f8718fc7de30be53ffcec0f73989475eb \ - --hash=sha256:188173c07dce0692fd5660a6eb7ea8c126d7b3a4b61496c8a8ee9e8b10186ff5 \ - --hash=sha256:1e091c2b44673c218ee2df23adbc0b6d04fd5c646cfcb6c6fe26fb849434812a \ - --hash=sha256:2a7a73ab4bbc16b81983531c99fa102a0c7dae459db958c17fea48c981f5e764 \ - --hash=sha256:2c2901daebd7c8a702302315e7a58cdc38e626406ad4a05b4d48634897d5f5a3 \ - --hash=sha256:2e20a70c1545d6626dcd4ce2d7ecf3d566d978ea64cb37e7952f93baff66b812 \ - --hash=sha256:2ecb1be961f1969be047d07743093f0dcf2f6d4ec3a06a4555587f380a96f6e7 \ - --hash=sha256:3ce1f42bdfce7b86cb5a56cba730fed611fb879d867e6024f0d520257bef6891 \ - --hash=sha256:3d52c693baf07bba897d109b0ecb067f21fd0cc0fb266d67db456e85b80d699e \ - --hash=sha256:4807d374e645bd68334e4f487ba85a27189dbc1267a98e644aa686a7927e0559 \ - --hash=sha256:4c67621e842c415ce336ab019a9f087305dfcf24c095b68b8e9d27848f6f6d91 \ - --hash=sha256:500d321bdefe4dcd351e4390a79b7786aab49b0536bedfa0788e5ffb0e91e421 \ - --hash=sha256:526744b12075051fa20979090c111cc3a42a3b55e2714818270c7b84a41a8cfe \ - --hash=sha256:5a6751e2e89d62d53217870bcc2a8c887dc56ae370ba1b74e52e880761916e54 \ - --hash=sha256:5d772be68cce812f7c4b15ae8c68e624c8b88ff83071e3903ca5b5f55e343c25 \ - --hash=sha256:607344b1079ea647629bf962dcea7580ec864faaad3f5aae650e2e8652121d89 \ - --hash=sha256:62e93a8e99937bf4307eeece3ca37e1161325ebf9363c4ce195410fb5daf64a0 \ - --hash=sha256:639aee8abd63a95baa12b94b6f3a842d51877d631879c7d08c98c68dc44a84c3 \ - --hash=sha256:6667948e7501a513caef90edda2d367865097239d4c2381eb3998e9905af7209 \ - --hash=sha256:7805ae8c9de433c38939ada2e376706a9f6740239f61fd445927b88f5b42c267 \ - --hash=sha256:85a302d79577efe5bc308647394ffdc535dd5f062644c41103604ccf24931a05 \ - --hash=sha256:892c11a8694394e97d3ac0f8d5974ea588c732d14e721f22095c58b4f584c144 \ - --hash=sha256:8b98ed5c34955c48e72d35daed713ba4a6833a8a6d1204e79d2c85e644049792 \ - --hash=sha256:8cee4315da7cdd3f2a18f1ab1418953a7a9eda65e63095b01f03c7d3645d633e \ - --hash=sha256:910e7b3ad87d5352ed3f496bd42bffbf9f896245278b0d8e76afa1382e42a7ae \ - --hash=sha256:93f7055779caf7753810f1e2c6444af6d727393fd116ffa0767fbd54fb8c9bbf \ - --hash=sha256:969d0057ea5472e0b574c5293c4f3ecf49585362351c543e8ea55dc48b60f1eb \ - --hash=sha256:a01b0341bd6bee431bb8c07c7ac0ed221250c7390b125c025b7d57578e78e8a3 \ - --hash=sha256:a1e1a19f9740f586362f47862d0095b54d50b9d465babcaa8a563746132fe5be \ - --hash=sha256:aae262da1940a67c3ba765c49e2308947ce68ff647f87630002c306433a98ca1 \ - --hash=sha256:b0bbbd58b300aaddf985f763720bdebba9f7a73168ff9f57168117f630ad1c06 \ - --hash=sha256:b223c53077a736c304b63cf5afceb928975fbd12ddae5afd6b71370bab7b4700 \ - --hash=sha256:b477852cf1741d9187b021e23723e64b063794bbf946a9b5b84cc222f3caf58a \ - --hash=sha256:c4e5ca423b1e310f0327536f5ed1a2c6e08d83289a7f909e021590b0b477cae2 \ - --hash=sha256:c97eac5af102cede4b5f57cecb25e8f949fa4e4a8d812bed575539951c60ecaf \ - --hash=sha256:d4d36cf3d390f02d2bdf53d9efefb92be7bd70e07a5a86cdb79020c48c2d81b7 \ - --hash=sha256:d654895b546a47e85f2e071b98e377a60bb03cd643b9423017fa66fcd5adedce \ - --hash=sha256:dd28c434fb44f825dde6a75df2c338d44645791b03480af66a4d993f93801e10 \ - --hash=sha256:e09b295c401c104ae0e30f66c1a3f3c2aa4ba2cbe12a787576499356a5a4d6c1 \ - --hash=sha256:ea2e27a507b53dfbf2ba2ba85c98682a1ad2dac3f9941a7bffa5cb86150d0c47 \ - --hash=sha256:ef4d508c899ce0f37de731340759c68bfd1102a39a873675c71fae2c8d71ad97 \ - --hash=sha256:f1e5bd58b71f322dc6c16a95a129433b1bc229d4b714f870a61c2367425396ee \ - --hash=sha256:fad45cd2f2e3c5fbebacb8d172a60fb22443222e549bf740a0bc7eeb849e5ce7 \ - --hash=sha256:fbd01fc7d7b5b2befe914e8cdb5ed3a1c5476e57b765197cceff8d897f33d012 \ - --hash=sha256:fe0af3ba9e1a78ed5f2ad32fc18d18b78ef233e7d0c627e1a77a525a7eb0c241 +markupsafe==3.0.2 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4 \ + --hash=sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30 \ + --hash=sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0 \ + --hash=sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9 \ + --hash=sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396 \ + --hash=sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13 \ + --hash=sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028 \ + --hash=sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca \ + --hash=sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557 \ + --hash=sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832 \ + --hash=sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0 \ + --hash=sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b \ + --hash=sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579 \ + --hash=sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a \ + --hash=sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c \ + --hash=sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff \ + --hash=sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c \ + --hash=sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22 \ + --hash=sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094 \ + --hash=sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb \ + --hash=sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e \ + --hash=sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5 \ + --hash=sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a \ + --hash=sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d \ + --hash=sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a \ + --hash=sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b \ + --hash=sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8 \ + --hash=sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225 \ + --hash=sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c \ + --hash=sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144 \ + --hash=sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f \ + --hash=sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87 \ + --hash=sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d \ + --hash=sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93 \ + --hash=sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf \ + --hash=sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158 \ + --hash=sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84 \ + --hash=sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb \ + --hash=sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48 \ + --hash=sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171 \ + --hash=sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c \ + --hash=sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6 \ + --hash=sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd \ + --hash=sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d \ + --hash=sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1 \ + --hash=sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d \ + --hash=sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca \ + --hash=sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a \ + --hash=sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29 \ + --hash=sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe \ + --hash=sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798 \ + --hash=sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c \ + --hash=sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8 \ + --hash=sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f \ + --hash=sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f \ + --hash=sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a \ + --hash=sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178 \ + --hash=sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0 \ + --hash=sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79 \ + --hash=sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430 \ + --hash=sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50 +maxminddb==2.6.3 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:01773fee182cc36f6d38c277936accf7c85b8f4c20d13bb630666f6b3f087ad8 \ + --hash=sha256:01b143a38ae38c71ebc9028d67bbcb05c1b954e0f3a28c508eaee46833807903 \ + --hash=sha256:0348c8dadef9493dbcd45f032ae271c7fd2216ed4bb4bab0aff371ffc522f871 \ + --hash=sha256:0b140c1db0c218f485b033b51a086d98d57f55f4a4c2b1cb72fe6a5e1e57359a \ + --hash=sha256:0d82fbddf3a88e6aa6181bd16bc08a6939d6353f97f143eeddec16bc5394e361 \ + --hash=sha256:0dd55d2498d287b6cfd6b857deed9070e53c4b22a1acd69615e88dec92d95fb3 \ + --hash=sha256:1bc2edcef76ce54d4df04f58aec98f4df0377f37aae2217587bfecd663ed5c66 \ + --hash=sha256:1f0b78c40a12588e9e0ca0ffe5306b6dea028dcd21f2c120d1ceb328a3307a98 \ + --hash=sha256:27ba5e22bd09fe324f0a4c5ed97e73c1c7c3ab7e3bae4e1e6fcaa15f175b9f5a \ + --hash=sha256:2849357de35bfed0011ad1ff14a83c946273ae8c75a8867612d22f559df70e7d \ + --hash=sha256:2b0fef825b23df047876d2056cbb69fb8d8e4b965f744f674be75e16fb86a52e \ + --hash=sha256:2d25acb42ef8829e8e3491b6b3b4ced9dbb4eea6c4ec24afdc4028051e7b8803 \ + --hash=sha256:3015afb00e6168837938dbe5fda40ace37442c22b292ccee27c1690fbf6078ed \ + --hash=sha256:34943b4b724a35ef63ec40dcf894a100575d233b23b6cd4f8224017ea1195265 \ + --hash=sha256:39254e173af7b0018c1508c2dd68ecda0c043032176140cfe917587e2d082f42 \ + --hash=sha256:415dd5de87adc7640d3da2a8e7cf19a313c1a715cb84a3433f0e3b2d27665319 \ + --hash=sha256:448d062e95242e3088df85fe7ed3f2890a9f4aea924bde336e9ff5d2337ca5fd \ + --hash=sha256:45da7549c952f88da39c9f440cb3fa2abbd7472571597699467641af88512730 \ + --hash=sha256:46096646c284835c8a580ec2ccbf0d6d5398191531fa543bb0437983c75cb7ba \ + --hash=sha256:46bdc8dc528a2f64ef34182bf40084e05410344d40097c1e93554d732dfb0e15 \ + --hash=sha256:47828bed767b82c219ba7aa65f0cb03d7f7443d7270259ce931e133a40691d34 \ + --hash=sha256:489c5ae835198a228380b83cc537a5ffb1911f1579d7545baf097e4a8eefcd9a \ + --hash=sha256:4b80275603bba6a95ed69d859d184dfa60bfd8e83cd4c8b722d7f7eaa9d95f8f \ + --hash=sha256:4d470fc4f9c5ed8854a945dc5ea56b2f0644a5c3e5872d0e579d66a5a9238d7f \ + --hash=sha256:4e0865069ef76b4f3eb862c042b107088171cbf43fea3dcaae0dd7253effe6e3 \ + --hash=sha256:4e7e6d5f3c1aa6350303edab8f0dd471e616d69b5d47ff5ecbf2c7c82998b9c6 \ + --hash=sha256:536a39fb917a44b1cd037da624e3d11d49898b5579dfc00c4d7103a057dc51ab \ + --hash=sha256:5a1586260eac831d61c2665b26ca1ae3ad00caca57c8031346767f4527025311 \ + --hash=sha256:6136dc8ad8c8f7e95a7d84174a990c1b47d5e641e3a3a8ae67d7bde625342dbb \ + --hash=sha256:6480ca47db4d8d09296c268e8ff4e6f4c1d455773a67233c9f899dfa6af3e6c6 \ + --hash=sha256:6887315de47f3a9840df19f498a4e68723c160c9448d276c3ef454531555778e \ + --hash=sha256:6c977da32cc72784980da1928a79d38b3e9fe83faa9a40ea9bae598a6bf2f7bb \ + --hash=sha256:6cc002099c9e1637309df772789a36db9a4601c4623dd1ace8145d057358c20b \ + --hash=sha256:6eb23f842a72ab3096f9f9b1c292f4feb55a8d758567cb6d77637c2257a3187c \ + --hash=sha256:77112cb1a2e381de42c443d1bf222c58b9da203183bb2008dd370c3d2a587a4e \ + --hash=sha256:7c3209d7a4b2f50d4b28a1d886d95b19094cdc840208e69dbbc40cae2c1cc65b \ + --hash=sha256:7c5d15a0546821a7e9104b71ca701c01462390d0a1bee5cad75f583cf26c400b \ + --hash=sha256:7d6024d1e40244b5549c5e6063af109399a2f89503a24916b5139c4d0657f1c8 \ + --hash=sha256:81340e52c743cdc3c0f4a9f45f9cf4e3c2ae87bf4bbb34613c5059a5b829eb65 \ + --hash=sha256:83d2324788a31a28bbb38b0dbdece5826f56db4df6e1538cf6f4b72f6a3da66c \ + --hash=sha256:85763c19246dce43044be58cb9119579c2efd0b85a7b79d865b741a698866488 \ + --hash=sha256:8868580f34b483d5b74edd4270db417e211906d57fb13bbeeb11ea8d5cd01829 \ + --hash=sha256:890dd845e371f67edef7b19a2866191d9fff85faf88f4b4c416a0aaa37204416 \ + --hash=sha256:89afed255ac3652db7f91d8f6b278a4c490c47283ddbff5589c22cfdef4b8453 \ + --hash=sha256:8ec674a2c2e4b47ab9f582460670a5c1d7725b1cbf16e6cbb94de1ae51ee9edf \ + --hash=sha256:9580b2cd017185db07baacd9d629ca01f3fe6f236528681c88a0209725376e9c \ + --hash=sha256:98258a295149aadf96ed8d667468722b248fe47bb991891ad01cfa8cb9e9684a \ + --hash=sha256:9d913971187326e59be8a63068128b6439f6717b13c7c451e6d9e1723286d9ff \ + --hash=sha256:a23c7c88f9df0727a3e56f2385ec19fb5f61bb46dcbebb6ddc5c948cf0b73b0a \ + --hash=sha256:a38faf03db15cc285009c0ddaacd04071b84ebd8ff7d773f700c7def695a291c \ + --hash=sha256:a59d72bf373c61da156fd43e2be6da802f68370a50a2205de84ee76916e05f9f \ + --hash=sha256:a6597599cde3916730d69b023045e6c22ff1c076d9cad7fb63641d36d01e3e93 \ + --hash=sha256:a6868438d1771c0bd0bbc95d84480c1ae04df72a85879e1ada42762250a00f59 \ + --hash=sha256:a70d46337c9497a5b3329d9c7fa7f45be33243ffad04924b8f06ffe41a136279 \ + --hash=sha256:a9ebd373a4ef69218bfbce93e9b97f583cfe681b28d4e32e0d64f76ded148fba \ + --hash=sha256:aadb9d12e887a1f52e8214e539e5d78338356fad4ef2a51931f6f7dbe56c2228 \ + --hash=sha256:acf46e20709a27d2b519669888e3f53a37bc4204b98a0c690664c48ff8cb1364 \ + --hash=sha256:b09bb7bb98418a620b1ec1881d1594c02e715a68cdc925781de1e79b39cefe77 \ + --hash=sha256:b29cea50b191784e2242227e0fac5bc985972b3849f97fe96c7f37fb7a7426d7 \ + --hash=sha256:b4729936fedb4793d9162b92d6de63e267e388c8938e19e700120b6df6a6ae6c \ + --hash=sha256:d2c3806baa7aa047aa1bac7419e7e353db435f88f09d51106a84dbacf645d254 \ + --hash=sha256:d4bac2b7b7609bed8dcf6beef1ef4a1e411e9e39c311070ffc2ace80d6de6444 \ + --hash=sha256:d69c5493c81f11bca90961b4dfa028c031aa8e7bb156653edf242a03dfc51561 \ + --hash=sha256:d78a02b70ededb3ba7317c24266217d7b68283e3be04cad0c34ee446a0217ee0 \ + --hash=sha256:da584edc3e4465f5417a48602ed7e2bee4f2a7a2b43fcf2c40728cfc9f9fd5aa \ + --hash=sha256:daa20961ad0fb550038c02dbf76a04e1c1958a3b899fa14a7c412aed67380812 \ + --hash=sha256:de8415538d778ae4f4bb40e2cee9581e2d5c860abdbdbba1458953f5b314a6b0 \ + --hash=sha256:deebf098c79ce031069fec1d7202cba0e766b3f12adbb631d16223174994724a \ + --hash=sha256:e28622fd7c4ccd298c3f630161d0801182eb38038ca01319693a70264de40b89 \ + --hash=sha256:e38a449890a976365da1f2c927ac076838aa2715b464593080075a18ae4e0dc8 \ + --hash=sha256:e441478922c2d311b8bc96f35d6e78306802774149fc20d07d96cc5c3b57dd02 \ + --hash=sha256:e5a8cfe71db548aa9a520a3f7e92430b6b7900affadef3b0c83c530c759dd12f \ + --hash=sha256:e867852037a8a26a24cfcf31b697dce63d488e1617af244c2895568d8f6c7a31 \ + --hash=sha256:edab18a50470031fc8447bcd9285c9f5f952abef2b6db5579fe50665bdcda941 \ + --hash=sha256:ef41bfe15692fe15e1799d600366a0faa3673a0d7d7dbe6a305ec3a5b6f07708 \ + --hash=sha256:efd875d43c4207fb90e10d582e4394d8a04f7b55c83c4d6bc0593a7be450e04f \ + --hash=sha256:f06e9c908a9270e882f0d23f041a9674680a7a110412b453f902d22323f86d38 \ + --hash=sha256:f49eefddad781e088969188c606b7988a7da27592590f6c4cc2b64fd2a85ff28 \ + --hash=sha256:fa36f1ca12fd3a37ad758afd0666457a749b2c4b16db0eb3f8c953f55ae6325d mdurl==0.1.2 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8 \ --hash=sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba @@ -747,310 +949,328 @@ msgpack==1.0.8 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:f51bab98d52739c50c56658cc303f190785f9a2cd97b823357e7aeae54c8f68a \ --hash=sha256:f9904e24646570539a8950400602d66d2b2c492b9010ea7e965025cb71d0c86d \ --hash=sha256:f9af38a89b6a5c04b7d18c492c8ccf2aee7048aff1ce8437c4683bb5a1df893d -msgspec==0.18.6 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:06acbd6edf175bee0e36295d6b0302c6de3aaf61246b46f9549ca0041a9d7177 \ - --hash=sha256:0e24539b25c85c8f0597274f11061c102ad6b0c56af053373ba4629772b407be \ - --hash=sha256:1003c20bfe9c6114cc16ea5db9c5466e49fae3d7f5e2e59cb70693190ad34da0 \ - --hash=sha256:1a76b60e501b3932782a9da039bd1cd552b7d8dec54ce38332b87136c64852dd \ - --hash=sha256:37f67c1d81272131895bb20d388dd8d341390acd0e192a55ab02d4d6468b434c \ - --hash=sha256:3ac4dd63fd5309dd42a8c8c36c1563531069152be7819518be0a9d03be9788e4 \ - --hash=sha256:40a4df891676d9c28a67c2cc39947c33de516335680d1316a89e8f7218660410 \ - --hash=sha256:41cf758d3f40428c235c0f27bc6f322d43063bc32da7b9643e3f805c21ed57b4 \ - --hash=sha256:46eb2f6b22b0e61c137e65795b97dc515860bf6ec761d8fb65fdb62aa094ba61 \ - --hash=sha256:6aa85198f8f154cf35d6f979998f6dadd3dc46a8a8c714632f53f5d65b315c07 \ - --hash=sha256:7481355a1adcf1f08dedd9311193c674ffb8bf7b79314b4314752b89a2cf7f1c \ - --hash=sha256:77f30b0234eceeff0f651119b9821ce80949b4d667ad38f3bfed0d0ebf9d6d8f \ - --hash=sha256:9080eb12b8f59e177bd1eb5c21e24dd2ba2fa88a1dbc9a98e05ad7779b54c681 \ - --hash=sha256:974d3520fcc6b824a6dedbdf2b411df31a73e6e7414301abac62e6b8d03791b4 \ - --hash=sha256:9da21f804c1a1471f26d32b5d9bc0480450ea77fbb8d9db431463ab64aaac2cf \ - --hash=sha256:a59fc3b4fcdb972d09138cb516dbde600c99d07c38fd9372a6ef500d2d031b4e \ - --hash=sha256:a6896f4cd5b4b7d688018805520769a8446df911eb93b421c6c68155cdf9dd5a \ - --hash=sha256:ad237100393f637b297926cae1868b0d500f764ccd2f0623a380e2bcfb2809ca \ - --hash=sha256:b5c390b0b0b7da879520d4ae26044d74aeee5144f83087eb7842ba59c02bc090 \ - --hash=sha256:c3232fabacef86fe8323cecbe99abbc5c02f7698e3f5f2e248e3480b66a3596b \ - --hash=sha256:c61ee4d3be03ea9cd089f7c8e36158786cd06e51fbb62529276452bbf2d52ece \ - --hash=sha256:c8355b55c80ac3e04885d72db515817d9fbb0def3bab936bba104e99ad22cf46 \ - --hash=sha256:cc001cf39becf8d2dcd3f413a4797c55009b3a3cdbf78a8bf5a7ca8fdb76032c \ - --hash=sha256:ce13981bfa06f5eb126a3a5a38b1976bddb49a36e4f46d8e6edecf33ccf11df1 \ - --hash=sha256:d0feb7a03d971c1c0353de1a8fe30bb6579c2dc5ccf29b5f7c7ab01172010492 \ - --hash=sha256:d5351afb216b743df4b6b147691523697ff3a2fc5f3d54f771e91219f5c23aaa \ - --hash=sha256:d70cb3d00d9f4de14d0b31d38dfe60c88ae16f3182988246a9861259c6722af6 \ - --hash=sha256:d86f5071fe33e19500920333c11e2267a31942d18fed4d9de5bc2fbab267d28c \ - --hash=sha256:db1d8626748fa5d29bbd15da58b2d73af25b10aa98abf85aab8028119188ed57 \ - --hash=sha256:e3b524df6ea9998bbc99ea6ee4d0276a101bcc1aa8d14887bb823914d9f60d07 \ - --hash=sha256:e77e56ffe2701e83a96e35770c6adb655ffc074d530018d1b584a8e635b4f36f \ - --hash=sha256:e97dec6932ad5e3ee1e3c14718638ba333befc45e0661caa57033cd4cc489466 \ - --hash=sha256:f7d9faed6dfff654a9ca7d9b0068456517f63dbc3aa704a527f493b9200b210a \ - --hash=sha256:fac5834e14ac4da1fca373753e0c4ec9c8069d1fe5f534fa5208453b6065d5be \ - --hash=sha256:fd62e5818731a66aaa8e9b0a1e5543dc979a46278da01e85c3c9a1a4f047ef7e \ - --hash=sha256:fda4c357145cf0b760000c4ad597e19b53adf01382b711f281720a10a0fe72b7 -msoffcrypto-tool==5.4.2 ; python_version >= "3.10" and python_version < "4.0" and platform_python_implementation != "PyPy" or python_version >= "3.10" and python_version < "4.0" and (platform_system != "Windows" and platform_system != "Darwin") \ +msgspec==0.19.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:00e87ecfa9795ee5214861eab8326b0e75475c2e68a384002aa135ea2a27d909 \ + --hash=sha256:047cfa8675eb3bad68722cfe95c60e7afabf84d1bd8938979dd2b92e9e4a9551 \ + --hash=sha256:0553bbc77662e5708fe66aa75e7bd3e4b0f209709c48b299afd791d711a93c36 \ + --hash=sha256:067f0de1c33cfa0b6a8206562efdf6be5985b988b53dd244a8e06f993f27c8c0 \ + --hash=sha256:0684573a821be3c749912acf5848cce78af4298345cb2d7a8b8948a0a5a27cfe \ + --hash=sha256:0f5c043ace7962ef188746e83b99faaa9e3e699ab857ca3f367b309c8e2c6b12 \ + --hash=sha256:15c1e86fff77184c20a2932cd9742bf33fe23125fa3fcf332df9ad2f7d483044 \ + --hash=sha256:19746b50be214a54239aab822964f2ac81e38b0055cca94808359d779338c10e \ + --hash=sha256:2719647625320b60e2d8af06b35f5b12d4f4d281db30a15a1df22adb2295f633 \ + --hash=sha256:317050bc0f7739cb30d257ff09152ca309bf5a369854bbf1e57dffc310c1f20f \ + --hash=sha256:3b5541b2b3294e5ffabe31a09d604e23a88533ace36ac288fa32a420aa38d229 \ + --hash=sha256:3be5c02e1fee57b54130316a08fe40cca53af92999a302a6054cd451700ea7db \ + --hash=sha256:3c4ec642689da44618f68c90855a10edbc6ac3ff7c1d94395446c65a776e712a \ + --hash=sha256:43bbb237feab761b815ed9df43b266114203f53596f9b6e6f00ebd79d178cdf2 \ + --hash=sha256:45c8fb410670b3b7eb884d44a75589377c341ec1392b778311acdbfa55187716 \ + --hash=sha256:4cfc033c02c3e0aec52b71710d7f84cb3ca5eb407ab2ad23d75631153fdb1f12 \ + --hash=sha256:5f0f65f29b45e2816d8bded36e6b837a4bf5fb60ec4bc3c625fa2c6da4124537 \ + --hash=sha256:604037e7cd475345848116e89c553aa9a233259733ab51986ac924ab1b976f8e \ + --hash=sha256:60ef4bdb0ec8e4ad62e5a1f95230c08efb1f64f32e6e8dd2ced685bcc73858b5 \ + --hash=sha256:695b832d0091edd86eeb535cd39e45f3919f48d997685f7ac31acb15e0a2ed90 \ + --hash=sha256:6c7adf191e4bd3be0e9231c3b6dc20cf1199ada2af523885efc2ed218eafd011 \ + --hash=sha256:70eaef4934b87193a27d802534dc466778ad8d536e296ae2f9334e182ac27b6c \ + --hash=sha256:757b501fa57e24896cf40a831442b19a864f56d253679f34f260dcb002524a6c \ + --hash=sha256:82b2c42c1b9ebc89e822e7e13bbe9d17ede0c23c187469fdd9505afd5a481314 \ + --hash=sha256:a5bc1472223a643f5ffb5bf46ccdede7f9795078194f14edd69e3aab7020d327 \ + --hash=sha256:aa77046904db764b0462036bc63ef71f02b75b8f72e9c9dd4c447d6da1ed8f8e \ + --hash=sha256:ac7f7c377c122b649f7545810c6cd1b47586e3aa3059126ce3516ac7ccc6a6a9 \ + --hash=sha256:ca06aa08e39bf57e39a258e1996474f84d0dd8130d486c00bec26d797b8c5446 \ + --hash=sha256:d8dd848ee7ca7c8153462557655570156c2be94e79acec3561cf379581343259 \ + --hash=sha256:d911c442571605e17658ca2b416fd8579c5050ac9adc5e00c2cb3126c97f73bc \ + --hash=sha256:e695dad6897896e9384cf5e2687d9ae9feaef50e802f93602d35458e20d1fb19 \ + --hash=sha256:e78f46ff39a427e10b4a61614a2777ad69559cc8d603a7c05681f5a595ea98f7 \ + --hash=sha256:f04cad4385e20be7c7176bb8ae3dca54a08e9756cfc97bcdb4f18560c3042063 \ + --hash=sha256:f12d30dd6266557aaaf0aa0f9580a9a8fbeadfa83699c487713e355ec5f0bd86 \ + --hash=sha256:f98bd8962ad549c27d63845b50af3f53ec468b6318400c9f1adfe8b092d7b62f \ + --hash=sha256:fe2c4bf29bf4e89790b3117470dea2c20b59932772483082c468b990d45fb947 +msoffcrypto-tool==5.4.2 ; python_version >= "3.10" and python_version < "4.0" and (platform_python_implementation != "PyPy" or platform_system != "Windows" and platform_system != "Darwin") \ --hash=sha256:274fe2181702d1e5a107ec1b68a4c9fea997a44972ae1cc9ae0cb4f6a50fef0e \ --hash=sha256:44b545adba0407564a0cc3d6dde6ca36b7c0fdf352b85bca51618fa1d4817370 -nanobind==2.1.0 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:a613a2ce750fee63f03dc8a36593be2bdc2929cb4cea56b38fafeb74b85c3a5f netstruct==1.1.2 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:70b6a5c73f5bbc7ab57b019369642adfb34dd8af41b948c400ce95f952b7df9a -networkx==3.1 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:4f33f68cb2afcf86f28a45f43efc27a9386b535d567d2127f8f61d51dec58d36 \ - --hash=sha256:de346335408f84de0eada6ff9fafafff9bcda11f0a0dfaa931133debb146ab61 -oauthlib==3.2.2 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:8139f29aac13e25d502680e9e19963e83f16838d48a0d71c287fe40e7067fbca \ - --hash=sha256:9859c40929662bec5d64f34d01c99e093149682a3f38915dc0655d5a633dd918 -olefile==0.46 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:133b031eaf8fd2c9399b78b8bc5b8fcbe4c31e85295749bb17a87cba8f3c3964 -oletools==0.60 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:bad54d3ced34f3475a5bffc0122f8481c66c3f3e09ad946dbda6ec80b75f72cb \ - --hash=sha256:dfad0328ac83b4f8db9f47e706cbd64db739ae4ebf9d98b2dcc465728a35f4a6 -orjson==3.9.15 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:001f4eb0ecd8e9ebd295722d0cbedf0748680fb9998d3993abaed2f40587257a \ - --hash=sha256:05a1f57fb601c426635fcae9ddbe90dfc1ed42245eb4c75e4960440cac667262 \ - --hash=sha256:10c57bc7b946cf2efa67ac55766e41764b66d40cbd9489041e637c1304400494 \ - --hash=sha256:12365576039b1a5a47df01aadb353b68223da413e2e7f98c02403061aad34bde \ - --hash=sha256:2973474811db7b35c30248d1129c64fd2bdf40d57d84beed2a9a379a6f57d0ab \ - --hash=sha256:2b5c0f532905e60cf22a511120e3719b85d9c25d0e1c2a8abb20c4dede3b05a5 \ - --hash=sha256:2c51378d4a8255b2e7c1e5cc430644f0939539deddfa77f6fac7b56a9784160a \ - --hash=sha256:2d99e3c4c13a7b0fb3792cc04c2829c9db07838fb6973e578b85c1745e7d0ce7 \ - --hash=sha256:2f256d03957075fcb5923410058982aea85455d035607486ccb847f095442bda \ - --hash=sha256:34cbcd216e7af5270f2ffa63a963346845eb71e174ea530867b7443892d77180 \ - --hash=sha256:4228aace81781cc9d05a3ec3a6d2673a1ad0d8725b4e915f1089803e9efd2b99 \ - --hash=sha256:4feeb41882e8aa17634b589533baafdceb387e01e117b1ec65534ec724023d04 \ - --hash=sha256:57d5d8cf9c27f7ef6bc56a5925c7fbc76b61288ab674eb352c26ac780caa5b10 \ - --hash=sha256:5bb399e1b49db120653a31463b4a7b27cf2fbfe60469546baf681d1b39f4edf2 \ - --hash=sha256:62482873e0289cf7313461009bf62ac8b2e54bc6f00c6fabcde785709231a5d7 \ - --hash=sha256:67384f588f7f8daf040114337d34a5188346e3fae6c38b6a19a2fe8c663a2f9b \ - --hash=sha256:6ae4e06be04dc00618247c4ae3f7c3e561d5bc19ab6941427f6d3722a0875ef7 \ - --hash=sha256:6f7b65bfaf69493c73423ce9db66cfe9138b2f9ef62897486417a8fcb0a92bfe \ - --hash=sha256:6fc2fe4647927070df3d93f561d7e588a38865ea0040027662e3e541d592811e \ - --hash=sha256:71c6b009d431b3839d7c14c3af86788b3cfac41e969e3e1c22f8a6ea13139404 \ - --hash=sha256:7413070a3e927e4207d00bd65f42d1b780fb0d32d7b1d951f6dc6ade318e1b5a \ - --hash=sha256:76bc6356d07c1d9f4b782813094d0caf1703b729d876ab6a676f3aaa9a47e37c \ - --hash=sha256:7f6cbd8e6e446fb7e4ed5bac4661a29e43f38aeecbf60c4b900b825a353276a1 \ - --hash=sha256:8055ec598605b0077e29652ccfe9372247474375e0e3f5775c91d9434e12d6b1 \ - --hash=sha256:809d653c155e2cc4fd39ad69c08fdff7f4016c355ae4b88905219d3579e31eb7 \ - --hash=sha256:82425dd5c7bd3adfe4e94c78e27e2fa02971750c2b7ffba648b0f5d5cc016a73 \ - --hash=sha256:87f1097acb569dde17f246faa268759a71a2cb8c96dd392cd25c668b104cad2f \ - --hash=sha256:920fa5a0c5175ab14b9c78f6f820b75804fb4984423ee4c4f1e6d748f8b22bc1 \ - --hash=sha256:92255879280ef9c3c0bcb327c5a1b8ed694c290d61a6a532458264f887f052cb \ - --hash=sha256:946c3a1ef25338e78107fba746f299f926db408d34553b4754e90a7de1d44068 \ - --hash=sha256:95cae920959d772f30ab36d3b25f83bb0f3be671e986c72ce22f8fa700dae061 \ - --hash=sha256:9cf1596680ac1f01839dba32d496136bdd5d8ffb858c280fa82bbfeb173bdd40 \ - --hash=sha256:9fe41b6f72f52d3da4db524c8653e46243c8c92df826ab5ffaece2dba9cccd58 \ - --hash=sha256:b17f0f14a9c0ba55ff6279a922d1932e24b13fc218a3e968ecdbf791b3682b25 \ - --hash=sha256:b3d336ed75d17c7b1af233a6561cf421dee41d9204aa3cfcc6c9c65cd5bb69a8 \ - --hash=sha256:b66bcc5670e8a6b78f0313bcb74774c8291f6f8aeef10fe70e910b8040f3ab75 \ - --hash=sha256:b725da33e6e58e4a5d27958568484aa766e825e93aa20c26c91168be58e08cbb \ - --hash=sha256:b72758f3ffc36ca566ba98a8e7f4f373b6c17c646ff8ad9b21ad10c29186f00d \ - --hash=sha256:bcef128f970bb63ecf9a65f7beafd9b55e3aaf0efc271a4154050fc15cdb386e \ - --hash=sha256:c8e8fe01e435005d4421f183038fc70ca85d2c1e490f51fb972db92af6e047c2 \ - --hash=sha256:d61f7ce4727a9fa7680cd6f3986b0e2c732639f46a5e0156e550e35258aa313a \ - --hash=sha256:d6768a327ea1ba44c9114dba5fdda4a214bdb70129065cd0807eb5f010bfcbb5 \ - --hash=sha256:e18668f1bd39e69b7fed19fa7cd1cd110a121ec25439328b5c89934e6d30d357 \ - --hash=sha256:e88b97ef13910e5f87bcbc4dd7979a7de9ba8702b54d3204ac587e83639c0c2b \ - --hash=sha256:ea0b183a5fe6b2b45f3b854b0d19c4e932d6f5934ae1f723b07cf9560edd4ec7 \ - --hash=sha256:ede0bde16cc6e9b96633df1631fbcd66491d1063667f260a4f2386a098393790 \ - --hash=sha256:f541587f5c558abd93cb0de491ce99a9ef8d1ae29dd6ab4dbb5a13281ae04cbd \ - --hash=sha256:fbbeb3c9b2edb5fd044b2a070f127a0ac456ffd079cb82746fc84af01ef021a4 \ - --hash=sha256:fdfa97090e2d6f73dced247a2f2d8004ac6449df6568f30e7fa1a045767c69a6 \ - --hash=sha256:ff0f9913d82e1d1fadbd976424c316fbc4d9c525c81d047bbdd16bd27dd98cfc -packaging==23.1 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61 \ - --hash=sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f -paramiko==3.4.0 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:43f0b51115a896f9c00f59618023484cb3a14b98bbceab43394a39c6739b7ee7 \ - --hash=sha256:aac08f26a31dc4dffd92821527d1682d99d52f9ef6851968114a8728f3c274d3 +networkx==3.4.2 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:307c3669428c5362aab27c8a1260aa8f47c4e91d3891f48be0141738d8d053e1 \ + --hash=sha256:df5d4365b724cf81b8c6a7312509d0c22386097011ad1abe274afd5e9d3bbc5f +olefile==0.47 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:543c7da2a7adadf21214938bb79c83ea12b473a4b6ee4ad4bf854e7715e13d1f \ + --hash=sha256:599383381a0bf3dfbd932ca0ca6515acd174ed48870cbf7fee123d698c192c1c +oletools==0.60.2 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:72ad8bd748fd0c4e7b5b4733af770d11543ebb2bf2697455f99f975fcd50cc96 \ + --hash=sha256:ad452099f4695ffd8855113f453348200d195ee9fa341a09e197d66ee7e0b2c3 +orjson==3.10.15 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:035fb83585e0f15e076759b6fedaf0abb460d1765b6a36f48018a52858443514 \ + --hash=sha256:05ca7fe452a2e9d8d9d706a2984c95b9c2ebc5db417ce0b7a49b91d50642a23e \ + --hash=sha256:0a4f27ea5617828e6b58922fdbec67b0aa4bb844e2d363b9244c47fa2180e665 \ + --hash=sha256:13242f12d295e83c2955756a574ddd6741c81e5b99f2bef8ed8d53e47a01e4b7 \ + --hash=sha256:17085a6aa91e1cd70ca8533989a18b5433e15d29c574582f76f821737c8d5806 \ + --hash=sha256:1e6d33efab6b71d67f22bf2962895d3dc6f82a6273a965fab762e64fa90dc399 \ + --hash=sha256:208beedfa807c922da4e81061dafa9c8489c6328934ca2a562efa707e049e561 \ + --hash=sha256:295c70f9dc154307777ba30fe29ff15c1bcc9dfc5c48632f37d20a607e9ba85a \ + --hash=sha256:305b38b2b8f8083cc3d618927d7f424349afce5975b316d33075ef0f73576b60 \ + --hash=sha256:33aedc3d903378e257047fee506f11e0833146ca3e57a1a1fb0ddb789876c1e1 \ + --hash=sha256:3614ea508d522a621384c1d6639016a5a2e4f027f3e4a1c93a51867615d28829 \ + --hash=sha256:3766ac4702f8f795ff3fa067968e806b4344af257011858cc3d6d8721588b53f \ + --hash=sha256:3a63bb41559b05360ded9132032239e47983a39b151af1201f07ec9370715c82 \ + --hash=sha256:43e17289ffdbbac8f39243916c893d2ae41a2ea1a9cbb060a56a4d75286351ae \ + --hash=sha256:552c883d03ad185f720d0c09583ebde257e41b9521b74ff40e08b7dec4559c04 \ + --hash=sha256:5dd9ef1639878cc3efffed349543cbf9372bdbd79f478615a1c633fe4e4180d1 \ + --hash=sha256:5e8afd6200e12771467a1a44e5ad780614b86abb4b11862ec54861a82d677746 \ + --hash=sha256:616e3e8d438d02e4854f70bfdc03a6bcdb697358dbaa6bcd19cbe24d24ece1f8 \ + --hash=sha256:63309e3ff924c62404923c80b9e2048c1f74ba4b615e7584584389ada50ed428 \ + --hash=sha256:6875210307d36c94873f553786a808af2788e362bd0cf4c8e66d976791e7b528 \ + --hash=sha256:6fd9bc64421e9fe9bd88039e7ce8e58d4fead67ca88e3a4014b143cec7684fd4 \ + --hash=sha256:7066b74f9f259849629e0d04db6609db4cf5b973248f455ba5d3bd58a4daaa5b \ + --hash=sha256:73cb85490aa6bf98abd20607ab5c8324c0acb48d6da7863a51be48505646c814 \ + --hash=sha256:763dadac05e4e9d2bc14938a45a2d0560549561287d41c465d3c58aec818b164 \ + --hash=sha256:7723ad949a0ea502df656948ddd8b392780a5beaa4c3b5f97e525191b102fff0 \ + --hash=sha256:781d54657063f361e89714293c095f506c533582ee40a426cb6489c48a637b81 \ + --hash=sha256:7946922ada8f3e0b7b958cc3eb22cfcf6c0df83d1fe5521b4a100103e3fa84c8 \ + --hash=sha256:7a1c73dcc8fadbd7c55802d9aa093b36878d34a3b3222c41052ce6b0fc65f8e8 \ + --hash=sha256:7c203f6f969210128af3acae0ef9ea6aab9782939f45f6fe02d05958fe761ef9 \ + --hash=sha256:7c2c79fa308e6edb0ffab0a31fd75a7841bf2a79a20ef08a3c6e3b26814c8ca8 \ + --hash=sha256:7c864a80a2d467d7786274fce0e4f93ef2a7ca4ff31f7fc5634225aaa4e9e98c \ + --hash=sha256:88dc3f65a026bd3175eb157fea994fca6ac7c4c8579fc5a86fc2114ad05705b7 \ + --hash=sha256:8918719572d662e18b8af66aef699d8c21072e54b6c82a3f8f6404c1f5ccd5e0 \ + --hash=sha256:9d11c0714fc85bfcf36ada1179400862da3288fc785c30e8297844c867d7505a \ + --hash=sha256:9e590a0477b23ecd5b0ac865b1b907b01b3c5535f5e8a8f6ab0e503efb896334 \ + --hash=sha256:9e992fd5cfb8b9f00bfad2fd7a05a4299db2bbe92e6440d9dd2fab27655b3182 \ + --hash=sha256:a2f708c62d026fb5340788ba94a55c23df4e1869fec74be455e0b2f5363b8507 \ + --hash=sha256:a330b9b4734f09a623f74a7490db713695e13b67c959713b78369f26b3dee6bf \ + --hash=sha256:a61a4622b7ff861f019974f73d8165be1bd9a0855e1cad18ee167acacabeb061 \ + --hash=sha256:a6be38bd103d2fd9bdfa31c2720b23b5d47c6796bcb1d1b598e3924441b4298d \ + --hash=sha256:abc7abecdbf67a173ef1316036ebbf54ce400ef2300b4e26a7b843bd446c2480 \ + --hash=sha256:acd271247691574416b3228db667b84775c497b245fa275c6ab90dc1ffbbd2b3 \ + --hash=sha256:b0482b21d0462eddd67e7fce10b89e0b6ac56570424662b685a0d6fccf581e13 \ + --hash=sha256:b299383825eafe642cbab34be762ccff9fd3408d72726a6b2a4506d410a71ab3 \ + --hash=sha256:b342567e5465bd99faa559507fe45e33fc76b9fb868a63f1642c6bc0735ad02a \ + --hash=sha256:b48f59114fe318f33bbaee8ebeda696d8ccc94c9e90bc27dbe72153094e26f41 \ + --hash=sha256:b7155eb1623347f0f22c38c9abdd738b287e39b9982e1da227503387b81b34ca \ + --hash=sha256:bae0e6ec2b7ba6895198cd981b7cca95d1487d0147c8ed751e5632ad16f031a6 \ + --hash=sha256:bb00b7bfbdf5d34a13180e4805d76b4567025da19a197645ca746fc2fb536586 \ + --hash=sha256:bb5cc3527036ae3d98b65e37b7986a918955f85332c1ee07f9d3f82f3a6899b5 \ + --hash=sha256:c03cd6eea1bd3b949d0d007c8d57049aa2b39bd49f58b4b2af571a5d3833d890 \ + --hash=sha256:c25774c9e88a3e0013d7d1a6c8056926b607a61edd423b50eb5c88fd7f2823ae \ + --hash=sha256:c33be3795e299f565681d69852ac8c1bc5c84863c0b0030b2b3468843be90388 \ + --hash=sha256:c4cc83960ab79a4031f3119cc4b1a1c627a3dc09df125b27c4201dff2af7eaa6 \ + --hash=sha256:cf45e0214c593660339ef63e875f32ddd5aa3b4adc15e662cdb80dc49e194f8e \ + --hash=sha256:d13b7fe322d75bf84464b075eafd8e7dd9eae05649aa2a5354cfa32f43c59f17 \ + --hash=sha256:d433bf32a363823863a96561a555227c18a522a8217a6f9400f00ddc70139ae2 \ + --hash=sha256:d569c1c462912acdd119ccbf719cf7102ea2c67dd03b99edcb1a3048651ac96b \ + --hash=sha256:d5ac11b659fd798228a7adba3e37c010e0152b78b1982897020a8e019a94882e \ + --hash=sha256:da03392674f59a95d03fa5fb9fe3a160b0511ad84b7a3914699ea5a1b3a38da2 \ + --hash=sha256:da9a18c500f19273e9e104cca8c1f0b40a6470bcccfc33afcc088045d0bf5ea6 \ + --hash=sha256:dadba0e7b6594216c214ef7894c4bd5f08d7c0135f4dd0145600be4fbcc16767 \ + --hash=sha256:dba5a1e85d554e3897fa9fe6fbcff2ed32d55008973ec9a2b992bd9a65d2352d \ + --hash=sha256:dd0099ae6aed5eb1fc84c9eb72b95505a3df4267e6962eb93cdd5af03be71c98 \ + --hash=sha256:ddbeef2481d895ab8be5185f2432c334d6dec1f5d1933a9c83014d188e102cef \ + --hash=sha256:e117eb299a35f2634e25ed120c37c641398826c2f5a3d3cc39f5993b96171b9e \ + --hash=sha256:e4759b109c37f635aa5c5cc93a1b26927bfde24b254bcc0e1149a9fada253d2d \ + --hash=sha256:e78c211d0074e783d824ce7bb85bf459f93a233eb67a5b5003498232ddfb0e8a \ + --hash=sha256:eca81f83b1b8c07449e1d6ff7074e82e3fd6777e588f1a6632127f286a968825 \ + --hash=sha256:eea80037b9fae5339b214f59308ef0589fc06dc870578b7cce6d71eb2096764c \ + --hash=sha256:ef5b87e7aa9545ddadd2309efe6824bd3dd64ac101c15dae0f2f597911d46eaa \ + --hash=sha256:efcf6c735c3d22ef60c4aa27a5238f1a477df85e9b15f2142f9d669beb2d13fd \ + --hash=sha256:f71eae9651465dff70aa80db92586ad5b92df46a9373ee55252109bb6b703307 \ + --hash=sha256:f93ce145b2db1252dd86af37d4165b6faa83072b46e3995ecc95d4b2301b725a \ + --hash=sha256:f95fb363d79366af56c3f26b71df40b9a583b07bbaaf5b317407c4d58497852e \ + --hash=sha256:f9875f5fea7492da8ec2444839dcc439b0ef298978f311103d0b7dfd775898ab \ + --hash=sha256:fd56a26a04f6ba5fb2045b0acc487a63162a958ed837648c5781e1fe3316cfbf \ + --hash=sha256:ff4f6edb1578960ed628a3b998fa54d78d9bb3e2eb2cfc5c2a09732431c678d0 \ + --hash=sha256:ffe19f3e8d68111e8644d4f4e267a069ca427926855582ff01fc012496d19969 +packaging==24.2 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759 \ + --hash=sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f +paramiko==3.5.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:1fedf06b085359051cd7d0d270cebe19e755a8a921cc2ddbfa647fb0cd7d68f9 \ + --hash=sha256:ad11e540da4f55cedda52931f1a3f812a8238a7af7f62a60de538cd80bb28124 pcodedmp==1.2.6 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:025f8c809a126f45a082ffa820893e6a8d990d9d7ddb68694b5a9f0a6dbcd955 \ --hash=sha256:4441f7c0ab4cbda27bd4668db3b14f36261d86e5059ce06c0828602cbe1c4278 -pebble==4.6.3 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:46e02767b239a29b8150466514fabb5c6632bea8c9b7456dfdb715f4636fc8a3 \ - --hash=sha256:694e1105db888f3576b8f00662f90b057cf3780e6f8b7f57955a568008d0f497 +pebble==5.1.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:530a398299ecd3a4ed1baf2e4b8045d8280b1e665560b0b409f8d8e58db60111 \ + --hash=sha256:5c30376f1827b21ecec4126ff90e7f22ad5501cac1ff2b32c86ff2601681f932 +peepdf-3==5.0.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:1c6dbb873be22633535bb7a0edfcc005781837869bb5d5b99c9a36591d1e9e95 \ + --hash=sha256:2c9220c39c1ffc7d1db4268874d5c68329e9f9f331643b0bb1e6e84798439bd8 pefile==2024.8.26 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:3ff6c5d8b43e8c37bb6e6dd5085658d658a7a0bdcd20b6a07b1fcfc1c4e9d632 \ --hash=sha256:76f8b485dcd3b1bb8166f1128d395fa3d87af26360c2358fb75b80019b957c6f -pillow==10.4.0 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:02a2be69f9c9b8c1e97cf2713e789d4e398c751ecfd9967c18d0ce304efbf885 \ - --hash=sha256:030abdbe43ee02e0de642aee345efa443740aa4d828bfe8e2eb11922ea6a21ea \ - --hash=sha256:06b2f7898047ae93fad74467ec3d28fe84f7831370e3c258afa533f81ef7f3df \ - --hash=sha256:0755ffd4a0c6f267cccbae2e9903d95477ca2f77c4fcf3a3a09570001856c8a5 \ - --hash=sha256:0a9ec697746f268507404647e531e92889890a087e03681a3606d9b920fbee3c \ - --hash=sha256:0ae24a547e8b711ccaaf99c9ae3cd975470e1a30caa80a6aaee9a2f19c05701d \ - --hash=sha256:134ace6dc392116566980ee7436477d844520a26a4b1bd4053f6f47d096997fd \ - --hash=sha256:166c1cd4d24309b30d61f79f4a9114b7b2313d7450912277855ff5dfd7cd4a06 \ - --hash=sha256:1b5dea9831a90e9d0721ec417a80d4cbd7022093ac38a568db2dd78363b00908 \ - --hash=sha256:1d846aea995ad352d4bdcc847535bd56e0fd88d36829d2c90be880ef1ee4668a \ - --hash=sha256:1ef61f5dd14c300786318482456481463b9d6b91ebe5ef12f405afbba77ed0be \ - --hash=sha256:297e388da6e248c98bc4a02e018966af0c5f92dfacf5a5ca22fa01cb3179bca0 \ - --hash=sha256:298478fe4f77a4408895605f3482b6cc6222c018b2ce565c2b6b9c354ac3229b \ - --hash=sha256:29dbdc4207642ea6aad70fbde1a9338753d33fb23ed6956e706936706f52dd80 \ - --hash=sha256:2db98790afc70118bd0255c2eeb465e9767ecf1f3c25f9a1abb8ffc8cfd1fe0a \ - --hash=sha256:32cda9e3d601a52baccb2856b8ea1fc213c90b340c542dcef77140dfa3278a9e \ - --hash=sha256:37fb69d905be665f68f28a8bba3c6d3223c8efe1edf14cc4cfa06c241f8c81d9 \ - --hash=sha256:416d3a5d0e8cfe4f27f574362435bc9bae57f679a7158e0096ad2beb427b8696 \ - --hash=sha256:43efea75eb06b95d1631cb784aa40156177bf9dd5b4b03ff38979e048258bc6b \ - --hash=sha256:4b35b21b819ac1dbd1233317adeecd63495f6babf21b7b2512d244ff6c6ce309 \ - --hash=sha256:4d9667937cfa347525b319ae34375c37b9ee6b525440f3ef48542fcf66f2731e \ - --hash=sha256:5161eef006d335e46895297f642341111945e2c1c899eb406882a6c61a4357ab \ - --hash=sha256:543f3dc61c18dafb755773efc89aae60d06b6596a63914107f75459cf984164d \ - --hash=sha256:551d3fd6e9dc15e4c1eb6fc4ba2b39c0c7933fa113b220057a34f4bb3268a060 \ - --hash=sha256:59291fb29317122398786c2d44427bbd1a6d7ff54017075b22be9d21aa59bd8d \ - --hash=sha256:5b001114dd152cfd6b23befeb28d7aee43553e2402c9f159807bf55f33af8a8d \ - --hash=sha256:5b4815f2e65b30f5fbae9dfffa8636d992d49705723fe86a3661806e069352d4 \ - --hash=sha256:5dc6761a6efc781e6a1544206f22c80c3af4c8cf461206d46a1e6006e4429ff3 \ - --hash=sha256:5e84b6cc6a4a3d76c153a6b19270b3526a5a8ed6b09501d3af891daa2a9de7d6 \ - --hash=sha256:6209bb41dc692ddfee4942517c19ee81b86c864b626dbfca272ec0f7cff5d9fb \ - --hash=sha256:673655af3eadf4df6b5457033f086e90299fdd7a47983a13827acf7459c15d94 \ - --hash=sha256:6c762a5b0997f5659a5ef2266abc1d8851ad7749ad9a6a5506eb23d314e4f46b \ - --hash=sha256:7086cc1d5eebb91ad24ded9f58bec6c688e9f0ed7eb3dbbf1e4800280a896496 \ - --hash=sha256:73664fe514b34c8f02452ffb73b7a92c6774e39a647087f83d67f010eb9a0cf0 \ - --hash=sha256:76a911dfe51a36041f2e756b00f96ed84677cdeb75d25c767f296c1c1eda1319 \ - --hash=sha256:780c072c2e11c9b2c7ca37f9a2ee8ba66f44367ac3e5c7832afcfe5104fd6d1b \ - --hash=sha256:7928ecbf1ece13956b95d9cbcfc77137652b02763ba384d9ab508099a2eca856 \ - --hash=sha256:7970285ab628a3779aecc35823296a7869f889b8329c16ad5a71e4901a3dc4ef \ - --hash=sha256:7a8d4bade9952ea9a77d0c3e49cbd8b2890a399422258a77f357b9cc9be8d680 \ - --hash=sha256:7c1ee6f42250df403c5f103cbd2768a28fe1a0ea1f0f03fe151c8741e1469c8b \ - --hash=sha256:7dfecdbad5c301d7b5bde160150b4db4c659cee2b69589705b6f8a0c509d9f42 \ - --hash=sha256:812f7342b0eee081eaec84d91423d1b4650bb9828eb53d8511bcef8ce5aecf1e \ - --hash=sha256:866b6942a92f56300012f5fbac71f2d610312ee65e22f1aa2609e491284e5597 \ - --hash=sha256:86dcb5a1eb778d8b25659d5e4341269e8590ad6b4e8b44d9f4b07f8d136c414a \ - --hash=sha256:87dd88ded2e6d74d31e1e0a99a726a6765cda32d00ba72dc37f0651f306daaa8 \ - --hash=sha256:8bc1a764ed8c957a2e9cacf97c8b2b053b70307cf2996aafd70e91a082e70df3 \ - --hash=sha256:8d4d5063501b6dd4024b8ac2f04962d661222d120381272deea52e3fc52d3736 \ - --hash=sha256:8f0aef4ef59694b12cadee839e2ba6afeab89c0f39a3adc02ed51d109117b8da \ - --hash=sha256:930044bb7679ab003b14023138b50181899da3f25de50e9dbee23b61b4de2126 \ - --hash=sha256:950be4d8ba92aca4b2bb0741285a46bfae3ca699ef913ec8416c1b78eadd64cd \ - --hash=sha256:961a7293b2457b405967af9c77dcaa43cc1a8cd50d23c532e62d48ab6cdd56f5 \ - --hash=sha256:9b885f89040bb8c4a1573566bbb2f44f5c505ef6e74cec7ab9068c900047f04b \ - --hash=sha256:9f4727572e2918acaa9077c919cbbeb73bd2b3ebcfe033b72f858fc9fbef0026 \ - --hash=sha256:a02364621fe369e06200d4a16558e056fe2805d3468350df3aef21e00d26214b \ - --hash=sha256:a985e028fc183bf12a77a8bbf36318db4238a3ded7fa9df1b9a133f1cb79f8fc \ - --hash=sha256:ac1452d2fbe4978c2eec89fb5a23b8387aba707ac72810d9490118817d9c0b46 \ - --hash=sha256:b15e02e9bb4c21e39876698abf233c8c579127986f8207200bc8a8f6bb27acf2 \ - --hash=sha256:b2724fdb354a868ddf9a880cb84d102da914e99119211ef7ecbdc613b8c96b3c \ - --hash=sha256:bbc527b519bd3aa9d7f429d152fea69f9ad37c95f0b02aebddff592688998abe \ - --hash=sha256:bcd5e41a859bf2e84fdc42f4edb7d9aba0a13d29a2abadccafad99de3feff984 \ - --hash=sha256:bd2880a07482090a3bcb01f4265f1936a903d70bc740bfcb1fd4e8a2ffe5cf5a \ - --hash=sha256:bee197b30783295d2eb680b311af15a20a8b24024a19c3a26431ff83eb8d1f70 \ - --hash=sha256:bf2342ac639c4cf38799a44950bbc2dfcb685f052b9e262f446482afaf4bffca \ - --hash=sha256:c76e5786951e72ed3686e122d14c5d7012f16c8303a674d18cdcd6d89557fc5b \ - --hash=sha256:cbed61494057c0f83b83eb3a310f0bf774b09513307c434d4366ed64f4128a91 \ - --hash=sha256:cfdd747216947628af7b259d274771d84db2268ca062dd5faf373639d00113a3 \ - --hash=sha256:d7480af14364494365e89d6fddc510a13e5a2c3584cb19ef65415ca57252fb84 \ - --hash=sha256:dbc6ae66518ab3c5847659e9988c3b60dc94ffb48ef9168656e0019a93dbf8a1 \ - --hash=sha256:dc3e2db6ba09ffd7d02ae9141cfa0ae23393ee7687248d46a7507b75d610f4f5 \ - --hash=sha256:dfe91cb65544a1321e631e696759491ae04a2ea11d36715eca01ce07284738be \ - --hash=sha256:e4d49b85c4348ea0b31ea63bc75a9f3857869174e2bf17e7aba02945cd218e6f \ - --hash=sha256:e4db64794ccdf6cb83a59d73405f63adbe2a1887012e308828596100a0b2f6cc \ - --hash=sha256:e553cad5179a66ba15bb18b353a19020e73a7921296a7979c4a2b7f6a5cd57f9 \ - --hash=sha256:e88d5e6ad0d026fba7bdab8c3f225a69f063f116462c49892b0149e21b6c0a0e \ - --hash=sha256:ecd85a8d3e79cd7158dec1c9e5808e821feea088e2f69a974db5edf84dc53141 \ - --hash=sha256:f5b92f4d70791b4a67157321c4e8225d60b119c5cc9aee8ecf153aace4aad4ef \ - --hash=sha256:f5f0c3e969c8f12dd2bb7e0b15d5c468b51e5017e01e2e867335c81903046a22 \ - --hash=sha256:f7baece4ce06bade126fb84b8af1c33439a76d8a6fd818970215e0560ca28c27 \ - --hash=sha256:ff25afb18123cea58a591ea0244b92eb1e61a1fd497bf6d6384f09bc3262ec3e \ - --hash=sha256:ff337c552345e95702c5fde3158acb0625111017d0e5f24bf3acdb9cc16b90d1 -protobuf==5.28.2 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:2c69461a7fcc8e24be697624c09a839976d82ae75062b11a0972e41fd2cd9132 \ - --hash=sha256:35cfcb15f213449af7ff6198d6eb5f739c37d7e4f1c09b5d0641babf2cc0c68f \ - --hash=sha256:52235802093bd8a2811abbe8bf0ab9c5f54cca0a751fdd3f6ac2a21438bffece \ - --hash=sha256:59379674ff119717404f7454647913787034f03fe7049cbef1d74a97bb4593f0 \ - --hash=sha256:5e8a95246d581eef20471b5d5ba010d55f66740942b95ba9b872d918c459452f \ - --hash=sha256:87317e9bcda04a32f2ee82089a204d3a2f0d3c8aeed16568c7daf4756e4f1fe0 \ - --hash=sha256:8ddc60bf374785fb7cb12510b267f59067fa10087325b8e1855b898a0d81d276 \ - --hash=sha256:a8b9403fc70764b08d2f593ce44f1d2920c5077bf7d311fefec999f8c40f78b7 \ - --hash=sha256:c0ea0123dac3399a2eeb1a1443d82b7afc9ff40241433296769f7da42d142ec3 \ - --hash=sha256:ca53faf29896c526863366a52a8f4d88e69cd04ec9571ed6082fa117fac3ab36 \ - --hash=sha256:eeea10f3dc0ac7e6b4933d32db20662902b4ab81bf28df12218aa389e9c2102d -psutil==5.9.8 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:02615ed8c5ea222323408ceba16c60e99c3f91639b07da6373fb7e6539abc56d \ - --hash=sha256:05806de88103b25903dff19bb6692bd2e714ccf9e668d050d144012055cbca73 \ - --hash=sha256:26bd09967ae00920df88e0352a91cff1a78f8d69b3ecabbfe733610c0af486c8 \ - --hash=sha256:27cc40c3493bb10de1be4b3f07cae4c010ce715290a5be22b98493509c6299e2 \ - --hash=sha256:36f435891adb138ed3c9e58c6af3e2e6ca9ac2f365efe1f9cfef2794e6c93b4e \ - --hash=sha256:50187900d73c1381ba1454cf40308c2bf6f34268518b3f36a9b663ca87e65e36 \ - --hash=sha256:611052c4bc70432ec770d5d54f64206aa7203a101ec273a0cd82418c86503bb7 \ - --hash=sha256:6be126e3225486dff286a8fb9a06246a5253f4c7c53b475ea5f5ac934e64194c \ - --hash=sha256:7d79560ad97af658a0f6adfef8b834b53f64746d45b403f225b85c5c2c140eee \ - --hash=sha256:8cb6403ce6d8e047495a701dc7c5bd788add903f8986d523e3e20b98b733e421 \ - --hash=sha256:8db4c1b57507eef143a15a6884ca10f7c73876cdf5d51e713151c1236a0e68cf \ - --hash=sha256:aee678c8720623dc456fa20659af736241f575d79429a0e5e9cf88ae0605cc81 \ - --hash=sha256:bc56c2a1b0d15aa3eaa5a60c9f3f8e3e565303b465dbf57a1b730e7a2b9844e0 \ - --hash=sha256:bd1184ceb3f87651a67b2708d4c3338e9b10c5df903f2e3776b62303b26cb631 \ - --hash=sha256:d06016f7f8625a1825ba3732081d77c94589dca78b7a3fc072194851e88461a4 \ - --hash=sha256:d16bbddf0693323b8c6123dd804100241da461e41d6e332fb0ba6058f630f8c8 -psycopg2-binary==2.9.9 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:03ef7df18daf2c4c07e2695e8cfd5ee7f748a1d54d802330985a78d2a5a6dca9 \ - --hash=sha256:0a602ea5aff39bb9fac6308e9c9d82b9a35c2bf288e184a816002c9fae930b77 \ - --hash=sha256:0c009475ee389757e6e34611d75f6e4f05f0cf5ebb76c6037508318e1a1e0d7e \ - --hash=sha256:0ef4854e82c09e84cc63084a9e4ccd6d9b154f1dbdd283efb92ecd0b5e2b8c84 \ - --hash=sha256:1236ed0952fbd919c100bc839eaa4a39ebc397ed1c08a97fc45fee2a595aa1b3 \ - --hash=sha256:143072318f793f53819048fdfe30c321890af0c3ec7cb1dfc9cc87aa88241de2 \ - --hash=sha256:15208be1c50b99203fe88d15695f22a5bed95ab3f84354c494bcb1d08557df67 \ - --hash=sha256:1873aade94b74715be2246321c8650cabf5a0d098a95bab81145ffffa4c13876 \ - --hash=sha256:18d0ef97766055fec15b5de2c06dd8e7654705ce3e5e5eed3b6651a1d2a9a152 \ - --hash=sha256:1ea665f8ce695bcc37a90ee52de7a7980be5161375d42a0b6c6abedbf0d81f0f \ - --hash=sha256:2293b001e319ab0d869d660a704942c9e2cce19745262a8aba2115ef41a0a42a \ - --hash=sha256:246b123cc54bb5361588acc54218c8c9fb73068bf227a4a531d8ed56fa3ca7d6 \ - --hash=sha256:275ff571376626195ab95a746e6a04c7df8ea34638b99fc11160de91f2fef503 \ - --hash=sha256:281309265596e388ef483250db3640e5f414168c5a67e9c665cafce9492eda2f \ - --hash=sha256:2d423c8d8a3c82d08fe8af900ad5b613ce3632a1249fd6a223941d0735fce493 \ - --hash=sha256:2e5afae772c00980525f6d6ecf7cbca55676296b580c0e6abb407f15f3706996 \ - --hash=sha256:30dcc86377618a4c8f3b72418df92e77be4254d8f89f14b8e8f57d6d43603c0f \ - --hash=sha256:31a34c508c003a4347d389a9e6fcc2307cc2150eb516462a7a17512130de109e \ - --hash=sha256:323ba25b92454adb36fa425dc5cf6f8f19f78948cbad2e7bc6cdf7b0d7982e59 \ - --hash=sha256:34eccd14566f8fe14b2b95bb13b11572f7c7d5c36da61caf414d23b91fcc5d94 \ - --hash=sha256:3a58c98a7e9c021f357348867f537017057c2ed7f77337fd914d0bedb35dace7 \ - --hash=sha256:3f78fd71c4f43a13d342be74ebbc0666fe1f555b8837eb113cb7416856c79682 \ - --hash=sha256:4154ad09dac630a0f13f37b583eae260c6aa885d67dfbccb5b02c33f31a6d420 \ - --hash=sha256:420f9bbf47a02616e8554e825208cb947969451978dceb77f95ad09c37791dae \ - --hash=sha256:4686818798f9194d03c9129a4d9a702d9e113a89cb03bffe08c6cf799e053291 \ - --hash=sha256:57fede879f08d23c85140a360c6a77709113efd1c993923c59fde17aa27599fe \ - --hash=sha256:60989127da422b74a04345096c10d416c2b41bd7bf2a380eb541059e4e999980 \ - --hash=sha256:64cf30263844fa208851ebb13b0732ce674d8ec6a0c86a4e160495d299ba3c93 \ - --hash=sha256:68fc1f1ba168724771e38bee37d940d2865cb0f562380a1fb1ffb428b75cb692 \ - --hash=sha256:6e6f98446430fdf41bd36d4faa6cb409f5140c1c2cf58ce0bbdaf16af7d3f119 \ - --hash=sha256:729177eaf0aefca0994ce4cffe96ad3c75e377c7b6f4efa59ebf003b6d398716 \ - --hash=sha256:72dffbd8b4194858d0941062a9766f8297e8868e1dd07a7b36212aaa90f49472 \ - --hash=sha256:75723c3c0fbbf34350b46a3199eb50638ab22a0228f93fb472ef4d9becc2382b \ - --hash=sha256:77853062a2c45be16fd6b8d6de2a99278ee1d985a7bd8b103e97e41c034006d2 \ - --hash=sha256:78151aa3ec21dccd5cdef6c74c3e73386dcdfaf19bced944169697d7ac7482fc \ - --hash=sha256:7f01846810177d829c7692f1f5ada8096762d9172af1b1a28d4ab5b77c923c1c \ - --hash=sha256:804d99b24ad523a1fe18cc707bf741670332f7c7412e9d49cb5eab67e886b9b5 \ - --hash=sha256:81ff62668af011f9a48787564ab7eded4e9fb17a4a6a74af5ffa6a457400d2ab \ - --hash=sha256:8359bf4791968c5a78c56103702000105501adb557f3cf772b2c207284273984 \ - --hash=sha256:83791a65b51ad6ee6cf0845634859d69a038ea9b03d7b26e703f94c7e93dbcf9 \ - --hash=sha256:8532fd6e6e2dc57bcb3bc90b079c60de896d2128c5d9d6f24a63875a95a088cf \ - --hash=sha256:876801744b0dee379e4e3c38b76fc89f88834bb15bf92ee07d94acd06ec890a0 \ - --hash=sha256:8dbf6d1bc73f1d04ec1734bae3b4fb0ee3cb2a493d35ede9badbeb901fb40f6f \ - --hash=sha256:8f8544b092a29a6ddd72f3556a9fcf249ec412e10ad28be6a0c0d948924f2212 \ - --hash=sha256:911dda9c487075abd54e644ccdf5e5c16773470a6a5d3826fda76699410066fb \ - --hash=sha256:977646e05232579d2e7b9c59e21dbe5261f403a88417f6a6512e70d3f8a046be \ - --hash=sha256:9dba73be7305b399924709b91682299794887cbbd88e38226ed9f6712eabee90 \ - --hash=sha256:a148c5d507bb9b4f2030a2025c545fccb0e1ef317393eaba42e7eabd28eb6041 \ - --hash=sha256:a6cdcc3ede532f4a4b96000b6362099591ab4a3e913d70bcbac2b56c872446f7 \ - --hash=sha256:ac05fb791acf5e1a3e39402641827780fe44d27e72567a000412c648a85ba860 \ - --hash=sha256:b0605eaed3eb239e87df0d5e3c6489daae3f7388d455d0c0b4df899519c6a38d \ - --hash=sha256:b58b4710c7f4161b5e9dcbe73bb7c62d65670a87df7bcce9e1faaad43e715245 \ - --hash=sha256:b6356793b84728d9d50ead16ab43c187673831e9d4019013f1402c41b1db9b27 \ - --hash=sha256:b76bedd166805480ab069612119ea636f5ab8f8771e640ae103e05a4aae3e417 \ - --hash=sha256:bc7bb56d04601d443f24094e9e31ae6deec9ccb23581f75343feebaf30423359 \ - --hash=sha256:c2470da5418b76232f02a2fcd2229537bb2d5a7096674ce61859c3229f2eb202 \ - --hash=sha256:c332c8d69fb64979ebf76613c66b985414927a40f8defa16cf1bc028b7b0a7b0 \ - --hash=sha256:c6af2a6d4b7ee9615cbb162b0738f6e1fd1f5c3eda7e5da17861eacf4c717ea7 \ - --hash=sha256:c77e3d1862452565875eb31bdb45ac62502feabbd53429fdc39a1cc341d681ba \ - --hash=sha256:ca08decd2697fdea0aea364b370b1249d47336aec935f87b8bbfd7da5b2ee9c1 \ - --hash=sha256:ca49a8119c6cbd77375ae303b0cfd8c11f011abbbd64601167ecca18a87e7cdd \ - --hash=sha256:cb16c65dcb648d0a43a2521f2f0a2300f40639f6f8c1ecbc662141e4e3e1ee07 \ - --hash=sha256:d2997c458c690ec2bc6b0b7ecbafd02b029b7b4283078d3b32a852a7ce3ddd98 \ - --hash=sha256:d3f82c171b4ccd83bbaf35aa05e44e690113bd4f3b7b6cc54d2219b132f3ae55 \ - --hash=sha256:dc4926288b2a3e9fd7b50dc6a1909a13bbdadfc67d93f3374d984e56f885579d \ - --hash=sha256:ead20f7913a9c1e894aebe47cccf9dc834e1618b7aa96155d2091a626e59c972 \ - --hash=sha256:ebdc36bea43063116f0486869652cb2ed7032dbc59fbcb4445c4862b5c1ecf7f \ - --hash=sha256:ed1184ab8f113e8d660ce49a56390ca181f2981066acc27cf637d5c1e10ce46e \ - --hash=sha256:ee825e70b1a209475622f7f7b776785bd68f34af6e7a46e2e42f27b659b5bc26 \ - --hash=sha256:f7ae5d65ccfbebdfa761585228eb4d0df3a8b15cfb53bd953e713e09fbb12957 \ - --hash=sha256:f7fc5a5acafb7d6ccca13bfa8c90f8c51f13d8fb87d95656d3950f0158d3ce53 \ - --hash=sha256:f9b5571d33660d5009a8b3c25dc1db560206e2d2f89d3df1cb32d72c0d117d52 +pillow==11.1.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:015c6e863faa4779251436db398ae75051469f7c903b043a48f078e437656f83 \ + --hash=sha256:0a2f91f8a8b367e7a57c6e91cd25af510168091fb89ec5146003e424e1558a96 \ + --hash=sha256:11633d58b6ee5733bde153a8dafd25e505ea3d32e261accd388827ee987baf65 \ + --hash=sha256:2062ffb1d36544d42fcaa277b069c88b01bb7298f4efa06731a7fd6cc290b81a \ + --hash=sha256:31eba6bbdd27dde97b0174ddf0297d7a9c3a507a8a1480e1e60ef914fe23d352 \ + --hash=sha256:3362c6ca227e65c54bf71a5f88b3d4565ff1bcbc63ae72c34b07bbb1cc59a43f \ + --hash=sha256:368da70808b36d73b4b390a8ffac11069f8a5c85f29eff1f1b01bcf3ef5b2a20 \ + --hash=sha256:36ba10b9cb413e7c7dfa3e189aba252deee0602c86c309799da5a74009ac7a1c \ + --hash=sha256:3764d53e09cdedd91bee65c2527815d315c6b90d7b8b79759cc48d7bf5d4f114 \ + --hash=sha256:3a5fe20a7b66e8135d7fd617b13272626a28278d0e578c98720d9ba4b2439d49 \ + --hash=sha256:3cdcdb0b896e981678eee140d882b70092dac83ac1cdf6b3a60e2216a73f2b91 \ + --hash=sha256:4637b88343166249fe8aa94e7c4a62a180c4b3898283bb5d3d2fd5fe10d8e4e0 \ + --hash=sha256:4db853948ce4e718f2fc775b75c37ba2efb6aaea41a1a5fc57f0af59eee774b2 \ + --hash=sha256:4dd43a78897793f60766563969442020e90eb7847463eca901e41ba186a7d4a5 \ + --hash=sha256:54251ef02a2309b5eec99d151ebf5c9904b77976c8abdcbce7891ed22df53884 \ + --hash=sha256:54ce1c9a16a9561b6d6d8cb30089ab1e5eb66918cb47d457bd996ef34182922e \ + --hash=sha256:593c5fd6be85da83656b93ffcccc2312d2d149d251e98588b14fbc288fd8909c \ + --hash=sha256:5bb94705aea800051a743aa4874bb1397d4695fb0583ba5e425ee0328757f196 \ + --hash=sha256:67cd427c68926108778a9005f2a04adbd5e67c442ed21d95389fe1d595458756 \ + --hash=sha256:70ca5ef3b3b1c4a0812b5c63c57c23b63e53bc38e758b37a951e5bc466449861 \ + --hash=sha256:73ddde795ee9b06257dac5ad42fcb07f3b9b813f8c1f7f870f402f4dc54b5269 \ + --hash=sha256:758e9d4ef15d3560214cddbc97b8ef3ef86ce04d62ddac17ad39ba87e89bd3b1 \ + --hash=sha256:7d33d2fae0e8b170b6a6c57400e077412240f6f5bb2a342cf1ee512a787942bb \ + --hash=sha256:7fdadc077553621911f27ce206ffcbec7d3f8d7b50e0da39f10997e8e2bb7f6a \ + --hash=sha256:8000376f139d4d38d6851eb149b321a52bb8893a88dae8ee7d95840431977081 \ + --hash=sha256:837060a8599b8f5d402e97197d4924f05a2e0d68756998345c829c33186217b1 \ + --hash=sha256:89dbdb3e6e9594d512780a5a1c42801879628b38e3efc7038094430844e271d8 \ + --hash=sha256:8c730dc3a83e5ac137fbc92dfcfe1511ce3b2b5d7578315b63dbbb76f7f51d90 \ + --hash=sha256:8e275ee4cb11c262bd108ab2081f750db2a1c0b8c12c1897f27b160c8bd57bbc \ + --hash=sha256:9044b5e4f7083f209c4e35aa5dd54b1dd5b112b108648f5c902ad586d4f945c5 \ + --hash=sha256:93a18841d09bcdd774dcdc308e4537e1f867b3dec059c131fde0327899734aa1 \ + --hash=sha256:9409c080586d1f683df3f184f20e36fb647f2e0bc3988094d4fd8c9f4eb1b3b3 \ + --hash=sha256:96f82000e12f23e4f29346e42702b6ed9a2f2fea34a740dd5ffffcc8c539eb35 \ + --hash=sha256:9aa9aeddeed452b2f616ff5507459e7bab436916ccb10961c4a382cd3e03f47f \ + --hash=sha256:9ee85f0696a17dd28fbcfceb59f9510aa71934b483d1f5601d1030c3c8304f3c \ + --hash=sha256:a07dba04c5e22824816b2615ad7a7484432d7f540e6fa86af60d2de57b0fcee2 \ + --hash=sha256:a3cd561ded2cf2bbae44d4605837221b987c216cff94f49dfeed63488bb228d2 \ + --hash=sha256:a697cd8ba0383bba3d2d3ada02b34ed268cb548b369943cd349007730c92bddf \ + --hash=sha256:a76da0a31da6fcae4210aa94fd779c65c75786bc9af06289cd1c184451ef7a65 \ + --hash=sha256:a85b653980faad27e88b141348707ceeef8a1186f75ecc600c395dcac19f385b \ + --hash=sha256:a8d65b38173085f24bc07f8b6c505cbb7418009fa1a1fcb111b1f4961814a442 \ + --hash=sha256:aa8dd43daa836b9a8128dbe7d923423e5ad86f50a7a14dc688194b7be5c0dea2 \ + --hash=sha256:ab8a209b8485d3db694fa97a896d96dd6533d63c22829043fd9de627060beade \ + --hash=sha256:abc56501c3fd148d60659aae0af6ddc149660469082859fa7b066a298bde9482 \ + --hash=sha256:ad5db5781c774ab9a9b2c4302bbf0c1014960a0a7be63278d13ae6fdf88126fe \ + --hash=sha256:ae98e14432d458fc3de11a77ccb3ae65ddce70f730e7c76140653048c71bfcbc \ + --hash=sha256:b20be51b37a75cc54c2c55def3fa2c65bb94ba859dde241cd0a4fd302de5ae0a \ + --hash=sha256:b523466b1a31d0dcef7c5be1f20b942919b62fd6e9a9be199d035509cbefc0ec \ + --hash=sha256:b5d658fbd9f0d6eea113aea286b21d3cd4d3fd978157cbf2447a6035916506d3 \ + --hash=sha256:b6123aa4a59d75f06e9dd3dac5bf8bc9aa383121bb3dd9a7a612e05eabc9961a \ + --hash=sha256:bd165131fd51697e22421d0e467997ad31621b74bfc0b75956608cb2906dda07 \ + --hash=sha256:bf902d7413c82a1bfa08b06a070876132a5ae6b2388e2712aab3a7cbc02205c6 \ + --hash=sha256:c12fc111ef090845de2bb15009372175d76ac99969bdf31e2ce9b42e4b8cd88f \ + --hash=sha256:c1eec9d950b6fe688edee07138993e54ee4ae634c51443cfb7c1e7613322718e \ + --hash=sha256:c640e5a06869c75994624551f45e5506e4256562ead981cce820d5ab39ae2192 \ + --hash=sha256:cc1331b6d5a6e144aeb5e626f4375f5b7ae9934ba620c0ac6b3e43d5e683a0f0 \ + --hash=sha256:cfd5cd998c2e36a862d0e27b2df63237e67273f2fc78f47445b14e73a810e7e6 \ + --hash=sha256:d3d8da4a631471dfaf94c10c85f5277b1f8e42ac42bade1ac67da4b4a7359b73 \ + --hash=sha256:d44ff19eea13ae4acdaaab0179fa68c0c6f2f45d66a4d8ec1eda7d6cecbcc15f \ + --hash=sha256:dd0052e9db3474df30433f83a71b9b23bd9e4ef1de13d92df21a52c0303b8ab6 \ + --hash=sha256:dd0e081319328928531df7a0e63621caf67652c8464303fd102141b785ef9547 \ + --hash=sha256:dda60aa465b861324e65a78c9f5cf0f4bc713e4309f83bc387be158b077963d9 \ + --hash=sha256:e06695e0326d05b06833b40b7ef477e475d0b1ba3a6d27da1bb48c23209bf457 \ + --hash=sha256:e1abe69aca89514737465752b4bcaf8016de61b3be1397a8fc260ba33321b3a8 \ + --hash=sha256:e267b0ed063341f3e60acd25c05200df4193e15a4a5807075cd71225a2386e26 \ + --hash=sha256:e5449ca63da169a2e6068dd0e2fcc8d91f9558aba89ff6d02121ca8ab11e79e5 \ + --hash=sha256:e63e4e5081de46517099dc30abe418122f54531a6ae2ebc8680bcd7096860eab \ + --hash=sha256:f189805c8be5ca5add39e6f899e6ce2ed824e65fb45f3c28cb2841911da19070 \ + --hash=sha256:f7955ecf5609dee9442cbface754f2c6e541d9e6eda87fad7f7a989b0bdb9d71 \ + --hash=sha256:f86d3a7a9af5d826744fabf4afd15b9dfef44fe69a98541f666f66fbb8d3fef9 \ + --hash=sha256:fbd43429d0d7ed6533b25fc993861b8fd512c42d04514a0dd6337fb3ccf22761 +prettytable==3.14.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:61d5c68f04a94acc73c7aac64f0f380f5bed4d2959d59edc6e4cbb7a0e7b55c4 \ + --hash=sha256:b804b8d51db23959b96b329094debdbbdf10c8c3aa75958c5988cfd7f78501dd +protobuf==5.29.3 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:0a18ed4a24198528f2333802eb075e59dea9d679ab7a6c5efb017a59004d849f \ + --hash=sha256:0eb32bfa5219fc8d4111803e9a690658aa2e6366384fd0851064b963b6d1f2a7 \ + --hash=sha256:3ea51771449e1035f26069c4c7fd51fba990d07bc55ba80701c78f886bf9c888 \ + --hash=sha256:5da0f41edaf117bde316404bad1a486cb4ededf8e4a54891296f648e8e076620 \ + --hash=sha256:6ce8cc3389a20693bfde6c6562e03474c40851b44975c9b2bf6df7d8c4f864da \ + --hash=sha256:84a57163a0ccef3f96e4b6a20516cedcf5bb3a95a657131c5c3ac62200d23252 \ + --hash=sha256:a4fa6f80816a9a0678429e84973f2f98cbc218cca434abe8db2ad0bffc98503a \ + --hash=sha256:a8434404bbf139aa9e1300dbf989667a83d42ddda9153d8ab76e0d5dcaca484e \ + --hash=sha256:b89c115d877892a512f79a8114564fb435943b59067615894c3b13cd3e1fa107 \ + --hash=sha256:c027e08a08be10b67c06bf2370b99c811c466398c357e615ca88c91c07f0910f \ + --hash=sha256:daaf63f70f25e8689c072cfad4334ca0ac1d1e05a92fc15c54eb9cf23c3efd84 +psutil==6.1.1 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:018aeae2af92d943fdf1da6b58665124897cfc94faa2ca92098838f83e1b1bca \ + --hash=sha256:0bdd4eab935276290ad3cb718e9809412895ca6b5b334f5a9111ee6d9aff9377 \ + --hash=sha256:1924e659d6c19c647e763e78670a05dbb7feaf44a0e9c94bf9e14dfc6ba50468 \ + --hash=sha256:33431e84fee02bc84ea36d9e2c4a6d395d479c9dd9bba2376c1f6ee8f3a4e0b3 \ + --hash=sha256:384636b1a64b47814437d1173be1427a7c83681b17a450bfc309a1953e329603 \ + --hash=sha256:6d4281f5bbca041e2292be3380ec56a9413b790579b8e593b1784499d0005dac \ + --hash=sha256:8be07491f6ebe1a693f17d4f11e69d0dc1811fa082736500f649f79df7735303 \ + --hash=sha256:8df0178ba8a9e5bc84fed9cfa61d54601b371fbec5c8eebad27575f1e105c0d4 \ + --hash=sha256:97f7cb9921fbec4904f522d972f0c0e1f4fabbdd4e0287813b21215074a0f160 \ + --hash=sha256:9ccc4316f24409159897799b83004cb1e24f9819b0dcf9c0b68bdcb6cefee6a8 \ + --hash=sha256:b6e06c20c05fe95a3d7302d74e7097756d4ba1247975ad6905441ae1b5b66003 \ + --hash=sha256:c777eb75bb33c47377c9af68f30e9f11bc78e0f07fbf907be4a5d70b2fe5f030 \ + --hash=sha256:ca9609c77ea3b8481ab005da74ed894035936223422dc591d6772b147421f777 \ + --hash=sha256:cf8496728c18f2d0b45198f06895be52f36611711746b7f30c464b422b50e2f5 \ + --hash=sha256:eaa912e0b11848c4d9279a93d7e2783df352b082f40111e078388701fd479e53 \ + --hash=sha256:f35cfccb065fff93529d2afb4a2e89e363fe63ca1e4a5da22b603a85833c2649 \ + --hash=sha256:fc0ed7fe2231a444fc219b9c42d0376e0a9a1a72f16c5cfa0f68d19f1a0663e8 +psycopg2-binary==2.9.10 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:04392983d0bb89a8717772a193cfaac58871321e3ec69514e1c4e0d4957b5aff \ + --hash=sha256:056470c3dc57904bbf63d6f534988bafc4e970ffd50f6271fc4ee7daad9498a5 \ + --hash=sha256:0ea8e3d0ae83564f2fc554955d327fa081d065c8ca5cc6d2abb643e2c9c1200f \ + --hash=sha256:155e69561d54d02b3c3209545fb08938e27889ff5a10c19de8d23eb5a41be8a5 \ + --hash=sha256:18c5ee682b9c6dd3696dad6e54cc7ff3a1a9020df6a5c0f861ef8bfd338c3ca0 \ + --hash=sha256:19721ac03892001ee8fdd11507e6a2e01f4e37014def96379411ca99d78aeb2c \ + --hash=sha256:1a6784f0ce3fec4edc64e985865c17778514325074adf5ad8f80636cd029ef7c \ + --hash=sha256:2286791ececda3a723d1910441c793be44625d86d1a4e79942751197f4d30341 \ + --hash=sha256:230eeae2d71594103cd5b93fd29d1ace6420d0b86f4778739cb1a5a32f607d1f \ + --hash=sha256:245159e7ab20a71d989da00f280ca57da7641fa2cdcf71749c193cea540a74f7 \ + --hash=sha256:26540d4a9a4e2b096f1ff9cce51253d0504dca5a85872c7f7be23be5a53eb18d \ + --hash=sha256:270934a475a0e4b6925b5f804e3809dd5f90f8613621d062848dd82f9cd62007 \ + --hash=sha256:2ad26b467a405c798aaa1458ba09d7e2b6e5f96b1ce0ac15d82fd9f95dc38a92 \ + --hash=sha256:2b3d2491d4d78b6b14f76881905c7a8a8abcf974aad4a8a0b065273a0ed7a2cb \ + --hash=sha256:2ce3e21dc3437b1d960521eca599d57408a695a0d3c26797ea0f72e834c7ffe5 \ + --hash=sha256:30e34c4e97964805f715206c7b789d54a78b70f3ff19fbe590104b71c45600e5 \ + --hash=sha256:3216ccf953b3f267691c90c6fe742e45d890d8272326b4a8b20850a03d05b7b8 \ + --hash=sha256:32581b3020c72d7a421009ee1c6bf4a131ef5f0a968fab2e2de0c9d2bb4577f1 \ + --hash=sha256:35958ec9e46432d9076286dda67942ed6d968b9c3a6a2fd62b48939d1d78bf68 \ + --hash=sha256:3abb691ff9e57d4a93355f60d4f4c1dd2d68326c968e7db17ea96df3c023ef73 \ + --hash=sha256:3c18f74eb4386bf35e92ab2354a12c17e5eb4d9798e4c0ad3a00783eae7cd9f1 \ + --hash=sha256:3c4745a90b78e51d9ba06e2088a2fe0c693ae19cc8cb051ccda44e8df8a6eb53 \ + --hash=sha256:3c4ded1a24b20021ebe677b7b08ad10bf09aac197d6943bfe6fec70ac4e4690d \ + --hash=sha256:3e9c76f0ac6f92ecfc79516a8034a544926430f7b080ec5a0537bca389ee0906 \ + --hash=sha256:48b338f08d93e7be4ab2b5f1dbe69dc5e9ef07170fe1f86514422076d9c010d0 \ + --hash=sha256:4b3df0e6990aa98acda57d983942eff13d824135fe2250e6522edaa782a06de2 \ + --hash=sha256:512d29bb12608891e349af6a0cccedce51677725a921c07dba6342beaf576f9a \ + --hash=sha256:5a507320c58903967ef7384355a4da7ff3f28132d679aeb23572753cbf2ec10b \ + --hash=sha256:5c370b1e4975df846b0277b4deba86419ca77dbc25047f535b0bb03d1a544d44 \ + --hash=sha256:6b269105e59ac96aba877c1707c600ae55711d9dcd3fc4b5012e4af68e30c648 \ + --hash=sha256:6d4fa1079cab9018f4d0bd2db307beaa612b0d13ba73b5c6304b9fe2fb441ff7 \ + --hash=sha256:6dc08420625b5a20b53551c50deae6e231e6371194fa0651dbe0fb206452ae1f \ + --hash=sha256:73aa0e31fa4bb82578f3a6c74a73c273367727de397a7a0f07bd83cbea696baa \ + --hash=sha256:7559bce4b505762d737172556a4e6ea8a9998ecac1e39b5233465093e8cee697 \ + --hash=sha256:79625966e176dc97ddabc142351e0409e28acf4660b88d1cf6adb876d20c490d \ + --hash=sha256:7a813c8bdbaaaab1f078014b9b0b13f5de757e2b5d9be6403639b298a04d218b \ + --hash=sha256:7b2c956c028ea5de47ff3a8d6b3cc3330ab45cf0b7c3da35a2d6ff8420896526 \ + --hash=sha256:7f4152f8f76d2023aac16285576a9ecd2b11a9895373a1f10fd9db54b3ff06b4 \ + --hash=sha256:7f5d859928e635fa3ce3477704acee0f667b3a3d3e4bb109f2b18d4005f38287 \ + --hash=sha256:851485a42dbb0bdc1edcdabdb8557c09c9655dfa2ca0460ff210522e073e319e \ + --hash=sha256:8608c078134f0b3cbd9f89b34bd60a943b23fd33cc5f065e8d5f840061bd0673 \ + --hash=sha256:880845dfe1f85d9d5f7c412efea7a08946a46894537e4e5d091732eb1d34d9a0 \ + --hash=sha256:8aabf1c1a04584c168984ac678a668094d831f152859d06e055288fa515e4d30 \ + --hash=sha256:8aecc5e80c63f7459a1a2ab2c64df952051df196294d9f739933a9f6687e86b3 \ + --hash=sha256:8cd9b4f2cfab88ed4a9106192de509464b75a906462fb846b936eabe45c2063e \ + --hash=sha256:8de718c0e1c4b982a54b41779667242bc630b2197948405b7bd8ce16bcecac92 \ + --hash=sha256:9440fa522a79356aaa482aa4ba500b65f28e5d0e63b801abf6aa152a29bd842a \ + --hash=sha256:b5f86c56eeb91dc3135b3fd8a95dc7ae14c538a2f3ad77a19645cf55bab1799c \ + --hash=sha256:b73d6d7f0ccdad7bc43e6d34273f70d587ef62f824d7261c4ae9b8b1b6af90e8 \ + --hash=sha256:bb89f0a835bcfc1d42ccd5f41f04870c1b936d8507c6df12b7737febc40f0909 \ + --hash=sha256:c3cc28a6fd5a4a26224007712e79b81dbaee2ffb90ff406256158ec4d7b52b47 \ + --hash=sha256:ce5ab4bf46a211a8e924d307c1b1fcda82368586a19d0a24f8ae166f5c784864 \ + --hash=sha256:d00924255d7fc916ef66e4bf22f354a940c67179ad3fd7067d7a0a9c84d2fbfc \ + --hash=sha256:d7cd730dfa7c36dbe8724426bf5612798734bff2d3c3857f36f2733f5bfc7c00 \ + --hash=sha256:e217ce4d37667df0bc1c397fdcd8de5e81018ef305aed9415c3b093faaeb10fb \ + --hash=sha256:e3923c1d9870c49a2d44f795df0c889a22380d36ef92440ff618ec315757e539 \ + --hash=sha256:e5720a5d25e3b99cd0dc5c8a440570469ff82659bb09431c1439b92caf184d3b \ + --hash=sha256:e8b58f0a96e7a1e341fc894f62c1177a7c83febebb5ff9123b579418fdc8a481 \ + --hash=sha256:e984839e75e0b60cfe75e351db53d6db750b00de45644c5d1f7ee5d1f34a1ce5 \ + --hash=sha256:eb09aa7f9cecb45027683bb55aebaaf45a0df8bf6de68801a6afdc7947bb09d4 \ + --hash=sha256:ec8a77f521a17506a24a5f626cb2aee7850f9b69a0afe704586f63a464f3cd64 \ + --hash=sha256:ecced182e935529727401b24d76634a357c71c9275b356efafd8a2a91ec07392 \ + --hash=sha256:ee0e8c683a7ff25d23b55b11161c2663d4b099770f6085ff0a20d4505778d6b4 \ + --hash=sha256:f0c2d907a1e102526dd2986df638343388b94c33860ff3bbe1384130828714b1 \ + --hash=sha256:f758ed67cab30b9a8d2833609513ce4d3bd027641673d4ebc9c067e4d208eec1 \ + --hash=sha256:f8157bed2f51db683f31306aa497311b560f2265998122abe1dce6428bd86567 \ + --hash=sha256:ffe8ed017e4ed70f68b7b371d84b7d4a790368db9203dfc2d222febd3a9c8863 pyasn1-modules==0.3.0 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:5bd01446b736eb9d31512a30d46c1ac3395d676c6f3cafa4c03eb54b9925631c \ --hash=sha256:d3ccd6ed470d9ffbc716be08bd90efbd44d0734bc9303818f7336070984a162d @@ -1060,165 +1280,143 @@ pyasn1==0.5.1 ; python_version >= "3.10" and python_version < "4.0" \ pycparser==2.22 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6 \ --hash=sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc -pycryptodome==3.21.0 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:0714206d467fc911042d01ea3a1847c847bc10884cf674c82e12915cfe1649f8 \ - --hash=sha256:0fa0a05a6a697ccbf2a12cec3d6d2650b50881899b845fac6e87416f8cb7e87d \ - --hash=sha256:0fd54003ec3ce4e0f16c484a10bc5d8b9bd77fa662a12b85779a2d2d85d67ee0 \ - --hash=sha256:18caa8cfbc676eaaf28613637a89980ad2fd96e00c564135bf90bc3f0b34dd93 \ - --hash=sha256:2480ec2c72438430da9f601ebc12c518c093c13111a5c1644c82cdfc2e50b1e4 \ - --hash=sha256:26412b21df30b2861424a6c6d5b1d8ca8107612a4cfa4d0183e71c5d200fb34a \ - --hash=sha256:280b67d20e33bb63171d55b1067f61fbd932e0b1ad976b3a184303a3dad22764 \ - --hash=sha256:2cb635b67011bc147c257e61ce864879ffe6d03342dc74b6045059dfbdedafca \ - --hash=sha256:2de4b7263a33947ff440412339cb72b28a5a4c769b5c1ca19e33dd6cd1dcec6e \ - --hash=sha256:3ba4cc304eac4d4d458f508d4955a88ba25026890e8abff9b60404f76a62c55e \ - --hash=sha256:4c26a2f0dc15f81ea3afa3b0c87b87e501f235d332b7f27e2225ecb80c0b1cdd \ - --hash=sha256:590ef0898a4b0a15485b05210b4a1c9de8806d3ad3d47f74ab1dc07c67a6827f \ - --hash=sha256:5dfafca172933506773482b0e18f0cd766fd3920bd03ec85a283df90d8a17bc6 \ - --hash=sha256:6cce52e196a5f1d6797ff7946cdff2038d3b5f0aba4a43cb6bf46b575fd1b5bb \ - --hash=sha256:7cb087b8612c8a1a14cf37dd754685be9a8d9869bed2ffaaceb04850a8aeef7e \ - --hash=sha256:7d85c1b613121ed3dbaa5a97369b3b757909531a959d229406a75b912dd51dd1 \ - --hash=sha256:7ee86cbde706be13f2dec5a42b52b1c1d1cbb90c8e405c68d0755134735c8dc6 \ - --hash=sha256:8898a66425a57bcf15e25fc19c12490b87bd939800f39a03ea2de2aea5e3611a \ - --hash=sha256:8acd7d34af70ee63f9a849f957558e49a98f8f1634f86a59d2be62bb8e93f71c \ - --hash=sha256:932c905b71a56474bff8a9c014030bc3c882cee696b448af920399f730a650c2 \ - --hash=sha256:a1752eca64c60852f38bb29e2c86fca30d7672c024128ef5d70cc15868fa10f4 \ - --hash=sha256:a3804675283f4764a02db05f5191eb8fec2bb6ca34d466167fc78a5f05bbe6b3 \ - --hash=sha256:a4e74c522d630766b03a836c15bff77cb657c5fdf098abf8b1ada2aebc7d0819 \ - --hash=sha256:a915597ffccabe902e7090e199a7bf7a381c5506a747d5e9d27ba55197a2c568 \ - --hash=sha256:b7aa25fc0baa5b1d95b7633af4f5f1838467f1815442b22487426f94e0d66c53 \ - --hash=sha256:cc2269ab4bce40b027b49663d61d816903a4bd90ad88cb99ed561aadb3888dd3 \ - --hash=sha256:d5ebe0763c982f069d3877832254f64974139f4f9655058452603ff559c482e8 \ - --hash=sha256:dad9bf36eda068e89059d1f07408e397856be9511d7113ea4b586642a429a4fd \ - --hash=sha256:de18954104667f565e2fbb4783b56667f30fb49c4d79b346f52a29cb198d5b6b \ - --hash=sha256:f35e442630bc4bc2e1878482d6f59ea22e280d7121d7adeaedba58c23ab6386b \ - --hash=sha256:f7787e0d469bdae763b876174cf2e6c0f7be79808af26b1da96f1a64bcf47297 \ - --hash=sha256:ff99f952db3db2fbe98a0b355175f93ec334ba3d01bbde25ad3a5a33abc02b58 -pycryptodomex==3.20.0 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:0daad007b685db36d977f9de73f61f8da2a7104e20aca3effd30752fd56f73e1 \ - --hash=sha256:108e5f1c1cd70ffce0b68739c75734437c919d2eaec8e85bffc2c8b4d2794305 \ - --hash=sha256:19764605feea0df966445d46533729b645033f134baeb3ea26ad518c9fdf212c \ - --hash=sha256:1be97461c439a6af4fe1cf8bf6ca5936d3db252737d2f379cc6b2e394e12a458 \ - --hash=sha256:25cd61e846aaab76d5791d006497134602a9e451e954833018161befc3b5b9ed \ - --hash=sha256:2a47bcc478741b71273b917232f521fd5704ab4b25d301669879e7273d3586cc \ - --hash=sha256:59af01efb011b0e8b686ba7758d59cf4a8263f9ad35911bfe3f416cee4f5c08c \ - --hash=sha256:5dcac11031a71348faaed1f403a0debd56bf5404232284cf8c761ff918886ebc \ - --hash=sha256:62a5ec91388984909bb5398ea49ee61b68ecb579123694bffa172c3b0a107079 \ - --hash=sha256:645bd4ca6f543685d643dadf6a856cc382b654cc923460e3a10a49c1b3832aeb \ - --hash=sha256:653b29b0819605fe0898829c8ad6400a6ccde096146730c2da54eede9b7b8baa \ - --hash=sha256:69138068268127cd605e03438312d8f271135a33140e2742b417d027a0539427 \ - --hash=sha256:6e186342cfcc3aafaad565cbd496060e5a614b441cacc3995ef0091115c1f6c5 \ - --hash=sha256:76bd15bb65c14900d98835fcd10f59e5e0435077431d3a394b60b15864fddd64 \ - --hash=sha256:7805830e0c56d88f4d491fa5ac640dfc894c5ec570d1ece6ed1546e9df2e98d6 \ - --hash=sha256:7a710b79baddd65b806402e14766c721aee8fb83381769c27920f26476276c1e \ - --hash=sha256:7a7a8f33a1f1fb762ede6cc9cbab8f2a9ba13b196bfaf7bc6f0b39d2ba315a43 \ - --hash=sha256:82ee7696ed8eb9a82c7037f32ba9b7c59e51dda6f105b39f043b6ef293989cb3 \ - --hash=sha256:88afd7a3af7ddddd42c2deda43d53d3dfc016c11327d0915f90ca34ebda91499 \ - --hash=sha256:8af1a451ff9e123d0d8bd5d5e60f8e3315c3a64f3cdd6bc853e26090e195cdc8 \ - --hash=sha256:8ee606964553c1a0bc74057dd8782a37d1c2bc0f01b83193b6f8bb14523b877b \ - --hash=sha256:91852d4480a4537d169c29a9d104dda44094c78f1f5b67bca76c29a91042b623 \ - --hash=sha256:9c682436c359b5ada67e882fec34689726a09c461efd75b6ea77b2403d5665b7 \ - --hash=sha256:bc3ee1b4d97081260d92ae813a83de4d2653206967c4a0a017580f8b9548ddbc \ - --hash=sha256:bca649483d5ed251d06daf25957f802e44e6bb6df2e8f218ae71968ff8f8edc4 \ - --hash=sha256:c39778fd0548d78917b61f03c1fa8bfda6cfcf98c767decf360945fe6f97461e \ - --hash=sha256:cbe71b6712429650e3883dc81286edb94c328ffcd24849accac0a4dbcc76958a \ - --hash=sha256:d00fe8596e1cc46b44bf3907354e9377aa030ec4cd04afbbf6e899fc1e2a7781 \ - --hash=sha256:d3584623e68a5064a04748fb6d76117a21a7cb5eaba20608a41c7d0c61721794 \ - --hash=sha256:e48217c7901edd95f9f097feaa0388da215ed14ce2ece803d3f300b4e694abea \ - --hash=sha256:f2e497413560e03421484189a6b65e33fe800d3bd75590e6d78d4dfdb7accf3b \ - --hash=sha256:ff5c9a67f8a4fba4aed887216e32cbc48f2a6fb2673bb10a99e43be463e15913 -pydantic-core==2.23.4 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:0a7df63886be5e270da67e0966cf4afbae86069501d35c8c1b3b6c168f42cb36 \ - --hash=sha256:0cb3da3fd1b6a5d0279a01877713dbda118a2a4fc6f0d821a57da2e464793f05 \ - --hash=sha256:0dbd8dbed2085ed23b5c04afa29d8fd2771674223135dc9bc937f3c09284d071 \ - --hash=sha256:0dff76e0602ca7d4cdaacc1ac4c005e0ce0dcfe095d5b5259163a80d3a10d327 \ - --hash=sha256:1278e0d324f6908e872730c9102b0112477a7f7cf88b308e4fc36ce1bdb6d58c \ - --hash=sha256:128585782e5bfa515c590ccee4b727fb76925dd04a98864182b22e89a4e6ed36 \ - --hash=sha256:1498bec4c05c9c787bde9125cfdcc63a41004ff167f495063191b863399b1a29 \ - --hash=sha256:19442362866a753485ba5e4be408964644dd6a09123d9416c54cd49171f50744 \ - --hash=sha256:1b84d168f6c48fabd1f2027a3d1bdfe62f92cade1fb273a5d68e621da0e44e6d \ - --hash=sha256:1e90d2e3bd2c3863d48525d297cd143fe541be8bbf6f579504b9712cb6b643ec \ - --hash=sha256:20152074317d9bed6b7a95ade3b7d6054845d70584216160860425f4fbd5ee9e \ - --hash=sha256:216f9b2d7713eb98cb83c80b9c794de1f6b7e3145eef40400c62e86cee5f4e1e \ - --hash=sha256:233710f069d251feb12a56da21e14cca67994eab08362207785cf8c598e74577 \ - --hash=sha256:255a8ef062cbf6674450e668482456abac99a5583bbafb73f9ad469540a3a232 \ - --hash=sha256:2584f7cf844ac4d970fba483a717dbe10c1c1c96a969bf65d61ffe94df1b2863 \ - --hash=sha256:2971bb5ffe72cc0f555c13e19b23c85b654dd2a8f7ab493c262071377bfce9f6 \ - --hash=sha256:29d2c342c4bc01b88402d60189f3df065fb0dda3654744d5a165a5288a657368 \ - --hash=sha256:2e203fdf807ac7e12ab59ca2bfcabb38c7cf0b33c41efeb00f8e5da1d86af480 \ - --hash=sha256:33e3d65a85a2a4a0dc3b092b938a4062b1a05f3a9abde65ea93b233bca0e03f2 \ - --hash=sha256:374a5e5049eda9e0a44c696c7ade3ff355f06b1fe0bb945ea3cac2bc336478a2 \ - --hash=sha256:37b0fe330e4a58d3c58b24d91d1eb102aeec675a3db4c292ec3928ecd892a9a6 \ - --hash=sha256:3d5639516376dce1940ea36edf408c554475369f5da2abd45d44621cb616f769 \ - --hash=sha256:42c6dcb030aefb668a2b7009c85b27f90e51e6a3b4d5c9bc4c57631292015b0d \ - --hash=sha256:4a7cd62e831afe623fbb7aabbb4fe583212115b3ef38a9f6b71869ba644624a2 \ - --hash=sha256:4ba762ed58e8d68657fc1281e9bb72e1c3e79cc5d464be146e260c541ec12d84 \ - --hash=sha256:4fc714bdbfb534f94034efaa6eadd74e5b93c8fa6315565a222f7b6f42ca1166 \ - --hash=sha256:4ffa2ebd4c8530079140dd2d7f794a9d9a73cbb8e9d59ffe24c63436efa8f271 \ - --hash=sha256:5a1504ad17ba4210df3a045132a7baeeba5a200e930f57512ee02909fc5c4cb5 \ - --hash=sha256:5c364564d17da23db1106787675fc7af45f2f7b58b4173bfdd105564e132e6fb \ - --hash=sha256:5e11661ce0fd30a6790e8bcdf263b9ec5988e95e63cf901972107efc49218b13 \ - --hash=sha256:5f54b118ce5de9ac21c363d9b3caa6c800341e8c47a508787e5868c6b79c9323 \ - --hash=sha256:5f5ff8d839f4566a474a969508fe1c5e59c31c80d9e140566f9a37bba7b8d556 \ - --hash=sha256:61817945f2fe7d166e75fbfb28004034b48e44878177fc54d81688e7b85a3665 \ - --hash=sha256:624e278a7d29b6445e4e813af92af37820fafb6dcc55c012c834f9e26f9aaaef \ - --hash=sha256:63e46b3169866bd62849936de036f901a9356e36376079b05efa83caeaa02ceb \ - --hash=sha256:6531b7ca5f951d663c339002e91aaebda765ec7d61b7d1e3991051906ddde119 \ - --hash=sha256:68665f4c17edcceecc112dfed5dbe6f92261fb9d6054b47d01bf6371a6196126 \ - --hash=sha256:696dd8d674d6ce621ab9d45b205df149399e4bb9aa34102c970b721554828510 \ - --hash=sha256:6f783e0ec4803c787bcea93e13e9932edab72068f68ecffdf86a99fd5918878b \ - --hash=sha256:723314c1d51722ab28bfcd5240d858512ffd3116449c557a1336cbe3919beb87 \ - --hash=sha256:74b9127ffea03643e998e0c5ad9bd3811d3dac8c676e47db17b0ee7c3c3bf35f \ - --hash=sha256:7530e201d10d7d14abce4fb54cfe5b94a0aefc87da539d0346a484ead376c3cc \ - --hash=sha256:77733e3892bb0a7fa797826361ce8a9184d25c8dffaec60b7ffe928153680ba8 \ - --hash=sha256:78ddaaa81421a29574a682b3179d4cf9e6d405a09b99d93ddcf7e5239c742e21 \ - --hash=sha256:7c9129eb40958b3d4500fa2467e6a83356b3b61bfff1b414c7361d9220f9ae8f \ - --hash=sha256:7d32706badfe136888bdea71c0def994644e09fff0bfe47441deaed8e96fdbc6 \ - --hash=sha256:81965a16b675b35e1d09dd14df53f190f9129c0202356ed44ab2728b1c905658 \ - --hash=sha256:8394d940e5d400d04cad4f75c0598665cbb81aecefaca82ca85bd28264af7f9b \ - --hash=sha256:86d2f57d3e1379a9525c5ab067b27dbb8a0642fb5d454e17a9ac434f9ce523e3 \ - --hash=sha256:883a91b5dd7d26492ff2f04f40fbb652de40fcc0afe07e8129e8ae779c2110eb \ - --hash=sha256:88ad334a15b32a791ea935af224b9de1bf99bcd62fabf745d5f3442199d86d59 \ - --hash=sha256:9261d3ce84fa1d38ed649c3638feefeae23d32ba9182963e465d58d62203bd24 \ - --hash=sha256:97df63000f4fea395b2824da80e169731088656d1818a11b95f3b173747b6cd9 \ - --hash=sha256:98d134c954828488b153d88ba1f34e14259284f256180ce659e8d83e9c05eaa3 \ - --hash=sha256:996a38a83508c54c78a5f41456b0103c30508fed9abcad0a59b876d7398f25fd \ - --hash=sha256:9a5bce9d23aac8f0cf0836ecfc033896aa8443b501c58d0602dbfd5bd5b37753 \ - --hash=sha256:9a6b5099eeec78827553827f4c6b8615978bb4b6a88e5d9b93eddf8bb6790f55 \ - --hash=sha256:9d18368b137c6295db49ce7218b1a9ba15c5bc254c96d7c9f9e924a9bc7825ad \ - --hash=sha256:a4fa4fc04dff799089689f4fd502ce7d59de529fc2f40a2c8836886c03e0175a \ - --hash=sha256:a5c7ba8ffb6d6f8f2ab08743be203654bb1aaa8c9dcb09f82ddd34eadb695605 \ - --hash=sha256:aea443fffa9fbe3af1a9ba721a87f926fe548d32cab71d188a6ede77d0ff244e \ - --hash=sha256:b10bd51f823d891193d4717448fab065733958bdb6a6b351967bd349d48d5c9b \ - --hash=sha256:ba1a0996f6c2773bd83e63f18914c1de3c9dd26d55f4ac302a7efe93fb8e7433 \ - --hash=sha256:bb2802e667b7051a1bebbfe93684841cc9351004e2badbd6411bf357ab8d5ac8 \ - --hash=sha256:cfdd16ab5e59fc31b5e906d1a3f666571abc367598e3e02c83403acabc092e07 \ - --hash=sha256:d06b0c8da4f16d1d1e352134427cb194a0a6e19ad5db9161bf32b2113409e728 \ - --hash=sha256:d0776dea117cf5272382634bd2a5c1b6eb16767c223c6a5317cd3e2a757c61a0 \ - --hash=sha256:d18ca8148bebe1b0a382a27a8ee60350091a6ddaf475fa05ef50dc35b5df6327 \ - --hash=sha256:d4488a93b071c04dc20f5cecc3631fc78b9789dd72483ba15d423b5b3689b555 \ - --hash=sha256:d5f7a395a8cf1621939692dba2a6b6a830efa6b3cee787d82c7de1ad2930de64 \ - --hash=sha256:d7a80d21d613eec45e3d41eb22f8f94ddc758a6c4720842dc74c0581f54993d6 \ - --hash=sha256:d97683ddee4723ae8c95d1eddac7c192e8c552da0c73a925a89fa8649bf13eea \ - --hash=sha256:dcedcd19a557e182628afa1d553c3895a9f825b936415d0dbd3cd0bbcfd29b4b \ - --hash=sha256:de6d1d1b9e5101508cb37ab0d972357cac5235f5c6533d1071964c47139257df \ - --hash=sha256:df49e7a0861a8c36d089c1ed57d308623d60416dab2647a4a17fe050ba85de0e \ - --hash=sha256:df933278128ea1cd77772673c73954e53a1c95a4fdf41eef97c2b779271bd0bd \ - --hash=sha256:e08277a400de01bc72436a0ccd02bdf596631411f592ad985dcee21445bd0068 \ - --hash=sha256:e38e63e6f3d1cec5a27e0afe90a085af8b6806ee208b33030e65b6516353f1a3 \ - --hash=sha256:e55541f756f9b3ee346b840103f32779c695a19826a4c442b7954550a0972040 \ - --hash=sha256:ec4e55f79b1c4ffb2eecd8a0cfba9955a2588497d96851f4c8f99aa4a1d39b12 \ - --hash=sha256:ed1a53de42fbe34853ba90513cea21673481cd81ed1be739f7f2efb931b24916 \ - --hash=sha256:ed541d70698978a20eb63d8c5d72f2cc6d7079d9d90f6b50bad07826f1320f5f \ - --hash=sha256:f09e2ff1f17c2b51f2bc76d1cc33da96298f0a036a137f5440ab3ec5360b624f \ - --hash=sha256:f220b0eea5965dec25480b6333c788fb72ce5f9129e8759ef876a1d805d00801 \ - --hash=sha256:f3e0da4ebaef65158d4dfd7d3678aad692f7666877df0002b8a522cdf088f231 \ - --hash=sha256:f455ee30a9d61d3e1a15abd5068827773d6e4dc513e795f380cdd59932c782d5 \ - --hash=sha256:f5ef8f42bec47f21d07668a043f077d507e5bf4e668d5c6dfe6aaba89de1a5b8 \ - --hash=sha256:f69a8e0b033b747bb3e36a44e7732f0c99f7edd5cea723d45bc0d6e95377ffee \ - --hash=sha256:ff02b6d461a6de369f07ec15e465a88895f3223eb75073ffea56b84d9331f607 -pydantic==2.9.2 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:d155cef71265d1e9807ed1c32b4c8deec042a44a50a4188b25ac67ecd81a9c0f \ - --hash=sha256:f048cec7b26778210e28a0459867920654d48e5e62db0958433636cde4254f12 +pycryptodomex==3.21.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:0df2608682db8279a9ebbaf05a72f62a321433522ed0e499bc486a6889b96bf3 \ + --hash=sha256:103c133d6cd832ae7266feb0a65b69e3a5e4dbbd6f3a3ae3211a557fd653f516 \ + --hash=sha256:1233443f19d278c72c4daae749872a4af3787a813e05c3561c73ab0c153c7b0f \ + --hash=sha256:222d0bd05381dd25c32dd6065c071ebf084212ab79bab4599ba9e6a3e0009e6c \ + --hash=sha256:27e84eeff24250ffec32722334749ac2a57a5fd60332cd6a0680090e7c42877e \ + --hash=sha256:34325b84c8b380675fd2320d0649cdcbc9cf1e0d1526edbe8fce43ed858cdc7e \ + --hash=sha256:365aa5a66d52fd1f9e0530ea97f392c48c409c2f01ff8b9a39c73ed6f527d36c \ + --hash=sha256:3efddfc50ac0ca143364042324046800c126a1d63816d532f2e19e6f2d8c0c31 \ + --hash=sha256:46eb1f0c8d309da63a2064c28de54e5e614ad17b7e2f88df0faef58ce192fc7b \ + --hash=sha256:5241bdb53bcf32a9568770a6584774b1b8109342bd033398e4ff2da052123832 \ + --hash=sha256:52e23a0a6e61691134aa8c8beba89de420602541afaae70f66e16060fdcd677e \ + --hash=sha256:56435c7124dd0ce0c8bdd99c52e5d183a0ca7fdcd06c5d5509423843f487dd0b \ + --hash=sha256:5823d03e904ea3e53aebd6799d6b8ec63b7675b5d2f4a4bd5e3adcb512d03b37 \ + --hash=sha256:65d275e3f866cf6fe891411be9c1454fb58809ccc5de6d3770654c47197acd65 \ + --hash=sha256:770d630a5c46605ec83393feaa73a9635a60e55b112e1fb0c3cea84c2897aa0a \ + --hash=sha256:77ac2ea80bcb4b4e1c6a596734c775a1615d23e31794967416afc14852a639d3 \ + --hash=sha256:7a1058e6dfe827f4209c5cae466e67610bcd0d66f2f037465daa2a29d92d952b \ + --hash=sha256:8a9d8342cf22b74a746e3c6c9453cb0cfbb55943410e3a2619bd9164b48dc9d9 \ + --hash=sha256:8ef436cdeea794015263853311f84c1ff0341b98fc7908e8a70595a68cefd971 \ + --hash=sha256:9aa0cf13a1a1128b3e964dc667e5fe5c6235f7d7cfb0277213f0e2a783837cc2 \ + --hash=sha256:9ba09a5b407cbb3bcb325221e346a140605714b5e880741dc9a1e9ecf1688d42 \ + --hash=sha256:a192fb46c95489beba9c3f002ed7d93979423d1b2a53eab8771dbb1339eb3ddd \ + --hash=sha256:a3d77919e6ff56d89aada1bd009b727b874d464cb0e2e3f00a49f7d2e709d76e \ + --hash=sha256:b0e9765f93fe4890f39875e6c90c96cb341767833cfa767f41b490b506fa9ec0 \ + --hash=sha256:bbb07f88e277162b8bfca7134b34f18b400d84eac7375ce73117f865e3c80d4c \ + --hash=sha256:c07e64867a54f7e93186a55bec08a18b7302e7bee1b02fd84c6089ec215e723a \ + --hash=sha256:cc7e111e66c274b0df5f4efa679eb31e23c7545d702333dfd2df10ab02c2a2ce \ + --hash=sha256:da76ebf6650323eae7236b54b1b1f0e57c16483be6e3c1ebf901d4ada47563b6 \ + --hash=sha256:dbeb84a399373df84a69e0919c1d733b89e049752426041deeb30d68e9867822 \ + --hash=sha256:e859e53d983b7fe18cb8f1b0e29d991a5c93be2c8dd25db7db1fe3bd3617f6f9 \ + --hash=sha256:ef046b2e6c425647971b51424f0f88d8a2e0a2a63d3531817968c42078895c00 \ + --hash=sha256:feaecdce4e5c0045e7a287de0c4351284391fe170729aa9182f6bd967631b3a8 +pydantic-core==2.27.2 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:00bad2484fa6bda1e216e7345a798bd37c68fb2d97558edd584942aa41b7d278 \ + --hash=sha256:0296abcb83a797db256b773f45773da397da75a08f5fcaef41f2044adec05f50 \ + --hash=sha256:03d0f86ea3184a12f41a2d23f7ccb79cdb5a18e06993f8a45baa8dfec746f0e9 \ + --hash=sha256:044a50963a614ecfae59bb1eaf7ea7efc4bc62f49ed594e18fa1e5d953c40e9f \ + --hash=sha256:05e3a55d124407fffba0dd6b0c0cd056d10e983ceb4e5dbd10dda135c31071d6 \ + --hash=sha256:08e125dbdc505fa69ca7d9c499639ab6407cfa909214d500897d02afb816e7cc \ + --hash=sha256:097830ed52fd9e427942ff3b9bc17fab52913b2f50f2880dc4a5611446606a54 \ + --hash=sha256:0d1e85068e818c73e048fe28cfc769040bb1f475524f4745a5dc621f75ac7630 \ + --hash=sha256:0d75070718e369e452075a6017fbf187f788e17ed67a3abd47fa934d001863d9 \ + --hash=sha256:14d4a5c49d2f009d62a2a7140d3064f686d17a5d1a268bc641954ba181880236 \ + --hash=sha256:172fce187655fece0c90d90a678424b013f8fbb0ca8b036ac266749c09438cb7 \ + --hash=sha256:18a101c168e4e092ab40dbc2503bdc0f62010e95d292b27827871dc85450d7ee \ + --hash=sha256:1a4207639fb02ec2dbb76227d7c751a20b1a6b4bc52850568e52260cae64ca3b \ + --hash=sha256:1c1fd185014191700554795c99b347d64f2bb637966c4cfc16998a0ca700d048 \ + --hash=sha256:1e2cb691ed9834cd6a8be61228471d0a503731abfb42f82458ff27be7b2186fc \ + --hash=sha256:1ebaf1d0481914d004a573394f4be3a7616334be70261007e47c2a6fe7e50130 \ + --hash=sha256:220f892729375e2d736b97d0e51466252ad84c51857d4d15f5e9692f9ef12be4 \ + --hash=sha256:251136cdad0cb722e93732cb45ca5299fb56e1344a833640bf93b2803f8d1bfd \ + --hash=sha256:26f0d68d4b235a2bae0c3fc585c585b4ecc51382db0e3ba402a22cbc440915e4 \ + --hash=sha256:26f32e0adf166a84d0cb63be85c562ca8a6fa8de28e5f0d92250c6b7e9e2aff7 \ + --hash=sha256:280d219beebb0752699480fe8f1dc61ab6615c2046d76b7ab7ee38858de0a4e7 \ + --hash=sha256:28ccb213807e037460326424ceb8b5245acb88f32f3d2777427476e1b32c48c4 \ + --hash=sha256:2bf14caea37e91198329b828eae1618c068dfb8ef17bb33287a7ad4b61ac314e \ + --hash=sha256:2d367ca20b2f14095a8f4fa1210f5a7b78b8a20009ecced6b12818f455b1e9fa \ + --hash=sha256:30c5f68ded0c36466acede341551106821043e9afaad516adfb6e8fa80a4e6a6 \ + --hash=sha256:337b443af21d488716f8d0b6164de833e788aa6bd7e3a39c005febc1284f4962 \ + --hash=sha256:3911ac9284cd8a1792d3cb26a2da18f3ca26c6908cc434a18f730dc0db7bfa3b \ + --hash=sha256:3d591580c34f4d731592f0e9fe40f9cc1b430d297eecc70b962e93c5c668f15f \ + --hash=sha256:3de3ce3c9ddc8bbd88f6e0e304dea0e66d843ec9de1b0042b0911c1663ffd474 \ + --hash=sha256:3de9961f2a346257caf0aa508a4da705467f53778e9ef6fe744c038119737ef5 \ + --hash=sha256:40d02e7d45c9f8af700f3452f329ead92da4c5f4317ca9b896de7ce7199ea459 \ + --hash=sha256:42c5f762659e47fdb7b16956c71598292f60a03aa92f8b6351504359dbdba6cf \ + --hash=sha256:47956ae78b6422cbd46f772f1746799cbb862de838fd8d1fbd34a82e05b0983a \ + --hash=sha256:491a2b73db93fab69731eaee494f320faa4e093dbed776be1a829c2eb222c34c \ + --hash=sha256:4c9775e339e42e79ec99c441d9730fccf07414af63eac2f0e48e08fd38a64d76 \ + --hash=sha256:4e0b4220ba5b40d727c7f879eac379b822eee5d8fff418e9d3381ee45b3b0362 \ + --hash=sha256:50a68f3e3819077be2c98110c1f9dcb3817e93f267ba80a2c05bb4f8799e2ff4 \ + --hash=sha256:519f29f5213271eeeeb3093f662ba2fd512b91c5f188f3bb7b27bc5973816934 \ + --hash=sha256:521eb9b7f036c9b6187f0b47318ab0d7ca14bd87f776240b90b21c1f4f149320 \ + --hash=sha256:57762139821c31847cfb2df63c12f725788bd9f04bc2fb392790959b8f70f118 \ + --hash=sha256:5e4f4bb20d75e9325cc9696c6802657b58bc1dbbe3022f32cc2b2b632c3fbb96 \ + --hash=sha256:5e68c4446fe0810e959cdff46ab0a41ce2f2c86d227d96dc3847af0ba7def306 \ + --hash=sha256:669e193c1c576a58f132e3158f9dfa9662969edb1a250c54d8fa52590045f046 \ + --hash=sha256:688d3fd9fcb71f41c4c015c023d12a79d1c4c0732ec9eb35d96e3388a120dcf3 \ + --hash=sha256:6fb4aadc0b9a0c063206846d603b92030eb6f03069151a625667f982887153e2 \ + --hash=sha256:7041c36f5680c6e0f08d922aed302e98b3745d97fe1589db0a3eebf6624523af \ + --hash=sha256:71b24c7d61131bb83df10cc7e687433609963a944ccf45190cfc21e0887b08c9 \ + --hash=sha256:77d1bca19b0f7021b3a982e6f903dcd5b2b06076def36a652e3907f596e29f67 \ + --hash=sha256:7969e133a6f183be60e9f6f56bfae753585680f3b7307a8e555a948d443cc05a \ + --hash=sha256:7a66efda2387de898c8f38c0cf7f14fca0b51a8ef0b24bfea5849f1b3c95af27 \ + --hash=sha256:7d0c8399fcc1848491f00e0314bd59fb34a9c008761bcb422a057670c3f65e35 \ + --hash=sha256:7d14bd329640e63852364c306f4d23eb744e0f8193148d4044dd3dacdaacbd8b \ + --hash=sha256:7e17b560be3c98a8e3aa66ce828bdebb9e9ac6ad5466fba92eb74c4c95cb1151 \ + --hash=sha256:8083d4e875ebe0b864ffef72a4304827015cff328a1be6e22cc850753bfb122b \ + --hash=sha256:82f91663004eb8ed30ff478d77c4d1179b3563df6cdb15c0817cd1cdaf34d154 \ + --hash=sha256:82f986faf4e644ffc189a7f1aafc86e46ef70372bb153e7001e8afccc6e54133 \ + --hash=sha256:83097677b8e3bd7eaa6775720ec8e0405f1575015a463285a92bfdfe254529ef \ + --hash=sha256:85210c4d99a0114f5a9481b44560d7d1e35e32cc5634c656bc48e590b669b145 \ + --hash=sha256:8c19d1ea0673cd13cc2f872f6c9ab42acc4e4f492a7ca9d3795ce2b112dd7e15 \ + --hash=sha256:8d9b3388db186ba0c099a6d20f0604a44eabdeef1777ddd94786cdae158729e4 \ + --hash=sha256:8e10c99ef58cfdf2a66fc15d66b16c4a04f62bca39db589ae8cba08bc55331bc \ + --hash=sha256:953101387ecf2f5652883208769a79e48db18c6df442568a0b5ccd8c2723abee \ + --hash=sha256:9c3ed807c7b91de05e63930188f19e921d1fe90de6b4f5cd43ee7fcc3525cb8c \ + --hash=sha256:9e0c8cfefa0ef83b4da9588448b6d8d2a2bf1a53c3f1ae5fca39eb3061e2f0b0 \ + --hash=sha256:9fdbe7629b996647b99c01b37f11170a57ae675375b14b8c13b8518b8320ced5 \ + --hash=sha256:a0fcd29cd6b4e74fe8ddd2c90330fd8edf2e30cb52acda47f06dd615ae72da57 \ + --hash=sha256:ac4dbfd1691affb8f48c2c13241a2e3b60ff23247cbcf981759c768b6633cf8b \ + --hash=sha256:b0cb791f5b45307caae8810c2023a184c74605ec3bcbb67d13846c28ff731ff8 \ + --hash=sha256:ba5dd002f88b78a4215ed2f8ddbdf85e8513382820ba15ad5ad8955ce0ca19a1 \ + --hash=sha256:bca101c00bff0adb45a833f8451b9105d9df18accb8743b08107d7ada14bd7da \ + --hash=sha256:bd8086fa684c4775c27f03f062cbb9eaa6e17f064307e86b21b9e0abc9c0f02e \ + --hash=sha256:bec317a27290e2537f922639cafd54990551725fc844249e64c523301d0822fc \ + --hash=sha256:c10eb4f1659290b523af58fa7cffb452a61ad6ae5613404519aee4bfbf1df993 \ + --hash=sha256:c33939a82924da9ed65dab5a65d427205a73181d8098e79b6b426bdf8ad4e656 \ + --hash=sha256:c61709a844acc6bf0b7dce7daae75195a10aac96a596ea1b776996414791ede4 \ + --hash=sha256:c70c26d2c99f78b125a3459f8afe1aed4d9687c24fd677c6a4436bc042e50d6c \ + --hash=sha256:c817e2b40aba42bac6f457498dacabc568c3b7a986fc9ba7c8d9d260b71485fb \ + --hash=sha256:cabb9bcb7e0d97f74df8646f34fc76fbf793b7f6dc2438517d7a9e50eee4f14d \ + --hash=sha256:cc3f1a99a4f4f9dd1de4fe0312c114e740b5ddead65bb4102884b384c15d8bc9 \ + --hash=sha256:cca63613e90d001b9f2f9a9ceb276c308bfa2a43fafb75c8031c4f66039e8c6e \ + --hash=sha256:ce8918cbebc8da707ba805b7fd0b382816858728ae7fe19a942080c24e5b7cd1 \ + --hash=sha256:d2088237af596f0a524d3afc39ab3b036e8adb054ee57cbb1dcf8e09da5b29cc \ + --hash=sha256:d262606bf386a5ba0b0af3b97f37c83d7011439e3dc1a9298f21efb292e42f1a \ + --hash=sha256:d2d63f1215638d28221f664596b1ccb3944f6e25dd18cd3b86b0a4c408d5ebb9 \ + --hash=sha256:d3e8d504bdd3f10835468f29008d72fc8359d95c9c415ce6e767203db6127506 \ + --hash=sha256:d4041c0b966a84b4ae7a09832eb691a35aec90910cd2dbe7a208de59be77965b \ + --hash=sha256:d716e2e30c6f140d7560ef1538953a5cd1a87264c737643d481f2779fc247fe1 \ + --hash=sha256:d81d2068e1c1228a565af076598f9e7451712700b673de8f502f0334f281387d \ + --hash=sha256:d9640b0059ff4f14d1f37321b94061c6db164fbe49b334b31643e0528d100d99 \ + --hash=sha256:de3cd1899e2c279b140adde9357c4495ed9d47131b4a4eaff9052f23398076b3 \ + --hash=sha256:e0fd26b16394ead34a424eecf8a31a1f5137094cabe84a1bcb10fa6ba39d3d31 \ + --hash=sha256:e2bb4d3e5873c37bb3dd58714d4cd0b0e6238cebc4177ac8fe878f8b3aa8e74c \ + --hash=sha256:eb026e5a4c1fee05726072337ff51d1efb6f59090b7da90d30ea58625b1ffb39 \ + --hash=sha256:eda3f5c2a021bbc5d976107bb302e0131351c2ba54343f8a496dc8783d3d3a6a \ + --hash=sha256:ef592d4bad47296fb11f96cd7dc898b92e795032b4894dfb4076cfccd43a9308 \ + --hash=sha256:f141ee28a0ad2123b6611b6ceff018039df17f32ada8b534e6aa039545a3efb2 \ + --hash=sha256:f66d89ba397d92f840f8654756196d93804278457b5fbede59598a1f9f90b228 \ + --hash=sha256:f6f8e111843bbb0dee4cb6594cdc73e79b3329b526037ec242a3e49012495b3b \ + --hash=sha256:fa8e459d4954f608fa26116118bb67f56b93b209c39b008277ace29937453dc9 \ + --hash=sha256:fd1aea04935a508f62e0d0ef1f5ae968774a32afc306fb8545e06f5ff5cdf3ad +pydantic==2.10.6 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:427d664bf0b8a2b34ff5dd0f5a18df00591adcee7198fbd71981054cef37b584 \ + --hash=sha256:ca5daa827cce33de7a42be142548b0096bf05a7e7b365aebfa5f8eeec7128236 pydeep2==0.5.1 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:0fedc1c9660cb5d0b73ad0b5f1dbffe16990e6721cbfc6454571a4b9882d0ea4 \ --hash=sha256:199d05d8b4b7544509a2ba4802ead4b41dfe7859e0ecea9d9be9e41939f11660 \ @@ -1241,75 +1439,58 @@ pyelftools==0.31 ; python_version >= "3.10" and python_version < "4.0" \ pygal==2.4.0 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:27abab93cbc31e21f3c6bdecc05bda6cd3570cbdbd8297b7caa6904051b50d72 \ --hash=sha256:9204f05380b02a8a32f9bf99d310b51aa2a932cba5b369f7a4dc3705f0a4ce83 -pygments==2.18.0 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199 \ - --hash=sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a +pygments==2.19.1 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:61c16d2a8576dc0649d9f39e089b5f02bcd27fba10d8fb4dcc28173f7a45151f \ + --hash=sha256:9ea1544ad55cecf4b8242fab6dd35a93bbce657034b0611ee383099054ab6d8c pyguacamole==0.11 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:7f8d8652ce2e86473d72a50e0c9d8a8e0c3c74e373c6b926ca4c851774cae608 \ --hash=sha256:d6facde097a1b1a3048b20fb2ff88b024744ceb2865fb912525da7ebb7779695 -pyjwt[crypto]==2.9.0 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:3b02fb0f44517787776cf48f2ae25d8e14f300e6d7545a4315cee571a415e850 \ - --hash=sha256:7e1e5b56cc735432a7369cbfa0efe50fa113ebecdc04ae6922deba8b84582d0c -pymongo==4.9.1 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:0492ef43f3342354cf581712e431621c221f60c877ebded84e3f3e53b71bbbe0 \ - --hash=sha256:08fbab69f3fb6f8088c81f4c4a8abd84a99c132034f5e27e47f894bbcb6bf439 \ - --hash=sha256:16d2efe559d0d96bc0b74b3ff76701ad6f6e1a65f6581b573dcacc29158131c8 \ - --hash=sha256:172d8ba0f567e351a18765db23dab7dbcfdffd91a8788d90d46b350f80a40781 \ - --hash=sha256:1b4b961fce213f2bcdc92268f85111a3668c61b9b4d4e7ece27dce3a137cfcbd \ - --hash=sha256:1d79f20f9c7cbc1c708fb80b648b6fbd3220fd3437a9bd6017c1eb592e03b361 \ - --hash=sha256:1d7aa9cc2d92e73bdb036c578ba019da94ea165eb147e691cd910a6fab7ce3b7 \ - --hash=sha256:1dfd2aa30174d36a3ef1dae4ee4c89710c2d65cac52ce6e13f17c710edbd61cf \ - --hash=sha256:1fac1def9e9073f1c80198c99f0ec39c2528236c8912d96d7fd3b0237f4c523a \ - --hash=sha256:286fb275267f0293364ba579f6354452599161f1902ad411061c7f744ab88328 \ - --hash=sha256:34e4993ae78be56f9e27a141168a1ab78253576fa3e893fa335a719ce204c3ef \ - --hash=sha256:375765ec81b1f0a26d08928afea0c3dff897c36080a090be53fc7b70cc51d497 \ - --hash=sha256:3a846423c4535428f69a90a1451df3718bc59f0c4ab685b9e96d3071951e0be4 \ - --hash=sha256:42c19d2b094cdd0ead7dbb38860bbe8268c140334ce55d8b39204ddb4ebd4904 \ - --hash=sha256:4327c0d9bd616b8289691360f2d4a09a72fe35479795832eae0d4ff78af53923 \ - --hash=sha256:432ad395d2233056b042ccc73234e7136aa65d944d6bd8b5138394bd38aaff79 \ - --hash=sha256:47b4896544095d172c366dd4d4ea1da6b0ab1a77d8416897cc1801e2421b1e67 \ - --hash=sha256:4cddb51cead9700c4dccc916952bc0321b8d766bf782d374bfa0e93ef47c1d20 \ - --hash=sha256:4d1b959a3dda0775d9111622ee47ad47772aed3a9da2e7d5f2f513fa68175dea \ - --hash=sha256:51dbc6251c6783dfcc7d657c346986d8bad7210989b2fe15de16db5204a8e7ae \ - --hash=sha256:56877cfcdf7dfc5c6408e4551ec0d6d65ebbca4d744a0bc90400f09ef6bbcc8a \ - --hash=sha256:679b8d55854da7c7fdb82aa5e092ab4de0144daf6758defed8ab00ff9ce05360 \ - --hash=sha256:687cf70e096381bc65b4273a6a9319617618f7ace65caffc356e1099c4a68511 \ - --hash=sha256:6bb3d5282278594753089dc7da48bfae4a7f337a2dd4d397eabb591c649e58d0 \ - --hash=sha256:75d5974f874acdb2f125bdbe785045b23a39ecce1d3143dd5712800c7b6d25eb \ - --hash=sha256:7f962d74201c772555f7a78792fed820a5ea76db5c7ee6cf43748e411b44e430 \ - --hash=sha256:8089003a99127f917bdbeec177d41cef019cda8ec70534c1018cb60aacd23c2a \ - --hash=sha256:8b632e01617f2608880f7b9926f54a5f5ebb51631996e0540fff7fc7980663c9 \ - --hash=sha256:8f0d5258bc85a4e6b5bcae8160628168e71ec4625a58ceb53327c3280a0b6914 \ - --hash=sha256:91b1a92214c3912af5467f77c2f6435cd76f6de64c70cba7bb4ee43eba7f459e \ - --hash=sha256:95418e334629440f70fe5ceeefc6cbbd50defb566901c8d68179ffbaec8d5f01 \ - --hash=sha256:96462fb2175f740701d229f52018ea6e4adc4148c4112e6628bb359dd534a3df \ - --hash=sha256:99b611ff75b5d9e17183dcf9584a7b04f9db07e51a162f23ea05e485e0735c0a \ - --hash=sha256:9d78adf25967c06298c7e488f4cfab79a390fc32c2b1d428613976f99031603d \ - --hash=sha256:9fbe9fad27619ac4cfda5df0ade26a99906da7dfe7b01deddc25997eb1804e4c \ - --hash=sha256:a0b10cf51ec14a487c94709d294c00e1fb6a0a4c38cdc3acfb2ced5ef60972a0 \ - --hash=sha256:a2b12c74cfd90147babb77f9728646bcedfdbd2bd2a5b4130a00e3a0af1a3d34 \ - --hash=sha256:a40ea8bc9cffb61c5c9c426c430d22235e085e610ee81ae075ddf51f12f76236 \ - --hash=sha256:a7689da1d1b444284e4ea9ab2eb64a15307b6b795918c0f3cd7774dd1d8a7556 \ - --hash=sha256:aa4493f304b33c5d2ecee3055c98889ac6724d56f5f922d47420a45d0d4099c9 \ - --hash=sha256:b23211c031b45d0f32de83ab7d77f9c26f1025c2d2c91463a5d8594a16103655 \ - --hash=sha256:b347052d510989d1f52b8553b31297f21cf74bd9f6aed71ee84e563492f4ff17 \ - --hash=sha256:b4c793db8457c856f333f396798470b9bfe405e17c307d581532c74cec70150c \ - --hash=sha256:b7f2d34390acf60e229c30037d1473fcf69f4536cd7f48f6f78c0c931c61c505 \ - --hash=sha256:c4204fad54830a3173a5c939cd052d0561fba03dba7e0ff6852fd631f3314aa4 \ - --hash=sha256:d476d91a5c9e6c37bc8ec3fb294e1c01d95736ccf01a59bb1540fe2f710f826e \ - --hash=sha256:db5b299e11284f8d82ce2983d8e19fcc28f98f902a179709ef1982b4cca6f8b8 \ - --hash=sha256:dc3d070d746ab79e9b393a5c236df20e56607389af2b79bf1bfe9a841117558e \ - --hash=sha256:dd3352eaf578f8e9bdea7a5692910eedad1e8680f60726fc70e99c8af51a5449 \ - --hash=sha256:e02b03e3815b80a63e773e4c32aed3cf5633d406f376477be74550295c211256 \ - --hash=sha256:e1f346811d4a2369f88ab7a6f886fa9c3bbc9ed4e4f4a3becca8717a73d465cb \ - --hash=sha256:e3645aff8419ca60f9ccd08966b2f6b0d78053f9f98a814d025426f1d874c19a \ - --hash=sha256:ea3f0196e7c311b9944a609ac175bd91ab97952164a1246716fdd38d53ca3bcc \ - --hash=sha256:f05e34d401be871d7c87cb10727d49315444e4ded07ff876a595e4c23b7436da \ - --hash=sha256:f23a046531030318622414f21198e232cf93c5640da9a80b45596a059c8cc090 \ - --hash=sha256:f838f613e74b4dad8ace0d90f42346005bece4eda5bf6d389cfadb8322d39316 \ - --hash=sha256:f8e8b8deba6a4bff3dd5421071083219521c74d2acae0322de5c06f1a66c56af \ - --hash=sha256:fbb1c7dfcf6c44e9e1928290631c7603817991cdf570691c9e15fca594918435 \ - --hash=sha256:fe709d05654c12fc513617c8d5c8d05b7e9cf1d5d94ada68add4e89530c867d2 +pymongo==4.11 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:0197fe47e31bee488e82e7ab73e6a351a191bbd6e25cf4a380622e4b1ffcd143 \ + --hash=sha256:06e82968ea031aebc18820898b102efed1ea8dc21b51eff2a81dc9ba4191fa6b \ + --hash=sha256:17966e98d0350b578a337f053cc541458c296236d4b5b4b8adb76f5e82644fb8 \ + --hash=sha256:17d4b787afed3163e5faa147b028d8ec7996a5388328499588c5c5858f26ab8a \ + --hash=sha256:1971039a8e3aab139e0382b26a9670cd34f43c5301da267360b9a640b637d09b \ + --hash=sha256:212f14a882659a45db472f0bc0fdf83c16fad4e7e4d4257187797779c461320e \ + --hash=sha256:21b9969e155c4b16a160fbe90c390a07ca7514479af6c3811b1d15ead26e10ba \ + --hash=sha256:254aa90fafead13aca8dbcfbc407e2f6f7c125fce726925bd96adc74b6226f9e \ + --hash=sha256:314648ec4d5eaa755233343d6d110348371988257de35f5a84667b709262deab \ + --hash=sha256:38f777f5e43f3acd651a79aaf95abf29e8e7f85429a12ce32c4b7f2f877b2200 \ + --hash=sha256:488e3440f5bedcbf494fd02c0a433cb5be7e55ba44dc72202813e1007a865e6a \ + --hash=sha256:4c0ab91c06bf79dd3d3fd5f2a53fe22db8036084ddcfaacd80e83828b6eb4f25 \ + --hash=sha256:5837ee73ffa4a943360320d7b9b917da2752a4cb536e23a6eaca418c1c64f60a \ + --hash=sha256:5c7e5e500cc408f7f2aa38a4a6ed96d78a4c4cce3bcc6f1a55113f0b9d7823dd \ + --hash=sha256:64ad12ae8d79f18ec30d807b9b9b9802c30427c39599d8b1833bc00e63f0e4bb \ + --hash=sha256:74f02b7dbb3b15c2e48c7bb28941f6198dc73ced09683738180a9fdbfc0983b2 \ + --hash=sha256:7ac81987985275abeb53f5faa5c4a7f7890df9368c0f730fe37460a6301d94d7 \ + --hash=sha256:7cf9d18444259d86cd7028ac428f4fe7fbdefa3450575126ad2ee4447db0f52f \ + --hash=sha256:8436f90f26379c167d47b61ac78c48224af98f3136ece380b74158cbef2ad6a7 \ + --hash=sha256:85cfc99af2ae0fb8699e15b1f3479018216eb75d80f0082973a876ecfdf1022e \ + --hash=sha256:8e5a56a508612c64906d5375ebee413cee2fc432c09abd9cb4d9e23ac8bcdd3a \ + --hash=sha256:981ae0caa245bedf9af3af9159ae71b05444e35dd61a0250235b5c58dcd69178 \ + --hash=sha256:9a04f49d6f1fa4f4e6a98ef76b4881b4a9c7decd8a90ee5c9aeff087662e2d8a \ + --hash=sha256:a308ad2eeaee370b3b4154a82840c8f8f9b18ccc76b71812323d243a7bda98a2 \ + --hash=sha256:ae7f636010ce54885f1ea19c37ea2cb744d976ce689361a776fd0e991d020b81 \ + --hash=sha256:b4878e92ae255a05756399a4e2b428f0fd3529561eacd9f4781a70ad5311397e \ + --hash=sha256:b7246f2060ba03dcc25ade803bc6a3c39f1cc7db93297875edd251ca8cf5cac7 \ + --hash=sha256:b7af60e034e04c3206dfbe2a7bbcc46057ca4f9ad8ed66869bf669c02a1e6e99 \ + --hash=sha256:b885210269455e05f5704887a4c7b3b99d03c9ab37f320005acc21d1761f1656 \ + --hash=sha256:bb3ce0ce49cfe5c31db046d567403adb2d158459cf10d7c71f6d72f2324c900a \ + --hash=sha256:bedcf4a602042cb2c19ae3ee450fbdd5832095ae5b563e004f7c0d307a22c9e1 \ + --hash=sha256:c3a7ead31317d3a3eabc17486c75ad109f3d514a702002e52da474b59dd8acce \ + --hash=sha256:c3b67a2949240d63a0b8b1094e1d2d4b94dedf0317ac6e14164adc9aaf260963 \ + --hash=sha256:c72165e4ee884c3c9ac85a443f20dd01849c9df26b38ff25b08260330ed78202 \ + --hash=sha256:c7f2309384de675e7e25104fc5947acb65ad3ba69dc8246095d57ff1220dfcbb \ + --hash=sha256:cf0a36fae1187fc2bdf3c527dc68c6bfd7c89726fbbf3215460b82e28bd8b81e \ + --hash=sha256:d33c80afcbfdd2934917fedfa005251484ca8d98767c74793c35d5f24b8e48f1 \ + --hash=sha256:d4a275ea70f597d3fd77bdc83054307a65749b2c669bc5045cb4620930eed596 \ + --hash=sha256:d4db6f7515f5087ae9deecc3b60d3d8a512b1a0a9bff93113bc9edf20ed29ab9 \ + --hash=sha256:e43b0533f291a8883e52fd7a8919353ae7038d50ef17873983877c2f6b76330e \ + --hash=sha256:e84e27d6fa4b3e532043daf89d52d2cfbd7b4697b44b86a7b3db8cacdcfcc58c \ + --hash=sha256:ec86da9ce746de890b8d470a1f12f3d2634786fee002a4c56bdbbfd59e23f1f7 \ + --hash=sha256:ee3cc9bce848a1024d1c96717540f3f30cba885df9610be70c0653764e30ae6e \ + --hash=sha256:f300d1811bd33940b2dd1907dbe2b4ae473003a384d6a764babb3ea5a4edede4 \ + --hash=sha256:ff5176a083252a769e5bfb1a950d0e37da585c313e1a4af98903e22e7cf4c475 pynacl==1.5.0 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:06b8f6fa7f5de8d5d2f7573fe8c863c051225a27b61e6860fd047b1775807858 \ --hash=sha256:0c84947a22519e013607c9be43706dd42513f9e6ae5d39d3613ca1e142fba44d \ @@ -1321,73 +1502,105 @@ pynacl==1.5.0 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:a36d4a9dda1f19ce6e03c9a784a2921a4b726b02e1c736600ca9c22029474394 \ --hash=sha256:a422368fc821589c228f4c49438a368831cb5bbc0eab5ebe1d7fac9dded6567b \ --hash=sha256:e46dae94e34b085175f8abb3b0aaa7da40767865ac82c928eeb9e57e1ea8a543 -pyopenssl==24.2.1 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:4247f0dbe3748d560dcbb2ff3ea01af0f9a1a001ef5f7c4c647956ed8cbf0e95 \ - --hash=sha256:967d5719b12b243588573f39b0c677637145c7a1ffedcd495a487e58177fbb8d -pyparsing==2.4.7 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1 \ - --hash=sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b +pyopenssl==25.0.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:424c247065e46e76a37411b9ab1782541c23bb658bf003772c3405fbaa128e90 \ + --hash=sha256:cd2cef799efa3936bb08e8ccb9433a575722b9dd986023f1cabc4ae64e9dac16 +pyparsing==3.2.1 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:506ff4f4386c4cec0590ec19e6302d3aedb992fdc02c761e90416f158dacf8e1 \ + --hash=sha256:61980854fd66de3a90028d679a954d5f2623e83144b5afe5ee86f43d762e5f0a +pypdf==5.2.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:7c38e68420f038f2c4998fd9d6717b6db4f6cef1642e9cf384d519c9cf094663 \ + --hash=sha256:d107962ec45e65e3bd10c1d9242bdbbedaa38193c9e3a6617bd6d996e5747b19 +pyre2-updated==0.3.8 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:2bda9bf4d59568152e085450ffc1c08fcf659000d06766861f7ff340ba601c3e \ + --hash=sha256:350be9580700b67af87f5227453d1123bc9f4513f0bcc60450574f1bc46cb24f \ + --hash=sha256:407ca7082e2049aeae0b2c716cc53cd92217d17886694fe0047ce5d636161155 \ + --hash=sha256:45c0940dbda4a2c45652e69ab52946c2395171f3e3a96bce456a8ddb45b337e1 \ + --hash=sha256:4db83d0f148d91f9b67b71eb3fd04a7e1d09397e7ecea75972632cd46c27ba6e \ + --hash=sha256:53d27f552fac2149c9dbe45faf1f6236c55fba42ea579799a07cdc853e3732d6 \ + --hash=sha256:54de291bb8b3aa2223f864293e0a9e62ea2877fea72a632fe5cd36a60012f7bc \ + --hash=sha256:648196c6fe7b115431f2bedc48660333a61f6628bde7efefc14098d67f86b7b4 \ + --hash=sha256:65e67527eb472cd5045966df42414205cbfc187633a844ba7d9f59480f46e748 \ + --hash=sha256:6d6aaa2f41a085095993b2d09562511cf40d4aedfc5bd00d78f53112be051e19 \ + --hash=sha256:802dec5801912c76b21dcc3d91810ae9ff0cc308e78fb0aa32d93e921783f5d8 \ + --hash=sha256:8331fc7bb1fa57d2654046ed189631d70db1170d935264ce82a7291413ce60e4 \ + --hash=sha256:99942f75a252691117880fc60941f20170d35bcb3ccb72aff9a1bbce951d4db0 \ + --hash=sha256:ab79cd5c663d20eca146361f2b2b7cf23c631109e43064d4bb35a43cb0607ffd \ + --hash=sha256:b0820d8420caca762184f5eebe57631be5b985e405c53e807aac24897daa892a \ + --hash=sha256:daf366a83b70b08cc4c59477455a28b2a108484d282824d350217a8a7378d229 \ + --hash=sha256:f1c406e30aed02699888ae6938b83058f0845650991cd97db09e1686ce8a181a \ + --hash=sha256:ff764613bd436689cf5e4d3c14a25cf8465474e1db9f2a39738bbf481dd07300 pysocks==1.7.1 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:08e69f092cc6dbe92a0fdd16eeb9b9ffbc13cadfe5ca4c7bd92ffb078b293299 \ --hash=sha256:2725bd0a9925919b9b51739eea5f9e2bae91e83288108a9ad338b2e3a4435ee5 \ --hash=sha256:3f8804571ebe159c380ac6de37643bb4685970655d3bba243530d6558b799aa0 -python-dateutil==2.8.2 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \ - --hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9 +python-dateutil==2.9.0.post0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3 \ + --hash=sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427 python-dotenv==1.0.1 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca \ --hash=sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a -python-flirt==0.8.10 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:02792127c579d624a98c8f65a1b6f2b27c2d8be673a4ae3e26d9b42543051cf3 \ - --hash=sha256:055412a990b471acc2ce16d9372e4a1af379a81bc54bedd864d35f5f63c8b010 \ - --hash=sha256:0b79ee2a6b0a098225510ab5cc0d54f1534ecfd4c647b3a614ce9dca9615d13a \ - --hash=sha256:0d2f444799bc2b73271fb6e0715f9ccb831308999051ca58f121282499a955b1 \ - --hash=sha256:0db5c5d9a8920cdb32fd1dbff8fe00e99d32fd9d478b15317093e23942c0aef6 \ - --hash=sha256:166d27e034118562aa95d7b403a5b111d5ba8469fe14dd4077b081bee7266d00 \ - --hash=sha256:19e38ea51af68113d88e180c339873d202bef8637532a2c9173dbac38ff4aece \ - --hash=sha256:2547d34b146ab18bf64c1eceb46be0643d903bbf396aabe79d07ed8475893cea \ - --hash=sha256:2b0e285a582576c2ddba9af17a97fbfdd52187fbf098a797df9e00034000796d \ - --hash=sha256:2d8ab2d51b26502415a7a3cd82eb825252be508d7f32d870440fd1096b444232 \ - --hash=sha256:2f4cebbf3cba105f5f0db64a030109c966f4aecb29f94c7ac6e4594f3d4b3c63 \ - --hash=sha256:30960a682673bb25ae2bd8ba7c754e9e18faa4427fe75fe29d5fc341fa974346 \ - --hash=sha256:354814081fcba58f5448ef8bd3b3a7814cce2cb1ed7ad9c1d572447bc85f9e7a \ - --hash=sha256:38e96fdf79b6e4b6b86b37d33278c8522eb41972a17d98d8924d3772c527ecd8 \ - --hash=sha256:4bfe6e2b8eb0e587476fab7db21a15fb40e96e944f404e872c906d89450a52bc \ - --hash=sha256:4fb8d48f304ef6394dee74a5d6b1e31f9b91bc046305ea6ee1f26d8e28229cb4 \ - --hash=sha256:547fc7d8567db0332b229be0551c3aa65c839d439fdd317b7184edb491dde322 \ - --hash=sha256:5d3172c9ac87d36add8eb15d42f33636d408516c7e4834dd572836ef5f4e9e26 \ - --hash=sha256:6745709e936c19100190c52220adda8290dd6bad31d8f708cf3c52deb87546d9 \ - --hash=sha256:6897be2d12fed520524259640648f5022c3c1bff2bf2da10f018315823cc4134 \ - --hash=sha256:6c3185790d3f3749ea2cb984f10259d533ad54036b5878cedcd4149ce3122d5e \ - --hash=sha256:729002c7a6a15905a8dd60b646d5a28605ca73ed8a62886caaf3a49cfeb5a7bf \ - --hash=sha256:82b820c27a4f8f94804aa42617e8d5cb80e76ca55bd89b656ff3924dae8af293 \ - --hash=sha256:877a99236e7e37f4e94f223297e9029ab14426a4d4d5f7a621a1719b0375dda1 \ - --hash=sha256:8da95d051a6bbcebb3043541adfb3dd635b792a837e8ac829c017549ba00d977 \ - --hash=sha256:9b5a285ea80a63d68af56fb406de5ab263ab1b6c988def94c76ebe91d1e9eeec \ - --hash=sha256:a19cbf0a2c87decb8d861b74a499eb4b3fd6fc65ac73227d6330641a5d070c6c \ - --hash=sha256:a59b5c26a12038484ece6bb66eb2e45c286fe476f5a4bed5e8f4e5dabba63991 \ - --hash=sha256:acedddf5acd384626c55b7ed2bda71a5651eb7aed3be1d7098507ea0d3adaabb \ - --hash=sha256:b17592ac73c49515f4c7ae4945060dea18685bdfb320f0bbcd0f613d11492dbb \ - --hash=sha256:b927124c042863d7d488c3b64796c6c690252f1b4f9cbe1135681c6918e53d99 \ - --hash=sha256:b9bb40cb897458ac0d9af121e35aa28ea33ea1a9eadf33bbf79ff84a81f2eaf4 \ - --hash=sha256:c14ff6b3a9dd1d0629d247688e8e69d348dab9df2f654b0f2424343f66446ce5 \ - --hash=sha256:c1f8e406d15f31049ff558534b4be952fb073aadc2e7174e650f4a71256282c6 \ - --hash=sha256:cb173f1030cd05f586f20b68c594bdc54f745d4fb12dc8db100dc70127a771d7 \ - --hash=sha256:cc1b020e54f119916f7cdac4a24f692643915af83d51e2f73f35472da16213ac \ - --hash=sha256:ce6f443bec1ac970e41c8238e945b6f08af402bab32256c6b818143acd0a86d6 \ - --hash=sha256:d0102bfdda68ce2050cfb61cd2e8bbc8225bc2b5a0535fb492e12c42d9dbf23a \ - --hash=sha256:f21d7e23a82ba6cfcde4fc252cd998af8848b7e5bbd7b8c670384073d3b7ad66 \ - --hash=sha256:fc7e6041c7e146328a6daf05303590aa18251c0e5cf92f8b93e8f1eafaadb7a7 -python-magic==0.4.27 ; python_version >= "3.10" and sys_platform == "linux" and python_version < "4.0" \ +python-flirt==0.9.2 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:113c0d865380117bbb5f52870b1459f9ee8fe8f6c5317d2206357c0834a2e9f3 \ + --hash=sha256:191f325a137339db07b83112a3a1117d4a8db2c1d17c37bbc7b9078c3fc032c7 \ + --hash=sha256:1c41c4990971e6d7ea343726762a59d5731b1e8df11e72612409ba34083d888c \ + --hash=sha256:23c1a513ab31760ffcd21200c96f7027e97dd37bdfc35a09948e291c8ebe53af \ + --hash=sha256:2a90f41c172afe99b149d1468514ac233534078fcd78864a6964382d24fd2c5f \ + --hash=sha256:2affcc3cf23f102ee6b2143801b956869307e52d6588896e2b6ee1a8dc8595f5 \ + --hash=sha256:2d9ae9332fa14c88a36b731bd178336989f27284bd2ec1e07215cd42389723bd \ + --hash=sha256:3df5da165ab577fb056918342bbb45fd5952ae926609527a8fb172174d67cf8d \ + --hash=sha256:3e533bd2fbd5c6bab4ab1f46b4e7dd00f550f70ab6e811e5df96328aef124538 \ + --hash=sha256:3ede761720ac608b2a46482cbc04b130f89ac2d3e53575b303373e1b281c5ff2 \ + --hash=sha256:4107b1bc92a5b5fa772828d692e319e6d6bbf595feceb89436ed6f0e0ebcafec \ + --hash=sha256:45f498b5021ed799f8188de04b77cf091d9386f6b26274e57b2bf661d4209a06 \ + --hash=sha256:49993fed8fec8fc497f178ef75630870a910cd454d5169185282c1a92ccfe4cc \ + --hash=sha256:5840aa59c218b91e70b463e0d5fe0178539bc714e001d3f3130379cc505e6344 \ + --hash=sha256:5c82c2b7a1f54c9dc020e222b4d60cb02876ed7b9ff08e2cf34c27a7d7bf86cc \ + --hash=sha256:626de70de4a49f5f252c07e6d5db1c7a3aa624922107564554da9e864e848253 \ + --hash=sha256:66a6b73afec2d33923b652071d2c0c3006ad2186fff4a54c7733acb32a566473 \ + --hash=sha256:757224a7c3ab0483086b12101ab42bbe73038b1cab4612e9dc120ff8384d36e3 \ + --hash=sha256:75d6586af3791705c1d8f20795ea8dee696c47b69632ce55d8680e063802b802 \ + --hash=sha256:79b0598076926b6912596e4a79f9fa910aaf63d741ea091bb1a228df20b3895f \ + --hash=sha256:8376882edb2fa931aebc89aedb9c27be0d1d5cf5ab85e266ba2f7c5dfe4cb637 \ + --hash=sha256:83e41452dedc730713058bb8dbbd01664c1f2cd89594cf7892f4243079cf72fa \ + --hash=sha256:87c17e413d675dcb34bb3af0e40296d20b971ff53f09f105b0135f30d96ae965 \ + --hash=sha256:8936b389261b7ddbf738900a114a37b1af23e428d31bbd39f1cea40c0bc82a6c \ + --hash=sha256:8f5461360d5723368733cb214c12f92a85340ea64c522ebe34d780422b26aaf8 \ + --hash=sha256:92fe65eab1587573b5b13ffb755dbb67d1fdec411425850c984566d44864c104 \ + --hash=sha256:98e9da4492303ef21d2224b7380fc22d510f7eccddc13f7a9476ce079cc30ce8 \ + --hash=sha256:9fac75e70aebaf282c5af2d3f9457c12c843992027079a6ba923875d169c09ea \ + --hash=sha256:a511b947ae64c840f136a00adca38850cadfde7dd969420f8693c2112260c7e5 \ + --hash=sha256:a853b217bc930738fafddd1b6c2d4e4387ffc33812f5695368b976f37f5a59ba \ + --hash=sha256:a98ecc2d1c0e4c77bd0878c515a295332d972034b44c5e829ef694baddd6fe16 \ + --hash=sha256:aafb9802eb337b7bf63cd5bfadcc6d2686772a3a5213a43d82d8a256a9063b72 \ + --hash=sha256:ac0662be73a482fdb09ff95cf9a408b2f8e3110f63d99f6fdff00c845e25543e \ + --hash=sha256:b029cb6d1db4b422f3547aa6f153894f100e400182a05f2d87aa9270b643b2c4 \ + --hash=sha256:b2a3ddc1fb778f99ee254ab534aaa94387536cd0a38f5daa93921b19232fae69 \ + --hash=sha256:b411dbd17f8b411113f109203b3cff608d9e6cc13f516694e34436e35f6c0196 \ + --hash=sha256:b5fad3e4814c39799198d4f01fad89b28d45dc2ee162393936b29ba5ba65964b \ + --hash=sha256:b81b4661a907c3e9b9c60d943e404af9f198a8358c0194714da83ab6b4599d36 \ + --hash=sha256:c369efcb1774a91f810484e4907f938ea69f18b811162c95f04907b6670937c0 \ + --hash=sha256:c3e16582059cab76c5f23c103028ea09429f2af7f365086ddcbad293e1c65317 \ + --hash=sha256:cd1e90da5de9ce4b77928f4ffaddac9b0d051e094cb82158d3ec2d4d0dd9b5b9 \ + --hash=sha256:d48712f72e5769cb5ab096218924d68146b2341291373df70ec9b543124665ad \ + --hash=sha256:d6c7c7f7ed7c0cdcec67cb06552d098f482a73683a23c319fa2f9fcbb787a1cc \ + --hash=sha256:db7aac6bcce62d5e9aa6f3490a0df2689cda6674ad9a3f9ebe193bf6d09b4464 \ + --hash=sha256:dd3edb4cceb818558969481e60917ee607401bc86f2c079e053f6db758e2644e \ + --hash=sha256:e46800fe6fcaaf0af1f59b7b64fe23f65cce7946c372687bc024d163390cb283 \ + --hash=sha256:e604dd2b47362cc8379afd6c160a04ee5911c21985da5781ff9df96357b8553d \ + --hash=sha256:ecb08316ead1567c53873725106f9925c766e9dbf35af11aa650f78ba589165b \ + --hash=sha256:f75ade2755a4de967296e06105f8250c364ed0072e09107a6794cabc3b7de80d \ + --hash=sha256:fe0a8dcc0bda98d235359cb75fc3960b04721ae50ec8b518e68dabdebadcd21a +python-magic==0.4.27 ; python_version >= "3.10" and python_version < "4.0" and sys_platform == "linux" \ --hash=sha256:c1ba14b08e4a5f5c31a302b7721239695b2f0f058d125bd5ce1ee36b9d9d3c3b \ --hash=sha256:c212960ad306f700aa0d01e5d7a325d20548ff97eb9920dcd29513174f0294d3 python-tlsh==4.5.0 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:55e3df72cabdf48bf576b4580d7fa2feba9c14aacde0a8bd7b1ef40b03cb7969 -python-whois==0.7.3 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:656a1100b8757f29daf010ec5a893a3d6349ccf097884021988c174eedea4a16 -python3-openid==3.2.0 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:33fbf6928f401e0b790151ed2b5290b02545e8775f982485205a066f874aaeaf \ - --hash=sha256:6626f771e0417486701e0b4daff762e7212e820ca5b29fcc0d05f6f8736dfa6b +python-whois==0.9.5 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:18968c21484752fcc4b9a5f0af477ef6b8dc2e8bb7f1bd5c33831499c0dd41ca \ + --hash=sha256:d435cf8012659745ce3b4e7c15428df9405648696727812d9eaacc36782d14e3 +pythonaes==1.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:71dd31c03500b8cba06f83f17603dcca1dd1c1308fbdaef752f353ed1aaf9f67 pytz==2021.1 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:83a4a90894bf38e243cf052c8b58f381bfe9a7a483f6a9cab140bc7f702ac4da \ --hash=sha256:eb10ce3e7736052ed3623d49975ce333bcd712c7bb19a58b9e2089d4057d0798 @@ -1445,158 +1658,88 @@ pyyaml==6.0.2 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba \ --hash=sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12 \ --hash=sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4 -pyzipper==0.3.5 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:6040069654dad040cf8708d4db78ce5829238e2091ad8006a47d97d6ffe275d6 \ - --hash=sha256:e696e9d306427400e23e13a766c7614b64d9fc3316bdc71bbcc8f0070a14f150 -rat-king-parser @ git+https://github.com/jeFF0Falltrades/rat_king_parser@ab849ec8face38c8dac3f803ae5fe7cf8be26583 ; python_version >= "3.10" and python_version < "4.0" -regex==2021.7.6 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:0eb2c6e0fcec5e0f1d3bcc1133556563222a2ffd2211945d7b1480c1b1a42a6f \ - --hash=sha256:15dddb19823f5147e7517bb12635b3c82e6f2a3a6b696cc3e321522e8b9308ad \ - --hash=sha256:173bc44ff95bc1e96398c38f3629d86fa72e539c79900283afa895694229fe6a \ - --hash=sha256:17d8a3f99b18d87ac54a449b836d485cc8c195bb6f5e4379c84c8519045facc9 \ - --hash=sha256:1947e7de155063e1c495c50590229fb98720d4c383af5031bbcb413db33fa1be \ - --hash=sha256:1c78780bf46d620ff4fff40728f98b8afd8b8e35c3efd638c7df67be2d5cddbf \ - --hash=sha256:2366fe0479ca0e9afa534174faa2beae87847d208d457d200183f28c74eaea59 \ - --hash=sha256:2bceeb491b38225b1fee4517107b8491ba54fba77cf22a12e996d96a3c55613d \ - --hash=sha256:2ddeabc7652024803666ea09f32dd1ed40a0579b6fbb2a213eba590683025895 \ - --hash=sha256:2fe5e71e11a54e3355fa272137d521a40aace5d937d08b494bed4529964c19c4 \ - --hash=sha256:319eb2a8d0888fa6f1d9177705f341bc9455a2c8aca130016e52c7fe8d6c37a3 \ - --hash=sha256:3f5716923d3d0bfb27048242a6e0f14eecdb2e2a7fac47eda1d055288595f222 \ - --hash=sha256:3fabb19c82ecf39832a3f5060dfea9a7ab270ef156039a1143a29a83a09a62de \ - --hash=sha256:422dec1e7cbb2efbbe50e3f1de36b82906def93ed48da12d1714cabcd993d7f0 \ - --hash=sha256:4c9c3155fe74269f61e27617529b7f09552fbb12e44b1189cebbdb24294e6e1c \ - --hash=sha256:4f64fc59fd5b10557f6cd0937e1597af022ad9b27d454e182485f1db3008f417 \ - --hash=sha256:564a4c8a29435d1f2256ba247a0315325ea63335508ad8ed938a4f14c4116a5d \ - --hash=sha256:56fc7045a1999a8d9dd1896715bc5c802dfec5b9b60e883d2cbdecb42adedea4 \ - --hash=sha256:59506c6e8bd9306cd8a41511e32d16d5d1194110b8cfe5a11d102d8b63cf945d \ - --hash=sha256:598c0a79b4b851b922f504f9f39a863d83ebdfff787261a5ed061c21e67dd761 \ - --hash=sha256:598ee917dbe961dcf827217bf2466bb86e4ee5a8559705af57cbabb3489dd37e \ - --hash=sha256:59c00bb8dd8775473cbfb967925ad2c3ecc8886b3b2d0c90a8e2707e06c743f0 \ - --hash=sha256:6110bab7eab6566492618540c70edd4d2a18f40ca1d51d704f1d81c52d245026 \ - --hash=sha256:6afe6a627888c9a6cfbb603d1d017ce204cebd589d66e0703309b8048c3b0854 \ - --hash=sha256:716a6db91b3641f566531ffcc03ceec00b2447f0db9942b3c6ea5d2827ad6be3 \ - --hash=sha256:7423aca7cc30a6228ccdcf2ea76f12923d652c5c7c6dc1959a0b004e308f39fb \ - --hash=sha256:791aa1b300e5b6e5d597c37c346fb4d66422178566bbb426dd87eaae475053fb \ - --hash=sha256:8244c681018423a0d1784bc6b9af33bdf55f2ab8acb1f3cd9dd83d90e0813253 \ - --hash=sha256:8394e266005f2d8c6f0bc6780001f7afa3ef81a7a2111fa35058ded6fce79e4d \ - --hash=sha256:875c355360d0f8d3d827e462b29ea7682bf52327d500a4f837e934e9e4656068 \ - --hash=sha256:89e5528803566af4df368df2d6f503c84fbfb8249e6631c7b025fe23e6bd0cde \ - --hash=sha256:8a4c742089faf0e51469c6a1ad7e3d3d21afae54a16a6cead85209dfe0a1ce65 \ - --hash=sha256:914e626dc8e75fe4fc9b7214763f141d9f40165d00dfe680b104fa1b24063bbf \ - --hash=sha256:99d8ab206a5270c1002bfcf25c51bf329ca951e5a169f3b43214fdda1f0b5f0d \ - --hash=sha256:9a854b916806c7e3b40e6616ac9e85d3cdb7649d9e6590653deb5b341a736cec \ - --hash=sha256:b85ac458354165405c8a84725de7bbd07b00d9f72c31a60ffbf96bb38d3e25fa \ - --hash=sha256:bb9834c1e77493efd7343b8e38950dee9797d2d6f2d5fd91c008dfaef64684b9 \ - --hash=sha256:bc84fb254a875a9f66616ed4538542fb7965db6356f3df571d783f7c8d256edd \ - --hash=sha256:bf1d2d183abc7faa101ebe0b8d04fd19cb9138820abc8589083035c9440b8ca6 \ - --hash=sha256:c92831dac113a6e0ab28bc98f33781383fe294df1a2c3dfd1e850114da35fd5b \ - --hash=sha256:cbe23b323988a04c3e5b0c387fe3f8f363bf06c0680daf775875d979e376bd26 \ - --hash=sha256:ccb3d2190476d00414aab36cca453e4596e8f70a206e2aa8db3d495a109153d2 \ - --hash=sha256:d30895ec80cc80358392841add9dde81ea1d54a4949049269115e6b0555d0498 \ - --hash=sha256:d8bbce0c96462dbceaa7ac4a7dfbbee92745b801b24bce10a98d2f2b1ea9432f \ - --hash=sha256:db2b7df831c3187a37f3bb80ec095f249fa276dbe09abd3d35297fc250385694 \ - --hash=sha256:dfc0957c4a4b91eff5ad036088769e600a25774256cd0e1154378591ce573f08 \ - --hash=sha256:e586f448df2bbc37dfadccdb7ccd125c62b4348cb90c10840d695592aa1b29e0 \ - --hash=sha256:e5983c19d0beb6af88cb4d47afb92d96751fb3fa1784d8785b1cdf14c6519407 \ - --hash=sha256:e6a1e5ca97d411a461041d057348e578dc344ecd2add3555aedba3b408c9f874 \ - --hash=sha256:e8363ac90ea63c3dd0872dfdb695f38aff3334bfa5712cffb238bd3ffef300e3 \ - --hash=sha256:eaf58b9e30e0e546cdc3ac06cf9165a1ca5b3de8221e9df679416ca667972035 \ - --hash=sha256:ed693137a9187052fc46eedfafdcb74e09917166362af4cc4fddc3b31560e93d \ - --hash=sha256:edd1a68f79b89b0c57339bce297ad5d5ffcc6ae7e1afdb10f1947706ed066c9c \ - --hash=sha256:efb4af05fa4d2fc29766bf516f1f5098d6b5c3ed846fde980c18bf8646ad3979 \ - --hash=sha256:f080248b3e029d052bf74a897b9d74cfb7643537fbde97fe8225a6467fb559b5 \ - --hash=sha256:f9392a4555f3e4cb45310a65b403d86b589adc773898c25a39184b1ba4db8985 \ - --hash=sha256:f98dc35ab9a749276f1a4a38ab3e0e2ba1662ce710f6530f5b0a6656f1c32b58 -requests-file==1.5.1 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:07d74208d3389d01c38ab89ef403af0cfec63957d53a0081d8eca738d0247d8e \ - --hash=sha256:dfe5dae75c12481f68ba353183c53a65e6044c923e64c24b2209f6c7570ca953 -requests-oauthlib==2.0.0 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:7dd8a5c40426b779b0868c404bdef9768deccf22749cde15852df527e6269b36 \ - --hash=sha256:b3dffaebd884d8cd778494369603a9e7b58d29111bf6b41bdc2dcd87203af4e9 -requests==2.32.2 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:dd951ff5ecf3e3b3aa26b40703ba77495dab41da839ae72ef3c8e5d8e2433289 \ - --hash=sha256:fc06670dd0ed212426dfeb94fc1b983d917c4f9847c863f313c9dfaaffb7c23c -requests[security,socks]==2.32.2 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:dd951ff5ecf3e3b3aa26b40703ba77495dab41da839ae72ef3c8e5d8e2433289 \ - --hash=sha256:fc06670dd0ed212426dfeb94fc1b983d917c4f9847c863f313c9dfaaffb7c23c -rich==13.8.1 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:1760a3c0848469b97b558fc61c85233e3dafb69c7a071b4d60c38099d3cd4c06 \ - --hash=sha256:8260cda28e3db6bf04d2d1ef4dbc03ba80a824c88b0e7668a0f23126a424844a -ruamel-yaml-clib==0.2.8 ; platform_python_implementation == "CPython" and python_version < "3.13" and python_version >= "3.10" \ - --hash=sha256:024cfe1fc7c7f4e1aff4a81e718109e13409767e4f871443cbff3dba3578203d \ - --hash=sha256:03d1162b6d1df1caa3a4bd27aa51ce17c9afc2046c31b0ad60a0a96ec22f8001 \ - --hash=sha256:07238db9cbdf8fc1e9de2489a4f68474e70dffcb32232db7c08fa61ca0c7c462 \ - --hash=sha256:09b055c05697b38ecacb7ac50bdab2240bfca1a0c4872b0fd309bb07dc9aa3a9 \ - --hash=sha256:1707814f0d9791df063f8c19bb51b0d1278b8e9a2353abbb676c2f685dee6afe \ - --hash=sha256:1758ce7d8e1a29d23de54a16ae867abd370f01b5a69e1a3ba75223eaa3ca1a1b \ - --hash=sha256:184565012b60405d93838167f425713180b949e9d8dd0bbc7b49f074407c5a8b \ - --hash=sha256:1b617618914cb00bf5c34d4357c37aa15183fa229b24767259657746c9077615 \ - --hash=sha256:1dc67314e7e1086c9fdf2680b7b6c2be1c0d8e3a8279f2e993ca2a7545fecf62 \ - --hash=sha256:25ac8c08322002b06fa1d49d1646181f0b2c72f5cbc15a85e80b4c30a544bb15 \ - --hash=sha256:25c515e350e5b739842fc3228d662413ef28f295791af5e5110b543cf0b57d9b \ - --hash=sha256:305889baa4043a09e5b76f8e2a51d4ffba44259f6b4c72dec8ca56207d9c6fe1 \ - --hash=sha256:3213ece08ea033eb159ac52ae052a4899b56ecc124bb80020d9bbceeb50258e9 \ - --hash=sha256:3f215c5daf6a9d7bbed4a0a4f760f3113b10e82ff4c5c44bec20a68c8014f675 \ - --hash=sha256:46d378daaac94f454b3a0e3d8d78cafd78a026b1d71443f4966c696b48a6d899 \ - --hash=sha256:4ecbf9c3e19f9562c7fdd462e8d18dd902a47ca046a2e64dba80699f0b6c09b7 \ - --hash=sha256:53a300ed9cea38cf5a2a9b069058137c2ca1ce658a874b79baceb8f892f915a7 \ - --hash=sha256:56f4252222c067b4ce51ae12cbac231bce32aee1d33fbfc9d17e5b8d6966c312 \ - --hash=sha256:5c365d91c88390c8d0a8545df0b5857172824b1c604e867161e6b3d59a827eaa \ - --hash=sha256:700e4ebb569e59e16a976857c8798aee258dceac7c7d6b50cab63e080058df91 \ - --hash=sha256:75e1ed13e1f9de23c5607fe6bd1aeaae21e523b32d83bb33918245361e9cc51b \ - --hash=sha256:77159f5d5b5c14f7c34073862a6b7d34944075d9f93e681638f6d753606c6ce6 \ - --hash=sha256:7f67a1ee819dc4562d444bbafb135832b0b909f81cc90f7aa00260968c9ca1b3 \ - --hash=sha256:840f0c7f194986a63d2c2465ca63af8ccbbc90ab1c6001b1978f05119b5e7334 \ - --hash=sha256:84b554931e932c46f94ab306913ad7e11bba988104c5cff26d90d03f68258cd5 \ - --hash=sha256:87ea5ff66d8064301a154b3933ae406b0863402a799b16e4a1d24d9fbbcbe0d3 \ - --hash=sha256:955eae71ac26c1ab35924203fda6220f84dce57d6d7884f189743e2abe3a9fbe \ - --hash=sha256:a1a45e0bb052edf6a1d3a93baef85319733a888363938e1fc9924cb00c8df24c \ - --hash=sha256:a5aa27bad2bb83670b71683aae140a1f52b0857a2deff56ad3f6c13a017a26ed \ - --hash=sha256:a6a9ffd280b71ad062eae53ac1659ad86a17f59a0fdc7699fd9be40525153337 \ - --hash=sha256:a75879bacf2c987c003368cf14bed0ffe99e8e85acfa6c0bfffc21a090f16880 \ - --hash=sha256:aa2267c6a303eb483de8d02db2871afb5c5fc15618d894300b88958f729ad74f \ - --hash=sha256:aab7fd643f71d7946f2ee58cc88c9b7bfc97debd71dcc93e03e2d174628e7e2d \ - --hash=sha256:b16420e621d26fdfa949a8b4b47ade8810c56002f5389970db4ddda51dbff248 \ - --hash=sha256:b42169467c42b692c19cf539c38d4602069d8c1505e97b86387fcf7afb766e1d \ - --hash=sha256:bba64af9fa9cebe325a62fa398760f5c7206b215201b0ec825005f1b18b9bccf \ - --hash=sha256:beb2e0404003de9a4cab9753a8805a8fe9320ee6673136ed7f04255fe60bb512 \ - --hash=sha256:bef08cd86169d9eafb3ccb0a39edb11d8e25f3dae2b28f5c52fd997521133069 \ - --hash=sha256:c2a72e9109ea74e511e29032f3b670835f8a59bbdc9ce692c5b4ed91ccf1eedb \ - --hash=sha256:c58ecd827313af6864893e7af0a3bb85fd529f862b6adbefe14643947cfe2942 \ - --hash=sha256:c69212f63169ec1cfc9bb44723bf2917cbbd8f6191a00ef3410f5a7fe300722d \ - --hash=sha256:cabddb8d8ead485e255fe80429f833172b4cadf99274db39abc080e068cbcc31 \ - --hash=sha256:d176b57452ab5b7028ac47e7b3cf644bcfdc8cacfecf7e71759f7f51a59e5c92 \ - --hash=sha256:da09ad1c359a728e112d60116f626cc9f29730ff3e0e7db72b9a2dbc2e4beed5 \ - --hash=sha256:e2b4c44b60eadec492926a7270abb100ef9f72798e18743939bdbf037aab8c28 \ - --hash=sha256:e79e5db08739731b0ce4850bed599235d601701d5694c36570a99a0c5ca41a9d \ - --hash=sha256:ebc06178e8821efc9692ea7544aa5644217358490145629914d8020042c24aa1 \ - --hash=sha256:edaef1c1200c4b4cb914583150dcaa3bc30e592e907c01117c08b13a07255ec2 \ - --hash=sha256:f481f16baec5290e45aebdc2a5168ebc6d35189ae6fea7a58787613a25f6e875 \ - --hash=sha256:fff3573c2db359f091e1589c3d7c5fc2f86f5bdb6f24252c2d8e539d4e45f412 -ruamel-yaml==0.18.6 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:57b53ba33def16c4f3d807c0ccbc00f8a6081827e81ba2491691b76882d0c636 \ - --hash=sha256:8b27e6a217e786c6fbe5634d8f3f11bc63e0f80f6a5890f28863d9c45aac311b -ruff==0.0.290 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:0e2b09ac4213b11a3520221083866a5816616f3ae9da123037b8ab275066fbac \ - --hash=sha256:150bf8050214cea5b990945b66433bf9a5e0cef395c9bc0f50569e7de7540c86 \ - --hash=sha256:1d9be6351b7889462912e0b8185a260c0219c35dfd920fb490c7f256f1d8313e \ - --hash=sha256:2ab41bc0ba359d3f715fc7b705bdeef19c0461351306b70a4e247f836b9350ed \ - --hash=sha256:35e3550d1d9f2157b0fcc77670f7bb59154f223bff281766e61bdd1dd854e0c5 \ - --hash=sha256:461fbd1fb9ca806d4e3d5c745a30e185f7cf3ca77293cdc17abb2f2a990ad3f7 \ - --hash=sha256:4ca6285aa77b3d966be32c9a3cd531655b3d4a0171e1f9bf26d66d0372186767 \ - --hash=sha256:75386ebc15fe5467248c039f5bf6a0cfe7bfc619ffbb8cd62406cd8811815fca \ - --hash=sha256:75cdc7fe32dcf33b7cec306707552dda54632ac29402775b9e212a3c16aad5e6 \ - --hash=sha256:949fecbc5467bb11b8db810a7fa53c7e02633856ee6bd1302b2f43adcd71b88d \ - --hash=sha256:982af5ec67cecd099e2ef5e238650407fb40d56304910102d054c109f390bf3c \ - --hash=sha256:ac93eadf07bc4ab4c48d8bb4e427bf0f58f3a9c578862eb85d99d704669f5da0 \ - --hash=sha256:ae5a92dfbdf1f0c689433c223f8dac0782c2b2584bd502dfdbc76475669f1ba1 \ - --hash=sha256:bbd37352cea4ee007c48a44c9bc45a21f7ba70a57edfe46842e346651e2b995a \ - --hash=sha256:d748c8bd97874f5751aed73e8dde379ce32d16338123d07c18b25c9a2796574a \ - --hash=sha256:eb07f37f7aecdbbc91d759c0c09870ce0fb3eed4025eebedf9c4b98c69abd527 \ - --hash=sha256:f1f49f5ec967fd5778813780b12a5650ab0ebcb9ddcca28d642c689b36920796 -service-identity==24.1.0 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:6829c9d62fb832c2e1c435629b0a8c476e1929881f28bee4d20bc24161009221 \ - --hash=sha256:a28caf8130c8a5c1c7a6f5293faaf239bbfb7751e4862436920ee6f2616f568a +pyzipper==0.3.6 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:0adca90a00c36a93fbe49bfa8c5add452bfe4ef85a1b8e3638739dd1c7b26bfc \ + --hash=sha256:6d097f465bfa47796b1494e12ea65d1478107d38e13bc56f6e58eedc4f6c1a87 +rat-king-parser==4.1.6 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:b5249842b8ea77b3d647cd6a559f39b51c4a15719a2d9cb0285deeb709c5cca9 \ + --hash=sha256:ea3db0274cb158e797953561624b9c9d513fcf34c5a6be53a41971b8008a079a +requests-file==2.1.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:0f549a3f3b0699415ac04d167e9cb39bccfb730cb832b4d20be3d9867356e658 \ + --hash=sha256:cf270de5a4c5874e84599fc5778303d496c10ae5e870bfa378818f35d21bda5c +requests==2.32.4 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:27babd3cda2a6d50b30443204ee89830707d396671944c998b5975b031ac2b2c \ + --hash=sha256:27d0316682c8a29834d3264820024b62a36942083d52caf2f14c0591336d3422 +rich==13.9.4 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:439594978a49a09530cff7ebc4b5c7103ef57baf48d5ea3184f21d9a2befa098 \ + --hash=sha256:6049d5e6ec054bf2779ab3358186963bac2ea89175919d699e378b99738c2a90 +ruamel-yaml-clib==0.2.12 ; python_version >= "3.10" and python_version < "3.13" and platform_python_implementation == "CPython" \ + --hash=sha256:040ae85536960525ea62868b642bdb0c2cc6021c9f9d507810c0c604e66f5a7b \ + --hash=sha256:0467c5965282c62203273b838ae77c0d29d7638c8a4e3a1c8bdd3602c10904e4 \ + --hash=sha256:0b7e75b4965e1d4690e93021adfcecccbca7d61c7bddd8e22406ef2ff20d74ef \ + --hash=sha256:11f891336688faf5156a36293a9c362bdc7c88f03a8a027c2c1d8e0bcde998e5 \ + --hash=sha256:20b0f8dc160ba83b6dcc0e256846e1a02d044e13f7ea74a3d1d56ede4e48c632 \ + --hash=sha256:22353049ba4181685023b25b5b51a574bce33e7f51c759371a7422dcae5402a6 \ + --hash=sha256:32621c177bbf782ca5a18ba4d7af0f1082a3f6e517ac2a18b3974d4edf349680 \ + --hash=sha256:3bc2a80e6420ca8b7d3590791e2dfc709c88ab9152c00eeb511c9875ce5778bf \ + --hash=sha256:3eac5a91891ceb88138c113f9db04f3cebdae277f5d44eaa3651a4f573e6a5da \ + --hash=sha256:4a6679521a58256a90b0d89e03992c15144c5f3858f40d7c18886023d7943db6 \ + --hash=sha256:4c8c5d82f50bb53986a5e02d1b3092b03622c02c2eb78e29bec33fd9593bae1a \ + --hash=sha256:5a0e060aace4c24dcaf71023bbd7d42674e3b230f7e7b97317baf1e953e5b519 \ + --hash=sha256:6442cb36270b3afb1b4951f060eccca1ce49f3d087ca1ca4563a6eb479cb3de6 \ + --hash=sha256:6c8fbb13ec503f99a91901ab46e0b07ae7941cd527393187039aec586fdfd36f \ + --hash=sha256:749c16fcc4a2b09f28843cda5a193e0283e47454b63ec4b81eaa2242f50e4ccd \ + --hash=sha256:7dd5adc8b930b12c8fc5b99e2d535a09889941aa0d0bd06f4749e9a9397c71d2 \ + --hash=sha256:811ea1594b8a0fb466172c384267a4e5e367298af6b228931f273b111f17ef52 \ + --hash=sha256:932205970b9f9991b34f55136be327501903f7c66830e9760a8ffb15b07f05cd \ + --hash=sha256:943f32bc9dedb3abff9879edc134901df92cfce2c3d5c9348f172f62eb2d771d \ + --hash=sha256:95c3829bb364fdb8e0332c9931ecf57d9be3519241323c5274bd82f709cebc0c \ + --hash=sha256:96777d473c05ee3e5e3c3e999f5d23c6f4ec5b0c38c098b3a5229085f74236c6 \ + --hash=sha256:a274fb2cb086c7a3dea4322ec27f4cb5cc4b6298adb583ab0e211a4682f241eb \ + --hash=sha256:a606ef75a60ecf3d924613892cc603b154178ee25abb3055db5062da811fd969 \ + --hash=sha256:ab007f2f5a87bd08ab1499bdf96f3d5c6ad4dcfa364884cb4549aa0154b13a28 \ + --hash=sha256:bb43a269eb827806502c7c8efb7ae7e9e9d0573257a46e8e952f4d4caba4f31e \ + --hash=sha256:bc5f1e1c28e966d61d2519f2a3d451ba989f9ea0f2307de7bc45baa526de9e45 \ + --hash=sha256:bd0a08f0bab19093c54e18a14a10b4322e1eacc5217056f3c063bd2f59853ce4 \ + --hash=sha256:beffaed67936fbbeffd10966a4eb53c402fafd3d6833770516bf7314bc6ffa12 \ + --hash=sha256:bf165fef1f223beae7333275156ab2022cffe255dcc51c27f066b4370da81e31 \ + --hash=sha256:cf12567a7b565cbf65d438dec6cfbe2917d3c1bdddfce84a9930b7d35ea59642 \ + --hash=sha256:d84318609196d6bd6da0edfa25cedfbabd8dbde5140a0a23af29ad4b8f91fb1e \ + --hash=sha256:d85252669dc32f98ebcd5d36768f5d4faeaeaa2d655ac0473be490ecdae3c285 \ + --hash=sha256:e143ada795c341b56de9418c58d028989093ee611aa27ffb9b7f609c00d813ed \ + --hash=sha256:e188d2699864c11c36cdfdada94d781fd5d6b0071cd9c427bceb08ad3d7c70e1 \ + --hash=sha256:e2f1c3765db32be59d18ab3953f43ab62a761327aafc1594a2a1fbe038b8b8a7 \ + --hash=sha256:e5b8daf27af0b90da7bb903a876477a9e6d7270be6146906b276605997c7e9a3 \ + --hash=sha256:e7e3736715fbf53e9be2a79eb4db68e4ed857017344d697e8b9749444ae57475 \ + --hash=sha256:e8c4ebfcfd57177b572e2040777b8abc537cdef58a2120e830124946aa9b42c5 \ + --hash=sha256:f66efbc1caa63c088dead1c4170d148eabc9b80d95fb75b6c92ac0aad2437d76 \ + --hash=sha256:fc4b630cd3fa2cf7fce38afa91d7cfe844a9f75d7f0f36393fa98815e911d987 \ + --hash=sha256:fd5415dded15c3822597455bc02bcd66e81ef8b7a48cb71a33628fc9fdde39df +ruamel-yaml==0.18.10 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:20c86ab29ac2153f80a428e1254a8adf686d3383df04490514ca3b79a362db58 \ + --hash=sha256:30f22513ab2301b3d2b577adc121c6471f28734d3d9728581245f1e76468b4f1 +ruff==0.9.4 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:05bebf4cdbe3ef75430d26c375773978950bbf4ee3c95ccb5448940dc092408e \ + --hash=sha256:1d4c8772670aecf037d1bf7a07c39106574d143b26cfe5ed1787d2f31e800214 \ + --hash=sha256:37c892540108314a6f01f105040b5106aeb829fa5fb0561d2dcaf71485021137 \ + --hash=sha256:433dedf6ddfdec7f1ac7575ec1eb9844fa60c4c8c2f8887a070672b8d353d34c \ + --hash=sha256:54499fb08408e32b57360f6f9de7157a5fec24ad79cb3f42ef2c3f3f728dfe2b \ + --hash=sha256:56acd6c694da3695a7461cc55775f3a409c3815ac467279dfa126061d84b314b \ + --hash=sha256:585792f1e81509e38ac5123492f8875fbc36f3ede8185af0a26df348e5154f41 \ + --hash=sha256:64e73d25b954f71ff100bb70f39f1ee09e880728efb4250c632ceed4e4cdf706 \ + --hash=sha256:6907ee3529244bb0ed066683e075f09285b38dd5b4039370df6ff06041ca19e7 \ + --hash=sha256:6ce6743ed64d9afab4fafeaea70d3631b4d4b28b592db21a5c2d1f0ef52934bf \ + --hash=sha256:87c90c32357c74f11deb7fbb065126d91771b207bf9bfaaee01277ca59b574ec \ + --hash=sha256:a6c634fc6f5a0ceae1ab3e13c58183978185d131a29c425e4eaa9f40afe1e6d6 \ + --hash=sha256:bfc5f1d7afeda8d5d37660eeca6d389b142d7f2b5a1ab659d9214ebd0e025231 \ + --hash=sha256:d612dbd0f3a919a8cc1d12037168bfa536862066808960e0cc901404b77968f0 \ + --hash=sha256:db1192ddda2200671f9ef61d9597fcef89d934f5d1705e571a93a67fb13a4402 \ + --hash=sha256:de9edf2ce4b9ddf43fd93e20ef635a900e25f622f87ed6e3047a664d0e8f810e \ + --hash=sha256:e0c93e7d47ed951b9394cf352d6695b31498e68fd5782d6cbc282425655f687a \ + --hash=sha256:faa935fc00ae854d8b638c16a5f1ce881bc3f67446957dd6f2af440a5fc8526b +service-identity==24.2.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:6b047fbd8a84fd0bb0d55ebce4031e400562b9196e1e0d3e0fe2b8a59f6d4a85 \ + --hash=sha256:b8683ba13f0d39c6cd5d625d2c5f65421d6d707b013b375c355751557cbe8e09 setproctitle==1.3.2 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:1c5d5dad7c28bdd1ec4187d818e43796f58a845aa892bb4481587010dc4d362b \ --hash=sha256:1c8d9650154afaa86a44ff195b7b10d683c73509d085339d174e394a22cccbb9 \ @@ -1670,15 +1813,15 @@ setproctitle==1.3.2 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:fe8a988c7220c002c45347430993830666e55bc350179d91fcee0feafe64e1d4 \ --hash=sha256:fed18e44711c5af4b681c2b3b18f85e6f0f1b2370a28854c645d636d5305ccd8 \ --hash=sha256:ffc61a388a5834a97953d6444a2888c24a05f2e333f9ed49f977a87bb1ad4761 -setuptools==70.0.0 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:54faa7f2e8d2d11bcd2c07bed282eef1046b5c080d1c32add737d7b5817b1ad4 \ - --hash=sha256:f211a66637b8fa059bb28183da127d4e86396c991a942b028c6650d4319c3fd0 -sflock2[linux,shellcode]==0.3.66 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:3dc74a75491fe2502d70bbc3778b9a6da5cbeb9da479997eef40106208ee639f \ - --hash=sha256:fb4f8c52df5dfbbb8abde6fb46addf06d6456262ac3e8ad9c332354914381821 -six==1.16.0 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ - --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 +setuptools==78.1.1 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:c3a9c4211ff4c309edb8b8c4f1cbfa7ae324c4ba9f91ff254e3d305b9fd54561 \ + --hash=sha256:fcc17fd9cd898242f6b4adfaca46137a9edef687f43e6f78469692a5e70d851d +sflock2==0.3.76 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:3d989d142fc49ebd049f75eb8d402451fcd20148cf27aaa20c540ac95a9c81ff \ + --hash=sha256:eed75b32adf3c82a60d9339fda63a151355f9be7639d7d583de8f43ea6604e4c +six==1.17.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274 \ + --hash=sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81 sniffio==1.3.1 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \ --hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc @@ -1738,36 +1881,78 @@ sqlalchemy==1.4.50 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:f5b1fb2943d13aba17795a770d22a2ec2214fc65cff46c487790192dda3a3ee7 \ --hash=sha256:fb9adc4c6752d62c6078c107d23327aa3023ef737938d0135ece8ffb67d07030 \ --hash=sha256:fbaf6643a604aa17e7a7afd74f665f9db882df5c297bdd86c38368f2c471f37d -sqlparse==0.5.1 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:773dcbf9a5ab44a090f3441e2180efe2560220203dc2f8c0b0fa141e18b505e4 \ - --hash=sha256:bb6b4df465655ef332548e24f08e205afc81b9ab86cb1c45657a7ff173a3a00e -tabulate==0.9.0 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:0095b12bf5966de529c0feb1fa08671671b3368eec77d7ef7ab114be2c068b3c \ - --hash=sha256:024ca478df22e9340661486f85298cff5f6dcdba14f3813e8830015b9ed1948f -termcolor==2.4.0 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:9297c0df9c99445c2412e832e882a7884038a25617c60cea2ad69488d4040d63 \ - --hash=sha256:aab9e56047c8ac41ed798fa36d892a37aca6b3e9159f3e0c24bc64a9b3ac7b7a -tldextract==3.5.0 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:2cb271ca8d06ea1630a1361b58edad14e0cf81f34ce3c90b052854528fe2a281 \ - --hash=sha256:4df1c65b95be61d59428e8611e955e54e6f1d4483d3e8d5733d3a9062155e910 -tomli==2.0.1 ; python_version >= "3.10" and python_version < "3.11" \ - --hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \ - --hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f -tqdm==4.66.5 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:90279a3770753eafc9194a0364852159802111925aa30eb3f9d85b0e805ac7cd \ - --hash=sha256:e1020aef2e5096702d8a025ac7d16b1577279c9d63f8375b63083e9a5f0fcbad -twisted[tls]==24.7.0 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:5a60147f044187a127ec7da96d170d49bcce50c6fd36f594e60f4587eff4d394 \ - --hash=sha256:734832ef98108136e222b5230075b1079dad8a3fc5637319615619a7725b0c81 +sqlparse==0.5.3 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:09f67787f56a0b16ecdbde1bfc7f5d9c3371ca683cfeaa8e6ff60b4807ec9272 \ + --hash=sha256:cf2196ed3418f3ba5de6af7e82c694a9fbdbfecccdfc72e281548517081f16ca +stpyv8==13.1.201.22 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:05c3ecaaf2dd8dbe06bdb70f3192b7e6161337ee04e6830a57b58eb4be7c70bd \ + --hash=sha256:2bdbc7307dbf86cfc4bd0a11f0ae1f1c6f91d1b3366a7b409f9fc5d01f69441d \ + --hash=sha256:2ce8e85a4a88e80ae561654be1b55ff578c4f1a27d518ac35e6d87b93d91b096 \ + --hash=sha256:4d737935167c52ed72e5a78264d9adfeaf089bf54693b88f12cbdb439a36a102 \ + --hash=sha256:6cb5e8751aee2487cc3b5f21eac6d459041a7180a779941b64db5736e27276ee \ + --hash=sha256:6dc40b656cea7fe541f6bdbad83b6b4ed51e5ead985b54c139319a731253a55e \ + --hash=sha256:8019f19b29621ccde85125d86f60f5814175b17670f5949d2671cf22cf453ea6 \ + --hash=sha256:834b9761bb7f49da8b887847c7647495a2cf6c45f69e2124ae0e3f024493bc15 \ + --hash=sha256:90568ff08dfaf0ebd3bf1c79f7d21db06d82eada412a6e914b995bead7c78666 \ + --hash=sha256:b2a660a331e82fa89d5938ec8418743ebfb544733269f24cd8461a18472701c2 \ + --hash=sha256:b53df6114a88698ee6f3820cf46476e83ee09c9a67dd9f7cf58ca6a2928238b0 \ + --hash=sha256:b9d9499ed2007cc097a5d2ae0cb18226b2bf3ca429301811b2e12a787a8f137e \ + --hash=sha256:bf51578ec84dba6519d75ca81a154a070910e638da0ec384f4bf6d535f9b5218 \ + --hash=sha256:c24aa4215c64db7d67fc6c42c0d7731cabcf300596bf9c826ae74f426fe3b771 \ + --hash=sha256:c4292843c8133fc99833aceef25925a97edf01031e186335582deb077b99d2bf \ + --hash=sha256:c8189b8c4d87579f353705441757f11e2f2260578b82000925dadf0ed59a47e3 \ + --hash=sha256:d00a220268d63d68490682b571d082d5b197de1f19d6f478a88357c61da94f7a \ + --hash=sha256:da6d8f2945bd057057c64bc93ea3c064cc848b75f55d6d651120ee5d115e0761 \ + --hash=sha256:fc2b956bfaf23531c490845edb7d80fc998fe8aee1c7cf1337317dae01169307 \ + --hash=sha256:fe25c004b12f2616ae877e0469f95aa2164e62e6584d253b07f3c9f9ba69a888 +tldextract==5.1.3 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:78de310cc2ca018692de5ddf320f9d6bd7c5cf857d0fd4f2175f0cdf4440ea75 \ + --hash=sha256:d43c7284c23f5dc8a42fd0fee2abede2ff74cc622674e4cb07f514ab3330c338 +tomli==2.2.1 ; python_version == "3.10" \ + --hash=sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6 \ + --hash=sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd \ + --hash=sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c \ + --hash=sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b \ + --hash=sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8 \ + --hash=sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6 \ + --hash=sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77 \ + --hash=sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff \ + --hash=sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea \ + --hash=sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192 \ + --hash=sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249 \ + --hash=sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee \ + --hash=sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4 \ + --hash=sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98 \ + --hash=sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8 \ + --hash=sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4 \ + --hash=sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281 \ + --hash=sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744 \ + --hash=sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69 \ + --hash=sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13 \ + --hash=sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140 \ + --hash=sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e \ + --hash=sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e \ + --hash=sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc \ + --hash=sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff \ + --hash=sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec \ + --hash=sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2 \ + --hash=sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222 \ + --hash=sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106 \ + --hash=sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272 \ + --hash=sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a \ + --hash=sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7 +twisted==24.11.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:695d0556d5ec579dcc464d2856b634880ed1319f45b10d19043f2b57eb0115b5 \ + --hash=sha256:fe403076c71f04d5d2d789a755b687c5637ec3bcd3b2b8252d76f2ba65f54261 txaio==23.1.1 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:aaea42f8aad50e0ecfb976130ada140797e9dcb85fad2cf72b0f37f8cefcb490 \ --hash=sha256:f9a9216e976e5e3246dfd112ad7ad55ca915606b60b84a757ac769bd404ff704 typing-extensions==4.12.2 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \ --hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8 -tzdata==2024.2 ; python_version >= "3.10" and python_version < "4.0" and sys_platform == "win32" \ - --hash=sha256:7d85cc416e9382e69095b7bdf4afd9e3880418a2413feec7069d533d6b4e31cc \ - --hash=sha256:a48093786cdcde33cad18c2555e8532f34422074448fbc874186f0abd79565cd +tzdata==2025.1 ; python_version >= "3.10" and python_version < "4.0" and sys_platform == "win32" \ + --hash=sha256:24894909e88cdb28bd1636c6887801df64cb485bd593f2fd83ef29075a81d694 \ + --hash=sha256:7e127113816800496f027041c570f50bcd464a020098a3b6b199517772303639 unicorn==2.1.1 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:06df71752e147107680df7f841360a70cc1ae1f69438dbdc3a87e642fd21eb49 \ --hash=sha256:13e7fb410a4258522dbc5482d24949f1a89d4f113bc7fd30d523cdad1e543c3f \ @@ -1780,234 +1965,208 @@ unicorn==2.1.1 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:b0f139adb1c9406f57d25cab96ad7a6d3cbb9119f5480ebecedd4f5d7cb024fb \ --hash=sha256:d4a08dbf222c5481bc909a9aa404b79874f6e67f5ba7c47036d03c68ab7371a7 \ --hash=sha256:f0ebcfaba67ef0ebcd05ee3560268f1c6f683bdd08ff496888741a163d29735d -urllib3==2.2.3 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac \ - --hash=sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9 -uvicorn[standard]==0.18.3 ; python_version >= "3.10" and python_version < "4.0" \ +urllib3==2.5.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760 \ + --hash=sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc +uvicorn==0.18.3 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:0abd429ebb41e604ed8d2be6c60530de3408f250e8d2d84967d85ba9e86fe3af \ --hash=sha256:9a66e7c42a2a95222f76ec24a4b754c158261c4696e683b9dadc72b590e0311b -uvloop==0.20.0 ; (sys_platform != "win32" and sys_platform != "cygwin") and platform_python_implementation != "PyPy" and python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:265a99a2ff41a0fd56c19c3838b29bf54d1d177964c300dad388b27e84fd7847 \ - --hash=sha256:2beee18efd33fa6fdb0976e18475a4042cd31c7433c866e8a09ab604c7c22ff2 \ - --hash=sha256:35968fc697b0527a06e134999eef859b4034b37aebca537daeb598b9d45a137b \ - --hash=sha256:36c530d8fa03bfa7085af54a48f2ca16ab74df3ec7108a46ba82fd8b411a2315 \ - --hash=sha256:3a609780e942d43a275a617c0839d85f95c334bad29c4c0918252085113285b5 \ - --hash=sha256:4603ca714a754fc8d9b197e325db25b2ea045385e8a3ad05d3463de725fdf469 \ - --hash=sha256:4b75f2950ddb6feed85336412b9a0c310a2edbcf4cf931aa5cfe29034829676d \ - --hash=sha256:4f44af67bf39af25db4c1ac27e82e9665717f9c26af2369c404be865c8818dcf \ - --hash=sha256:6462c95f48e2d8d4c993a2950cd3d31ab061864d1c226bbf0ee2f1a8f36674b9 \ - --hash=sha256:649c33034979273fa71aa25d0fe120ad1777c551d8c4cd2c0c9851d88fcb13ab \ - --hash=sha256:746242cd703dc2b37f9d8b9f173749c15e9a918ddb021575a0205ec29a38d31e \ - --hash=sha256:77fbc69c287596880ecec2d4c7a62346bef08b6209749bf6ce8c22bbaca0239e \ - --hash=sha256:80dc1b139516be2077b3e57ce1cb65bfed09149e1d175e0478e7a987863b68f0 \ - --hash=sha256:82edbfd3df39fb3d108fc079ebc461330f7c2e33dbd002d146bf7c445ba6e756 \ - --hash=sha256:89e8d33bb88d7263f74dc57d69f0063e06b5a5ce50bb9a6b32f5fcbe655f9e73 \ - --hash=sha256:94707205efbe809dfa3a0d09c08bef1352f5d3d6612a506f10a319933757c006 \ - --hash=sha256:95720bae002ac357202e0d866128eb1ac82545bcf0b549b9abe91b5178d9b541 \ - --hash=sha256:9b04d96188d365151d1af41fa2d23257b674e7ead68cfd61c725a422764062ae \ - --hash=sha256:9d0fba61846f294bce41eb44d60d58136090ea2b5b99efd21cbdf4e21927c56a \ - --hash=sha256:9ebafa0b96c62881d5cafa02d9da2e44c23f9f0cd829f3a32a6aff771449c996 \ - --hash=sha256:a0fac7be202596c7126146660725157d4813aa29a4cc990fe51346f75ff8fde7 \ - --hash=sha256:aea15c78e0d9ad6555ed201344ae36db5c63d428818b4b2a42842b3870127c00 \ - --hash=sha256:b10c2956efcecb981bf9cfb8184d27d5d64b9033f917115a960b83f11bfa0d6b \ - --hash=sha256:b16696f10e59d7580979b420eedf6650010a4a9c3bd8113f24a103dfdb770b10 \ - --hash=sha256:d8c36fdf3e02cec92aed2d44f63565ad1522a499c654f07935c8f9d04db69e95 \ - --hash=sha256:e237f9c1e8a00e7d9ddaa288e535dc337a39bcbf679f290aee9d26df9e72bce9 \ - --hash=sha256:e50289c101495e0d1bb0bfcb4a60adde56e32f4449a67216a1ab2750aa84f037 \ - --hash=sha256:e7d61fe8e8d9335fac1bf8d5d82820b4808dd7a43020c149b63a1ada953d48a6 \ - --hash=sha256:e97152983442b499d7a71e44f29baa75b3b02e65d9c44ba53b10338e98dedb66 \ - --hash=sha256:f0e94b221295b5e69de57a1bd4aeb0b3a29f61be6e1b478bb8a69a73377db7ba \ - --hash=sha256:fee6044b64c965c425b65a4e17719953b96e065c5b7e09b599ff332bb2744bdf -viv-utils[flirt]==0.7.11 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:ab44c428315e014a52dbbaab6862876dd0cd2c930a138a1655c1c5be7270d81b +uvloop==0.21.0 ; python_version >= "3.10" and python_version < "4.0" and sys_platform != "win32" and sys_platform != "cygwin" and platform_python_implementation != "PyPy" \ + --hash=sha256:0878c2640cf341b269b7e128b1a5fed890adc4455513ca710d77d5e93aa6d6a0 \ + --hash=sha256:10d66943def5fcb6e7b37310eb6b5639fd2ccbc38df1177262b0640c3ca68c1f \ + --hash=sha256:10da8046cc4a8f12c91a1c39d1dd1585c41162a15caaef165c2174db9ef18bdc \ + --hash=sha256:17df489689befc72c39a08359efac29bbee8eee5209650d4b9f34df73d22e414 \ + --hash=sha256:183aef7c8730e54c9a3ee3227464daed66e37ba13040bb3f350bc2ddc040f22f \ + --hash=sha256:196274f2adb9689a289ad7d65700d37df0c0930fd8e4e743fa4834e850d7719d \ + --hash=sha256:221f4f2a1f46032b403bf3be628011caf75428ee3cc204a22addf96f586b19fd \ + --hash=sha256:2d1f581393673ce119355d56da84fe1dd9d2bb8b3d13ce792524e1607139feff \ + --hash=sha256:359ec2c888397b9e592a889c4d72ba3d6befba8b2bb01743f72fffbde663b59c \ + --hash=sha256:3bf12b0fda68447806a7ad847bfa591613177275d35b6724b1ee573faa3704e3 \ + --hash=sha256:4509360fcc4c3bd2c70d87573ad472de40c13387f5fda8cb58350a1d7475e58d \ + --hash=sha256:460def4412e473896ef179a1671b40c039c7012184b627898eea5072ef6f017a \ + --hash=sha256:461d9ae6660fbbafedd07559c6a2e57cd553b34b0065b6550685f6653a98c1cb \ + --hash=sha256:46923b0b5ee7fc0020bef24afe7836cb068f5050ca04caf6b487c513dc1a20b2 \ + --hash=sha256:53e420a3afe22cdcf2a0f4846e377d16e718bc70103d7088a4f7623567ba5fb0 \ + --hash=sha256:5ee4d4ef48036ff6e5cfffb09dd192c7a5027153948d85b8da7ff705065bacc6 \ + --hash=sha256:67dd654b8ca23aed0a8e99010b4c34aca62f4b7fce88f39d452ed7622c94845c \ + --hash=sha256:787ae31ad8a2856fc4e7c095341cccc7209bd657d0e71ad0dc2ea83c4a6fa8af \ + --hash=sha256:86975dca1c773a2c9864f4c52c5a55631038e387b47eaf56210f873887b6c8dc \ + --hash=sha256:87c43e0f13022b998eb9b973b5e97200c8b90823454d4bc06ab33829e09fb9bb \ + --hash=sha256:88cb67cdbc0e483da00af0b2c3cdad4b7c61ceb1ee0f33fe00e09c81e3a6cb75 \ + --hash=sha256:8a375441696e2eda1c43c44ccb66e04d61ceeffcd76e4929e527b7fa401b90fb \ + --hash=sha256:a5c39f217ab3c663dc699c04cbd50c13813e31d917642d459fdcec07555cc553 \ + --hash=sha256:b9fb766bb57b7388745d8bcc53a359b116b8a04c83a2288069809d2b3466c37e \ + --hash=sha256:baa0e6291d91649c6ba4ed4b2f982f9fa165b5bbd50a9e203c416a2797bab3c6 \ + --hash=sha256:baa4dcdbd9ae0a372f2167a207cd98c9f9a1ea1188a8a526431eef2f8116cc8d \ + --hash=sha256:bc09f0ff191e61c2d592a752423c767b4ebb2986daa9ed62908e2b1b9a9ae206 \ + --hash=sha256:bd53ecc9a0f3d87ab847503c2e1552b690362e005ab54e8a48ba97da3924c0dc \ + --hash=sha256:bfd55dfcc2a512316e65f16e503e9e450cab148ef11df4e4e679b5e8253a5281 \ + --hash=sha256:c097078b8031190c934ed0ebfee8cc5f9ba9642e6eb88322b9958b649750f72b \ + --hash=sha256:c0f3fa6200b3108919f8bdabb9a7f87f20e7097ea3c543754cabc7d717d95cf8 \ + --hash=sha256:e678ad6fe52af2c58d2ae3c73dc85524ba8abe637f134bf3564ed07f555c5e79 \ + --hash=sha256:ec7e6b09a6fdded42403182ab6b832b71f4edaf7f37a9a0e371a01db5f0cb45f \ + --hash=sha256:f0ce1b49560b1d2d8a2977e3ba4afb2414fb46b86a1b64056bc4ab929efdafbe \ + --hash=sha256:f38b2e090258d051d68a5b14d1da7203a3c3677321cf32a95a6f4db4dd8b6f26 \ + --hash=sha256:f3df876acd7ec037a3d005b3ab85a7e4110422e4d9c1571d4fc89b0fc41b6816 \ + --hash=sha256:f7089d2dc73179ce5ac255bdf37c236a9f914b264825fdaacaded6990a7fb4c2 +viv-utils==0.8.0 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:4fd2d8849529d00dc5b7f736f581beeb79064ee094eb2dfd9f84877bb5a95c12 vivisect==1.2.1 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:62eb383013318efcd42f2565e4ab5323ebb1fb6b2e8e9e7e432bd126743eccda \ --hash=sha256:cc15ab541b9be3cad8060ee4f420e680258fa4fcf7477f3f9ad5023353f73299 -watchfiles==0.24.0 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:01550ccf1d0aed6ea375ef259706af76ad009ef5b0203a3a4cce0f6024f9b68a \ - --hash=sha256:01def80eb62bd5db99a798d5e1f5f940ca0a05986dcfae21d833af7a46f7ee22 \ - --hash=sha256:07cdef0c84c03375f4e24642ef8d8178e533596b229d32d2bbd69e5128ede02a \ - --hash=sha256:083dc77dbdeef09fa44bb0f4d1df571d2e12d8a8f985dccde71ac3ac9ac067a0 \ - --hash=sha256:1cf1f6dd7825053f3d98f6d33f6464ebdd9ee95acd74ba2c34e183086900a827 \ - --hash=sha256:21ab23fdc1208086d99ad3f69c231ba265628014d4aed31d4e8746bd59e88cd1 \ - --hash=sha256:2dadf8a8014fde6addfd3c379e6ed1a981c8f0a48292d662e27cabfe4239c83c \ - --hash=sha256:2e28d91ef48eab0afb939fa446d8ebe77e2f7593f5f463fd2bb2b14132f95b6e \ - --hash=sha256:2efec17819b0046dde35d13fb8ac7a3ad877af41ae4640f4109d9154ed30a188 \ - --hash=sha256:30bbd525c3262fd9f4b1865cb8d88e21161366561cd7c9e1194819e0a33ea86b \ - --hash=sha256:316449aefacf40147a9efaf3bd7c9bdd35aaba9ac5d708bd1eb5763c9a02bef5 \ - --hash=sha256:327763da824817b38ad125dcd97595f942d720d32d879f6c4ddf843e3da3fe90 \ - --hash=sha256:32aa53a9a63b7f01ed32e316e354e81e9da0e6267435c7243bf8ae0f10b428ef \ - --hash=sha256:34e19e56d68b0dad5cff62273107cf5d9fbaf9d75c46277aa5d803b3ef8a9e9b \ - --hash=sha256:3770e260b18e7f4e576edca4c0a639f704088602e0bc921c5c2e721e3acb8d15 \ - --hash=sha256:3d2e3ab79a1771c530233cadfd277fcc762656d50836c77abb2e5e72b88e3a48 \ - --hash=sha256:41face41f036fee09eba33a5b53a73e9a43d5cb2c53dad8e61fa6c9f91b5a51e \ - --hash=sha256:43e3e37c15a8b6fe00c1bce2473cfa8eb3484bbeecf3aefbf259227e487a03df \ - --hash=sha256:449f43f49c8ddca87c6b3980c9284cab6bd1f5c9d9a2b00012adaaccd5e7decd \ - --hash=sha256:4933a508d2f78099162da473841c652ad0de892719043d3f07cc83b33dfd9d91 \ - --hash=sha256:49d617df841a63b4445790a254013aea2120357ccacbed00253f9c2b5dc24e2d \ - --hash=sha256:49fb58bcaa343fedc6a9e91f90195b20ccb3135447dc9e4e2570c3a39565853e \ - --hash=sha256:4a7fa2bc0efef3e209a8199fd111b8969fe9db9c711acc46636686331eda7dd4 \ - --hash=sha256:4abf4ad269856618f82dee296ac66b0cd1d71450fc3c98532d93798e73399b7a \ - --hash=sha256:4b8693502d1967b00f2fb82fc1e744df128ba22f530e15b763c8d82baee15370 \ - --hash=sha256:4d28cea3c976499475f5b7a2fec6b3a36208656963c1a856d328aeae056fc5c1 \ - --hash=sha256:5148c2f1ea043db13ce9b0c28456e18ecc8f14f41325aa624314095b6aa2e9ea \ - --hash=sha256:54ca90a9ae6597ae6dc00e7ed0a040ef723f84ec517d3e7ce13e63e4bc82fa04 \ - --hash=sha256:551ec3ee2a3ac9cbcf48a4ec76e42c2ef938a7e905a35b42a1267fa4b1645896 \ - --hash=sha256:5c51749f3e4e269231510da426ce4a44beb98db2dce9097225c338f815b05d4f \ - --hash=sha256:632676574429bee8c26be8af52af20e0c718cc7f5f67f3fb658c71928ccd4f7f \ - --hash=sha256:6509ed3f467b79d95fc62a98229f79b1a60d1b93f101e1c61d10c95a46a84f43 \ - --hash=sha256:6bdcfa3cd6fdbdd1a068a52820f46a815401cbc2cb187dd006cb076675e7b735 \ - --hash=sha256:7138eff8baa883aeaa074359daabb8b6c1e73ffe69d5accdc907d62e50b1c0da \ - --hash=sha256:7211b463695d1e995ca3feb38b69227e46dbd03947172585ecb0588f19b0d87a \ - --hash=sha256:73bde715f940bea845a95247ea3e5eb17769ba1010efdc938ffcb967c634fa61 \ - --hash=sha256:78470906a6be5199524641f538bd2c56bb809cd4bf29a566a75051610bc982c3 \ - --hash=sha256:7ae3e208b31be8ce7f4c2c0034f33406dd24fbce3467f77223d10cd86778471c \ - --hash=sha256:7e4bd963a935aaf40b625c2499f3f4f6bbd0c3776f6d3bc7c853d04824ff1c9f \ - --hash=sha256:82ae557a8c037c42a6ef26c494d0631cacca040934b101d001100ed93d43f361 \ - --hash=sha256:82b2509f08761f29a0fdad35f7e1638b8ab1adfa2666d41b794090361fb8b855 \ - --hash=sha256:8360f7314a070c30e4c976b183d1d8d1585a4a50c5cb603f431cebcbb4f66327 \ - --hash=sha256:85d5f0c7771dcc7a26c7a27145059b6bb0ce06e4e751ed76cdf123d7039b60b5 \ - --hash=sha256:88bcd4d0fe1d8ff43675360a72def210ebad3f3f72cabfeac08d825d2639b4ab \ - --hash=sha256:9301c689051a4857d5b10777da23fafb8e8e921bcf3abe6448a058d27fb67633 \ - --hash=sha256:951088d12d339690a92cef2ec5d3cfd957692834c72ffd570ea76a6790222777 \ - --hash=sha256:95cf3b95ea665ab03f5a54765fa41abf0529dbaf372c3b83d91ad2cfa695779b \ - --hash=sha256:96619302d4374de5e2345b2b622dc481257a99431277662c30f606f3e22f42be \ - --hash=sha256:999928c6434372fde16c8f27143d3e97201160b48a614071261701615a2a156f \ - --hash=sha256:9a60e2bf9dc6afe7f743e7c9b149d1fdd6dbf35153c78fe3a14ae1a9aee3d98b \ - --hash=sha256:9f895d785eb6164678ff4bb5cc60c5996b3ee6df3edb28dcdeba86a13ea0465e \ - --hash=sha256:a2a9891723a735d3e2540651184be6fd5b96880c08ffe1a98bae5017e65b544b \ - --hash=sha256:a974231b4fdd1bb7f62064a0565a6b107d27d21d9acb50c484d2cdba515b9366 \ - --hash=sha256:aa0fd7248cf533c259e59dc593a60973a73e881162b1a2f73360547132742823 \ - --hash=sha256:acbfa31e315a8f14fe33e3542cbcafc55703b8f5dcbb7c1eecd30f141df50db3 \ - --hash=sha256:afb72325b74fa7a428c009c1b8be4b4d7c2afedafb2982827ef2156646df2fe1 \ - --hash=sha256:b3ef2c69c655db63deb96b3c3e587084612f9b1fa983df5e0c3379d41307467f \ - --hash=sha256:b52a65e4ea43c6d149c5f8ddb0bef8d4a1e779b77591a458a893eb416624a418 \ - --hash=sha256:b665caeeda58625c3946ad7308fbd88a086ee51ccb706307e5b1fa91556ac886 \ - --hash=sha256:b74fdffce9dfcf2dc296dec8743e5b0332d15df19ae464f0e249aa871fc1c571 \ - --hash=sha256:b995bfa6bf01a9e09b884077a6d37070464b529d8682d7691c2d3b540d357a0c \ - --hash=sha256:bd82010f8ab451dabe36054a1622870166a67cf3fce894f68895db6f74bbdc94 \ - --hash=sha256:bdcd5538e27f188dd3c804b4a8d5f52a7fc7f87e7fd6b374b8e36a4ca03db428 \ - --hash=sha256:c79d7719d027b7a42817c5d96461a99b6a49979c143839fc37aa5748c322f234 \ - --hash=sha256:cdab9555053399318b953a1fe1f586e945bc8d635ce9d05e617fd9fe3a4687d6 \ - --hash=sha256:ce72dba6a20e39a0c628258b5c308779b8697f7676c254a845715e2a1039b968 \ - --hash=sha256:d337193bbf3e45171c8025e291530fb7548a93c45253897cd764a6a71c937ed9 \ - --hash=sha256:d3dcb774e3568477275cc76554b5a565024b8ba3a0322f77c246bc7111c5bb9c \ - --hash=sha256:d64ba08db72e5dfd5c33be1e1e687d5e4fcce09219e8aee893a4862034081d4e \ - --hash=sha256:d7a2e3b7f5703ffbd500dabdefcbc9eafeff4b9444bbdd5d83d79eedf8428fab \ - --hash=sha256:d831ee0a50946d24a53821819b2327d5751b0c938b12c0653ea5be7dea9c82ec \ - --hash=sha256:d9018153cf57fc302a2a34cb7564870b859ed9a732d16b41a9b5cb2ebed2d444 \ - --hash=sha256:e5171ef898299c657685306d8e1478a45e9303ddcd8ac5fed5bd52ad4ae0b69b \ - --hash=sha256:e94e98c7cb94cfa6e071d401ea3342767f28eb5a06a58fafdc0d2a4974f4f35c \ - --hash=sha256:ec39698c45b11d9694a1b635a70946a5bad066b593af863460a8e600f0dff1ca \ - --hash=sha256:ed9aba6e01ff6f2e8285e5aa4154e2970068fe0fc0998c4380d0e6278222269b \ - --hash=sha256:edf71b01dec9f766fb285b73930f95f730bb0943500ba0566ae234b5c1618c18 \ - --hash=sha256:ee82c98bed9d97cd2f53bdb035e619309a098ea53ce525833e26b93f673bc318 \ - --hash=sha256:f4c96283fca3ee09fb044f02156d9570d156698bc3734252175a38f0e8975f07 \ - --hash=sha256:f7d9b87c4c55e3ea8881dfcbf6d61ea6775fffed1fedffaa60bd047d3c08c430 \ - --hash=sha256:f83df90191d67af5a831da3a33dd7628b02a95450e168785586ed51e6d28943c \ - --hash=sha256:fca9433a45f18b7c779d2bae7beeec4f740d28b788b117a48368d95a3233ed83 \ - --hash=sha256:fd92bbaa2ecdb7864b7600dcdb6f2f1db6e0346ed425fbd01085be04c63f0b05 +watchfiles==1.0.4 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:02a526ee5b5a09e8168314c905fc545c9bc46509896ed282aeb5a8ba9bd6ca27 \ + --hash=sha256:05d341c71f3d7098920f8551d4df47f7b57ac5b8dad56558064c3431bdfc0b74 \ + --hash=sha256:076f293100db3b0b634514aa0d294b941daa85fc777f9c698adb1009e5aca0b1 \ + --hash=sha256:0799ae68dfa95136dde7c472525700bd48777875a4abb2ee454e3ab18e9fc712 \ + --hash=sha256:0986902677a1a5e6212d0c49b319aad9cc48da4bd967f86a11bde96ad9676ca1 \ + --hash=sha256:0bc80d91ddaf95f70258cf78c471246846c1986bcc5fd33ccc4a1a67fcb40f9a \ + --hash=sha256:13c2ce7b72026cfbca120d652f02c7750f33b4c9395d79c9790b27f014c8a5a2 \ + --hash=sha256:1941b4e39de9b38b868a69b911df5e89dc43767feeda667b40ae032522b9b5f1 \ + --hash=sha256:1eacd91daeb5158c598fe22d7ce66d60878b6294a86477a4715154990394c9b3 \ + --hash=sha256:229e6ec880eca20e0ba2f7e2249c85bae1999d330161f45c78d160832e026ee2 \ + --hash=sha256:22bb55a7c9e564e763ea06c7acea24fc5d2ee5dfc5dafc5cfbedfe58505e9f90 \ + --hash=sha256:278aaa395f405972e9f523bd786ed59dfb61e4b827856be46a42130605fd0899 \ + --hash=sha256:2a9f93f8439639dc244c4d2902abe35b0279102bca7bbcf119af964f51d53c19 \ + --hash=sha256:308ac265c56f936636e3b0e3f59e059a40003c655228c131e1ad439957592303 \ + --hash=sha256:31f1a379c9dcbb3f09cf6be1b7e83b67c0e9faabed0471556d9438a4a4e14202 \ + --hash=sha256:32b026a6ab64245b584acf4931fe21842374da82372d5c039cba6bf99ef722f3 \ + --hash=sha256:342622287b5604ddf0ed2d085f3a589099c9ae8b7331df3ae9845571586c4f3d \ + --hash=sha256:39f4914548b818540ef21fd22447a63e7be6e24b43a70f7642d21f1e73371590 \ + --hash=sha256:3f68d8e9d5a321163ddacebe97091000955a1b74cd43724e346056030b0bacee \ + --hash=sha256:43b168bba889886b62edb0397cab5b6490ffb656ee2fcb22dec8bfeb371a9e12 \ + --hash=sha256:47eb32ef8c729dbc4f4273baece89398a4d4b5d21a1493efea77a17059f4df8a \ + --hash=sha256:4810ea2ae622add560f4aa50c92fef975e475f7ac4900ce5ff5547b2434642d8 \ + --hash=sha256:4e997802d78cdb02623b5941830ab06f8860038faf344f0d288d325cc9c5d2ff \ + --hash=sha256:4ebbeca9360c830766b9f0df3640b791be569d988f4be6c06d6fae41f187f105 \ + --hash=sha256:4f8c4998506241dedf59613082d1c18b836e26ef2a4caecad0ec41e2a15e4226 \ + --hash=sha256:55ccfd27c497b228581e2838d4386301227fc0cb47f5a12923ec2fe4f97b95af \ + --hash=sha256:5717021b199e8353782dce03bd8a8f64438832b84e2885c4a645f9723bf656d9 \ + --hash=sha256:5c11ea22304d17d4385067588123658e9f23159225a27b983f343fcffc3e796a \ + --hash=sha256:5e0227b8ed9074c6172cf55d85b5670199c99ab11fd27d2c473aa30aec67ee42 \ + --hash=sha256:62c9953cf85529c05b24705639ffa390f78c26449e15ec34d5339e8108c7c407 \ + --hash=sha256:6ba473efd11062d73e4f00c2b730255f9c1bdd73cd5f9fe5b5da8dbd4a717205 \ + --hash=sha256:740d103cd01458f22462dedeb5a3382b7f2c57d07ff033fbc9465919e5e1d0f3 \ + --hash=sha256:74cb3ca19a740be4caa18f238298b9d472c850f7b2ed89f396c00a4c97e2d9ff \ + --hash=sha256:7b75fee5a16826cf5c46fe1c63116e4a156924d668c38b013e6276f2582230f0 \ + --hash=sha256:7cf684aa9bba4cd95ecb62c822a56de54e3ae0598c1a7f2065d51e24637a3c5d \ + --hash=sha256:8012bd820c380c3d3db8435e8cf7592260257b378b649154a7948a663b5f84e9 \ + --hash=sha256:857f5fc3aa027ff5e57047da93f96e908a35fe602d24f5e5d8ce64bf1f2fc733 \ + --hash=sha256:8b1f135238e75d075359cf506b27bf3f4ca12029c47d3e769d8593a2024ce161 \ + --hash=sha256:8d0d0630930f5cd5af929040e0778cf676a46775753e442a3f60511f2409f48f \ + --hash=sha256:90192cdc15ab7254caa7765a98132a5a41471cf739513cc9bcf7d2ffcc0ec7b2 \ + --hash=sha256:95b42cac65beae3a362629950c444077d1b44f1790ea2772beaea95451c086bb \ + --hash=sha256:9745a4210b59e218ce64c91deb599ae8775c8a9da4e95fb2ee6fe745fc87d01a \ + --hash=sha256:9d1ef56b56ed7e8f312c934436dea93bfa3e7368adfcf3df4c0da6d4de959a1e \ + --hash=sha256:9eea33ad8c418847dd296e61eb683cae1c63329b6d854aefcd412e12d94ee235 \ + --hash=sha256:9f25d0ba0fe2b6d2c921cf587b2bf4c451860086534f40c384329fb96e2044d1 \ + --hash=sha256:9fe37a2de80aa785d340f2980276b17ef697ab8db6019b07ee4fd28a8359d2f3 \ + --hash=sha256:a38320582736922be8c865d46520c043bff350956dfc9fbaee3b2df4e1740a4b \ + --hash=sha256:a462490e75e466edbb9fc4cd679b62187153b3ba804868452ef0577ec958f5ff \ + --hash=sha256:a5ae5706058b27c74bac987d615105da17724172d5aaacc6c362a40599b6de43 \ + --hash=sha256:aa216f87594f951c17511efe5912808dfcc4befa464ab17c98d387830ce07b60 \ + --hash=sha256:ab0311bb2ffcd9f74b6c9de2dda1612c13c84b996d032cd74799adb656af4e8b \ + --hash=sha256:ab594e75644421ae0a2484554832ca5895f8cab5ab62de30a1a57db460ce06c6 \ + --hash=sha256:aee397456a29b492c20fda2d8961e1ffb266223625346ace14e4b6d861ba9c80 \ + --hash=sha256:b045c800d55bc7e2cadd47f45a97c7b29f70f08a7c2fa13241905010a5493f94 \ + --hash=sha256:b77d5622ac5cc91d21ae9c2b284b5d5c51085a0bdb7b518dba263d0af006132c \ + --hash=sha256:ba5bb3073d9db37c64520681dd2650f8bd40902d991e7b4cfaeece3e32561d08 \ + --hash=sha256:bdef5a1be32d0b07dcea3318a0be95d42c98ece24177820226b56276e06b63b0 \ + --hash=sha256:c2acfa49dd0ad0bf2a9c0bb9a985af02e89345a7189be1efc6baa085e0f72d7c \ + --hash=sha256:c7cce76c138a91e720d1df54014a047e680b652336e1b73b8e3ff3158e05061e \ + --hash=sha256:cc27a65069bcabac4552f34fd2dce923ce3fcde0721a16e4fb1b466d63ec831f \ + --hash=sha256:cdbd912a61543a36aef85e34f212e5d2486e7c53ebfdb70d1e0b060cc50dd0bf \ + --hash=sha256:cdcc92daeae268de1acf5b7befcd6cfffd9a047098199056c72e4623f531de18 \ + --hash=sha256:d3452c1ec703aa1c61e15dfe9d482543e4145e7c45a6b8566978fbb044265a21 \ + --hash=sha256:d6097538b0ae5c1b88c3b55afa245a66793a8fec7ada6755322e465fb1a0e8cc \ + --hash=sha256:d8d3d9203705b5797f0af7e7e5baa17c8588030aaadb7f6a86107b7247303817 \ + --hash=sha256:e0611d244ce94d83f5b9aff441ad196c6e21b55f77f3c47608dcf651efe54c4a \ + --hash=sha256:f12969a3765909cf5dc1e50b2436eb2c0e676a3c75773ab8cc3aa6175c16e902 \ + --hash=sha256:f44a39aee3cbb9b825285ff979ab887a25c5d336e5ec3574f1506a4671556a8d \ + --hash=sha256:f9ce064e81fe79faa925ff03b9f4c1a98b0bbb4a1b8c1b015afa93030cb21a49 \ + --hash=sha256:fb2c46e275fbb9f0c92e7654b231543c7bbfa1df07cdc4b99fa73bedfde5c844 \ + --hash=sha256:fc2eb5d14a8e0d5df7b36288979176fbb39672d45184fc4b1c004d7c3ce29317 wcwidth==0.2.13 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859 \ --hash=sha256:72ea0c06399eb286d978fdedb6923a9eb47e1c486ce63e9b4e64fc18303972b5 -websockets==13.1 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:004280a140f220c812e65f36944a9ca92d766b6cc4560be652a0a3883a79ed8a \ - --hash=sha256:035233b7531fb92a76beefcbf479504db8c72eb3bff41da55aecce3a0f729e54 \ - --hash=sha256:149e622dc48c10ccc3d2760e5f36753db9cacf3ad7bc7bbbfd7d9c819e286f23 \ - --hash=sha256:163e7277e1a0bd9fb3c8842a71661ad19c6aa7bb3d6678dc7f89b17fbcc4aeb7 \ - --hash=sha256:18503d2c5f3943e93819238bf20df71982d193f73dcecd26c94514f417f6b135 \ - --hash=sha256:1971e62d2caa443e57588e1d82d15f663b29ff9dfe7446d9964a4b6f12c1e700 \ - --hash=sha256:204e5107f43095012b00f1451374693267adbb832d29966a01ecc4ce1db26faf \ - --hash=sha256:2510c09d8e8df777177ee3d40cd35450dc169a81e747455cc4197e63f7e7bfe5 \ - --hash=sha256:25c35bf84bf7c7369d247f0b8cfa157f989862c49104c5cf85cb5436a641d93e \ - --hash=sha256:2f85cf4f2a1ba8f602298a853cec8526c2ca42a9a4b947ec236eaedb8f2dc80c \ - --hash=sha256:308e20f22c2c77f3f39caca508e765f8725020b84aa963474e18c59accbf4c02 \ - --hash=sha256:325b1ccdbf5e5725fdcb1b0e9ad4d2545056479d0eee392c291c1bf76206435a \ - --hash=sha256:327b74e915cf13c5931334c61e1a41040e365d380f812513a255aa804b183418 \ - --hash=sha256:346bee67a65f189e0e33f520f253d5147ab76ae42493804319b5716e46dddf0f \ - --hash=sha256:38377f8b0cdeee97c552d20cf1865695fcd56aba155ad1b4ca8779a5b6ef4ac3 \ - --hash=sha256:3c78383585f47ccb0fcf186dcb8a43f5438bd7d8f47d69e0b56f71bf431a0a68 \ - --hash=sha256:4059f790b6ae8768471cddb65d3c4fe4792b0ab48e154c9f0a04cefaabcd5978 \ - --hash=sha256:459bf774c754c35dbb487360b12c5727adab887f1622b8aed5755880a21c4a20 \ - --hash=sha256:463e1c6ec853202dd3657f156123d6b4dad0c546ea2e2e38be2b3f7c5b8e7295 \ - --hash=sha256:4676df3fe46956fbb0437d8800cd5f2b6d41143b6e7e842e60554398432cf29b \ - --hash=sha256:485307243237328c022bc908b90e4457d0daa8b5cf4b3723fd3c4a8012fce4c6 \ - --hash=sha256:48a2ef1381632a2f0cb4efeff34efa97901c9fbc118e01951ad7cfc10601a9bb \ - --hash=sha256:4b889dbd1342820cc210ba44307cf75ae5f2f96226c0038094455a96e64fb07a \ - --hash=sha256:586a356928692c1fed0eca68b4d1c2cbbd1ca2acf2ac7e7ebd3b9052582deefa \ - --hash=sha256:58cf7e75dbf7e566088b07e36ea2e3e2bd5676e22216e4cad108d4df4a7402a0 \ - --hash=sha256:5993260f483d05a9737073be197371940c01b257cc45ae3f1d5d7adb371b266a \ - --hash=sha256:5dd6da9bec02735931fccec99d97c29f47cc61f644264eb995ad6c0c27667238 \ - --hash=sha256:5f2e75431f8dc4a47f31565a6e1355fb4f2ecaa99d6b89737527ea917066e26c \ - --hash=sha256:5f9fee94ebafbc3117c30be1844ed01a3b177bb6e39088bc6b2fa1dc15572084 \ - --hash=sha256:61fc0dfcda609cda0fc9fe7977694c0c59cf9d749fbb17f4e9483929e3c48a19 \ - --hash=sha256:624459daabeb310d3815b276c1adef475b3e6804abaf2d9d2c061c319f7f187d \ - --hash=sha256:62d516c325e6540e8a57b94abefc3459d7dab8ce52ac75c96cad5549e187e3a7 \ - --hash=sha256:6548f29b0e401eea2b967b2fdc1c7c7b5ebb3eeb470ed23a54cd45ef078a0db9 \ - --hash=sha256:6d2aad13a200e5934f5a6767492fb07151e1de1d6079c003ab31e1823733ae79 \ - --hash=sha256:6d6855bbe70119872c05107e38fbc7f96b1d8cb047d95c2c50869a46c65a8e96 \ - --hash=sha256:70c5be9f416aa72aab7a2a76c90ae0a4fe2755c1816c153c1a2bcc3333ce4ce6 \ - --hash=sha256:730f42125ccb14602f455155084f978bd9e8e57e89b569b4d7f0f0c17a448ffe \ - --hash=sha256:7a43cfdcddd07f4ca2b1afb459824dd3c6d53a51410636a2c7fc97b9a8cf4842 \ - --hash=sha256:7bd6abf1e070a6b72bfeb71049d6ad286852e285f146682bf30d0296f5fbadfa \ - --hash=sha256:7c1e90228c2f5cdde263253fa5db63e6653f1c00e7ec64108065a0b9713fa1b3 \ - --hash=sha256:7c65ffa900e7cc958cd088b9a9157a8141c991f8c53d11087e6fb7277a03f81d \ - --hash=sha256:80c421e07973a89fbdd93e6f2003c17d20b69010458d3a8e37fb47874bd67d51 \ - --hash=sha256:82d0ba76371769d6a4e56f7e83bb8e81846d17a6190971e38b5de108bde9b0d7 \ - --hash=sha256:83f91d8a9bb404b8c2c41a707ac7f7f75b9442a0a876df295de27251a856ad09 \ - --hash=sha256:87c6e35319b46b99e168eb98472d6c7d8634ee37750d7693656dc766395df096 \ - --hash=sha256:8d23b88b9388ed85c6faf0e74d8dec4f4d3baf3ecf20a65a47b836d56260d4b9 \ - --hash=sha256:9156c45750b37337f7b0b00e6248991a047be4aa44554c9886fe6bdd605aab3b \ - --hash=sha256:91a0fa841646320ec0d3accdff5b757b06e2e5c86ba32af2e0815c96c7a603c5 \ - --hash=sha256:95858ca14a9f6fa8413d29e0a585b31b278388aa775b8a81fa24830123874678 \ - --hash=sha256:95df24ca1e1bd93bbca51d94dd049a984609687cb2fb08a7f2c56ac84e9816ea \ - --hash=sha256:9b37c184f8b976f0c0a231a5f3d6efe10807d41ccbe4488df8c74174805eea7d \ - --hash=sha256:9b6f347deb3dcfbfde1c20baa21c2ac0751afaa73e64e5b693bb2b848efeaa49 \ - --hash=sha256:9d75baf00138f80b48f1eac72ad1535aac0b6461265a0bcad391fc5aba875cfc \ - --hash=sha256:9ef8aa8bdbac47f4968a5d66462a2a0935d044bf35c0e5a8af152d58516dbeb5 \ - --hash=sha256:a11e38ad8922c7961447f35c7b17bffa15de4d17c70abd07bfbe12d6faa3e027 \ - --hash=sha256:a1b54689e38d1279a51d11e3467dd2f3a50f5f2e879012ce8f2d6943f00e83f0 \ - --hash=sha256:a3b3366087c1bc0a2795111edcadddb8b3b59509d5db5d7ea3fdd69f954a8878 \ - --hash=sha256:a569eb1b05d72f9bce2ebd28a1ce2054311b66677fcd46cf36204ad23acead8c \ - --hash=sha256:a7affedeb43a70351bb811dadf49493c9cfd1ed94c9c70095fd177e9cc1541fa \ - --hash=sha256:a9a396a6ad26130cdae92ae10c36af09d9bfe6cafe69670fd3b6da9b07b4044f \ - --hash=sha256:a9ab1e71d3d2e54a0aa646ab6d4eebfaa5f416fe78dfe4da2839525dc5d765c6 \ - --hash=sha256:a9cd1af7e18e5221d2878378fbc287a14cd527fdd5939ed56a18df8a31136bb2 \ - --hash=sha256:a9dcaf8b0cc72a392760bb8755922c03e17a5a54e08cca58e8b74f6902b433cf \ - --hash=sha256:b9d7439d7fab4dce00570bb906875734df13d9faa4b48e261c440a5fec6d9708 \ - --hash=sha256:bcc03c8b72267e97b49149e4863d57c2d77f13fae12066622dc78fe322490fe6 \ - --hash=sha256:c11d4d16e133f6df8916cc5b7e3e96ee4c44c936717d684a94f48f82edb7c92f \ - --hash=sha256:c1dca61c6db1166c48b95198c0b7d9c990b30c756fc2923cc66f68d17dc558fd \ - --hash=sha256:c518e84bb59c2baae725accd355c8dc517b4a3ed8db88b4bc93c78dae2974bf2 \ - --hash=sha256:c7934fd0e920e70468e676fe7f1b7261c1efa0d6c037c6722278ca0228ad9d0d \ - --hash=sha256:c7e72ce6bda6fb9409cc1e8164dd41d7c91466fb599eb047cfda72fe758a34a7 \ - --hash=sha256:c90d6dec6be2c7d03378a574de87af9b1efea77d0c52a8301dd831ece938452f \ - --hash=sha256:ceec59f59d092c5007e815def4ebb80c2de330e9588e101cf8bd94c143ec78a5 \ - --hash=sha256:cf1781ef73c073e6b0f90af841aaf98501f975d306bbf6221683dd594ccc52b6 \ - --hash=sha256:d04f13a1d75cb2b8382bdc16ae6fa58c97337253826dfe136195b7f89f661557 \ - --hash=sha256:d6d300f8ec35c24025ceb9b9019ae9040c1ab2f01cddc2bcc0b518af31c75c14 \ - --hash=sha256:d8dbb1bf0c0a4ae8b40bdc9be7f644e2f3fb4e8a9aca7145bfa510d4a374eeb7 \ - --hash=sha256:de58647e3f9c42f13f90ac7e5f58900c80a39019848c5547bc691693098ae1bd \ - --hash=sha256:deeb929efe52bed518f6eb2ddc00cc496366a14c726005726ad62c2dd9017a3c \ - --hash=sha256:df01aea34b6e9e33572c35cd16bae5a47785e7d5c8cb2b54b2acdb9678315a17 \ - --hash=sha256:e2620453c075abeb0daa949a292e19f56de518988e079c36478bacf9546ced23 \ - --hash=sha256:e4450fc83a3df53dec45922b576e91e94f5578d06436871dce3a6be38e40f5db \ - --hash=sha256:e54affdeb21026329fb0744ad187cf812f7d3c2aa702a5edb562b325191fcab6 \ - --hash=sha256:e9875a0143f07d74dc5e1ded1c4581f0d9f7ab86c78994e2ed9e95050073c94d \ - --hash=sha256:f1c3cf67185543730888b20682fb186fc8d0fa6f07ccc3ef4390831ab4b388d9 \ - --hash=sha256:f48c749857f8fb598fb890a75f540e3221d0976ed0bf879cf3c7eef34151acee \ - --hash=sha256:f779498eeec470295a2b1a5d97aa1bc9814ecd25e1eb637bd9d1c73a327387f6 -werkzeug==3.0.6 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:1bc0c2310d2fbb07b1dd1105eba2f7af72f322e1e455f2f93c993bee8c8a5f17 \ - --hash=sha256:a8dd59d4de28ca70471a34cba79bed5f7ef2e036a76b3ab0835474246eb41f8d -wheel==0.44.0 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:2376a90c98cc337d18623527a97c31797bd02bad0033d41547043a1cbfbe448f \ - --hash=sha256:a29c3f2817e95ab89aa4660681ad547c0e9547f20e75b0562fe7723c9a2a9d49 -win-unicode-console==0.5 ; platform_system == "Windows" and platform_python_implementation != "PyPy" and python_version >= "3.10" and python_version < "4.0" \ +websockets==14.2 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:02687db35dbc7d25fd541a602b5f8e451a238ffa033030b172ff86a93cb5dc2a \ + --hash=sha256:065ce275e7c4ffb42cb738dd6b20726ac26ac9ad0a2a48e33ca632351a737267 \ + --hash=sha256:091ab63dfc8cea748cc22c1db2814eadb77ccbf82829bac6b2fbe3401d548eda \ + --hash=sha256:0a52a6d7cf6938e04e9dceb949d35fbdf58ac14deea26e685ab6368e73744e4c \ + --hash=sha256:0a6f3efd47ffd0d12080594f434faf1cd2549b31e54870b8470b28cc1d3817d9 \ + --hash=sha256:0d8c3e2cdb38f31d8bd7d9d28908005f6fa9def3324edb9bf336d7e4266fd397 \ + --hash=sha256:1979bee04af6a78608024bad6dfcc0cc930ce819f9e10342a29a05b5320355d0 \ + --hash=sha256:1a5a20d5843886d34ff8c57424cc65a1deda4375729cbca4cb6b3353f3ce4142 \ + --hash=sha256:1c9b6535c0e2cf8a6bf938064fb754aaceb1e6a4a51a80d884cd5db569886910 \ + --hash=sha256:1f20522e624d7ffbdbe259c6b6a65d73c895045f76a93719aa10cd93b3de100c \ + --hash=sha256:2066dc4cbcc19f32c12a5a0e8cc1b7ac734e5b64ac0a325ff8353451c4b15ef2 \ + --hash=sha256:20e6dd0984d7ca3037afcb4494e48c74ffb51e8013cac71cf607fffe11df7205 \ + --hash=sha256:22441c81a6748a53bfcb98951d58d1af0661ab47a536af08920d129b4d1c3473 \ + --hash=sha256:2c6c0097a41968b2e2b54ed3424739aab0b762ca92af2379f152c1aef0187e1c \ + --hash=sha256:2dddacad58e2614a24938a50b85969d56f88e620e3f897b7d80ac0d8a5800258 \ + --hash=sha256:2e20c5f517e2163d76e2729104abc42639c41cf91f7b1839295be43302713661 \ + --hash=sha256:34277a29f5303d54ec6468fb525d99c99938607bc96b8d72d675dee2b9f5bf1d \ + --hash=sha256:3bdc8c692c866ce5fefcaf07d2b55c91d6922ac397e031ef9b774e5b9ea42166 \ + --hash=sha256:3c1426c021c38cf92b453cdf371228d3430acd775edee6bac5a4d577efc72365 \ + --hash=sha256:44bba1a956c2c9d268bdcdf234d5e5ff4c9b6dc3e300545cbe99af59dda9dcce \ + --hash=sha256:4b27ece32f63150c268593d5fdb82819584831a83a3f5809b7521df0685cd5d8 \ + --hash=sha256:4da98b72009836179bb596a92297b1a61bb5a830c0e483a7d0766d45070a08ad \ + --hash=sha256:4daa0faea5424d8713142b33825fff03c736f781690d90652d2c8b053345b0e7 \ + --hash=sha256:5059ed9c54945efb321f097084b4c7e52c246f2c869815876a69d1efc4ad6eb5 \ + --hash=sha256:577a4cebf1ceaf0b65ffc42c54856214165fb8ceeba3935852fc33f6b0c55e7f \ + --hash=sha256:647b573f7d3ada919fd60e64d533409a79dcf1ea21daeb4542d1d996519ca967 \ + --hash=sha256:669c3e101c246aa85bc8534e495952e2ca208bd87994650b90a23d745902db9a \ + --hash=sha256:6af6a4b26eea4fc06c6818a6b962a952441e0e39548b44773502761ded8cc1d4 \ + --hash=sha256:6af99a38e49f66be5a64b1e890208ad026cda49355661549c507152113049990 \ + --hash=sha256:6d7ff794c8b36bc402f2e07c0b2ceb4a2424147ed4785ff03e2a7af03711d60a \ + --hash=sha256:6f1372e511c7409a542291bce92d6c83320e02c9cf392223272287ce55bc224e \ + --hash=sha256:714a9b682deb4339d39ffa674f7b674230227d981a37d5d174a4a83e3978a610 \ + --hash=sha256:75862126b3d2d505e895893e3deac0a9339ce750bd27b4ba515f008b5acf832d \ + --hash=sha256:7a570862c325af2111343cc9b0257b7119b904823c675b22d4ac547163088d0d \ + --hash=sha256:7a6ceec4ea84469f15cf15807a747e9efe57e369c384fa86e022b3bea679b79b \ + --hash=sha256:7cd5706caec1686c5d233bc76243ff64b1c0dc445339bd538f30547e787c11fe \ + --hash=sha256:80c8efa38957f20bba0117b48737993643204645e9ec45512579132508477cfc \ + --hash=sha256:862e9967b46c07d4dcd2532e9e8e3c2825e004ffbf91a5ef9dde519ee2effb0b \ + --hash=sha256:86cf1aaeca909bf6815ea714d5c5736c8d6dd3a13770e885aafe062ecbd04f1f \ + --hash=sha256:89a71173caaf75fa71a09a5f614f450ba3ec84ad9fca47cb2422a860676716f0 \ + --hash=sha256:9f05702e93203a6ff5226e21d9b40c037761b2cfb637187c9802c10f58e40473 \ + --hash=sha256:a39d7eceeea35db85b85e1169011bb4321c32e673920ae9c1b6e0978590012a3 \ + --hash=sha256:a3c4aa3428b904d5404a0ed85f3644d37e2cb25996b7f096d77caeb0e96a3b42 \ + --hash=sha256:a9b0f6c3ba3b1240f602ebb3971d45b02cc12bd1845466dd783496b3b05783a5 \ + --hash=sha256:a9e72fb63e5f3feacdcf5b4ff53199ec8c18d66e325c34ee4c551ca748623bbc \ + --hash=sha256:ab95d357cd471df61873dadf66dd05dd4709cae001dd6342edafc8dc6382f307 \ + --hash=sha256:ad1c1d02357b7665e700eca43a31d52814ad9ad9b89b58118bdabc365454b574 \ + --hash=sha256:b374e8953ad477d17e4851cdc66d83fdc2db88d9e73abf755c94510ebddceb95 \ + --hash=sha256:b439ea828c4ba99bb3176dc8d9b933392a2413c0f6b149fdcba48393f573377f \ + --hash=sha256:b4c8cef610e8d7c70dea92e62b6814a8cd24fbd01d7103cc89308d2bfe1659ef \ + --hash=sha256:bbe03eb853e17fd5b15448328b4ec7fb2407d45fb0245036d06a3af251f8e48f \ + --hash=sha256:bc63cee8596a6ec84d9753fd0fcfa0452ee12f317afe4beae6b157f0070c6c7f \ + --hash=sha256:c3ecadc7ce90accf39903815697917643f5b7cfb73c96702318a096c00aa71f5 \ + --hash=sha256:c76193c1c044bd1e9b3316dcc34b174bbf9664598791e6fb606d8d29000e070c \ + --hash=sha256:c93215fac5dadc63e51bcc6dceca72e72267c11def401d6668622b47675b097f \ + --hash=sha256:cc45afb9c9b2dc0852d5c8b5321759cf825f82a31bfaf506b65bf4668c96f8b2 \ + --hash=sha256:d7d9cafbccba46e768be8a8ad4635fa3eae1ffac4c6e7cb4eb276ba41297ed29 \ + --hash=sha256:da85651270c6bfb630136423037dd4975199e5d4114cae6d3066641adcc9d1c7 \ + --hash=sha256:dec254fcabc7bd488dab64846f588fc5b6fe0d78f641180030f8ea27b76d72c3 \ + --hash=sha256:e3fbd68850c837e57373d95c8fe352203a512b6e49eaae4c2f4088ef8cf21980 \ + --hash=sha256:e8179f95323b9ab1c11723e5d91a89403903f7b001828161b480a7810b334885 \ + --hash=sha256:e9d0e53530ba7b8b5e389c02282f9d2aa47581514bd6049d3a7cffe1385cf5fe \ + --hash=sha256:eabdb28b972f3729348e632ab08f2a7b616c7e53d5414c12108c29972e655b20 \ + --hash=sha256:ec607328ce95a2f12b595f7ae4c5d71bf502212bddcea528290b35c286932b12 \ + --hash=sha256:efd9b868d78b194790e6236d9cbc46d68aba4b75b22497eb4ab64fa640c3af56 \ + --hash=sha256:f2e53c72052f2596fb792a7acd9704cbc549bf70fcde8a99e899311455974ca3 \ + --hash=sha256:f390024a47d904613577df83ba700bd189eedc09c57af0a904e5c39624621270 \ + --hash=sha256:f8a86a269759026d2bde227652b87be79f8a734e582debf64c9d302faa1e9f03 \ + --hash=sha256:fd475a974d5352390baf865309fe37dec6831aafc3014ffac1eea99e84e83fc2 +werkzeug==3.1.3 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:54b78bf3716d19a65be4fceccc0d1d7b89e608834989dfae50ea87564639213e \ + --hash=sha256:60723ce945c19328679790e3282cc758aa4a6040e4bb330f53d30fa546d44746 +win-unicode-console==0.5 ; python_version >= "3.10" and python_version < "4.0" and platform_system == "Windows" and platform_python_implementation != "PyPy" \ --hash=sha256:d4142d4d56d46f449d6f00536a73625a871cba040f0bc1a2e305a04578f07d1e -xmltodict==0.13.0 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:341595a488e3e01a85a9d8911d8912fd922ede5fecc4dce437eb4b6c8d037e56 \ - --hash=sha256:aa89e8fd76320154a40d19a0df04a4695fb9dc5ba977cbb68ab3e4eb225e7852 +xmltodict==0.14.2 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:201e7c28bb210e374999d1dde6382923ab0ed1a8a5faeece48ab525b7810a553 \ + --hash=sha256:20cc7d723ed729276e808f26fb6b3599f786cbc37e06c65e192ba77c40f20aac yara-python==4.5.1 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:024c477f182c26265fc447051e09099016e3562ac7f2255e05de2a506dd4d6dc \ --hash=sha256:0324175b06c440eb754b7ff3845b6eb426b5870bbbebbeae32f2e5281fd35860 \ @@ -2092,38 +2251,41 @@ yara-python==4.5.1 ; python_version >= "3.10" and python_version < "4.0" \ zope-event==5.0 ; python_version >= "3.10" and python_version < "4.0" \ --hash=sha256:2832e95014f4db26c47a13fdaef84cef2f4df37e66b59d8f1f4a8f319a632c26 \ --hash=sha256:bac440d8d9891b4068e2b5a2c5e2c9765a9df762944bda6955f96bb9b91e67cd -zope-interface==7.0.3 ; python_version >= "3.10" and python_version < "4.0" \ - --hash=sha256:01e6e58078ad2799130c14a1d34ec89044ada0e1495329d72ee0407b9ae5100d \ - --hash=sha256:064ade95cb54c840647205987c7b557f75d2b2f7d1a84bfab4cf81822ef6e7d1 \ - --hash=sha256:11fa1382c3efb34abf16becff8cb214b0b2e3144057c90611621f2d186b7e1b7 \ - --hash=sha256:1bee1b722077d08721005e8da493ef3adf0b7908e0cd85cc7dc836ac117d6f32 \ - --hash=sha256:1eeeb92cb7d95c45e726e3c1afe7707919370addae7ed14f614e22217a536958 \ - --hash=sha256:21a207c6b2c58def5011768140861a73f5240f4f39800625072ba84e76c9da0b \ - --hash=sha256:2545d6d7aac425d528cd9bf0d9e55fcd47ab7fd15f41a64b1c4bf4c6b24946dc \ - --hash=sha256:2c4316a30e216f51acbd9fb318aa5af2e362b716596d82cbb92f9101c8f8d2e7 \ - --hash=sha256:35062d93bc49bd9b191331c897a96155ffdad10744ab812485b6bad5b588d7e4 \ - --hash=sha256:382d31d1e68877061daaa6499468e9eb38eb7625d4369b1615ac08d3860fe896 \ - --hash=sha256:3aa8fcbb0d3c2be1bfd013a0f0acd636f6ed570c287743ae2bbd467ee967154d \ - --hash=sha256:3d4b91821305c8d8f6e6207639abcbdaf186db682e521af7855d0bea3047c8ca \ - --hash=sha256:3de1d553ce72868b77a7e9d598c9bff6d3816ad2b4cc81c04f9d8914603814f3 \ - --hash=sha256:3fcdc76d0cde1c09c37b7c6b0f8beba2d857d8417b055d4f47df9c34ec518bdd \ - --hash=sha256:5112c530fa8aa2108a3196b9c2f078f5738c1c37cfc716970edc0df0414acda8 \ - --hash=sha256:53d678bb1c3b784edbfb0adeebfeea6bf479f54da082854406a8f295d36f8386 \ - --hash=sha256:6195c3c03fef9f87c0dbee0b3b6451df6e056322463cf35bca9a088e564a3c58 \ - --hash=sha256:6d04b11ea47c9c369d66340dbe51e9031df2a0de97d68f442305ed7625ad6493 \ - --hash=sha256:6dd647fcd765030638577fe6984284e0ebba1a1008244c8a38824be096e37fe3 \ - --hash=sha256:799ef7a444aebbad5a145c3b34bff012b54453cddbde3332d47ca07225792ea4 \ - --hash=sha256:7d92920416f31786bc1b2f34cc4fc4263a35a407425319572cbf96b51e835cd3 \ - --hash=sha256:7e0c151a6c204f3830237c59ee4770cc346868a7a1af6925e5e38650141a7f05 \ - --hash=sha256:84f8794bd59ca7d09d8fce43ae1b571be22f52748169d01a13d3ece8394d8b5b \ - --hash=sha256:95e5913ec718010dc0e7c215d79a9683b4990e7026828eedfda5268e74e73e11 \ - --hash=sha256:9b9369671a20b8d039b8e5a1a33abd12e089e319a3383b4cc0bf5c67bd05fe7b \ - --hash=sha256:ab985c566a99cc5f73bc2741d93f1ed24a2cc9da3890144d37b9582965aff996 \ - --hash=sha256:af94e429f9d57b36e71ef4e6865182090648aada0cb2d397ae2b3f7fc478493a \ - --hash=sha256:c96b3e6b0d4f6ddfec4e947130ec30bd2c7b19db6aa633777e46c8eecf1d6afd \ - --hash=sha256:cd2690d4b08ec9eaf47a85914fe513062b20da78d10d6d789a792c0b20307fb1 \ - --hash=sha256:d3b7ce6d46fb0e60897d62d1ff370790ce50a57d40a651db91a3dde74f73b738 \ - --hash=sha256:d976fa7b5faf5396eb18ce6c132c98e05504b52b60784e3401f4ef0b2e66709b \ - --hash=sha256:db6237e8fa91ea4f34d7e2d16d74741187e9105a63bbb5686c61fea04cdbacca \ - --hash=sha256:ecd32f30f40bfd8511b17666895831a51b532e93fc106bfa97f366589d3e4e0e \ - --hash=sha256:f418c88f09c3ba159b95a9d1cfcdbe58f208443abb1f3109f4b9b12fd60b187c +zope-interface==7.2 ; python_version >= "3.10" and python_version < "4.0" \ + --hash=sha256:033b3923b63474800b04cba480b70f6e6243a62208071fc148354f3f89cc01b7 \ + --hash=sha256:05b910a5afe03256b58ab2ba6288960a2892dfeef01336dc4be6f1b9ed02ab0a \ + --hash=sha256:086ee2f51eaef1e4a52bd7d3111a0404081dadae87f84c0ad4ce2649d4f708b7 \ + --hash=sha256:0ef9e2f865721553c6f22a9ff97da0f0216c074bd02b25cf0d3af60ea4d6931d \ + --hash=sha256:1090c60116b3da3bfdd0c03406e2f14a1ff53e5771aebe33fec1edc0a350175d \ + --hash=sha256:144964649eba4c5e4410bb0ee290d338e78f179cdbfd15813de1a664e7649b3b \ + --hash=sha256:15398c000c094b8855d7d74f4fdc9e73aa02d4d0d5c775acdef98cdb1119768d \ + --hash=sha256:1909f52a00c8c3dcab6c4fad5d13de2285a4b3c7be063b239b8dc15ddfb73bd2 \ + --hash=sha256:21328fcc9d5b80768bf051faa35ab98fb979080c18e6f84ab3f27ce703bce465 \ + --hash=sha256:224b7b0314f919e751f2bca17d15aad00ddbb1eadf1cb0190fa8175edb7ede62 \ + --hash=sha256:25e6a61dcb184453bb00eafa733169ab6d903e46f5c2ace4ad275386f9ab327a \ + --hash=sha256:27f926f0dcb058211a3bb3e0e501c69759613b17a553788b2caeb991bed3b61d \ + --hash=sha256:29caad142a2355ce7cfea48725aa8bcf0067e2b5cc63fcf5cd9f97ad12d6afb5 \ + --hash=sha256:2ad9913fd858274db8dd867012ebe544ef18d218f6f7d1e3c3e6d98000f14b75 \ + --hash=sha256:31d06db13a30303c08d61d5fb32154be51dfcbdb8438d2374ae27b4e069aac40 \ + --hash=sha256:3e0350b51e88658d5ad126c6a57502b19d5f559f6cb0a628e3dc90442b53dd98 \ + --hash=sha256:3f6771d1647b1fc543d37640b45c06b34832a943c80d1db214a37c31161a93f1 \ + --hash=sha256:4893395d5dd2ba655c38ceb13014fd65667740f09fa5bb01caa1e6284e48c0cd \ + --hash=sha256:52e446f9955195440e787596dccd1411f543743c359eeb26e9b2c02b077b0519 \ + --hash=sha256:550f1c6588ecc368c9ce13c44a49b8d6b6f3ca7588873c679bd8fd88a1b557b6 \ + --hash=sha256:72cd1790b48c16db85d51fbbd12d20949d7339ad84fd971427cf00d990c1f137 \ + --hash=sha256:7bd449c306ba006c65799ea7912adbbfed071089461a19091a228998b82b1fdb \ + --hash=sha256:7dc5016e0133c1a1ec212fc87a4f7e7e562054549a99c73c8896fa3a9e80cbc7 \ + --hash=sha256:802176a9f99bd8cc276dcd3b8512808716492f6f557c11196d42e26c01a69a4c \ + --hash=sha256:80ecf2451596f19fd607bb09953f426588fc1e79e93f5968ecf3367550396b22 \ + --hash=sha256:8b49f1a3d1ee4cdaf5b32d2e738362c7f5e40ac8b46dd7d1a65e82a4872728fe \ + --hash=sha256:8e7da17f53e25d1a3bde5da4601e026adc9e8071f9f6f936d0fe3fe84ace6d54 \ + --hash=sha256:a102424e28c6b47c67923a1f337ede4a4c2bba3965b01cf707978a801fc7442c \ + --hash=sha256:a19a6cc9c6ce4b1e7e3d319a473cf0ee989cbbe2b39201d7c19e214d2dfb80c7 \ + --hash=sha256:a71a5b541078d0ebe373a81a3b7e71432c61d12e660f1d67896ca62d9628045b \ + --hash=sha256:baf95683cde5bc7d0e12d8e7588a3eb754d7c4fa714548adcd96bdf90169f021 \ + --hash=sha256:cab15ff4832580aa440dc9790b8a6128abd0b88b7ee4dd56abacbc52f212209d \ + --hash=sha256:ce290e62229964715f1011c3dbeab7a4a1e4971fd6f31324c4519464473ef9f2 \ + --hash=sha256:d3a8ffec2a50d8ec470143ea3d15c0c52d73df882eef92de7537e8ce13475e8a \ + --hash=sha256:e204937f67b28d2dca73ca936d3039a144a081fc47a07598d44854ea2a106239 \ + --hash=sha256:eb23f58a446a7f09db85eda09521a498e109f137b85fb278edb2e34841055398 \ + --hash=sha256:f6dd02ec01f4468da0f234da9d9c8545c5412fef80bc590cc51d8dd084138a89 diff --git a/systemd/cape-dist.service b/systemd/cape-dist.service index acfcfbc2772..0338e431104 100644 --- a/systemd/cape-dist.service +++ b/systemd/cape-dist.service @@ -4,7 +4,7 @@ Documentation=https://github.com/kevoreilly/CAPEv2 [Service] WorkingDirectory=/opt/CAPEv2/utils/ -ExecStart=/usr/bin/python3 -m poetry run python dist.py -ef +ExecStart=/etc/poetry/bin/poetry run python dist.py -ef User=cape Group=cape Restart=always diff --git a/systemd/cape-fstab.service b/systemd/cape-fstab.service index 958e375230f..925a97f935c 100644 --- a/systemd/cape-fstab.service +++ b/systemd/cape-fstab.service @@ -4,8 +4,8 @@ Documentation=https://github.com/kevoreilly/CAPEv2 [Service] WorkingDirectory=/opt/CAPEv2/utils/ -ExecStartPre=/usr/bin/python3 -m poetry config cache-dir /opt/CAPEv2/.cache/pypoetry -ExecStart=/usr/bin/python3 -m poetry run python fstab.py -g cape +ExecStartPre=/etc/poetry/bin/poetry config cache-dir /opt/CAPEv2/.cache/pypoetry +ExecStart=/etc/poetry/bin/poetry run python fstab.py -g cape User=root Group=root Restart=always diff --git a/systemd/cape-processor.service b/systemd/cape-processor.service index cfd42915709..efb27ece5c2 100644 --- a/systemd/cape-processor.service +++ b/systemd/cape-processor.service @@ -6,7 +6,7 @@ After=cape-rooter.service [Service] WorkingDirectory=/opt/CAPEv2/utils/ -ExecStart=/usr/bin/python3 -m poetry run python process.py -p7 auto -pt 900 +ExecStart=/etc/poetry/bin/poetry run python process.py -p7 auto -pt 900 User=cape Group=cape Restart=always diff --git a/systemd/cape-rooter.service b/systemd/cape-rooter.service index 3f6d9985668..ecf5e1253ec 100644 --- a/systemd/cape-rooter.service +++ b/systemd/cape-rooter.service @@ -6,8 +6,8 @@ After=syslog.target network.target [Service] WorkingDirectory=/opt/CAPEv2/utils/ -ExecStartPre=/usr/bin/python3 -m poetry config cache-dir /opt/CAPEv2/.cache/pypoetry -ExecStart=/usr/bin/python3 -m poetry run python rooter.py -g cape +ExecStartPre=/etc/poetry/bin/poetry config cache-dir /opt/CAPEv2/.cache/pypoetry +ExecStart=/etc/poetry/bin/poetry run python rooter.py -g cape User=root Group=root Restart=always diff --git a/systemd/cape-web.service b/systemd/cape-web.service index e4fee3e25a1..71b817935d1 100644 --- a/systemd/cape-web.service +++ b/systemd/cape-web.service @@ -6,7 +6,7 @@ After=cape-rooter.service [Service] WorkingDirectory=/opt/CAPEv2/web -ExecStart=/usr/bin/python3 -m poetry run python manage.py runserver_plus 0.0.0.0:8000 --traceback --keep-meta-shutdown +ExecStart=/etc/poetry/bin/poetry run python manage.py runserver_plus 0.0.0.0:8000 --traceback --keep-meta-shutdown User=cape Group=cape Restart=always diff --git a/systemd/cape.service b/systemd/cape.service index 26bbbac3b40..2b1da435d16 100644 --- a/systemd/cape.service +++ b/systemd/cape.service @@ -6,7 +6,7 @@ After=cape-rooter.service [Service] WorkingDirectory=/opt/CAPEv2/ -ExecStart=/usr/bin/python3 -m poetry run python cuckoo.py +ExecStart=/etc/poetry/bin/poetry run python cuckoo.py User=cape Group=cape Restart=always diff --git a/systemd/guac-web.service b/systemd/guac-web.service index f490f94ca95..ea63e64fe07 100644 --- a/systemd/guac-web.service +++ b/systemd/guac-web.service @@ -3,7 +3,7 @@ Description=Guacamole ASGI app [Service] WorkingDirectory=/opt/CAPEv2/web -ExecStart=/usr/bin/poetry run gunicorn --bind 127.0.0.1:8008 web.asgi -t 180 -w 4 -k uvicorn.workers.UvicornWorker --capture-output --enable-stdio-inheritance +ExecStart=/etc/poetry/bin/poetry run gunicorn --bind 127.0.0.1:8008 web.asgi -t 180 -w 4 -k uvicorn.workers.UvicornWorker --capture-output --enable-stdio-inheritance User=cape Group=cape Restart=always diff --git a/tests/grab_samples.py b/tests/grab_samples.py index d74bfd5747b..87187835f2b 100644 --- a/tests/grab_samples.py +++ b/tests/grab_samples.py @@ -43,7 +43,7 @@ def get_filepaths(directory, args): def load_sample_lists(args): sample_json_list = get_filepaths("tests/Extractors/StandAlone/unit_tests", args) for sample_json_location in sample_json_list: - logging.warning("Found sample.json: " + sample_json_location) + logging.warning("Found sample.json: %s", sample_json_location) with open(sample_json_location, "r") as samples: sample_dict = json.load(samples) for hash_item in sample_dict["hashes"]: @@ -60,7 +60,7 @@ def run(args): if __name__ == "__main__": parser = argparse.ArgumentParser( - description="Grab malicious samples from sample.json files via https://10.203.112.173/centralrepo/" + description="Grab malicious samples from sample.json" ) parser.add_argument("--family", action="store", dest="family", type=str) diff --git a/tests/integrity.py b/tests/integrity.py index b36bba3a635..d725de75b71 100644 --- a/tests/integrity.py +++ b/tests/integrity.py @@ -9,6 +9,7 @@ that there are no remaining tasks in the queue this utility will clean the entire database before starting various analyses. """ + import argparse import json import logging diff --git a/tests/tcr_misc.py b/tests/tcr_misc.py index 2406a2236fd..8c7771973eb 100644 --- a/tests/tcr_misc.py +++ b/tests/tcr_misc.py @@ -65,7 +65,7 @@ def get_malware_paths(path): def get_sample(hash, download_location): if os.path.isfile(download_location) and hash == hashlib.sha256(open(download_location, "rb").read()).hexdigest(): - logging.warning(download_location + " already there, skipping!") + logging.warning("%s already there, skipping!", download_location) else: r = s.get(SAMPLE_STORAGE + hash, verify=False, timeout=10) if r and r.status_code == 200: @@ -74,7 +74,7 @@ def get_sample(hash, download_location): raise Exception("Hashes doens't match") with open(download_location, mode="wb+") as file: file.write(r.content) - logging.warning(download_location + " grabbed!") + logging.warning("%s grabbed!", download_location) else: - logging.warning("Status code: {} - content: {}".format(r.status_code, r.content)) + logging.warning("Status code: %d - content: %s", r.status_code, r.text) raise Exception("Non 200 status code") diff --git a/tests/test_analysis_manager.py b/tests/test_analysis_manager.py index 064d7173d00..d48e189176e 100644 --- a/tests/test_analysis_manager.py +++ b/tests/test_analysis_manager.py @@ -129,7 +129,11 @@ def test_init(self, task: Task): "sanitize_to_len": 24, "scaling_semaphore": False, "scaling_semaphore_update_timer": 10, + "task_pending_timeout": 0, + "task_timeout": False, + "task_timeout_scan_interval": 30, "freespace_processing": 15000, + "ignore_signals": True, "periodic_log": False, "fail_unserviceable": True, } @@ -350,6 +354,10 @@ def test_build_options( "upload_max_size": 100000000, "usage": False, "windows_static_route": False, + "windows_static_route_gateway": "192.168.1.1", + "dns_etw": False, + "wmi_etw": False, + "watchdownloads": False, } def test_build_options_pe( @@ -411,6 +419,10 @@ def test_build_options_pe( "upload_max_size": 100000000, "usage": False, "windows_static_route": False, + "windows_static_route_gateway": "192.168.1.1", + "dns_etw": False, + "wmi_etw": False, + "watchdownloads": False, } def test_category_checks( diff --git a/tests/test_aplib.py b/tests/test_aplib.py deleted file mode 100644 index 68299796f1f..00000000000 --- a/tests/test_aplib.py +++ /dev/null @@ -1,10 +0,0 @@ -# Copyright (C) 2010-2015 Cuckoo Foundation. -# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org -# See the file 'docs/LICENSE' for copying permission. - -from lib.cuckoo.common import aplib - - -def test_apilib_decompress(): - data = b"T\x00he quick\xecb\x0erown\xcef\xaex\x80jumps\xed\xe4veur`t?lazy\xead\xfeg\xc0\x00" - assert aplib.decompress(data) == b"The quick brown fox jumps over the lazy dog" diff --git a/tests/test_cape_utils.py b/tests/test_cape_utils.py new file mode 100644 index 00000000000..5d290e8d081 --- /dev/null +++ b/tests/test_cape_utils.py @@ -0,0 +1,74 @@ +import unittest +from unittest.mock import MagicMock, patch + +from lib.cuckoo.common.cape_utils import cape_name_from_yara, static_config_parsers + + +class TestCapeUtils(unittest.TestCase): + @patch("lib.cuckoo.common.cape_utils.File.yara_hit_provides_detection") + @patch("lib.cuckoo.common.cape_utils.File.get_cape_name_from_yara_hit") + def test_cape_name_from_yara(self, mock_get_cape_name_from_yara_hit, mock_yara_hit_provides_detection): + details = {"cape_yara": [{"rule": "test_rule_1"}, {"rule": "test_rule_2"}]} + pid = 1234 + results = {} + + mock_yara_hit_provides_detection.side_effect = [False, True] + mock_get_cape_name_from_yara_hit.return_value = "test_name" + + name = cape_name_from_yara(details, pid, results) + + self.assertEqual(name, "test_name") + self.assertIn("detections2pid", results) + self.assertIn(str(pid), results["detections2pid"]) + self.assertIn("test_name", results["detections2pid"][str(pid)]) + + @patch("lib.cuckoo.common.cape_utils.File.yara_hit_provides_detection") + def test_cape_name_from_yara_no_detection(self, mock_yara_hit_provides_detection): + details = {"cape_yara": [{"rule": "test_rule_1"}]} + pid = 1234 + results = {} + + mock_yara_hit_provides_detection.return_value = False + + name = cape_name_from_yara(details, pid, results) + + self.assertIsNone(name) + self.assertNotIn("detections2pid", results) + + def test_cape_name_from_yara_no_cape_yara(self): + details = {} + pid = 1234 + results = {} + + name = cape_name_from_yara(details, pid, results) + + self.assertIsNone(name) + self.assertNotIn("detections2pid", results) + + +class TestStaticConfigParsers(unittest.TestCase): + @patch("lib.cuckoo.common.cape_utils.HAVE_CAPE_EXTRACTORS", True) + @patch("lib.cuckoo.common.cape_utils.cape_malware_parsers") + def test_static_config_parsers_cape_extractors(self, mock_cape_malware_parsers): + cape_name = "test_cape" + file_path = "/path/to/file" + file_data = b"test data" + mock_parser = MagicMock() + mock_parser.extract_config.return_value = {"key": "value"} + mock_cape_malware_parsers.__contains__.return_value = True + mock_cape_malware_parsers.__getitem__.return_value = mock_parser + result = static_config_parsers(cape_name, file_path, file_data) + self.assertIn(cape_name, result) + self.assertIn("key", result[cape_name]) + self.assertEqual(result[cape_name]["key"], ["value"]) + + def test_static_config_parsers_no_extractors(self): + cape_name = "test_none" + file_path = "/path/to/file" + file_data = b"test data" + result = static_config_parsers(cape_name, file_path, file_data) + self.assertEqual(result, {}) + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/test_database.py b/tests/test_database.py index 6dbf50d5086..b4e4ecc7d38 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -904,17 +904,23 @@ def test_delete_task(self, db: _Database, temp_filename): assert not db.delete_task(t2) def test_delete_tasks(self, db: _Database, temp_filename): + """Test the delete_tasks method. + + We need a new session after calling delete_tasks. + """ with db.session.begin(): t1 = db.add_url("https://1.com") t2 = db.add_path(temp_filename, tags="x86") t3 = db.add_url("https://3.com") with db.session.begin(): - assert db.delete_tasks([]) - assert db.delete_tasks([t1, t2, t3 + 1]) + assert db.delete_tasks(task_ids=[]) + assert db.delete_tasks(task_ids=[t1, t2, t3 + 1]) + with db.session.begin(): tasks = db.session.query(Task).all() assert len(tasks) == 1 assert tasks[0].id == t3 - assert db.delete_tasks([t1, t2]) + assert db.delete_tasks(task_ids=[t1, t2]) + with db.session.begin(): tasks = db.session.query(Task).all() assert len(tasks) == 1 assert tasks[0].id == t3 diff --git a/tests/test_downloaders.py b/tests/test_downloaders.py new file mode 100644 index 00000000000..07f79bea8c8 --- /dev/null +++ b/tests/test_downloaders.py @@ -0,0 +1,175 @@ +import io +import unittest +from unittest.mock import MagicMock, patch + +import pytest +import pyzipper +import requests + +from lib.downloaders import Downloaders +from lib.downloaders.malwarebazaar import download as mb_downloader +from lib.downloaders.virustotal import download as vt_downloader + + +class TestDownloaders(unittest.TestCase): + @patch('lib.downloaders.load_downloaders') + @patch('lib.downloaders.Config') + @patch('lib.downloaders.path_exists') + @patch('lib.downloaders.path_mkdir') + def setUp(self, mock_mkdir, mock_exists, mock_config, mock_load_downloaders): + mock_exists.return_value = False + self.mock_downloaders = { + 'downloader1': MagicMock(), + 'downloader2': MagicMock() + } + mock_load_downloaders.return_value = self.mock_downloaders + self.mock_config = MagicMock() + mock_config.return_value = self.mock_config + self.dl = Downloaders() + + @pytest.mark.skip(reason="Need to figure out how to test this") + def test_download_success(self): + self.mock_downloaders['downloader1'].is_supported.return_value = True + self.mock_downloaders['downloader1'].download.return_value = b'sample_data' + sample, service = self.dl.download('validhash') + self.assertEqual(sample, b'sample_data') + self.assertEqual(service, 'downloader1') + + def test_download_invalid_hash(self): + self.mock_downloaders['downloader1'].is_supported.return_value = False + self.mock_downloaders['downloader2'].is_supported.return_value = False + sample, service = self.dl.download('invalidhash') + self.assertFalse(sample) + self.assertFalse(service) + + @pytest.mark.skip(reason="Need to figure out how to test this") + def test_download_exception(self): + self.mock_downloaders['downloader1'].is_supported.side_effect = Exception("Test exception") + self.mock_downloaders['downloader2'].is_supported.return_value = True + self.mock_downloaders['downloader2'].download.return_value = b'sample_data' + sample, service = self.dl.download('validhash') + self.assertEqual(sample, b'sample_data') + self.assertEqual(service, 'downloader2') + + def test_download_no_sample(self): + self.mock_downloaders['downloader1'].is_supported.return_value = True + self.mock_downloaders['downloader1'].download.return_value = False + self.mock_downloaders['downloader2'].is_supported.return_value = True + self.mock_downloaders['downloader2'].download.return_value = False + sample, service = self.dl.download('validhash') + self.assertFalse(sample) + self.assertFalse(service) + + + @patch("requests.post") + def test_malwarebazaar_dl_success(self, mock_post): + # Mock the response from requests.post + mock_response = MagicMock() + mock_response.ok = True + mock_response.content = io.BytesIO() + with pyzipper.AESZipFile(mock_response.content, "w", encryption=pyzipper.WZ_AES) as zf: + zf.setpassword(b"infected") + zf.writestr("sample.txt", "sample content") + mock_post.return_value = mock_response + + # Call the function + result = mb_downloader("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa") + + # Check the result + self.assertEqual(result, b"sample content") + + @patch("requests.post") + def test_malwarebazaar_dl_file_not_found(self, mock_post): + # Mock the response from requests.post + mock_response = MagicMock() + mock_response.ok = True + mock_response.content = b"file_not_found" + mock_post.return_value = mock_response + + # Call the function + result = mb_downloader("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa") + + # Check the result + assert not result + + @patch("requests.post") + def test_malwarebazaar_dl_bad_zip_file(self, mock_post): + # Mock the response from requests.post + mock_response = MagicMock() + mock_response.ok = True + mock_response.content = b"not a zip file" + mock_post.return_value = mock_response + + # Call the function + result = mb_downloader("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa") + + # Check the result + assert not result + + @patch("requests.post") + def test_malwarebazaar_dl_exception(self, mock_post): + # Mock the response from requests.post to raise an exception + mock_post.side_effect = requests.exceptions.RequestException + + # Call the function + result = mb_downloader("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa") + + # Check the result + assert not result + + @patch("lib.downloaders.virustotal.requests.get") + def test_download_success_vt(self, mock_get): + mock_response = MagicMock() + mock_response.status_code = 200 + mock_response.content = b"sample content" + mock_get.return_value = mock_response + + result = vt_downloader("d41d8cd98f00b204e9800998ecf8427e", "test_api_key") + self.assertEqual(result, b"sample content") + + @patch("lib.downloaders.virustotal.requests.get") + def test_download_hash_not_present(self, mock_get): + mock_response = MagicMock() + mock_response.status_code = 200 + mock_response.content = b"Hash Not Present" + mock_get.return_value = mock_response + + result = vt_downloader("d41d8cd98f00b204e9800998ecf8427e", "test_api_key") + self.assertEqual(result, b"") + + @patch("lib.downloaders.virustotal.requests.get") + def test_download_forbidden(self, mock_get): + mock_response = MagicMock() + mock_response.status_code = 403 + mock_get.return_value = mock_response + + with self.assertLogs("lib.downloaders.virustotal", level="ERROR") as cm: + result = vt_downloader("d41d8cd98f00b204e9800998ecf8427e", "test_api_key") + self.assertIn("API key provided is not a valid VirusTotal key or is not authorized for downloads", cm.output[0]) + self.assertEqual(result, b"") + + @patch("lib.downloaders.virustotal.requests.get") + def test_download_not_found(self, mock_get): + mock_response = MagicMock() + mock_response.status_code = 404 + mock_get.return_value = mock_response + + with self.assertLogs("lib.downloaders.virustotal", level="ERROR") as cm: + result = vt_downloader("d41d8cd98f00b204e9800998ecf8427e", "test_api_key") + self.assertIn("Hash not found on VirusTotal", cm.output[0]) + self.assertEqual(result, b"") + + @pytest.mark.skip(reason="Need to figure out how to test this") + @patch("lib.downloaders.virustotal.requests.get") + def test_download_request_exception(self, mock_get): + mock_get.side_effect = requests.exceptions.RequestException("Request failed") + + with self.assertLogs("lib.downloaders.virustotal", level="ERROR") as cm: + result = vt_downloader("d41d8cd98f00b204e9800998ecf8427e", "test_api_key") + self.assertIn("Request failed", cm.output[0]) + self.assertIsNone(result) + + + +if __name__ == '__main__': + unittest.main() diff --git a/tests/test_file_extra_info.py b/tests/test_file_extra_info.py index a60d2cde5ca..c07f8b123ee 100644 --- a/tests/test_file_extra_info.py +++ b/tests/test_file_extra_info.py @@ -39,13 +39,14 @@ def test_generic_file_extractors(self): duplicated, tests=True, ) - assert data_dictionary["extracted_files_tool"] == "MsiExtract" - assert len(data_dictionary["extracted_files"]) == 2 + assert "MsiExtract" in data_dictionary["selfextract"].keys() + assert len(data_dictionary["selfextract"]["MsiExtract"]["extracted_files"]) == 2 @pytest.mark.skipif( not (self_extraction_dir / "5b354397f6393ed777639b7d40dec3f37215dcb5078c63993e8a9703e819e2bc.inno").exists(), reason="Required data file is not present", ) + @pytest.mark.skip(reason="innoextractor in community repo now") def test_generic_file_extractors_no_tests(self): results = {} data_dictionary = {"die": ["Inno Setup"], "type": ""} @@ -60,8 +61,8 @@ def test_generic_file_extractors_no_tests(self): results, duplicated, ) - assert data_dictionary["extracted_files_tool"] == "InnoExtract" - assert len(data_dictionary["extracted_files"]) == 1 + assert "InnoExtract" in data_dictionary["selfextract"].keys() + assert len(data_dictionary["selfextract"]["InnoExtract"]["extracted_files"]) == 1 @pytest.mark.skip(reason="Not implemented yet") def test_batch_extract(self): @@ -111,6 +112,7 @@ def test_msi_extract(self): not (self_extraction_dir / "5b354397f6393ed777639b7d40dec3f37215dcb5078c63993e8a9703e819e2bc.inno").exists(), reason="Required data file is not present", ) + @pytest.mark.skip(reason="innoextractor in community repo now") def test_Inno_extract(self): extracted_files = file_extra_info.Inno_extract( file=f"{self_extraction_dir}/5b354397f6393ed777639b7d40dec3f37215dcb5078c63993e8a9703e819e2bc.inno", diff --git a/tests/test_objects.py b/tests/test_objects.py index d6859890375..8752f4b02b6 100644 --- a/tests/test_objects.py +++ b/tests/test_objects.py @@ -82,7 +82,7 @@ def test_get_ssdeep(self, empty_file): assert empty_file["file"].get_ssdeep() is not None except ImportError: assert empty_file["file"].get_ssdeep() is None - logging.warn("Need to install pydeep python module") + logging.warning("Need to install pydeep python module") def test_get_type(self, empty_file): assert empty_file["file"].get_type() == "empty" diff --git a/tests/test_parse_office.py b/tests/test_parse_office.py new file mode 100644 index 00000000000..9a909a99e4e --- /dev/null +++ b/tests/test_parse_office.py @@ -0,0 +1,82 @@ +import unittest +from pathlib import Path +from unittest.mock import MagicMock, patch + +import pytest +from lib.cuckoo.common.integrations.parse_office import Office + +data_dir = Path(__file__).parent / "data" / "office" +rtf_path = data_dir / "rtf_exploit.doc" + + +class TestParseOffice(unittest.TestCase): + @patch("lib.cuckoo.common.integrations.parse_office.RtfObjParser") + @patch("lib.cuckoo.common.integrations.parse_office.path_exists") + @patch("lib.cuckoo.common.integrations.parse_office.hashlib.sha256") + def test_parse_rtf(self, mock_sha256, mock_path_exists, MockRtfObjParser): + # Setup + mock_sha256.return_value.hexdigest.return_value = "dummy_sha256" + mock_path_exists.return_value = False + mock_rtfobj = MagicMock() + mock_rtfobj.format_id = 1 + mock_rtfobj.is_package = False + mock_rtfobj.is_ole = False + mock_rtfobj.rawdata = b"rawdata" + mock_rtfobj.start = 0 + MockRtfObjParser.return_value.objects = [mock_rtfobj] + + office = Office( + file_path="dummy_path", + task_id="dummy_task_id", + sha256="dummy_sha256", + options={}, + ) + + # Execute + result = office._parse_rtf(b"dummy_data") + + # Verify + expected_result = { + "1": [ + { + "class_name": "", + "size": len(mock_rtfobj.rawdata), + "filename": "object_00000000.raw", + "type_embed": "", + "CVE": "", + "sha256": "dummy_sha256", + "index": "00000000h", + } + ] + } + self.assertEqual(result, expected_result) + + + @pytest.mark.skipif(not data_dir.exists(), reason="Required data file is not present") + @pytest.mark.skipif(not rtf_path.exists(), reason="Required data file is not present") + def test_parse_real_rtf(self): + office = Office( + file_path=rtf_path, + task_id="1", + sha256="5b307600b1ceb84f29315c95e5b21776eb6154b79214528629e4fc2310cd50e3", + options={}, + ) + result = office._parse_rtf(Path(rtf_path).read_bytes()) + + assert result == { + "2": [ + { + "class_name": "Equation.3", + "size": 3584, + "filename": "object_0000272F.bin", + "type_embed": "Embedded", + "CVE": "Microsoft Equation 3.0 (Known Related to CVE-2017-11882 or CVE-2018-0802)", + "sha256": "c00b73082638eda4af3d5318aba64ae32d23f703a02c7338d5e34230a7855e70", + "index": "0000272Fh", + } + ] + } + + +if __name__ == "__main__": + unittest.main() diff --git a/tests/test_peepdf.py b/tests/test_peepdf.py new file mode 100644 index 00000000000..8fd807e9463 --- /dev/null +++ b/tests/test_peepdf.py @@ -0,0 +1,40 @@ +from pathlib import Path + +import pytest + +from lib.cuckoo.common.integrations.peepdf import peepdf_parse + +data_dir = Path(__file__).parent / "data" / "malware" +pdf_path = data_dir / "ad6cedb0d1244c1d740bf5f681850a275c4592281cdebb491ce533edd9d6a77d" + +expected_result = { + "Info": { + "Creator": "Scribus 1.3.3.12", + "Producer": "Scribus PDF Library 1.3.3.12", + "Author": "" + }, + "Dates": [], + "Keywords": {}, + "JSStreams": [ + { + "Object ID": 13, + "Offset": 872, + "Size": 1255, + } + ], + "All_URLs": [], + "JS_URLs": ["http://78.109.30.5/count/005AD56F/load.php?pdf=a684eceee76fc522773286a895bc8436\x00"], +} + +pdfresult = {"Info": {}, "Dates": [], "Keywords": {}, "JSStreams": [], "All_URLs": [], "JS_URLs": []} + + +@pytest.mark.skipif(not data_dir.exists(), reason="Required data file is not present") +class TestPeepdf: + """Class to test peepdf_parse.""" + @pytest.mark.skipif(not pdf_path.exists(), reason="Required data file is not present") + def test_peepdf_parse_valid_pdf(self): + """Test parsing a valid PDF sample.""" + result = peepdf_parse(str(pdf_path), pdfresult) + del result["JSStreams"][0]["Data"] + assert result == expected_result diff --git a/tests/test_quarantine.py b/tests/test_quarantine.py index 657d9a94c6b..9e10d1d6b8c 100644 --- a/tests/test_quarantine.py +++ b/tests/test_quarantine.py @@ -2,12 +2,15 @@ # This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org # See the file 'docs/LICENSE' for copying permission. +import os import pathlib +import struct import tempfile +from unittest import mock import pytest -from lib.cuckoo.common.quarantine import mbam_unquarantine, mse_unquarantine, unquarantine +from lib.cuckoo.common.quarantine import bytearray_xor, mbam_unquarantine, mse_unquarantine, trend_unquarantine, unquarantine # from tcr_misc import get_sample @@ -23,6 +26,8 @@ def _grab_sample(sample_hash): return _grab_sample """ +QUARANTINED_DATA = b"\xff\xee\xdd\xcc\xbb\xaa" + @pytest.fixture def empty_file(): @@ -31,6 +36,42 @@ def empty_file(): empty_file.close() +@pytest.fixture +def temp_trend_qarantined_pe(tmp_path): + def trend_tag(code: int, tag_data: bytes) -> bytes: + return struct.pack("= 3.6") +if sys.version_info[:2] < (3, 8): + sys.exit("You are running an incompatible version of Python, please use >= 3.8") CUCKOO_ROOT = os.path.join(os.path.abspath(os.path.dirname(__file__)), "..") sys.path.append(CUCKOO_ROOT) @@ -12,9 +12,9 @@ def test_suricata_naming(): assert "Sharik" == get_suricata_family("ET MALWARE Sharik/Smoke CnC Beacon 11") - assert "Revenge-Rat" == get_suricata_family("ETPRO TROJAN MSIL/Revenge-RAT CnC Checkin") + assert "Revenge-RAT" == get_suricata_family("ETPRO TROJAN MSIL/Revenge-RAT CnC Checkin") assert "Predator" == get_suricata_family("ETPRO TROJAN Win32/Predator The Thief Initial CnC Checkin") - assert "Medusahttp" == get_suricata_family("ET TROJAN MedusaHTTP Variant CnC Checkin M2") + assert "MedusaHTTP" == get_suricata_family("ET TROJAN MedusaHTTP Variant CnC Checkin M2") assert False is get_suricata_family("ETPRO TROJAN Virus.Win32.Lamer.bd checkin") assert False is get_suricata_family("ETPRO TROJAN Custom Cobalt Strike Beacon UA") assert False is get_suricata_family("ET TROJAN Unit42 PoisonIvy Keepalive to CnC") @@ -47,7 +47,7 @@ def test_suricata_naming(): assert False is get_suricata_family("ETPRO TROJAN MSIL/Owned Bot CnC Checkin") assert False is get_suricata_family("ETPRO TROJAN Win-Python-Backdoor Config Inbound") assert False is get_suricata_family("ETPRO TROJAN VBS/Susp.Enumerator Script Inbound") - assert "Crazycrypt" == get_suricata_family("ETPRO TROJAN CrazyCrypt Ransomware CnC Activity") + assert "CrazyCrypt" == get_suricata_family("ETPRO TROJAN CrazyCrypt Ransomware CnC Activity") assert "Virut" == get_suricata_family("ET TROJAN Win32.Virut - GET") assert False is get_suricata_family("ET TROJAN Trojan.FakeMS Checkin") assert False is get_suricata_family("ET TROJAN iOS/WireLurker CnC Beacon") @@ -59,5 +59,9 @@ def test_suricata_naming(): assert "Tinba" == get_suricata_family("ET MALWARE [PTsecurity] Tinba Checkin 4") assert False is get_suricata_family("ET TROJAN Suspicious User-Agent (WindowsNT) With No Separating Space") assert "Photoloader" == get_suricata_family("ET MALWARE W32/Photoloader.Downloader Request Cookie") - assert "Pcrat" == get_suricata_family("ET MALWARE Backdoor family PCRat/Gh0st CnC traffic") + assert "pcrat" == get_suricata_family("ET MALWARE Backdoor family PCRat/Gh0st CnC traffic") assert "Stealc" == get_suricata_family("ET MALWARE [SEKOIA.IO] Win32/Stealc C2 Check-in") + + +if __name__ == "__main__": + print("Suricata detects as:", get_suricata_family(sys.argv[1])) diff --git a/tests/test_tls_utils.py b/tests/test_tls_utils.py index 5d6a166644a..c0e2d5f48c4 100644 --- a/tests/test_tls_utils.py +++ b/tests/test_tls_utils.py @@ -15,7 +15,6 @@ class TestTlsUtils: - def test_tlslog_to_sslkeylogfile(self, tmpdir): input_log = f"{tmpdir}/tlsdump.log" dest_log = f"{tmpdir}/sslkeys.log" diff --git a/tests/test_utils.py b/tests/test_utils.py index cd145863906..fd6b8aa387e 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -3,13 +3,14 @@ # See the file 'docs/LICENSE' for copying permission. import os - -import pytest from tcr_misc import random_string -from lib.cuckoo.common import utils +from unittest import mock +import pytest from lib.cuckoo.common.exceptions import CuckooOperationalError from lib.cuckoo.common.path_utils import path_mkdir +from lib.cuckoo.common import utils + def test_get_memdump_path(mocker): @@ -168,7 +169,75 @@ def test_pretty_print_retval_true_status(self): @pytest.mark.skip def test_is_safelisted_domain(): from lib.cuckoo.common.safelist import is_safelisted_domain - assert is_safelisted_domain("java.com") is True assert is_safelisted_domain("java2.com") is False assert is_safelisted_domain("crl.microsoft.com") is True + + + @pytest.fixture + def mock_config(mocker): + mock_config = mocker.patch("lib.cuckoo.common.utils.config") + mock_config.cuckoo.get.return_value = b"/tmp" + return mock_config + + @pytest.fixture + def mock_path_exists(mocker): + return mocker.patch("lib.cuckoo.common.utils.path_exists", return_value=False) + + @pytest.fixture + def mock_path_mkdir(mocker): + return mocker.patch("lib.cuckoo.common.utils.path_mkdir") + + @pytest.fixture + def mock_tempfile(mocker): + return mocker.patch("lib.cuckoo.common.utils.tempfile.mkdtemp", return_value="/tmp/cuckoo-tmp/upload_1234") + + @pytest.fixture + def mock_open(mocker): + return mocker.patch("builtins.open", mock.mock_open()) + + def test_store_temp_file_bytes(mock_config, mock_path_exists, mock_path_mkdir, mock_tempfile, mock_open): + filedata = b"test data" + filename = "testfile.txt" + result = utils.store_temp_file(filedata, filename) + assert result == b"/tmp/cuckoo-tmp/upload_1234/testfile.txt" + mock_open.assert_called_once_with(b"/tmp/cuckoo-tmp/upload_1234/testfile.txt", "wb") + mock_open().write.assert_called_once_with(filedata) + + def test_store_temp_file_filelike(mock_config, mock_path_exists, mock_path_mkdir, mock_tempfile, mock_open): + filedata = mock.Mock() + filedata.read.side_effect = [b"chunk1", b"chunk2", b""] + filename = "testfile.txt" + result = utils.store_temp_file(filedata, filename) + assert result == b"/tmp/cuckoo-tmp/upload_1234/testfile.txt" + mock_open.assert_called_once_with(b"/tmp/cuckoo-tmp/upload_1234/testfile.txt", "wb") + mock_open().write.assert_has_calls([mock.call(b"chunk1"), mock.call(b"chunk2")]) + + def test_store_temp_file_with_path(mock_config, mock_path_exists, mock_path_mkdir, mock_tempfile, mock_open): + filedata = b"test data" + filename = "testfile.txt" + path = b"/custom/path" + result = utils.store_temp_file(filedata, filename, path) + assert result == b"/custom/path/upload_1234/testfile.txt" + mock_open.assert_called_once_with(b"/custom/path/upload_1234/testfile.txt", "wb") + mock_open().write.assert_called_once_with(filedata) + + def test_store_temp_file_path_exists(mock_config, mocker, mock_tempfile, mock_open): + mock_path_exists = mocker.patch("lib.cuckoo.common.utils.path_exists", return_value=True) + filedata = b"test data" + filename = "testfile.txt" + result = utils.store_temp_file(filedata, filename) + assert result == b"/tmp/cuckoo-tmp/upload_1234/testfile.txt" + mock_open.assert_called_once_with(b"/tmp/cuckoo-tmp/upload_1234/testfile.txt", "wb") + mock_open().write.assert_called_once_with(filedata) + mock_path_exists.assert_called_once_with("/tmp/cuckoo-tmp") + + def test_store_temp_file_path_mkdir_error(mock_config, mocker, mock_tempfile): + # mock_path_exists = mocker.patch("lib.cuckoo.common.utils.path_exists", return_value=False) + mock_path_mkdir = mocker.patch("lib.cuckoo.common.utils.path_mkdir", side_effect=OSError) + filedata = b"test data" + filename = "testfile.txt" + with pytest.raises(CuckooOperationalError): + utils.store_temp_file(filedata, filename) + mock_path_mkdir.assert_called_once_with("/tmp/cuckoo-tmp") + diff --git a/tests/test_web_utils.py b/tests/test_web_utils.py index b52c5fd5d56..3524e0c1628 100644 --- a/tests/test_web_utils.py +++ b/tests/test_web_utils.py @@ -1,14 +1,19 @@ # Copyright (C) 2010-2015 Cuckoo Foundation. # This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org # See the file 'docs/LICENSE' for copying permission. - import tempfile +import unittest import httpretty import pytest from lib.cuckoo.common.path_utils import path_delete, path_write_file -from lib.cuckoo.common.web_utils import _download_file, force_int, get_file_content, parse_request_arguments +from lib.cuckoo.common.web_utils import ( + _download_file, + force_int, + get_file_content, + parse_request_arguments, +) @pytest.fixture @@ -90,3 +95,7 @@ def test_parse_request_arguments(mock_request): def test_force_int(): assert force_int(value="1") == 1 assert force_int(value="$") == 0 + + +if __name__ == "__main__": + unittest.main() diff --git a/tests_parsers/readme.md b/tests_parsers/readme.md deleted file mode 100644 index d7205b55664..00000000000 --- a/tests_parsers/readme.md +++ /dev/null @@ -1,2 +0,0 @@ -* Sample goes to: - * https://github.com/CAPESandbox/CAPE-TestFiles/tree/main/malware diff --git a/tests_parsers/test_agenttesla.py b/tests_parsers/test_agenttesla.py deleted file mode 100644 index b52cd064ce6..00000000000 --- a/tests_parsers/test_agenttesla.py +++ /dev/null @@ -1,49 +0,0 @@ -from contextlib import suppress - -from modules.processing.parsers.CAPE.AgentTesla import extract_config - -HAVE_MACO = False -with suppress(ImportError): - from modules.processing.parsers.MACO.AgentTesla import convert_to_MACO - - HAVE_MACO = True - - -def test_agenttesla(): - # AgentTeslaV5 - with open("tests/data/malware/893f4dc8f8a1dcee05a0840988cf90bc93c1cda5b414f35a6adb5e9f40678ce9", "rb") as data: - conf = extract_config(data.read()) - assert conf == { - "Protocol": "SMTP", - "C2": "mail.guestequipment.com.au", - "Username": "sendlog@guestequipment.com.au", - "Password": "Clone89!", - "EmailTo": "info@marethon.com", - "Persistence_Filename": "newfile.exe", - "ExternalIPCheckServices": ["http://ip-api.com/line/?fields=hosting"], - } - - if HAVE_MACO: - assert convert_to_MACO(conf).model_dump(exclude_defaults=True, exclude_none=True) == { - "family": "AgentTesla", - "other": { - "Protocol": "SMTP", - "C2": "mail.guestequipment.com.au", - "Username": "sendlog@guestequipment.com.au", - "Password": "Clone89!", - "EmailTo": "info@marethon.com", - "Persistence_Filename": "newfile.exe", - "ExternalIPCheckServices": ["http://ip-api.com/line/?fields=hosting"], - }, - "smtp": [ - { - "username": "sendlog@guestequipment.com.au", - "password": "Clone89!", - "hostname": "mail.guestequipment.com.au", - "mail_to": ["info@marethon.com"], - "usage": "c2", - } - ], - "http": [{"uri": "http://ip-api.com/line/?fields=hosting", "usage": "other"}], - "paths": [{"path": "newfile.exe", "usage": "storage"}], - } diff --git a/tests_parsers/test_asyncrat.py b/tests_parsers/test_asyncrat.py deleted file mode 100644 index ee49867d95d..00000000000 --- a/tests_parsers/test_asyncrat.py +++ /dev/null @@ -1,44 +0,0 @@ -from contextlib import suppress - -from modules.processing.parsers.CAPE.AsyncRAT import extract_config - -HAVE_MACO = False -with suppress(ImportError): - from modules.processing.parsers.MACO.AsyncRAT import convert_to_MACO - - HAVE_MACO = True - - -def test_asyncrat(): - with open("tests/data/malware/f08b325f5322a698e14f97db29d322e9ee91ad636ac688af352d51057fc56526", "rb") as data: - conf = extract_config(data.read()) - assert conf == { - "C2s": ["todfg.duckdns.org"], - "Ports": "6745", - "Version": "0.5.7B", - "Folder": "%AppData%", - "Filename": "updateee.exe", - "Install": "false", - "Mutex": "AsyncMutex_6SI8OkPnk", - "Pastebin": "null", - } - - if HAVE_MACO: - assert convert_to_MACO(conf).model_dump(exclude_defaults=True, exclude_none=True) == { - "family": "AsyncRAT", - "version": "0.5.7B", - "capability_disabled": ["persistence"], - "mutex": ["AsyncMutex_6SI8OkPnk"], - "other": { - "C2s": ["todfg.duckdns.org"], - "Ports": "6745", - "Version": "0.5.7B", - "Folder": "%AppData%", - "Filename": "updateee.exe", - "Install": "false", - "Mutex": "AsyncMutex_6SI8OkPnk", - "Pastebin": "null", - }, - "http": [{"hostname": "todfg.duckdns.org", "port": 6, "usage": "c2"}], - "paths": [{"path": "%AppData%/updateee.exe", "usage": "install"}], - } diff --git a/tests_parsers/test_aurorastealer.py b/tests_parsers/test_aurorastealer.py deleted file mode 100644 index a09cc324614..00000000000 --- a/tests_parsers/test_aurorastealer.py +++ /dev/null @@ -1,38 +0,0 @@ -from contextlib import suppress - -from modules.processing.parsers.CAPE.AuroraStealer import extract_config - -HAVE_MACO = False -with suppress(ImportError): - from modules.processing.parsers.MACO.AuroraStealer import convert_to_MACO - - HAVE_MACO = True - - -def test_aurorastealer(): - with open("tests/data/malware/8da8821d410b94a2811ce7ae80e901d7e150ad3420d677b158e45324a6606ac4", "rb") as data: - conf = extract_config(data.read()) - assert conf == { - "BuildID": "x64pump", - "MD5Hash": "f29f33b296b35ec5e7fc3ee784ef68ee", - "C2": "77.91.85.73", - "Architecture": "X64", - "BuildGroup": "x64pump", - "BuildAccept": "0", - "Date": "2023-04-06 19", - } - - if HAVE_MACO: - assert convert_to_MACO(conf).model_dump(exclude_defaults=True, exclude_none=True) == { - "family": "AuroraStealer", - "other": { - "BuildID": "x64pump", - "MD5Hash": "f29f33b296b35ec5e7fc3ee784ef68ee", - "C2": "77.91.85.73", - "Architecture": "X64", - "BuildGroup": "x64pump", - "BuildAccept": "0", - "Date": "2023-04-06 19", - }, - "http": [{"hostname": "77.91.85.73", "usage": "c2"}], - } diff --git a/tests_parsers/test_blackdropper.py b/tests_parsers/test_blackdropper.py deleted file mode 100644 index cf8326f56cd..00000000000 --- a/tests_parsers/test_blackdropper.py +++ /dev/null @@ -1,36 +0,0 @@ -# Copyright (C) 2010-2015 Cuckoo Foundation. -# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org -# See the file 'docs/LICENSE' for copying permission. - -from contextlib import suppress - -from modules.processing.parsers.CAPE.BlackDropper import extract_config - -HAVE_MACO = False -with suppress(ImportError): - from modules.processing.parsers.MACO.BlackDropper import convert_to_MACO - - HAVE_MACO = True - - -def test_blackdropper(): - with open("tests/data/malware/f8026ae3237bdd885e5fcaceb86bcab4087d8857e50ba472ca79ce44c12bc257", "rb") as data: - conf = extract_config(data.read()) - assert conf == { - "urls": ["http://72.5.42.222:8568/api/dll/", "http://72.5.42.222:8568/api/fileZip"], - "directories": ["\\Music\\dkcydqtwjv"], - "campaign": "oFwQ0aQ3v", - } - - if HAVE_MACO: - assert convert_to_MACO(conf).model_dump(exclude_defaults=True, exclude_none=True) == { - "family": "BlackDropper", - "campaign_id": ["oFwQ0aQ3v"], - "other": { - "urls": ["http://72.5.42.222:8568/api/dll/", "http://72.5.42.222:8568/api/fileZip"], - "directories": ["\\Music\\dkcydqtwjv"], - "campaign": "oFwQ0aQ3v", - }, - "http": [{"uri": "http://72.5.42.222:8568/api/dll/"}, {"uri": "http://72.5.42.222:8568/api/fileZip"}], - "paths": [{"path": "\\Music\\dkcydqtwjv"}], - } diff --git a/tests_parsers/test_bumblebee.py b/tests_parsers/test_bumblebee.py deleted file mode 100644 index c26509687a4..00000000000 --- a/tests_parsers/test_bumblebee.py +++ /dev/null @@ -1,28 +0,0 @@ -# Copyright (C) 2010-2015 Cuckoo Foundation. -# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org -# See the file 'docs/LICENSE' for copying permission. - -from contextlib import suppress - -from modules.processing.parsers.CAPE.BumbleBee import extract_config - -HAVE_MACO = False -with suppress(ImportError): - from modules.processing.parsers.MACO.BumbleBee import convert_to_MACO - - HAVE_MACO = True - - -def test_bumblebee(): - with open("tests/data/malware/f8a6eddcec59934c42ea254cdd942fb62917b5898f71f0feeae6826ba4f3470d", "rb") as data: - conf = extract_config(data.read()) - assert conf == {"Botnet ID": "YTBSBbNTWU", "Campaign ID": "1904r", "Data": "XNgHUGLrCD", "C2s": ["444"]} - if HAVE_MACO: - assert convert_to_MACO(conf).model_dump(exclude_defaults=True, exclude_none=True) == { - "family": "BumbleBee", - "campaign_id": ["1904r"], - "identifier": ["YTBSBbNTWU"], - "other": {"Botnet ID": "YTBSBbNTWU", "Campaign ID": "1904r", "Data": "XNgHUGLrCD", "C2s": ["444"]}, - "binaries": [{"data": "XNgHUGLrCD"}], - "http": [{"hostname": "444", "usage": "c2"}], - } diff --git a/tests_parsers/test_carbanak.py b/tests_parsers/test_carbanak.py deleted file mode 100644 index bb0d512bccf..00000000000 --- a/tests_parsers/test_carbanak.py +++ /dev/null @@ -1,21 +0,0 @@ -from contextlib import suppress - -from modules.processing.parsers.CAPE.Carbanak import extract_config - -HAVE_MACO = False -with suppress(ImportError): - from modules.processing.parsers.MACO.Carbanak import convert_to_MACO - - HAVE_MACO = True - - -def test_carbanak(): - with open("tests/data/malware/c9c1b06cb9c9bd6fc4451f5e2847a1f9524bb2870d7bb6f0ee09b9dd4e3e4c84", "rb") as data: - conf = extract_config(data.read()) - assert conf["C2"] == ["5.161.223.210:443", "207.174.30.226:443"] - if HAVE_MACO: - assert convert_to_MACO(conf).model_dump(exclude_defaults=True, exclude_none=True) == { - "family": "Carbanak", - "other": {"C2": ["5.161.223.210:443", "207.174.30.226:443"]}, - "http": [{"hostname": "5.161.223.210:443", "usage": "c2"}, {"hostname": "207.174.30.226:443", "usage": "c2"}], - } diff --git a/tests_parsers/test_cobaltstrikebeacon.py b/tests_parsers/test_cobaltstrikebeacon.py deleted file mode 100644 index 12afcdd3677..00000000000 --- a/tests_parsers/test_cobaltstrikebeacon.py +++ /dev/null @@ -1,125 +0,0 @@ -# Copyright (C) 2010-2015 Cuckoo Foundation. -# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org -# See the file 'docs/LICENSE' for copying permission. - -from contextlib import suppress - -from modules.processing.parsers.CAPE.CobaltStrikeBeacon import extract_config - -HAVE_MACO = False -with suppress(ImportError): - from modules.processing.parsers.MACO.CobaltStrikeBeacon import convert_to_MACO - - HAVE_MACO = True - - -def test_csb(): - with open("tests/data/malware/2588fd3232138f587e294aea5cc9a0611d1e165b199743552c84bfddc1e4c063", "rb") as data: - conf = extract_config(data.read()) - assert convert_to_MACO(conf).model_dump(exclude_defaults=True, exclude_none=True) == { - "BeaconType": ["HTTP"], - "Port": 4848, - "SleepTime": 60000, - "MaxGetSize": 1048576, - "Jitter": 0, - "MaxDNS": "Not Found", - "PublicKey": "30819f300d06092a864886f70d010101050003818d0030818902818100bebe41805d3c15a738caf3e308a992d4d507ce827996a8c9d783c766963e7e73083111729ae0abc1b49af0bcf803efdcaf83ac694fb53d043a88e9333f169e026a3c4e63cc6d4cd1aa5e199cb95eec500f948ac472c0ab2eda385d35fb8592d74b1154a1c671afb310eccb0b139ee1100907bfcdd8dfbf3385803a11bc252995020301000100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", - "C2Server": "192.144.206.100,/load", - "UserAgent": "Not Found", - "HttpPostUri": "/submit.php", - "Malleable_C2_Instructions": [], - "HttpGet_Metadata": "Not Found", - "HttpPost_Metadata": "Not Found", - "SpawnTo": "d7a9ca15a07f82bfd3b63020da38aa16", - "PipeName": "Not Found", - "DNS_Idle": "Not Found", - "DNS_Sleep": "Not Found", - "SSH_Host": "Not Found", - "SSH_Port": "Not Found", - "SSH_Username": "Not Found", - "SSH_Password_Plaintext": "Not Found", - "SSH_Password_Pubkey": "Not Found", - "HttpGet_Verb": "GET", - "HttpPost_Verb": "POST", - "HttpPostChunk": 0, - "Spawnto_x86": "%windir%\\syswow64\\rundll32.exe", - "Spawnto_x64": "%windir%\\sysnative\\rundll32.exe", - "CryptoScheme": 0, - "Proxy_Config": "Not Found", - "Proxy_User": "Not Found", - "Proxy_Password": "Not Found", - "Proxy_Behavior": "Use IE settings", - "Watermark": 391144938, - "bStageCleanup": "False", - "bCFGCaution": "False", - "KillDate": 0, - "bProcInject_StartRWX": "True", - "bProcInject_UseRWX": "True", - "bProcInject_MinAllocSize": 0, - "ProcInject_PrependAppend_x86": "Empty", - "ProcInject_PrependAppend_x64": "Empty", - "ProcInject_Execute": ["CreateThread", "SetThreadContext", "CreateRemoteThread", "RtlCreateUserThread"], - "ProcInject_AllocationMethod": "VirtualAllocEx", - "bUsesCookies": "True", - "HostHeader": "", - } - if HAVE_MACO: - assert convert_to_MACO(conf).model_dump(exclude_defaults=True, exclude_none=True) == { - "family": "CobaltStrikeBeacon", - "capability_enabled": ["ProcInject_StartRWX", "ProcInject_UseRWX", "UsesCookies"], - "capability_disabled": ["StageCleanup", "CFGCaution"], - "sleep_delay": 60000, - "sleep_delay_jitter": 0, - "other": { - "BeaconType": ["HTTP"], - "Port": 4848, - "SleepTime": 60000, - "MaxGetSize": 1048576, - "Jitter": 0, - "MaxDNS": "Not Found", - "PublicKey": "30819f300d06092a864886f70d010101050003818d0030818902818100bebe41805d3c15a738caf3e308a992d4d507ce827996a8c9d783c766963e7e73083111729ae0abc1b49af0bcf803efdcaf83ac694fb53d043a88e9333f169e026a3c4e63cc6d4cd1aa5e199cb95eec500f948ac472c0ab2eda385d35fb8592d74b1154a1c671afb310eccb0b139ee1100907bfcdd8dfbf3385803a11bc252995020301000100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000", - "C2Server": "192.144.206.100,/load", - "UserAgent": "Not Found", - "HttpPostUri": "/submit.php", - "Malleable_C2_Instructions": [], - "HttpGet_Metadata": "Not Found", - "HttpPost_Metadata": "Not Found", - "SpawnTo": "d7a9ca15a07f82bfd3b63020da38aa16", - "PipeName": "Not Found", - "DNS_Idle": "Not Found", - "DNS_Sleep": "Not Found", - "SSH_Host": "Not Found", - "SSH_Port": "Not Found", - "SSH_Username": "Not Found", - "SSH_Password_Plaintext": "Not Found", - "SSH_Password_Pubkey": "Not Found", - "HttpGet_Verb": "GET", - "HttpPost_Verb": "POST", - "HttpPostChunk": 0, - "Spawnto_x86": "%windir%\\syswow64\\rundll32.exe", - "Spawnto_x64": "%windir%\\sysnative\\rundll32.exe", - "CryptoScheme": 0, - "Proxy_Config": "Not Found", - "Proxy_User": "Not Found", - "Proxy_Password": "Not Found", - "Proxy_Behavior": "Use IE settings", - "Watermark": 391144938, - "bStageCleanup": "False", - "bCFGCaution": "False", - "KillDate": 0, - "bProcInject_StartRWX": "True", - "bProcInject_UseRWX": "True", - "bProcInject_MinAllocSize": 0, - "ProcInject_PrependAppend_x86": "Empty", - "ProcInject_PrependAppend_x64": "Empty", - "ProcInject_Execute": ["CreateThread", "SetThreadContext", "CreateRemoteThread", "RtlCreateUserThread"], - "ProcInject_AllocationMethod": "VirtualAllocEx", - "bUsesCookies": "True", - "HostHeader": "", - }, - "http": [ - {"hostname": "192.144.206.100", "port": 4848, "path": "/load", "method": "GET", "usage": "c2"}, - {"hostname": "192.144.206.100", "port": 4848, "path": "/submit.php", "method": "POST", "usage": "c2"}, - ], - "paths": [{"path": "%windir%\\syswow64\\rundll32.exe"}, {"path": "%windir%\\sysnative\\rundll32.exe"}], - } diff --git a/tests_parsers/test_darkgate.py b/tests_parsers/test_darkgate.py deleted file mode 100644 index 7040df6f51f..00000000000 --- a/tests_parsers/test_darkgate.py +++ /dev/null @@ -1,21 +0,0 @@ -from contextlib import suppress - -from modules.processing.parsers.CAPE.DarkGate import extract_config - -HAVE_MACO = False -with suppress(ImportError): - from modules.processing.parsers.MACO.DarkGate import convert_to_MACO - - HAVE_MACO = True - - -def test_darkgate(): - with open("tests/data/malware/1c3ae64795b61034080be00601b947819fe071efd69d7fc791a99ec666c2043d", "rb") as data: - conf = extract_config(data.read()) - assert conf["C2"] == ["http://80.66.88.145"] - if HAVE_MACO: - assert convert_to_MACO(conf).model_dump(exclude_defaults=True, exclude_none=True) == { - "family": "DarkGate", - "other": {"C2": ["http://80.66.88.145"]}, - "http": [{"uri": "http://80.66.88.145", "usage": "c2"}], - } diff --git a/tests_parsers/test_icedid.py b/tests_parsers/test_icedid.py deleted file mode 100644 index 8b8b389a15d..00000000000 --- a/tests_parsers/test_icedid.py +++ /dev/null @@ -1,22 +0,0 @@ -from contextlib import suppress - -from modules.processing.parsers.CAPE.IcedIDLoader import extract_config - -HAVE_MACO = False -with suppress(ImportError): - from modules.processing.parsers.MACO.IcedIDLoader import convert_to_MACO - - HAVE_MACO = True - - -def test_icedid(): - with open("tests/data/malware/7aaf80eb1436b946b2bd710ab57d2dcbaad2b1553d45602f2f3af6f2cfca5212", "rb") as data: - conf = extract_config(data.read()) - assert conf == {"C2": "anscowerbrut.com", "Campaign": 2738000827} - if HAVE_MACO: - assert convert_to_MACO(conf).model_dump(exclude_defaults=True, exclude_none=True) == { - "family": "IcedIDLoader", - "campaign_id": ["2738000827"], - "other": {"C2": "anscowerbrut.com", "Campaign": 2738000827}, - "http": [{"hostname": "anscowerbrut.com", "usage": "c2"}], - } diff --git a/tests_parsers/test_koiloader.py b/tests_parsers/test_koiloader.py deleted file mode 100644 index 38a74bf700a..00000000000 --- a/tests_parsers/test_koiloader.py +++ /dev/null @@ -1,28 +0,0 @@ -# Copyright (C) 2010-2015 Cuckoo Foundation. -# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org -# See the file 'docs/LICENSE' for copying permission. - -from contextlib import suppress - -from modules.processing.parsers.CAPE.KoiLoader import extract_config - -HAVE_MACO = False -with suppress(ImportError): - from modules.processing.parsers.MACO.KoiLoader import convert_to_MACO - - HAVE_MACO = True - - -def test_koiloader(): - with open("tests/data/malware/b462e3235c7578450b2b56a8aff875a3d99d22f6970a01db3ba98f7ecb6b01a0", "rb") as data: - conf = extract_config(data.read()) - assert conf == {"C2": ["http://91.202.233.209/hypermetropia.php", "https://admiralpub.ca/wp-content/uploads/2017"]} - if HAVE_MACO: - assert convert_to_MACO(conf).model_dump(exclude_defaults=True, exclude_none=True) == { - "family": "KoiLoader", - "other": {"C2": ["http://91.202.233.209/hypermetropia.php", "https://admiralpub.ca/wp-content/uploads/2017"]}, - "http": [ - {"uri": "http://91.202.233.209/hypermetropia.php", "usage": "c2"}, - {"uri": "https://admiralpub.ca/wp-content/uploads/2017", "usage": "c2"}, - ], - } diff --git a/tests_parsers/test_latrodectus.py b/tests_parsers/test_latrodectus.py deleted file mode 100644 index 410bac0a9e2..00000000000 --- a/tests_parsers/test_latrodectus.py +++ /dev/null @@ -1,293 +0,0 @@ -# Copyright (C) 2010-2015 Cuckoo Foundation. -# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org -# See the file 'docs/LICENSE' for copying permission. - -from contextlib import suppress - -from modules.processing.parsers.CAPE.Latrodectus import extract_config - -HAVE_MACO = False -with suppress(ImportError): - from modules.processing.parsers.MACO.Latrodectus import convert_to_MACO - - HAVE_MACO = True - - -def test_latrodectus(): - with open("tests/data/malware/a547cff9991a713535e5c128a0711ca68acf9298cc2220c4ea0685d580f36811", "rb") as data: - conf = extract_config(data.read()) - assert conf == { - "C2": ["https://arsimonopa.com/live/", "https://lemonimonakio.com/live/"], - "Group name": "Novik", - "Campaign ID": 1053565364, - "Version": "1.1", - "RC4 key": "12345", - "Strings": [ - "/c ipconfig /all", - "C:\\Windows\\System32\\cmd.exe", - "/c systeminfo", - "C:\\Windows\\System32\\cmd.exe", - "/c nltest /domain_trusts", - "C:\\Windows\\System32\\cmd.exe", - "/c nltest /domain_trusts /all_trusts", - "C:\\Windows\\System32\\cmd.exe", - "/c net view /all /domain", - "C:\\Windows\\System32\\cmd.exe", - "/c net view /all", - "C:\\Windows\\System32\\cmd.exe", - '/c net group "Domain Admins" /domain', - "C:\\Windows\\System32\\cmd.exe", - "/Node:localhost /Namespace:\\\\root\\SecurityCenter2 Path AntiVirusProduct Get * /Format:List", - "C:\\Windows\\System32\\wbem\\wmic.exe", - "/c net config workstation", - "C:\\Windows\\System32\\cmd.exe", - "/c wmic.exe /node:localhost /namespace:\\\\root\\SecurityCenter2 path AntiVirusProduct Get DisplayName | findstr /V /B /C:displayName || echo No Antivirus installed", - "C:\\Windows\\System32\\cmd.exe", - "/c whoami /groups", - "C:\\Windows\\System32\\cmd.exe", - ".dll", - ".exe", - '"%s"', - "rundll32.exe", - '"%s", %s %s', - "runnung", - ":wtfbbq", - "%s%s", - "%s\\%d.dll", - "%d.dat", - "%s\\%s", - 'init -zzzz="%s\\%s"', - "front", - "/files/", - "Novik", - ".exe", - "Content-Type: application/x-www-form-urlencoded", - "POST", - "GET", - "curl/7.88.1", - "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Tob 1.1)", - "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Tob 1.1)", - "CLEARURL", - "URLS", - "COMMAND", - "ERROR", - "12345", - "counter=%d&type=%d&guid=%s&os=%d&arch=%d&username=%s&group=%lu&ver=%d.%d&up=%d&direction=%s", - "counter=%d&type=%d&guid=%s&os=%d&arch=%d&username=%s&group=%lu&ver=%d.%d&up=%d&direction=%s", - "counter=%d&type=%d&guid=%s&os=%d&arch=%d&username=%s&group=%lu&ver=%d.%d&up=%d&direction=%s", - "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Tob 1.1)", - "%s%d.dll", - "%s%d.exe", - "LogonTrigger", - "%x%x", - "TimeTrigger", - "PT1H%02dM", - "&mac=", - "%04d-%02d-%02dT%02d:%02d:%02d", - "%02x", - ":%02x", - "PT0S", - "&computername=%s", - "&domain=%s", - "\\*.dll", - "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/", - "%04X%04X%04X%04X%08X%04X", - "%04X%04X%04X%04X%08X%04X", - "\\Registry\\Machine\\", - "AppData", - "Desktop", - "Startup", - "Personal", - "Local AppData", - "Software\\Microsoft\\Windows\\CurrentVersion\\Explorer\\Shell Folders", - "C:\\WINDOWS\\SYSTEM32\\rundll32.exe %s,%s", - "C:\\WINDOWS\\SYSTEM32\\rundll32.exe %s", - "URLS", - "URLS|%d|%s\r\n", - ], - } - if HAVE_MACO: - assert convert_to_MACO(conf).model_dump(exclude_defaults=True, exclude_none=True) == { - "family": "Latrodectus", - "version": "1.1", - "campaign_id": ["1053565364"], - "identifier": ["Novik"], - "decoded_strings": [ - "/c ipconfig /all", - "C:\\Windows\\System32\\cmd.exe", - "/c systeminfo", - "C:\\Windows\\System32\\cmd.exe", - "/c nltest /domain_trusts", - "C:\\Windows\\System32\\cmd.exe", - "/c nltest /domain_trusts /all_trusts", - "C:\\Windows\\System32\\cmd.exe", - "/c net view /all /domain", - "C:\\Windows\\System32\\cmd.exe", - "/c net view /all", - "C:\\Windows\\System32\\cmd.exe", - '/c net group "Domain Admins" /domain', - "C:\\Windows\\System32\\cmd.exe", - "/Node:localhost /Namespace:\\\\root\\SecurityCenter2 Path AntiVirusProduct Get * /Format:List", - "C:\\Windows\\System32\\wbem\\wmic.exe", - "/c net config workstation", - "C:\\Windows\\System32\\cmd.exe", - "/c wmic.exe /node:localhost /namespace:\\\\root\\SecurityCenter2 path AntiVirusProduct Get DisplayName | findstr /V /B /C:displayName || echo No Antivirus installed", - "C:\\Windows\\System32\\cmd.exe", - "/c whoami /groups", - "C:\\Windows\\System32\\cmd.exe", - ".dll", - ".exe", - '"%s"', - "rundll32.exe", - '"%s", %s %s', - "runnung", - ":wtfbbq", - "%s%s", - "%s\\%d.dll", - "%d.dat", - "%s\\%s", - 'init -zzzz="%s\\%s"', - "front", - "/files/", - "Novik", - ".exe", - "Content-Type: application/x-www-form-urlencoded", - "POST", - "GET", - "curl/7.88.1", - "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Tob 1.1)", - "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Tob 1.1)", - "CLEARURL", - "URLS", - "COMMAND", - "ERROR", - "12345", - "counter=%d&type=%d&guid=%s&os=%d&arch=%d&username=%s&group=%lu&ver=%d.%d&up=%d&direction=%s", - "counter=%d&type=%d&guid=%s&os=%d&arch=%d&username=%s&group=%lu&ver=%d.%d&up=%d&direction=%s", - "counter=%d&type=%d&guid=%s&os=%d&arch=%d&username=%s&group=%lu&ver=%d.%d&up=%d&direction=%s", - "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Tob 1.1)", - "%s%d.dll", - "%s%d.exe", - "LogonTrigger", - "%x%x", - "TimeTrigger", - "PT1H%02dM", - "&mac=", - "%04d-%02d-%02dT%02d:%02d:%02d", - "%02x", - ":%02x", - "PT0S", - "&computername=%s", - "&domain=%s", - "\\*.dll", - "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/", - "%04X%04X%04X%04X%08X%04X", - "%04X%04X%04X%04X%08X%04X", - "\\Registry\\Machine\\", - "AppData", - "Desktop", - "Startup", - "Personal", - "Local AppData", - "Software\\Microsoft\\Windows\\CurrentVersion\\Explorer\\Shell Folders", - "C:\\WINDOWS\\SYSTEM32\\rundll32.exe %s,%s", - "C:\\WINDOWS\\SYSTEM32\\rundll32.exe %s", - "URLS", - "URLS|%d|%s\r\n", - ], - "other": { - "C2": ["https://arsimonopa.com/live/", "https://lemonimonakio.com/live/"], - "Group name": "Novik", - "Campaign ID": 1053565364, - "Version": "1.1", - "RC4 key": "12345", - "Strings": [ - "/c ipconfig /all", - "C:\\Windows\\System32\\cmd.exe", - "/c systeminfo", - "C:\\Windows\\System32\\cmd.exe", - "/c nltest /domain_trusts", - "C:\\Windows\\System32\\cmd.exe", - "/c nltest /domain_trusts /all_trusts", - "C:\\Windows\\System32\\cmd.exe", - "/c net view /all /domain", - "C:\\Windows\\System32\\cmd.exe", - "/c net view /all", - "C:\\Windows\\System32\\cmd.exe", - '/c net group "Domain Admins" /domain', - "C:\\Windows\\System32\\cmd.exe", - "/Node:localhost /Namespace:\\\\root\\SecurityCenter2 Path AntiVirusProduct Get * /Format:List", - "C:\\Windows\\System32\\wbem\\wmic.exe", - "/c net config workstation", - "C:\\Windows\\System32\\cmd.exe", - "/c wmic.exe /node:localhost /namespace:\\\\root\\SecurityCenter2 path AntiVirusProduct Get DisplayName | findstr /V /B /C:displayName || echo No Antivirus installed", - "C:\\Windows\\System32\\cmd.exe", - "/c whoami /groups", - "C:\\Windows\\System32\\cmd.exe", - ".dll", - ".exe", - '"%s"', - "rundll32.exe", - '"%s", %s %s', - "runnung", - ":wtfbbq", - "%s%s", - "%s\\%d.dll", - "%d.dat", - "%s\\%s", - 'init -zzzz="%s\\%s"', - "front", - "/files/", - "Novik", - ".exe", - "Content-Type: application/x-www-form-urlencoded", - "POST", - "GET", - "curl/7.88.1", - "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Tob 1.1)", - "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Tob 1.1)", - "CLEARURL", - "URLS", - "COMMAND", - "ERROR", - "12345", - "counter=%d&type=%d&guid=%s&os=%d&arch=%d&username=%s&group=%lu&ver=%d.%d&up=%d&direction=%s", - "counter=%d&type=%d&guid=%s&os=%d&arch=%d&username=%s&group=%lu&ver=%d.%d&up=%d&direction=%s", - "counter=%d&type=%d&guid=%s&os=%d&arch=%d&username=%s&group=%lu&ver=%d.%d&up=%d&direction=%s", - "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Tob 1.1)", - "%s%d.dll", - "%s%d.exe", - "LogonTrigger", - "%x%x", - "TimeTrigger", - "PT1H%02dM", - "&mac=", - "%04d-%02d-%02dT%02d:%02d:%02d", - "%02x", - ":%02x", - "PT0S", - "&computername=%s", - "&domain=%s", - "\\*.dll", - "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/", - "%04X%04X%04X%04X%08X%04X", - "%04X%04X%04X%04X%08X%04X", - "\\Registry\\Machine\\", - "AppData", - "Desktop", - "Startup", - "Personal", - "Local AppData", - "Software\\Microsoft\\Windows\\CurrentVersion\\Explorer\\Shell Folders", - "C:\\WINDOWS\\SYSTEM32\\rundll32.exe %s,%s", - "C:\\WINDOWS\\SYSTEM32\\rundll32.exe %s", - "URLS", - "URLS|%d|%s\r\n", - ], - }, - "http": [ - {"uri": "https://arsimonopa.com/live/", "usage": "c2"}, - {"uri": "https://lemonimonakio.com/live/", "usage": "c2"}, - ], - "encryption": [{"algorithm": "RC4", "key": "12345"}], - } diff --git a/tests_parsers/test_lumma.py b/tests_parsers/test_lumma.py deleted file mode 100644 index e341169bcdd..00000000000 --- a/tests_parsers/test_lumma.py +++ /dev/null @@ -1,59 +0,0 @@ -# Copyright (C) 2010-2015 Cuckoo Foundation. -# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org -# See the file 'docs/LICENSE' for copying permission. - -from contextlib import suppress - -from modules.processing.parsers.CAPE.Lumma import extract_config - -HAVE_MACO = False -with suppress(ImportError): - from modules.processing.parsers.MACO.Lumma import convert_to_MACO - - HAVE_MACO = True - - -def test_lumma(): - with open("tests/data/malware/5d58bc449693815f6fb0755a364c4cd3a8e2a81188e431d4801f2fb0b1c2de8f", "rb") as data: - conf = extract_config(data.read()) - assert conf == { - "C2": [ - "delaylacedmn.site", - "writekdmsnu.site", - "agentyanlark.site", - "bellykmrebk.site", - "underlinemdsj.site", - "commandejorsk.site", - "possiwreeste.site", - "famikyjdiag.site", - "agentyanlark.site", - ] - } - if HAVE_MACO: - assert convert_to_MACO(conf).model_dump(exclude_defaults=True, exclude_none=True) == { - "family": "Lumma", - "other": { - "C2": [ - "delaylacedmn.site", - "writekdmsnu.site", - "agentyanlark.site", - "bellykmrebk.site", - "underlinemdsj.site", - "commandejorsk.site", - "possiwreeste.site", - "famikyjdiag.site", - "agentyanlark.site", - ] - }, - "http": [ - {"hostname": "delaylacedmn.site", "usage": "c2"}, - {"hostname": "writekdmsnu.site", "usage": "c2"}, - {"hostname": "agentyanlark.site", "usage": "c2"}, - {"hostname": "bellykmrebk.site", "usage": "c2"}, - {"hostname": "underlinemdsj.site", "usage": "c2"}, - {"hostname": "commandejorsk.site", "usage": "c2"}, - {"hostname": "possiwreeste.site", "usage": "c2"}, - {"hostname": "famikyjdiag.site", "usage": "c2"}, - {"hostname": "agentyanlark.site", "usage": "c2"}, - ], - } diff --git a/tests_parsers/test_nanocore.py b/tests_parsers/test_nanocore.py deleted file mode 100644 index af28b87d8ae..00000000000 --- a/tests_parsers/test_nanocore.py +++ /dev/null @@ -1,108 +0,0 @@ -# Copyright (C) 2010-2015 Cuckoo Foundation. -# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org -# See the file 'docs/LICENSE' for copying permission. - -from contextlib import suppress - -from modules.processing.parsers.CAPE.NanoCore import extract_config - -HAVE_MACO = False -with suppress(ImportError): - from modules.processing.parsers.MACO.NanoCore import convert_to_MACO - - HAVE_MACO = True - - -def test_nanocore(): - with open("tests/data/malware/f1bd511b69f95c26f489157272884a12225c1cf7a453207bfc46ce48a91eae96", "rb") as data: - conf = extract_config(data.read()) - assert conf == { - "BuildTime": "2023-11-22 00:25:26.569697", - "Version": "1.2.2.0", - "Mutex": "dc5ce709-95b6-4a26-9175-16a1a8446828", - "DefaultGroup": "6coinc", - "PrimaryConnectionHost": "6coinc.zapto.org", - "BackupConnectionHost": "127.0.0.1", - "ConnectionPort": "6696", - "RunOnStartup": "True", - "RequestElevation": "False", - "BypassUserAccountControl": "True", - "ClearZoneIdentifier": "True", - "ClearAccessControl": "False", - "SetCriticalProcess": "False", - "PreventSystemSleep": "True", - "ActivateAwayMode": "False", - "EnableDebugMode": "False", - "RunDelay": "0", - "ConnectDelay": "4000", - "RestartDelay": "5000", - "TimeoutInterval": "5000", - "KeepAliveTimeout": "30000", - "MutexTimeout": "5000", - "LanTimeout": "2500", - "WanTimeout": "8000", - "BufferSize": "65535", - "MaxPacketSize": "10485760", - "GCThreshold": "10485760", - "UseCustomDnsServer": "True", - "PrimaryDnsServer": "8.8.8.8", - "BackupDnsServer": "8.8.4.4", - "cncs": ["6coinc.zapto.org:6696", "127.0.0.1:6696"], - } - if HAVE_MACO: - assert convert_to_MACO(conf).model_dump(exclude_defaults=True, exclude_none=True) == { - "family": "NanoCore", - "version": "1.2.2.0", - "capability_enabled": [ - "RunOnStartup", - "BypassUserAccountControl", - "ClearZoneIdentifier", - "PreventSystemSleep", - "UseCustomDnsServer", - ], - "capability_disabled": [ - "RequestElevation", - "ClearAccessControl", - "SetCriticalProcess", - "ActivateAwayMode", - "EnableDebugMode", - ], - "mutex": ["dc5ce709-95b6-4a26-9175-16a1a8446828"], - "other": { - "BuildTime": "2023-11-22 00:25:26.569697", - "Version": "1.2.2.0", - "Mutex": "dc5ce709-95b6-4a26-9175-16a1a8446828", - "DefaultGroup": "6coinc", - "PrimaryConnectionHost": "6coinc.zapto.org", - "BackupConnectionHost": "127.0.0.1", - "ConnectionPort": "6696", - "RunOnStartup": "True", - "RequestElevation": "False", - "BypassUserAccountControl": "True", - "ClearZoneIdentifier": "True", - "ClearAccessControl": "False", - "SetCriticalProcess": "False", - "PreventSystemSleep": "True", - "ActivateAwayMode": "False", - "EnableDebugMode": "False", - "RunDelay": "0", - "ConnectDelay": "4000", - "RestartDelay": "5000", - "TimeoutInterval": "5000", - "KeepAliveTimeout": "30000", - "MutexTimeout": "5000", - "LanTimeout": "2500", - "WanTimeout": "8000", - "BufferSize": "65535", - "MaxPacketSize": "10485760", - "GCThreshold": "10485760", - "UseCustomDnsServer": "True", - "PrimaryDnsServer": "8.8.8.8", - "BackupDnsServer": "8.8.4.4", - "cncs": ["6coinc.zapto.org:6696", "127.0.0.1:6696"], - }, - "http": [ - {"hostname": "6coinc.zapto.org", "port": 6696, "usage": "c2"}, - {"hostname": "127.0.0.1", "port": 6696, "usage": "c2"}, - ], - } diff --git a/tests_parsers/test_njrat.py b/tests_parsers/test_njrat.py deleted file mode 100644 index 106d9dd1662..00000000000 --- a/tests_parsers/test_njrat.py +++ /dev/null @@ -1,63 +0,0 @@ -# Copyright (C) 2010-2015 Cuckoo Foundation. -# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org -# See the file 'docs/LICENSE' for copying permission. - -from contextlib import suppress - -from modules.processing.parsers.CAPE.Njrat import extract_config - -HAVE_MACO = False -with suppress(ImportError): - from modules.processing.parsers.MACO.Njrat import convert_to_MACO - - HAVE_MACO = True - - -def test_njrat(): - with open("tests/data/malware/09bf19c00f3d8c63b8896edadd4622724a01f7d74de583733ee57a7d11eacd86", "rb") as data: - conf = extract_config(data.read()) - assert conf == { - "cncs": ["peter-bikini.gl.at.ply.gg:64215"], - "campaign id": "HacKed", - "version": "Njrat 0.7 Golden By Hassan Amiri", - } - if HAVE_MACO: - assert convert_to_MACO(conf).model_dump(exclude_defaults=True, exclude_none=True) == { - "family": "Njrat", - "version": "Njrat 0.7 Golden By Hassan Amiri", - "other": { - "cncs": ["peter-bikini.gl.at.ply.gg:64215"], - "campaign id": "HacKed", - "version": "Njrat 0.7 Golden By Hassan Amiri", - }, - "http": [{"hostname": "peter-bikini.gl.at.ply.gg", "port": 64215, "usage": "c2"}], - } - - -""" -https://github.com/kevoreilly/CAPEv2/pull/1957 - -09bf19c00f3d8c63b8896edadd4622724a01f7d74de583733ee57a7d11eacd86 -2a5eb2f4bb25b89a9c3d325d893b87ed58fe87a6ada67c24f7cdef54b2138567 -2e18a6a4b191741e57d8fb63bddb498f769344130e0f658d8ef5d74bd95c5c9b -4c8198288b00c70aeb7c9fcaae179873c618c1d5a804d36a54ac6e5c7fbacee2 -4e1a8dff073c5648dbeaf55a6b3320461bcb0252cee9f8f5624f46e6d05b6584 -55acd192c7cca3e46b8d1c0a24f98259ae093762722de3493a7da248e83ec07c -59f0979f3123e02ee0a13e3afa6b45d27b2fdbae75edc339d57d473d340851d8 -5b147e624ad96d036c27aa9f526ed2e7daa9ca7bfe6639404dc8e71e1177a145 -614b15eaa2b19e4f9ddb26639dbf5574126f552ae48afd7899a77bd6c7b8980d -646ed3f6856f58b90b4641ab24cdd1b6f9860b44243dfeaec952df7f0954b18a -710507e1f3e61b7010a445728b3c414efe068e22cac28c1dd3b8db56968262d7 -77d1fcf6f8bea79cac80e284a9a5dbcc36b8b57eb86c9b74c538107d4baa2c1a -8b1b215f6a6f9881bc2b76ab409b0dff080dca31c538147a9d273ba7d05919e9 -a4e7f6de5b6c1514b5a4e3361191624127320bcff249ad16207ce79644ffb9c1 -a6c954599bf0b6a3f4e5b1d8bed604a09d1115a6b35b7e9a6de66f11a9977b81 -aeece6134d1a1f0789c8c35d2541164ebc6f23511e2d6781497a82e1bec73abd -af2d5ae5ed7a72a3fa6a36cda93e163b84d8ad70a78afb08bcd1afa63d54f61e -bb7efdb9cb3673c1768a0681989e2662d3f9683b45aded8f5b780a3310bec1bb -c2c788ce1d3e55537c75684ceb961c01d9d9d0eb6b69c915c58433943320ffe5 -e5967d1012f24bad8914ecfbc79af2211ef491a4a16e2ac390d7d26089c5307a -e69befafb01863bce3c730481fa21ff8e57c72351eec8002154538fe01e3cc9e -e8636547c991ba1557cf0532a143ad2316427e773bcbe474a60d8ba2bcf3cea3 -f45abfb1e4d789528a7ce1469255a249a6cdf010045868992689d28c2b791719 -""" diff --git a/tests_parsers/test_oyster.py b/tests_parsers/test_oyster.py deleted file mode 100644 index 85811e2f80e..00000000000 --- a/tests_parsers/test_oyster.py +++ /dev/null @@ -1,35 +0,0 @@ -# Copyright (C) 2010-2015 Cuckoo Foundation. -# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org -# See the file 'docs/LICENSE' for copying permission. - -from contextlib import suppress - -from modules.processing.parsers.CAPE.Oyster import extract_config - -HAVE_MACO = False -with suppress(ImportError): - from modules.processing.parsers.MACO.Oyster import convert_to_MACO - - HAVE_MACO = True - - -def test_oyster(): - with open("tests/data/malware/8bae0fa9f589cd434a689eebd7a1fde949cc09e6a65e1b56bb620998246a1650", "rb") as data: - conf = extract_config(data.read()) - assert conf == { - "C2": ["https://connectivity-check.linkpc.net/"], - "Dll Version": "v1.0 #ads 2", - "Strings": ["api/connect", "Content-Type: application/json", "api/session"], - } - if HAVE_MACO: - assert convert_to_MACO(conf).model_dump(exclude_defaults=True, exclude_none=True) == { - "family": "Oyster", - "version": "v1.0 #ads 2", - "decoded_strings": ["api/connect", "Content-Type: application/json", "api/session"], - "other": { - "C2": ["https://connectivity-check.linkpc.net/"], - "Dll Version": "v1.0 #ads 2", - "Strings": ["api/connect", "Content-Type: application/json", "api/session"], - }, - "http": [{"uri": "https://connectivity-check.linkpc.net/", "usage": "c2"}], - } diff --git a/tests_parsers/test_pikabot.py b/tests_parsers/test_pikabot.py deleted file mode 100644 index 52d38194e55..00000000000 --- a/tests_parsers/test_pikabot.py +++ /dev/null @@ -1,120 +0,0 @@ -# Copyright (C) 2010-2015 Cuckoo Foundation. -# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org -# See the file 'docs/LICENSE' for copying permission. - -from contextlib import suppress - -from modules.processing.parsers.CAPE.PikaBot import extract_config - -HAVE_MACO = False -with suppress(ImportError): - from modules.processing.parsers.MACO.PikaBot import convert_to_MACO - - HAVE_MACO = True - - -def test_pikabot(): - with open("tests/data/malware/7600d0efc92ecef06320a1a6ffd85cd90d3d98470a381b03202e81d93bcdd03c", "rb") as data: - conf = extract_config(data.read()) - assert conf == { - "C2s": [ - "154.53.55.165:13783", - "158.247.240.58:5632", - "70.34.223.164:5000", - "70.34.199.64:9785", - "45.77.63.237:5632", - "198.38.94.213:2224", - "94.72.104.80:5000", - "84.46.240.42:2083", - "154.12.236.248:13786", - "94.72.104.77:13724", - "209.126.86.48:1194", - ], - "Version": "1.8.32-beta", - "User Agent": "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; Trident/7.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; A7F; BRI/2; Tablet PC 2.0; wbx 1.0.0; Microsoft Outlook 14.0.7233; ms-office;", - "Campaign Name": "GG24_T@T@f0adda360d2b4ccda11468e026526576", - "Registry Key": "MWnkl", - } - if HAVE_MACO: - assert convert_to_MACO(conf).model_dump(exclude_defaults=True, exclude_none=True) == { - "family": "PikaBot", - "version": "1.8.32-beta", - "campaign_id": ["GG24_T@T@f0adda360d2b4ccda11468e026526576"], - "other": { - "C2s": [ - "154.53.55.165:13783", - "158.247.240.58:5632", - "70.34.223.164:5000", - "70.34.199.64:9785", - "45.77.63.237:5632", - "198.38.94.213:2224", - "94.72.104.80:5000", - "84.46.240.42:2083", - "154.12.236.248:13786", - "94.72.104.77:13724", - "209.126.86.48:1194", - ], - "Version": "1.8.32-beta", - "User Agent": "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; Trident/7.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; A7F; BRI/2; Tablet PC 2.0; wbx 1.0.0; Microsoft Outlook 14.0.7233; ms-office;", - "Campaign Name": "GG24_T@T@f0adda360d2b4ccda11468e026526576", - "Registry Key": "MWnkl", - }, - "http": [ - { - "hostname": "154.53.55.165", - "port": 13783, - "user_agent": "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; Trident/7.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; A7F; BRI/2; Tablet PC 2.0; wbx 1.0.0; Microsoft Outlook 14.0.7233; ms-office;", - }, - { - "hostname": "158.247.240.58", - "port": 5632, - "user_agent": "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; Trident/7.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; A7F; BRI/2; Tablet PC 2.0; wbx 1.0.0; Microsoft Outlook 14.0.7233; ms-office;", - }, - { - "hostname": "70.34.223.164", - "port": 5000, - "user_agent": "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; Trident/7.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; A7F; BRI/2; Tablet PC 2.0; wbx 1.0.0; Microsoft Outlook 14.0.7233; ms-office;", - }, - { - "hostname": "70.34.199.64", - "port": 9785, - "user_agent": "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; Trident/7.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; A7F; BRI/2; Tablet PC 2.0; wbx 1.0.0; Microsoft Outlook 14.0.7233; ms-office;", - }, - { - "hostname": "45.77.63.237", - "port": 5632, - "user_agent": "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; Trident/7.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; A7F; BRI/2; Tablet PC 2.0; wbx 1.0.0; Microsoft Outlook 14.0.7233; ms-office;", - }, - { - "hostname": "198.38.94.213", - "port": 2224, - "user_agent": "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; Trident/7.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; A7F; BRI/2; Tablet PC 2.0; wbx 1.0.0; Microsoft Outlook 14.0.7233; ms-office;", - }, - { - "hostname": "94.72.104.80", - "port": 5000, - "user_agent": "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; Trident/7.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; A7F; BRI/2; Tablet PC 2.0; wbx 1.0.0; Microsoft Outlook 14.0.7233; ms-office;", - }, - { - "hostname": "84.46.240.42", - "port": 2083, - "user_agent": "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; Trident/7.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; A7F; BRI/2; Tablet PC 2.0; wbx 1.0.0; Microsoft Outlook 14.0.7233; ms-office;", - }, - { - "hostname": "154.12.236.248", - "port": 13786, - "user_agent": "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; Trident/7.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; A7F; BRI/2; Tablet PC 2.0; wbx 1.0.0; Microsoft Outlook 14.0.7233; ms-office;", - }, - { - "hostname": "94.72.104.77", - "port": 13724, - "user_agent": "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; Trident/7.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; A7F; BRI/2; Tablet PC 2.0; wbx 1.0.0; Microsoft Outlook 14.0.7233; ms-office;", - }, - { - "hostname": "209.126.86.48", - "port": 1194, - "user_agent": "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 6.1; Trident/7.0; SLCC2; .NET CLR 2.0.50727; .NET CLR 3.5.30729; .NET CLR 3.0.30729; Media Center PC 6.0; .NET4.0C; .NET4.0E; A7F; BRI/2; Tablet PC 2.0; wbx 1.0.0; Microsoft Outlook 14.0.7233; ms-office;", - }, - ], - "registry": [{"key": "MWnkl"}], - } diff --git a/tests_parsers/test_qakbot.py b/tests_parsers/test_qakbot.py deleted file mode 100644 index fe5765baf78..00000000000 --- a/tests_parsers/test_qakbot.py +++ /dev/null @@ -1,10 +0,0 @@ -import pytest - -from modules.processing.parsers.CAPE.QakBot import extract_config - - -@pytest.mark.skip(reason="Missed file") -def test_qakbot(): - with open("tests/data/malware/0cb0d77ac38df36fff891e072dea96401a8c1e8ff40d6ac741d5a2942aaeddbb", "rb") as data: - conf = extract_config(data.read()) - assert conf == {"C2": "anscowerbrut.com", "Campaign": 2738000827} diff --git a/tests_parsers/test_quickbind.py b/tests_parsers/test_quickbind.py deleted file mode 100644 index 094790ff831..00000000000 --- a/tests_parsers/test_quickbind.py +++ /dev/null @@ -1,35 +0,0 @@ -# Copyright (C) 2010-2015 Cuckoo Foundation. -# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org -# See the file 'docs/LICENSE' for copying permission. - -from contextlib import suppress - -from modules.processing.parsers.CAPE.Quickbind import extract_config - -HAVE_MACO = False -with suppress(ImportError): - from modules.processing.parsers.MACO.Quickbind import convert_to_MACO - - HAVE_MACO = True - - -def test_quickbind(): - with open("tests/data/malware/bfcb215f86fc4f8b4829f6ddd5acb118e80fb5bd977453fc7e8ef10a52fc83b7", "rb") as data: - conf = extract_config(data.read()) - assert conf == { - "Encryption Key": "24de21a8dc08434c", - "Mutex": ["15432a4d-34ca-4d0d-a4ac-04df9a373862"], - "C2": ["185.49.69.41"], - } - if HAVE_MACO: - assert convert_to_MACO(conf).model_dump(exclude_defaults=True, exclude_none=True) == { - "family": "Quickbind", - "mutex": ["15432a4d-34ca-4d0d-a4ac-04df9a373862"], - "other": { - "Encryption Key": "24de21a8dc08434c", - "Mutex": ["15432a4d-34ca-4d0d-a4ac-04df9a373862"], - "C2": ["185.49.69.41"], - }, - "http": [{"hostname": "185.49.69.41", "usage": "c2"}], - "encryption": [{"key": "24de21a8dc08434c"}], - } diff --git a/tests_parsers/test_redline.py b/tests_parsers/test_redline.py deleted file mode 100644 index 8c455d06bac..00000000000 --- a/tests_parsers/test_redline.py +++ /dev/null @@ -1,35 +0,0 @@ -# Copyright (C) 2010-2015 Cuckoo Foundation. -# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org -# See the file "docs/LICENSE" for copying permission. - -from contextlib import suppress - -from modules.processing.parsers.CAPE.RedLine import extract_config - -HAVE_MACO = False -with suppress(ImportError): - from modules.processing.parsers.MACO.RedLine import convert_to_MACO - - HAVE_MACO = True - - -def test_redline(): - with open("tests/data/malware/000608d875638ba7d6c467ece976c1496e6a6ec8ce3e7f79e0fd195ae3045078", "rb") as data: - conf = extract_config(data.read()) - assert conf == { - "Authorization": "9059ea331e4599de3746df73ccb24514", - "C2": "77.91.68.68:19071", - "Botnet": "krast", - "Key": "Formative", - } - if HAVE_MACO: - assert convert_to_MACO(conf).model_dump(exclude_defaults=True, exclude_none=True) == { - "family": "RedLine", - "other": { - "Authorization": "9059ea331e4599de3746df73ccb24514", - "C2": "77.91.68.68:19071", - "Botnet": "krast", - "Key": "Formative", - }, - "http": [{"hostname": "77.91.68.68", "port": 19071, "usage": "c2"}], - } diff --git a/tests_parsers/test_smokeloader.py b/tests_parsers/test_smokeloader.py deleted file mode 100644 index 216829dcd94..00000000000 --- a/tests_parsers/test_smokeloader.py +++ /dev/null @@ -1,24 +0,0 @@ -from contextlib import suppress - -from modules.processing.parsers.CAPE.SmokeLoader import extract_config - -HAVE_MACO = False -with suppress(ImportError): - from modules.processing.parsers.MACO.SmokeLoader import convert_to_MACO - - HAVE_MACO = True - - -def test_smokeloader(): - with open("tests/data/malware/6929fff132c05ae7d348867f4ea77ba18f84fb8fae17d45dde3571c9e33f01f8", "rb") as data: - conf = extract_config(data.read()) - assert conf == {"C2s": ["http://host-file-host6.com/", "http://host-host-file8.com/"]} - if HAVE_MACO: - assert convert_to_MACO(conf).model_dump(exclude_defaults=True, exclude_none=True) == { - "family": "SmokeLoader", - "other": {"C2s": ["http://host-file-host6.com/", "http://host-host-file8.com/"]}, - "http": [ - {"uri": "http://host-file-host6.com/", "usage": "c2"}, - {"uri": "http://host-host-file8.com/", "usage": "c2"}, - ], - } diff --git a/tests_parsers/test_snake.py b/tests_parsers/test_snake.py deleted file mode 100644 index 489418eca86..00000000000 --- a/tests_parsers/test_snake.py +++ /dev/null @@ -1,14 +0,0 @@ -# Copyright (C) 2010-2015 Cuckoo Foundation. -# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org -# See the file 'docs/LICENSE' for copying permission. - -from modules.processing.parsers.CAPE.Snake import extract_config - - -def test_snake(): - with open("tests/data/malware/7b81c12fb7db9f0c317f36022ecac9faa45f5efefe24085c339c43db8b963ae2", "rb") as data: - conf = extract_config(data.read()) - assert conf == { - "Type": "Telegram", - "C2": "https://api.telegram.org/bot7952998151:AAFh98iY7kaOlHAR0qftD3ZcqGbQm0TXbBY/sendMessage?chat_id=5692813672", - } diff --git a/tests_parsers/test_sparkrat.py b/tests_parsers/test_sparkrat.py deleted file mode 100644 index 9e681b8efab..00000000000 --- a/tests_parsers/test_sparkrat.py +++ /dev/null @@ -1,36 +0,0 @@ -from contextlib import suppress - -from modules.processing.parsers.CAPE.SparkRAT import extract_config - -HAVE_MACO = False -with suppress(ImportError): - from modules.processing.parsers.MACO.SparkRAT import convert_to_MACO - - HAVE_MACO = True - - -def test_sparkrat(): - with open("tests/data/malware/ec349cfacc7658eed3640f1c475eb958c5f05bae7c2ed74d4cdb7493176daeba", "rb") as data: - conf = extract_config(data.read()) - assert conf == { - "secure": False, - "host": "67.217.62.106", - "port": 4443, - "path": "/", - "uuid": "8dc7e7d8f8576f3e55a00850b72887db", - "key": "a1348fb8969ad7a9f85ac173c2027622135e52e0e6d94d10e6a81916a29648ac", - } - if HAVE_MACO: - assert convert_to_MACO(conf).model_dump(exclude_defaults=True, exclude_none=True) == { - "family": "SparkRAT", - "identifier": ["8dc7e7d8f8576f3e55a00850b72887db"], - "other": { - "secure": False, - "host": "67.217.62.106", - "port": 4443, - "path": "/", - "uuid": "8dc7e7d8f8576f3e55a00850b72887db", - "key": "a1348fb8969ad7a9f85ac173c2027622135e52e0e6d94d10e6a81916a29648ac", - }, - "http": [{"uri": "http://67.217.62.106:4443/", "hostname": "67.217.62.106", "port": 4443, "path": "/"}], - } diff --git a/tests_parsers/test_stealc.py b/tests_parsers/test_stealc.py deleted file mode 100644 index 79b238dd3e8..00000000000 --- a/tests_parsers/test_stealc.py +++ /dev/null @@ -1,13 +0,0 @@ -# Copyright (C) 2010-2015 Cuckoo Foundation. -# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org -# See the file 'docs/LICENSE' for copying permission. - -from modules.processing.parsers.CAPE.Stealc import extract_config - - -def test_stealc(): - with open("tests/data/malware/619751f5ed0a9716318092998f2e4561f27f7f429fe6103406ecf16e33837470", "rb") as data: - conf = extract_config(data.read()) - assert conf == { - "C2": ["http://95.217.125.57"], - } diff --git a/tests_parsers/test_zloader.py b/tests_parsers/test_zloader.py deleted file mode 100644 index dda237b9ef0..00000000000 --- a/tests_parsers/test_zloader.py +++ /dev/null @@ -1,36 +0,0 @@ -# Copyright (C) 2010-2015 Cuckoo Foundation. -# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org -# See the file 'docs/LICENSE' for copying permission. - -from contextlib import suppress - -from modules.processing.parsers.CAPE.Zloader import extract_config - -HAVE_MACO = False -with suppress(ImportError): - from modules.processing.parsers.MACO.AgentTesla import convert_to_MACO - - HAVE_MACO = True - - -def test_zloader(): - with open("tests/data/malware/adbd0c7096a7373be82dd03df1aae61cb39e0a155c00bbb9c67abc01d48718aa", "rb") as data: - conf = extract_config(data.read()) - assert conf == { - "Botnet name": "Bing_Mod5", - "Campaign ID": "M1", - "address": ["https://dem.businessdeep.com"], - "Public key": "-----BEGIN PUBLIC KEY-----MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDKGAOWVkikqE7TyKIMtWI8dFsaleTaJNXMJNIPnRE/fGCzqrV+rtY3+ex4MCHEtq2Vwppthf0Rglv8OiWgKlerIN5P6NEyCfIsFYUMDfldQTF03VES8GBIvHq5SjlIz7lawuwfdjdEkaHfOmmu9srraftkI9gZO8WRQgY1uNdsXwIDAQAB-----END PUBLIC KEY-----", - } - if HAVE_MACO: - assert convert_to_MACO(conf).model_dump(exclude_defaults=True, exclude_none=True) == { - "family": "Zloader", - "campaign_id": ["M1"], - "other": { - "Botnet name": "Bing_Mod5", - "Campaign ID": "M1", - "address": ["https://dem.businessdeep.com"], - "Public key": "-----BEGIN PUBLIC KEY-----MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDKGAOWVkikqE7TyKIMtWI8dFsaleTaJNXMJNIPnRE/fGCzqrV+rtY3+ex4MCHEtq2Vwppthf0Rglv8OiWgKlerIN5P6NEyCfIsFYUMDfldQTF03VES8GBIvHq5SjlIz7lawuwfdjdEkaHfOmmu9srraftkI9gZO8WRQgY1uNdsXwIDAQAB-----END PUBLIC KEY-----", - }, - "http": [{"uri": "https://dem.businessdeep.com"}], - } diff --git a/utils/cleaners.py b/utils/cleaners.py index a456edce1ae..4c927169e5c 100644 --- a/utils/cleaners.py +++ b/utils/cleaners.py @@ -13,6 +13,9 @@ if __name__ == "__main__": parser = argparse.ArgumentParser() + parser.add_argument( + "-tr", "--time-range", help="Time range can be specified as: 1d, 22h, 55m, etc", action="store", required=False + ) parser.add_argument( "--clean", help="Remove all tasks and samples and their associated data", action="store_true", required=False ) @@ -23,8 +26,8 @@ action="store_true", required=False, ) - parser.add_argument("--delete-older-than-days", help="Remove all tasks older than X number of days", type=int, required=False) - parser.add_argument("--pcap-sorted-clean", help="remove sorted pcap from jobs", action="store_true", required=False) + parser.add_argument("--delete-older-than", help="Remove all tasks older than time range.", required=False) + parser.add_argument("--pcap-sorted-clean", help="Remove sorted pcap from jobs", action="store_true", required=False) parser.add_argument( "--suricata-zero-alert-filter", help="only remove events with zero suri alerts DELETE AFTER ONLY", @@ -47,15 +50,13 @@ parser.add_argument("--malscore", help="Remove all tasks with malscore <= X", required=False, action="store", type=int) parser.add_argument("--tlp", help="Remove all tasks with TLP", required=False, default=False, action="store_true") parser.add_argument( - "--delete-tmp-items-older-than-days", - help="Remove all items in tmp folder older than X days", - type=int, + "--delete-tmp-items-older-than", + help="Remove all items in tmp folder older than time range", required=False, ) parser.add_argument( - "--delete-binaries-items-older-than-days", - help="Remove all items in binaries folder older than X days", - type=int, + "--delete-binaries-items-older-than", + help="Remove all items in binaries folder older than time range", required=False, ) parser.add_argument( @@ -69,18 +70,9 @@ ) parser.add_argument( "-drs", - "--delete-range-start", - help="First job in range to delete, should be used with --delete-range-end", - action="store", - type=int, - required=False, - ) - parser.add_argument( - "-dre", - "--delete-range-end", - help="Last job in range to delete, should be used with --delete-range-start", + "--delete-range", + help="Delete jobs in range. Ex 1-5", action="store", - type=int, required=False, ) parser.add_argument( @@ -91,8 +83,22 @@ required=False, ) parser.add_argument( - "-bt", "--before-time", help="Manage all pending jobs before N hours.", action="store", required=False, type=int + "-cmc", + "--cleanup-mongo-calls", + help="Manage all pending jobs before time range", + action="store", + required=False, ) + + parser.add_argument( + "-cfcbi", + "--cleanup-files-collection-by-id", + help="Pull out task(s) id lower than X from files collection", + action="store", + required=False, + type=int, + ) + args = parser.parse_args() init_database() execute_cleanup(vars(args)) diff --git a/utils/community.py b/utils/community.py index 80bf41d50cc..e6902f95a85 100644 --- a/utils/community.py +++ b/utils/community.py @@ -57,9 +57,10 @@ def flare_capa(proxy=None): path_mkdir(capa_sigs_path) for url in signature_urls: signature_name = url.rsplit("/", 1)[-1] - with http.request("GET", url, preload_content=False) as sig, open( - os.path.join(capa_sigs_path, signature_name), "wb" - ) as out_sig: + with ( + http.request("GET", url, preload_content=False) as sig, + open(os.path.join(capa_sigs_path, signature_name), "wb") as out_sig, + ): shutil.copyfileobj(sig, out_sig) print("[+] FLARE CAPA rules/signatures installed") diff --git a/utils/db_migration/versions/2_3_1_square_hammer.py b/utils/db_migration/versions/2_3_1_square_hammer.py index 10c6d2d2efa..f0f3cb81f70 100644 --- a/utils/db_migration/versions/2_3_1_square_hammer.py +++ b/utils/db_migration/versions/2_3_1_square_hammer.py @@ -9,6 +9,7 @@ Create Date: 2021-05-02 18:24:43.075702 """ + from contextlib import suppress # revision identifiers, used by Alembic. diff --git a/utils/db_migration/versions/add_on_delete_cascade_to_task_tags_.py b/utils/db_migration/versions/add_on_delete_cascade_to_task_tags_.py new file mode 100644 index 00000000000..e1c43d1bb9d --- /dev/null +++ b/utils/db_migration/versions/add_on_delete_cascade_to_task_tags_.py @@ -0,0 +1,33 @@ +# Copyright (C) 2010-2015 Cuckoo Foundation. +# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org +# See the file 'docs/LICENSE' for copying permission. + +"""Add ON DELETE CASCADE to tasks_tags foreign keys + +Revision ID: 4e000e02a409 +Revises: c2bd0eb5e69d +Create Date: 2025-04-11 09:58:42.957359 + +""" + +# revision identifiers, used by Alembic. +revision = '4e000e02a409' +down_revision = 'c2bd0eb5e69d' + +from alembic import op + + +def upgrade(): + op.drop_constraint('tasks_tags_task_id_fkey', 'tasks_tags', type_='foreignkey') + op.create_foreign_key('tasks_tags_task_id_fkey', 'tasks_tags', 'tasks', ['task_id'], ['id'], ondelete='CASCADE') + + op.drop_constraint('tasks_tags_tag_id_fkey', 'tasks_tags', type_='foreignkey') + op.create_foreign_key('tasks_tags_tag_id_fkey', 'tasks_tags', 'tags', ['tag_id'], ['id'], ondelete='CASCADE') + + +def downgrade(): + op.drop_constraint('tasks_tags_task_id_fkey', 'tasks_tags', type_='foreignkey') + op.create_foreign_key('tasks_tags_task_id_fkey', 'tasks_tags', 'tasks', ['task_id'], ['id']) + + op.drop_constraint('tasks_tags_tag_id_fkey', 'tasks_tags', type_='foreignkey') + op.create_foreign_key('tasks_tags_tag_id_fkey', 'tasks_tags', 'tags', ['tag_id'], ['id']) diff --git a/utils/db_migration/versions/add_shrike_and_parent_id_columns.py b/utils/db_migration/versions/add_shrike_and_parent_id_columns.py index e33fbb91bbf..63acf17615c 100644 --- a/utils/db_migration/versions/add_shrike_and_parent_id_columns.py +++ b/utils/db_migration/versions/add_shrike_and_parent_id_columns.py @@ -9,6 +9,7 @@ Create Date: 2015-03-29 08:43:11.468664 """ + # revision identifiers, used by Alembic. revision = "f111620bb8" down_revision = "4b09c454108c" diff --git a/utils/db_migration/versions/add_task_tlp.py b/utils/db_migration/versions/add_task_tlp.py index 85ab54d9bd8..76108b3e1cd 100644 --- a/utils/db_migration/versions/add_task_tlp.py +++ b/utils/db_migration/versions/add_task_tlp.py @@ -9,6 +9,7 @@ Create Date: 2020-04-10 12:17:18.530901 """ + # revision identifiers, used by Alembic. revision = "7331c4d994fd" down_revision = "30d0230de7cd" diff --git a/utils/db_migration/versions/from_0_6_to_1_1.py b/utils/db_migration/versions/from_0_6_to_1_1.py index 8efe01457dc..b7a401e3dfe 100644 --- a/utils/db_migration/versions/from_0_6_to_1_1.py +++ b/utils/db_migration/versions/from_0_6_to_1_1.py @@ -34,7 +34,7 @@ print("Unable to import alembic (install with `poetry run pip install alembic`)") sys.exit() -sys.path.append(os.path.join("..", "..")) +sys.path.append(os.path.join("..", "..", "..")) import lib.cuckoo.core.database as db from lib.cuckoo.common.config import Config diff --git a/utils/db_migration/versions/from_1_1_to_1_2-added_states.py b/utils/db_migration/versions/from_1_1_to_1_2-added_states.py index 0b9241494c2..a68e1d6e875 100644 --- a/utils/db_migration/versions/from_1_1_to_1_2-added_states.py +++ b/utils/db_migration/versions/from_1_1_to_1_2-added_states.py @@ -34,7 +34,7 @@ sys.exit() curdir = os.path.abspath(os.path.dirname(__file__)) -sys.path.append(os.path.join(curdir, "..", "..")) +sys.path.append(os.path.join(curdir, "..", "..", "..")) import lib.cuckoo.core.database as db diff --git a/utils/db_migration/versions/from_1_1_to_1_2-extend_file_type.py b/utils/db_migration/versions/from_1_1_to_1_2-extend_file_type.py index 7f5577cdce7..534ea6bbc36 100644 --- a/utils/db_migration/versions/from_1_1_to_1_2-extend_file_type.py +++ b/utils/db_migration/versions/from_1_1_to_1_2-extend_file_type.py @@ -22,7 +22,7 @@ from alembic import op curdir = os.path.abspath(os.path.dirname(__file__)) -sys.path.append(os.path.join(curdir, "..", "..")) +sys.path.append(os.path.join(curdir, "..", "..", "..")) import lib.cuckoo.core.database as db diff --git a/utils/db_migration/versions/from_1_2_to_1_2-accuvant-add_statistics.py b/utils/db_migration/versions/from_1_2_to_1_2-accuvant-add_statistics.py index d45219daf23..e045bae53cc 100644 --- a/utils/db_migration/versions/from_1_2_to_1_2-accuvant-add_statistics.py +++ b/utils/db_migration/versions/from_1_2_to_1_2-accuvant-add_statistics.py @@ -34,7 +34,7 @@ sys.exit() curdir = os.path.abspath(os.path.dirname(__file__)) -sys.path.append(os.path.join(curdir, "..", "..")) +sys.path.append(os.path.join(curdir, "..", "..", "..")) import lib.cuckoo.core.database as db diff --git a/utils/dist.py b/utils/dist.py index 365115f5387..504cfe580b9 100644 --- a/utils/dist.py +++ b/utils/dist.py @@ -51,9 +51,10 @@ TASK_REPORTED, TASK_RUNNING, Database, + _Database, + init_database, ) from lib.cuckoo.core.database import Task as MD_Task -from lib.cuckoo.core.database import _Database, init_database dist_conf = Config("distributed") main_server_name = dist_conf.distributed.get("main_server_name", "master") @@ -141,6 +142,21 @@ def required(package): def node_status(url: str, name: str, apikey: str) -> dict: + """ + Retrieve the status of a CAPE node. + + This function sends a GET request to the specified CAPE node URL to retrieve its status. + It uses the provided API key for authorization. + + Args: + url (str): The base URL of the CAPE node. + name (str): The name of the CAPE node. + apikey (str): The API key for authorization. + + Returns: + dict: A dictionary containing the status data of the CAPE node. If an error occurs, + an empty dictionary is returned. + """ try: r = requests.get( os.path.join(url, "cuckoo", "status/"), headers={"Authorization": f"Token {apikey}"}, verify=False, timeout=300 @@ -152,6 +168,19 @@ def node_status(url: str, name: str, apikey: str) -> dict: def node_fetch_tasks(status, url, apikey, action="fetch", since=0): + """ + Fetches tasks from a remote server based on the given status and other parameters. + + Args: + status (str): The status of the tasks to fetch (e.g., "completed", "pending"). + url (str): The base URL of the remote server. + apikey (str): The API key for authentication. + action (str, optional): The action to perform. Defaults to "fetch". + since (int, optional): The timestamp to fetch tasks completed after. Defaults to 0. + + Returns: + list: A list of tasks fetched from the remote server. Returns an empty list if an error occurs. + """ try: url = os.path.join(url, "tasks", "list/") params = dict(status=status, ids=True) @@ -159,8 +188,7 @@ def node_fetch_tasks(status, url, apikey, action="fetch", since=0): params["completed_after"] = since r = requests.get(url, params=params, headers={"Authorization": f"Token {apikey}"}, verify=False) if not r.ok: - log.error(f"Error fetching task list. Status code: {r.status_code} - {r.url}") - log.info("Saving error to /tmp/dist_error.html") + log.error("Error fetching task list. Status code: %d - %s. Saving error to /tmp/dist_error.html", r.status_code, r.url) _ = path_write_file("/tmp/dist_error.html", r.content) return [] return r.json().get("data", []) @@ -171,6 +199,19 @@ def node_fetch_tasks(status, url, apikey, action="fetch", since=0): def node_list_machines(url, apikey): + """ + Retrieves a list of machines from a CAPE node and yields Machine objects. + + Args: + url (str): The base URL of the CAPE node. + apikey (str): The API key for authentication. + + Yields: + Machine: An instance of the Machine class with the machine's details. + + Raises: + HTTPException: If the request to the CAPE node fails or returns an error. + """ try: r = requests.get(os.path.join(url, "machines", "list/"), headers={"Authorization": f"Token {apikey}"}, verify=False) for machine in r.json()["data"]: @@ -180,6 +221,19 @@ def node_list_machines(url, apikey): def node_list_exitnodes(url, apikey): + """ + Fetches a list of exit nodes from a given URL using the provided API key. + + Args: + url (str): The base URL of the CAPE node. + apikey (str): The API key for authorization. + + Yields: + dict: Each exit node data as a dictionary. + + Raises: + HTTPException: If the request fails or the response is invalid. + """ try: r = requests.get(os.path.join(url, "exitnodes/"), headers={"Authorization": f"Token {apikey}"}, verify=False) for exitnode in r.json()["data"]: @@ -189,6 +243,22 @@ def node_list_exitnodes(url, apikey): def node_get_report(task_id, fmt, url, apikey, stream=False): + """ + Fetches a report for a given task from a specified URL. + + Args: + task_id (int): The ID of the task for which the report is to be fetched. + fmt (str): The format of the report (e.g., 'json', 'html'). + url (str): The base URL of the server from which to fetch the report. + apikey (str): The API key for authorization. + stream (bool, optional): Whether to stream the response. Defaults to False. + + Returns: + requests.Response: The response object containing the report. + + Raises: + Exception: If there is an error fetching the report. + """ try: url = os.path.join(url, "tasks", "get", "report", "%d/" % task_id, fmt) return requests.get(url, stream=stream, headers={"Authorization": f"Token {apikey}"}, verify=False, timeout=800) @@ -197,16 +267,33 @@ def node_get_report(task_id, fmt, url, apikey, stream=False): def node_get_report_nfs(task_id, worker_name, main_task_id) -> bool: + """ + Retrieves a report from a worker node via NFS and copies it to the main task's analysis directory. + + Args: + task_id (int): The ID of the task on the worker node. + worker_name (str): The name of the worker node. + main_task_id (int): The ID of the main task on the main node. + + Returns: + bool: True if the operation was successful, False otherwise. + + Raises: + Exception: If there is an error during the copying process. + + Logs: + Error messages if the worker node is not mounted, the file does not exist, or if there is an exception during copying. + """ worker_path = os.path.join(CUCKOO_ROOT, dist_conf.NFS.mount_folder, str(worker_name)) if not path_mount_point(worker_path): - log.error(f"[-] Worker: {worker_name} is not mounted to: {worker_path}!") + log.error("[-] Worker: %s is not mounted to: %s!", worker_name, worker_path) return True worker_path = os.path.join(worker_path, "storage", "analyses", str(task_id)) if not path_exists(worker_path): - log.error(f"File on destiny doesn't exist: {worker_path}") + log.error("File on destiny doesn't exist: %s", worker_path) return True analyses_path = os.path.join(CUCKOO_ROOT, "storage", "analyses", str(main_task_id)) @@ -215,6 +302,8 @@ def node_get_report_nfs(task_id, worker_name, main_task_id) -> bool: try: shutil.copytree(worker_path, analyses_path, ignore=dist_ignore_patterns, ignore_dangling_symlinks=True, dirs_exist_ok=True) + except shutil.Error: + log.error("Files doens't exist on worker") except Exception as e: log.exception(e) return False @@ -223,12 +312,32 @@ def node_get_report_nfs(task_id, worker_name, main_task_id) -> bool: def _delete_many(node, ids, nodes, db): + """ + Deletes multiple tasks from a specified node if the node is not the main server. + + Args: + node (str): The identifier of the node from which tasks are to be deleted. + ids (list): A list of task IDs to be deleted. + nodes (dict): A dictionary containing node information, where keys are node identifiers and values are node details. + db (object): The database connection object to perform rollback in case of failure. + + Returns: + None + + Raises: + Exception: If there is an error during the deletion process. + + Logs: + Debug: Logs the task IDs and node name from which tasks are being deleted. + Info: Logs the status code and content if the response status code is not 200. + Critical: Logs the error message if an exception occurs during the deletion process. + """ if nodes[node].name == main_server_name: return try: url = os.path.join(nodes[node].url, "tasks", "delete_many/") apikey = nodes[node].apikey - log.debug("Removing task id(s): {0} - from node: {1}".format(ids, nodes[node].name)) + log.debug("Removing task id(s): %s - from node: %s", ids, nodes[node].name) res = requests.post( url, headers={"Authorization": f"Token {apikey}"}, @@ -236,7 +345,7 @@ def _delete_many(node, ids, nodes, db): verify=False, ) if res and res.status_code != 200: - log.info("{} - {}".format(res.status_code, res.content)) + log.info("%d - %s", res.status_code, res.content) db.rollback() except Exception as e: @@ -245,6 +354,29 @@ def _delete_many(node, ids, nodes, db): def node_submit_task(task_id, node_id, main_task_id): + """ + Submits a task to a specified node for processing. + + Args: + task_id (int): The ID of the task to be submitted. + node_id (int): The ID of the node to which the task will be submitted. + main_task_id (int): The ID of the main task associated with this task. + + Returns: + bool: True if the task was successfully submitted, False otherwise. + + Raises: + Exception: If there is an error during the task submission process. + + The function performs the following steps: + 1. Retrieves the node and task information from the database. + 2. Checks if the node is the main server and returns if it is. + 3. Prepares the task data for submission based on the task category. + 4. Submits the task to the node using an HTTP POST request. + 5. Handles different response statuses from the node. + 6. Updates the task status in the database based on the submission result. + 7. Logs relevant information and errors during the process. + """ db = session() node = db.query(Node).with_entities(Node.id, Node.name, Node.url, Node.apikey).filter_by(id=node_id).first() task = db.query(Task).filter_by(id=task_id).first() @@ -314,7 +446,7 @@ def node_submit_task(task_id, node_id, main_task_id): files = dict(file=open(task.path, "rb")) r = requests.post(url, data=data, files=files, headers={"Authorization": f"Token {apikey}"}, verify=False) else: - log.debug("Target category is: {}".format(task.category)) + log.debug("Target category is: %s", task.category) db.close() return @@ -336,13 +468,13 @@ def node_submit_task(task_id, node_id, main_task_id): check = True else: log.debug( - "Failed to submit: main_task_id: {} task {} to node: {}, code: {}, msg: {}".format( + "Failed to submit: main_task_id: %d task %d to node: %s, code: %d, msg: %s", task.main_task_id, task_id, node.name, r.status_code, r.content - ) ) - + if b"File too big, enable" in r.content: + main_db.set_status(task.main_task_id, TASK_BANNED) if task.task_id: - log.debug("Submitted task to worker: {} - {} - {}".format(node.name, task.task_id, task.main_task_id)) + log.debug("Submitted task to worker: %s - %d - %d", node.name, task.task_id, task.main_task_id) elif r.status_code == 500: log.info("Saving error to /tmp/dist_error.html") @@ -353,7 +485,7 @@ def node_submit_task(task_id, node_id, main_task_id): log.info((r.status_code, "see api auth for more details")) else: - log.info("Node: {} - Task submit to worker failed: {} - {}".format(node.id, r.status_code, r.content)) + log.info("Node: %d - Task submit to worker failed: %d - %s", node.id, r.status_code, r.text) if check: task.node_id = node.id @@ -380,6 +512,38 @@ def node_submit_task(task_id, node_id, main_task_id): # class Retriever(): class Retriever(threading.Thread): + """ + A class that retrieves and processes tasks from distributed nodes. + + Methods + ------- + run(): + Initializes and starts various threads for fetching and processing tasks. + + free_space_mon(): + Monitors free disk space and logs an error if space is insufficient. + + notification_loop(): + Sends notifications for completed tasks to configured callback URLs. + + failed_cleaner(): + Cleans up failed tasks from nodes and updates their status in the database. + + fetcher(): + Continuously fetches tasks from enabled nodes and processes them. + + delete_target_file(task_id: int, sample_sha256: str, target: str): + Deletes the original file and its binary copy if configured to do so. + + fetch_latest_reports_nfs(): + Fetches the latest reports from nodes using NFS and processes them. + + fetch_latest_reports(): + Fetches the latest reports from nodes using REST API and processes them. + + remove_from_worker(): + Removes tasks from worker nodes and updates their status in the database. + """ def run(self): self.cleaner_queue = queue.Queue() self.fetcher_queue = queue.Queue() @@ -443,12 +607,24 @@ def run(self): for thr in self.threads: try: thr.join(timeout=0.0) - log.info(f"Thread: {thr.name} - Alive: {thr.is_alive()}") + log.info("Thread: %s - Alive: %s", thr.name, str(thr.is_alive())) except Exception as e: log.exception(e) time.sleep(60) def free_space_mon(self): + """ + Monitors the free disk space in the analysis folder and logs an error + message if the available space is below the configured threshold. This + check is performed periodically every 10 minutes. The check is ignored + if the 'freespace' configuration variable is set to zero. + + The analysis folder path is resolved to its full base path to handle + cases where it might be a symbolic link. + + Returns: + None + """ # If not enough free disk space is available, then we print an # error message and wait another round (this check is ignored # when the freespace configuration variable is set to zero). @@ -461,6 +637,22 @@ def free_space_mon(self): time.sleep(600) def notification_loop(self): + """ + Continuously checks for completed tasks that have not been notified and sends notifications to specified URLs. + + This method runs an infinite loop that: + 1. Queries the database for tasks that are finished, retrieved, but not yet notified. + 2. For each task, updates the main task status to `TASK_REPORTED`. + 3. Sends a POST request to each URL specified in the configuration with the task ID in the payload. + 4. Marks the task as notified if the POST request is successful. + 5. Logs the status of each notification attempt. + + The loop sleeps for 20 seconds before repeating the process. + + Raises: + requests.exceptions.ConnectionError: If there is a connection error while sending the POST request. + Exception: For any other exceptions that occur during the notification process. + """ urls = reporting_conf.callback.url.split(",") headers = {"x-api-key": reporting_conf.callback.key} @@ -471,30 +663,54 @@ def notification_loop(self): for task in tasks: with main_db.session.begin(): main_db.set_status(task.main_task_id, TASK_REPORTED) - log.debug("reporting main_task_id: {}".format(task.main_task_id)) + log.debug("reporting main_task_id: %d", task.main_task_id) for url in urls: try: res = requests.post(url, headers=headers, data=json.dumps({"task_id": int(task.main_task_id)})) if res and res.ok: task.notificated = True else: - log.info("failed to report: {} - {}".format(task.main_task_id, res.status_code)) + log.info("failed to report: %d - %d", task.main_task_id, res.status_code) except requests.exceptions.ConnectionError: log.info("Can't report to callback") except Exception as e: - log.info("failed to report: {} - {}".format(task.main_task_id, e)) + log.info("failed to report: %d - %s", task.main_task_id, str(e)) db.commit() time.sleep(20) def failed_cleaner(self): + """ + Periodically checks for failed tasks on enabled nodes and cleans them up. + + This method continuously queries the database for nodes that are enabled and + checks for tasks that have failed either during analysis or processing. If a + failed task is found, it updates the task status to indicate failure, marks + the task as finished, retrieved, and notified, and then adds the task to the + cleaner queue for further processing. + + The method runs indefinitely, sleeping for 600 seconds between each iteration. + + Attributes: + self.cleaner_queue (Queue): A queue to hold tasks that need to be cleaned. + + Notes: + - This method acquires and releases a lock (`lock_retriever`) to ensure + thread-safe operations when adding tasks to the cleaner queue. + - The method commits changes to the database after processing each node. + - The method closes the database session before exiting. + + Raises: + Any exceptions raised during database operations or task processing are + not explicitly handled within this method. + """ db = session() while True: for node in db.query(Node).with_entities(Node.id, Node.name, Node.url, Node.apikey).filter_by(enabled=True).all(): - log.info("Checking for failed tasks on: {}".format(node.name)) + log.info("Checking for failed tasks on: %s", node.name) for task in node_fetch_tasks("failed_analysis|failed_processing", node.url, node.apikey, action="delete"): t = db.query(Task).filter_by(task_id=task["id"], node_id=node.id).order_by(Task.id.desc()).first() if t is not None: - log.info("Cleaning failed for id:{}, node:{}: main_task_id: {}".format(t.id, t.node_id, t.main_task_id)) + log.info("Cleaning failed for id: %d, node: %s: main_task_id: %d", t.id, t.node_id, t.main_task_id) with main_db.session.begin(): main_db.set_status(t.main_task_id, TASK_FAILED_REPORTING) t.finished = True @@ -505,7 +721,7 @@ def failed_cleaner(self): self.cleaner_queue.put((t.node_id, t.task_id)) lock_retriever.release() else: - log.debug("failed_cleaner t is None for: {} - node_id: {}".format(task["id"], node.id)) + log.debug("failed_cleaner t is None for: %s - node_id: %d", str(task["id"]), node.id) lock_retriever.acquire() if (node.id, task["id"]) not in self.cleaner_queue.queue: self.cleaner_queue.put((node.id, task["id"])) @@ -515,7 +731,25 @@ def failed_cleaner(self): db.close() def fetcher(self): - """Method that runs forever""" + """ + Method that runs indefinitely to fetch tasks from nodes and process them. + + This method continuously checks for tasks from enabled nodes and processes them. + It maintains a status count and last check time for each node. If a node's tasks + are fetched successfully, they are added to the fetcher queue. If a node is deemed + dead after a certain number of failures, it is logged. + + Attributes: + last_checks (dict): Dictionary to keep track of the last check time for each node. + status_count (dict): Dictionary to keep track of the status count for each node. + stop_dist (threading.Event): Event to signal stopping the distribution. + cleaner_queue (queue.Queue): Queue to hold tasks that need cleaning. + fetcher_queue (queue.Queue): Queue to hold tasks that need fetching. + current_queue (dict): Dictionary to keep track of the current queue for each node. + + Raises: + Exception: If an error occurs during task processing, it is logged and the status count is incremented + """ last_checks = {} # to not exit till cleaner works with session() as db: @@ -564,7 +798,7 @@ def fetcher(self): ): limit += 1 self.fetcher_queue.put(({"id": task.task_id}, node.id)) - # log.debug("{} - {}".format(task, node.id)) + # log.debug("%s - %d", task, node.id) """ completed_on = datetime.strptime(task["completed_on"], "%Y-%m-%d %H:%M:%S") if node.last_check is None or completed_on > node.last_check: @@ -576,9 +810,9 @@ def fetcher(self): """ except Exception as e: self.status_count[node.name] += 1 - log.error(e, exc_info=True) + log.exception(e) if self.status_count[node.name] == dead_count: - log.info("[-] {} dead".format(node.name)) + log.info("[-] %s dead", node.name) # node_data = db.query(Node).filter_by(name=node.name).first() # node_data.enabled = False # db.commit() @@ -586,6 +820,22 @@ def fetcher(self): # time.sleep(5) def delete_target_file(self, task_id: int, sample_sha256: str, target: str): + """ + Deletes the target file and its binary copy if certain conditions are met. + + Args: + task_id (int): The ID of the task associated with the file. + sample_sha256 (str): The SHA-256 hash of the sample file. + target (str): The path to the target file. + + Behavior: + - Deletes the target file if `cfg.cuckoo.delete_original` is True and the target file exists. + - Deletes the binary copy of the file if `cfg.cuckoo.delete_bin_copy` is True and no other tasks are using the sample. + + Note: + - The function checks if the target file exists before attempting to delete it. + - The function checks if the binary copy is still in use by other tasks before deleting it. + """ # Is ok to delete original file, but we need to lookup on delete_bin_copy if no more pendings tasks if cfg.cuckoo.delete_original and target and path_exists(target): path_delete(target) @@ -600,6 +850,32 @@ def delete_target_file(self, task_id: int, sample_sha256: str, target: str): # This should be executed as external thread as it generates bottle neck def fetch_latest_reports_nfs(self): + """ + Fetches the latest reports from NFS (Network File System) for distributed tasks. + + This method continuously checks for new tasks in the fetcher queue and processes them. + It retrieves the task details from the database, fetches the corresponding report from + the specified node, and updates the task status in the main database. + + The method performs the following steps: + 1. Continuously checks for new tasks in the fetcher queue. + 2. Retrieves task details from the database. + 3. Fetches the report from the specified node. + 4. Updates the task status in the main database. + 5. Moves the report to the appropriate location. + 6. Creates a symbolic link to the analysis folder. + 7. Deletes the target file if necessary. + 8. Marks the task as retrieved and finished in the database. + + The method handles various exceptions and logs relevant information for debugging purposes. + + Note: + This method runs indefinitely until the `stop_dist` event is set. + + Raises: + Exception: If any error occurs during the processing of tasks. + + """ # db = session() with session() as db: # to not exit till cleaner works @@ -632,9 +908,8 @@ def fetch_latest_reports_nfs(self): continue log.debug( - "Fetching dist report for: id: {}, task_id: {}, main_task_id: {} from node: {}".format( + "Fetching dist report for: id: %d, task_id: %d, main_task_id: %d from node: %s", t.id, t.task_id, t.main_task_id, ID2NAME[t.node_id] if t.node_id in ID2NAME else t.node_id - ) ) with main_db.session.begin(): # set completed_on time @@ -643,18 +918,18 @@ def fetch_latest_reports_nfs(self): main_db.set_status(t.main_task_id, TASK_REPORTED) # Fetch each requested report. - report_path = os.path.join(CUCKOO_ROOT, "storage", "analyses", f"{t.main_task_id}") + report_path = os.path.join(CUCKOO_ROOT, "storage", "analyses", str(t.main_task_id)) # ToDo option node = db.query(Node).with_entities(Node.id, Node.name, Node.url, Node.apikey).filter_by(id=node_id).first() start_copy = timeit.default_timer() copied = node_get_report_nfs(t.task_id, node.name, t.main_task_id) timediff = timeit.default_timer() - start_copy log.info( - f"It took {timediff:.2f} seconds to copy report {t.task_id} from node: {node.name} for task: {t.main_task_id}" + "It took %s seconds to copy report %d from node: %s for task: %d", f"{timediff:.2f}", t.task_id, node.name, t.main_task_id ) if not copied: - log.error(f"Can't copy report {t.task_id} from node: {node.name} for task: {t.main_task_id}") + log.error("Can't copy report %d from node: %s for task: %d", t.task_id, node.name, t.main_task_id) continue # this doesn't exist for some reason @@ -674,9 +949,7 @@ def fetch_latest_reports_nfs(self): try: shutil.move(t.path, destination) except FileNotFoundError as e: - print(f"Failed to move: {t.path} - {e}") - pass - + log.error("Failed to move: %s - %s", t.path, str(e)) # creating link to analysis folder if path_exists(destination): try: @@ -698,6 +971,24 @@ def fetch_latest_reports_nfs(self): # This should be executed as external thread as it generates bottle neck def fetch_latest_reports(self): + """ + Continuously fetches the latest reports from distributed nodes and processes them. + + This method runs in an infinite loop until `self.stop_dist` is set. It retrieves tasks from the `fetcher_queue`, + fetches the corresponding reports from the nodes, and processes them. The reports are saved to the local storage + and the task status is updated in the database. + + The method handles various scenarios such as: + - Task not found or already processed. + - Report retrieval failures. + - Report extraction and saving. + - Handling of sample binaries associated with the tasks. + + The method also manages a cleaner queue to handle tasks that need to be cleaned up. + + Raises: + Exception: If any unexpected error occurs during the report fetching and processing. + """ db = session() # to not exit till cleaner works while True: @@ -730,9 +1021,8 @@ def fetch_latest_reports(self): continue log.debug( - "Fetching dist report for: id: {}, task_id: {}, main_task_id:{} from node: {}".format( + "Fetching dist report for: id: %d, task_id: %d, main_task_id: %d from node: %s", t.id, t.task_id, t.main_task_id, ID2NAME[t.node_id] if t.node_id in ID2NAME else t.node_id - ) ) with main_db.session.begin(): # set completed_on time @@ -745,23 +1035,21 @@ def fetch_latest_reports(self): report = node_get_report(t.task_id, "dist/", node.url, node.apikey, stream=True) if report is None: - log.info("dist report retrieve failed NONE: task_id: {} from node: {}".format(t.task_id, node_id)) + log.info("dist report retrieve failed NONE: task_id: %d from node: %d", t.task_id, node_id) continue if report.status_code != 200: log.info( - "dist report retrieve failed - status_code {}: task_id: {} from node: {}".format( - report.status_code, t.task_id, node_id - ) + "dist report retrieve failed - status_code %d: task_id: %d from node: %s", report.status_code, t.task_id, node_id ) if report.status_code == 400 and (node_id, task.get("id")) not in self.cleaner_queue.queue: self.cleaner_queue.put((node_id, task.get("id"))) - log.info(f"Status code: {report.status_code} - MSG: {report.text}") + log.info("Status code: %d - MSG: %s", report.status_code, report.text) continue - log.info(f"Report size for task {t.task_id} is: {int(report.headers.get('Content-length', 1))/int(1<<20):,.0f} MB") + log.info("Report size for task %s is: %s MB", t.task_id, f"{int(report.headers.get('Content-length', 1))/int(1<<20):,.0f}") - report_path = os.path.join(CUCKOO_ROOT, "storage", "analyses", "{}".format(t.main_task_id)) + report_path = os.path.join(CUCKOO_ROOT, "storage", "analyses", str(t.main_task_id)) if not path_exists(report_path): path_mkdir(report_path, mode=0o755) try: @@ -774,7 +1062,7 @@ def fetch_latest_reports(self): if (node_id, task.get("id")) not in self.cleaner_queue.queue: self.cleaner_queue.put((node_id, task.get("id"))) except OSError: - log.error("Permission denied: {}".format(report_path)) + log.error("Permission denied: %s", report_path) if path_exists(t.path): sample_sha256 = None @@ -803,7 +1091,7 @@ def fetch_latest_reports(self): self.delete_target_file(t.main_task_id, sample_sha256, t.path) else: - log.debug(f"{t.path} doesn't exist") + log.debug("%s doesn't exist", t.path) t.retrieved = True t.finished = True @@ -814,7 +1102,7 @@ def fetch_latest_reports(self): except pyzipper.zipfile.BadZipFile: log.error("File is not a zip file") except Exception as e: - log.exception("Exception: %s" % e) + log.exception("Exception: %s", str(e)) if path_exists(os.path.join(report_path, "reports", "report.json")): path_delete(os.path.join(report_path, "reports", "report.json")) except Exception as e: @@ -824,6 +1112,30 @@ def fetch_latest_reports(self): db.close() def remove_from_worker(self): + """ + Removes tasks from worker nodes. + + This method continuously processes tasks from the cleaner queue and removes them from the worker nodes. + It retrieves the list of nodes from the database and processes tasks in the cleaner queue. + If a task is found in the `t_is_none` dictionary for a node, it is removed from the list. + The method then sends a request to delete the tasks from the worker node. + + The method performs the following steps: + 1. Retrieves the list of nodes from the database. + 2. Continuously processes tasks from the cleaner queue. + 3. Groups tasks by node ID. + 4. Removes tasks from the `t_is_none` dictionary if present. + 5. Sends a request to delete tasks from the worker node. + 6. Commits the changes to the database. + 7. Sleeps for 20 seconds before processing the next batch of tasks. + + Note: + The method runs indefinitely until manually stopped. + + ToDo: + Determine if additional actions are needed when the length of `t_is_none[node_id]` exceeds 50. + + """ nodes = {} with session() as db: for node in db.query(Node).with_entities(Node.id, Node.name, Node.url, Node.apikey).all(): @@ -855,12 +1167,43 @@ def remove_from_worker(self): class StatusThread(threading.Thread): + """ + A thread that handles the submission of tasks to nodes and manages the status of nodes. + + Methods + ------- + submit_tasks(node_id, pend_tasks_num, options_like=False, force_push_push=False, db=None) + Submits tasks to a specified node. + + load_vm_tags(db, node_id, node_name) + Loads the tags for virtual machines associated with a node. + + run() + The main loop that continuously checks the status of nodes and submits tasks. + """ def submit_tasks(self, node_id, pend_tasks_num, options_like=False, force_push_push=False, db=None): + """ + Submits tasks to a specified node. + + Args: + node_id (str): The identifier of the node to which tasks will be submitted. + pend_tasks_num (int): The number of pending tasks to be submitted. + options_like (bool, optional): Flag to filter tasks based on options. Defaults to False. + force_push_push (bool, optional): Flag to forcefully push tasks to the node. Defaults to False. + db (Session, optional): The database session to use. Defaults to None. + + Returns: + bool: True if tasks were successfully submitted, False otherwise. + + Raises: + OperationalError: If there is an operational error when querying the database. + SQLAlchemyError: If there is a SQLAlchemy error when querying the database. + """ # HACK do not create a new session if the current one (passed as parameter) is still valid. try: node = db.query(Node).with_entities(Node.id, Node.name, Node.url, Node.apikey).filter_by(name=node_id).first() except (OperationalError, SQLAlchemyError) as e: - log.warning(f"Got an operational Exception when trying to submit tasks: {e}") + log.warning("Got an operational Exception when trying to submit tasks: %s", str(e)) return False if node.name not in SERVER_TAGS: @@ -897,7 +1240,7 @@ def submit_tasks(self, node_id, pend_tasks_num, options_like=False, force_push_p # Check if file exist, if no wipe from db and continue, rare cases if t.category in ("file", "pcap", "static"): if not path_exists(t.target): - log.info(f"Task id: {t.id} - File doesn't exist: {t.target}") + log.info("Task id: %d - File doesn't exist: %s", t.id, t.target) main_db.set_status(t.id, TASK_BANNED) continue @@ -906,7 +1249,7 @@ def submit_tasks(self, node_id, pend_tasks_num, options_like=False, force_push_p file_size = path_get_size(t.target) if file_size > web_conf.general.max_sample_size: log.warning( - f"File size: {file_size} is bigger than allowed: {web_conf.general.max_sample_size}" + "File size: %d is bigger than allowed: %d", file_size, web_conf.general.max_sample_size ) main_db.set_status(t.id, TASK_BANNED) continue @@ -925,12 +1268,12 @@ def submit_tasks(self, node_id, pend_tasks_num, options_like=False, force_push_p if "timeout=" in t.options: t.timeout = options.get("timeout", 0) except Exception as e: - log.error(e, exc_info=True) + log.exception(e) # wtf are you doing in pendings? tasks = db.query(Task).filter_by(main_task_id=t.id).all() if tasks: for task in tasks: - # log.info("Deleting incorrectly uploaded file from dist db, main_task_id: {}".format(t.id)) + # log.info("Deleting incorrectly uploaded file from dist db, main_task_id: %s", t.id) if node.name == main_server_name: main_db.set_status(t.id, TASK_RUNNING) else: @@ -960,7 +1303,7 @@ def submit_tasks(self, node_id, pend_tasks_num, options_like=False, force_push_p if t.options: t.options += "," - t.options += "main_task_id={}".format(t.id) + t.options += f"main_task_id={t.id}" args = dict( package=t.package, category=t.category, @@ -1033,7 +1376,7 @@ def submit_tasks(self, node_id, pend_tasks_num, options_like=False, force_push_p log.info("nothing to upload? How? o_O") return False # Submit appropriate tasks to node - log.debug("going to upload {} tasks to node {}".format(pend_tasks_num, node.name)) + log.debug("going to upload %d tasks to node %s", pend_tasks_num, node.name) for task in to_upload: submitted = node_submit_task(task.id, node.id, task.main_task_id) if submitted: @@ -1042,7 +1385,7 @@ def submit_tasks(self, node_id, pend_tasks_num, options_like=False, force_push_p else: main_db.set_status(task.main_task_id, TASK_DISTRIBUTED) else: - log.info("something is wrong with submission of task: {}".format(task.id)) + log.info("something is wrong with submission of task: %d", task.id) db.delete(task) db.commit() limit += 1 @@ -1053,6 +1396,17 @@ def submit_tasks(self, node_id, pend_tasks_num, options_like=False, force_push_p return True def load_vm_tags(self, db, node_id, node_name): + """ + Load virtual machine tags for a specific node and store them in the global SERVER_TAGS dictionary. + + Args: + db (Session): The database session to query the machines. + node_id (int): The ID of the node to load tags for. + node_name (str): The name of the node to load tags for. + + Returns: + None + """ global SERVER_TAGS # Get available node tags machines = db.query(Machine).filter_by(node_id=node_id).all() @@ -1124,7 +1478,7 @@ def run(self): failed_count[node.name] += 1 # This will declare worker as dead after X failed connections checks if failed_count[node.name] == dead_count: - log.info("[-] {} dead".format(node.name)) + log.info("[-] %s dead", node.name) # node.enabled = False db.commit() if node.name in STATUSES: @@ -1140,7 +1494,7 @@ def run(self): res = self.submit_tasks( node.name, MINIMUMQUEUE[node.name], - options_like="node={}".format(node.name), + options_like=f"node={node.name}", force_push_push=True, db=db, ) @@ -1188,7 +1542,7 @@ def run(self): continue db.commit() except Exception as e: - log.error("Got an exception when trying to check nodes status and submit tasks: {}.".format(e), exc_info=True) + log.error("Got an exception when trying to check nodes status and submit tasks: %s.", str(e)) # ToDo hard test this rollback, this normally only happens on db restart and similar db.rollback() @@ -1198,6 +1552,17 @@ def run(self): def output_json(data, code, headers=None): + """ + Create a JSON response with the given data, HTTP status code, and optional headers. + + Args: + data (dict): The data to be serialized to JSON. + code (int): The HTTP status code for the response. + headers (dict, optional): Additional headers to include in the response. Defaults to None. + + Returns: + Response: A Flask response object with the JSON data and specified headers. + """ resp = make_response(json.dumps(data), code) resp.headers.extend(headers or {}) return resp @@ -1399,16 +1764,16 @@ def update_machine_table(node_name): # delete all old vms _ = db.query(Machine).filter_by(node_id=node.id).delete() - log.info("Available VM's on %s:" % node_name) + log.info("Available VM's on %s:", node_name) # replace with new vms for machine in new_machines: - log.info("-->\t%s" % machine.name) + log.info("-->\t%s", machine.name) node.machines.append(machine) db.add(machine) db.commit() - log.info("Updated the machine table for node: %s" % node_name) + log.info("Updated the machine table for node: %s", node_name) def delete_vm_on_node(node_name, vm_name): @@ -1438,6 +1803,23 @@ def node_enabled(node_name, status): def cron_cleaner(clean_x_hours=False): + """ + Method that runs forever to clean up tasks. + + Args: + clean_x_hours (bool or int, optional): If provided, only clean up tasks that were + notified and created within the last `clean_x_hours` hours. + + The method performs the following steps: + 1. Checks if the cleaner is already running by looking for a PID file at "/tmp/dist_cleaner.pid". + 2. If the cleaner is not running, it creates a PID file to indicate that it is running. + 3. Connects to the database and retrieves all nodes. + 4. Depending on the `clean_x_hours` argument, it retrieves tasks that need to be cleaned up. + 5. Marks the retrieved tasks as deleted and groups them by node. + 6. Deletes the tasks from the nodes. + 7. Commits the changes to the database and closes the connection. + 8. Deletes the PID file to indicate that the cleaner has finished running. + """ """Method that runs forever""" # Check if we are not runned diff --git a/utils/fstab.py b/utils/fstab.py index a4758b44a9f..9daa139bb61 100644 --- a/utils/fstab.py +++ b/utils/fstab.py @@ -69,7 +69,6 @@ def add_nfs_entry(hostname: str, worker_folder: str): def remove_nfs_entry(hostname: str): - worker_path = os.path.join(CUCKOO_ROOT, dist_conf.NFS.mount_folder, hostname) with lock: @@ -193,14 +192,6 @@ def handle_sigterm(sig, f): try: output = handlers[command](*args, **kwargs) except Exception as e: - log.exception("Error executing command: {}".format(command)) + log.exception("Error executing command: %s", command) error = str(e) - server.sendto( - json.dumps( - { - "output": output, - "exception": error, - } - ).encode(), - addr, - ) + server.sendto(json.dumps({"output": output, "exception": error}).encode(), addr) diff --git a/utils/process.py b/utils/process.py index fc5879485de..722273452ea 100644 --- a/utils/process.py +++ b/utils/process.py @@ -38,7 +38,15 @@ from lib.cuckoo.common.constants import CUCKOO_ROOT from lib.cuckoo.common.path_utils import path_delete, path_exists, path_mkdir from lib.cuckoo.common.utils import get_options -from lib.cuckoo.core.database import TASK_COMPLETED, TASK_FAILED_PROCESSING, TASK_REPORTED, Database, Task, init_database +from lib.cuckoo.core.database import ( + TASK_COMPLETED, + TASK_FAILED_PROCESSING, + TASK_FAILED_REPORTING, + TASK_REPORTED, + Database, + Task, + init_database, +) from lib.cuckoo.core.plugins import RunProcessing, RunReporting, RunSignatures from lib.cuckoo.core.startup import ConsoleHandler, check_linux_dist, init_modules @@ -68,6 +76,15 @@ # https://stackoverflow.com/questions/41105733/limit-ram-usage-to-python-program def memory_limit(percentage: float = 0.8): + """ + Sets a memory limit for the current process on Linux systems. + + Args: + percentage (float): Percentage of the total system memory that is allowed to be used. Defaults to 0.8 (80%). + + Returns: + None + """ if platform.system() != "Linux": print("Only works on linux!") return @@ -137,9 +154,10 @@ def process( else: reprocess = report - RunReporting(task=task.to_dict(), results=results, reprocess=reprocess).run() + error_count = RunReporting(task=task.to_dict(), results=results, reprocess=reprocess).run() + status = TASK_REPORTED if error_count == 0 else TASK_FAILED_REPORTING with db.session.begin(): - db.set_status(task_id, TASK_REPORTED) + db.set_status(task_id, status) if auto: # Is ok to delete original file, but we need to lookup on delete_bin_copy if no more pendings tasks @@ -162,6 +180,12 @@ def process( log.removeHandler(per_analysis_handler) + # Remove the SQLAlchemy session to ensure the next task pulls objects from + # the database, instead of relying on a potentially outdated object cache. + # Stale data can prevent SQLAlchemy from querying the database or issuing + # statements, resulting in unexpected errors and inconsistencies. + db.session.remove() + def init_worker(): signal.signal(signal.SIGINT, signal.SIG_IGN) @@ -170,6 +194,16 @@ def init_worker(): def get_formatter_fmt(task_id=None, main_task_id=None): + """ + Generates a logging format string with optional task identifiers. + + Args: + task_id (int, optional): The ID of the task. Defaults to None. + main_task_id (int, optional): The ID of the main task. Defaults to None. + + Returns: + str: A formatted string for logging that includes the task information if provided. + """ task_info = f"[Task {task_id}" if task_id is not None else "" if main_task_id: task_info += f" ({main_task_id})" @@ -185,7 +219,35 @@ def set_formatter_fmt(task_id=None, main_task_id=None): FORMATTER._style._fmt = get_formatter_fmt(task_id, main_task_id) +class ForceClosingTimedRotatingFileHandler(logging.handlers.TimedRotatingFileHandler): + def doRollover(self): + """ + Override doRollover to force close the old handler before creating a new one. + """ + if self.stream: + logging.debug("Flushing log stream...") + self.stream.flush() + logging.debug("Closing log stream...") + self.stream.close() + logging.debug("Log stream closed.") + logging.handlers.TimedRotatingFileHandler.doRollover(self) + + def init_logging(debug=False): + """ + Initializes logging for the application. + + This function sets up logging handlers for console output, syslog, and file output. + It also configures log rotation if enabled in the configuration. + + Args: + debug (bool): If True, sets the logging level to DEBUG. Otherwise, sets it to INFO. + + Returns: + tuple: A tuple containing the console handler, file handler, and syslog handler (if configured). + + Raises: + PermissionError: If there is an issue creating or accessing the log file, typically due to incorrect user permissions. # Pyattck creates root logger which we don't want. So we must use this dirty hack to remove it # If basicConfig was already called by something and had a StreamHandler added, # replace it with a ConsoleHandler. @@ -193,7 +255,7 @@ def init_logging(debug=False): if isinstance(h, logging.StreamHandler) and h.stream == sys.stderr: log.removeHandler(h) h.close() - + """ """ Handlers: - ch - console handler @@ -219,7 +281,7 @@ def init_logging(debug=False): path = os.path.join(CUCKOO_ROOT, "log", "process.log") if logconf.log_rotation.enabled: days = logconf.log_rotation.backup_count or 7 - fh = logging.handlers.TimedRotatingFileHandler(path, when="midnight", backupCount=int(days)) + fh = ForceClosingTimedRotatingFileHandler(path, when="midnight", backupCount=int(days)) else: fh = logging.handlers.WatchedFileHandler(path) @@ -271,20 +333,32 @@ def init_per_analysis_logging(tid=0, debug=False): def processing_finished(future): + """ + Callback function to handle the completion of a processing task. + + This function is called when a future task is completed. It retrieves the task ID from the + pending_future_map, logs the result, and updates the task status in the database. If an + exception occurs during processing, it logs the error and sets the task status to failed. + + Args: + future (concurrent.futures.Future): The future object representing the asynchronous task. + + Raises: + TimeoutError: If the processing task times out. + pebble.ProcessExpired: If the processing task expires. + Exception: For any other exceptions that occur during processing. + """ task_id = pending_future_map.get(future) with db.session.begin(): try: _ = future.result() log.info("Reports generation completed for Task #%d", task_id) except TimeoutError as error: - log.error("[%d] Processing Timeout %s. Function: %s", task_id, error, error.args[1]) - Database().set_status(task_id, TASK_FAILED_PROCESSING) - except pebble.ProcessExpired as error: - log.error("[%d] Exception when processing task: %s", task_id, error, exc_info=True) - Database().set_status(task_id, TASK_FAILED_PROCESSING) - except Exception as error: - log.error("[%d] Exception when processing task: %s", task_id, error, exc_info=True) - Database().set_status(task_id, TASK_FAILED_PROCESSING) + log.error("[%d] Processing timeout: %s. Function: %s", task_id, error, error.args[1]) + db.set_status(task_id, TASK_FAILED_PROCESSING) + except (pebble.ProcessExpired, Exception) as error: + log.exception("[%d] Exception when processing task: %s", task_id, error) + db.set_status(task_id, TASK_FAILED_PROCESSING) pending_future_map.pop(future) pending_task_id_map.pop(task_id) @@ -295,6 +369,24 @@ def processing_finished(future): def autoprocess( parallel=1, failed_processing=False, maxtasksperchild=7, memory_debugging=False, processing_timeout=300, debug: bool = False ): + """ + Automatically processes analysis data using a process pool. + + Args: + parallel (int): Number of parallel processes to use. Default is 1. + failed_processing (bool): Whether to process failed tasks. Default is False. + maxtasksperchild (int): Maximum number of tasks per child process. Default is 7. + memory_debugging (bool): Whether to enable memory debugging. Default is False. + processing_timeout (int): Timeout for processing each task in seconds. Default is 300. + debug (bool): Whether to enable debug mode. Default is False. + + Raises: + KeyboardInterrupt: If the process is interrupted by the user. + MemoryError: If there is not enough free RAM to run processing. + OSError: If an OS-related error occurs. + Exception: If any other exception occurs during processing. + + """ maxcount = cfg.cuckoo.max_analysis_count count = 0 # pool = multiprocessing.Pool(parallel, init_worker) @@ -308,7 +400,7 @@ def autoprocess( # If not enough free disk space is available, then we print an # error message and wait another round (this check is ignored # when the freespace configuration variable is set to zero). - if cfg.cuckoo.freespace: + if cfg.cuckoo.freespace_processing: # Resolve the full base path to the analysis folder, just in # case somebody decides to make a symbolic link out of it. dir_path = os.path.join(CUCKOO_ROOT, "storage", "analyses") @@ -382,6 +474,18 @@ def autoprocess( def _load_report(task_id: int): + """ + Load the analysis report for a given task ID from the configured database. + + This function attempts to load the analysis report from MongoDB if it is enabled. + If MongoDB is not enabled, it tries to load the report from Elasticsearch if it is enabled and not in search-only mode. + + Args: + task_id (int): The ID of the task for which to load the analysis report. + + Returns: + dict or bool: The analysis report as a dictionary if found, otherwise False. + """ if repconf.mongodb.enabled: analysis = mongo_find_one("analysis", {"info.id": task_id}, sort=[("_id", -1)]) for process in analysis.get("behavior", {}).get("processes", []): @@ -407,6 +511,20 @@ def _load_report(task_id: int): def parse_id(id_string: str): + """ + Parses a string representing a range or list of ranges of IDs and returns a list of tuples. + + Args: + id_string (str): A string representing IDs. It can be "auto" or a string of comma-separated + ranges (e.g., "1-3,5,7-9"). + + Returns: + list: A list of tuples where each tuple represents a range of IDs. If the input is "auto", + it returns the string "auto". + + Raises: + TypeError: If the input string is not in the correct format or if a range is invalid. + """ if id_string == "auto": return id_string id_string = id_string.replace(" ", "") diff --git a/utils/rooter.py b/utils/rooter.py index 717bdbdc762..cda44e56eb4 100644 --- a/utils/rooter.py +++ b/utils/rooter.py @@ -6,6 +6,7 @@ import argparse import errno import grp +import ipaddress import json import logging.handlers import os @@ -15,8 +16,8 @@ import subprocess import sys -if sys.version_info[:2] < (3, 8): - sys.exit("You are running an incompatible version of Python, please use >= 3.8") +if sys.version_info[:2] < (3, 10): + sys.exit("You are running an incompatible version of Python, please use >= 3.10") CUCKOO_ROOT = os.path.join(os.path.abspath(os.path.dirname(__file__)), "..") sys.path.append(CUCKOO_ROOT) @@ -31,7 +32,6 @@ log.addHandler(ch) log.setLevel(logging.INFO) - class s: iptables = None iptables_save = None @@ -47,20 +47,75 @@ def run(*args): return stdout, stderr +def get_tun_peer_address(interface_name): + """Gets the peer address of a tun interface. + + Args: + interface_name: The name of the tun interface (e.g., "tun0"). + Format similar to: + inet 172.30.1.5 peer 172.30.1.6/32 scope global + + Returns: + The peer IP address as a string, or None if an error occurs. Returns None if the interface does not exist, or does not have a peer. + """ + try: + result = subprocess.run(["ip", "addr", "show", interface_name], capture_output=True, text=True, check=True) + output = result.stdout + + for line in output.splitlines(): + if "peer" in line: + parts = line.split() + if len(parts) > 1: # Check if there's a second element to avoid IndexError + peer_with_cidr = parts[3] + try: + # Handle CIDR notation using ipaddress library + peer_ip = ipaddress.ip_interface(peer_with_cidr).ip.exploded + return peer_ip + except ValueError: # Handle invalid CIDR notations + try: + peer_ip = peer_with_cidr.split("/")[0] # Try just splitting by / + return peer_ip + except IndexError: + return None # Invalid format - give up. + else: + return None # No peer address found on the line. + return None # "peer" not found in the output + + except subprocess.CalledProcessError as e: + if e.returncode == 1: # Interface not found + return None + else: + print(f"Error executing ip command: {e}") + return None + except FileNotFoundError: + print("ip command not found. Is iproute2 installed?") + return None + + +def enable_ip_forwarding(sysctl="/usr/sbin/sysctl"): + log.debug("Enabling IPv4 forwarding") + run(sysctl, "-w" "net.ipv4.ip_forward=1") + + def check_tuntap(vm_name, main_iface): """Create tuntap device for qemu vms""" try: - run([s.ip, "tuntap", "add", "dev", f"tap_{vm_name}", "mode", "tap", "user", username]) - run([s.ip, "link", "set", "tap_{vm_name}", "master", main_iface]) - run([s.ip, "link", "set", "dev", "tap_{vm_name}", "up"]) - run([s.ip, "link", "set", "dev", main_iface, "up"]) + run(s.ip, "tuntap", "add", "dev", f"tap_{vm_name}", "mode", "tap", "user", username) + run(s.ip, "link", "set", "tap_{vm_name}", "master", main_iface) + run(s.ip, "link", "set", "dev", "tap_{vm_name}", "up") + run(s.ip, "link", "set", "dev", main_iface, "up") return True except subprocess.CalledProcessError: return False -def run_iptables(*args): - iptables_args = [s.iptables] +def run_iptables(*args, **kwargs): + if kwargs and kwargs.get('netns'): + netns = kwargs.get('netns') + iptables_args = ["/usr/sbin/ip", "netns", "exec", netns, s.iptables] + else: + iptables_args = [s.iptables] + iptables_args.extend(list(args)) iptables_args.extend(["-m", "comment", "--comment", "CAPE-rooter"]) return run(*iptables_args) @@ -148,7 +203,7 @@ def delete_dev_from_vrf(dev): def vpn_status(name): """Gets current VPN status.""" ret = {} - for line in run(settings.systemctl, "status", "openvpn@{}.service".format(name))[0].split("\n"): + for line in run(settings.systemctl, "status", f"openvpn@{name}.service")[0].split("\n"): if "running" in line: ret[name] = "running" break @@ -188,48 +243,194 @@ def disable_nat(interface): run_iptables("-t", "nat", "-D", "POSTROUTING", "-o", interface, "-j", "MASQUERADE") -def enable_mitmdump(interface, client, port): +def enable_mitmdump(interface, client, port, netns): """Enable mitmdump on this interface.""" + + log.info("enable_mitmdump client: %s port: %s netns: %s", client, port, netns) + + if netns: + # assume all traffic in network namespace can be captured + run_iptables( + "-t", + "nat", + "-I", + "PREROUTING", + "-p", + "tcp", + "--dport", + "443", + "-j", + "REDIRECT", + "--to-port", + port, + netns=netns, + ) + run_iptables( + "-t", + "nat", + "-I", + "PREROUTING", + "-p", + "tcp", + "--dport", + "80", + "-j", + "REDIRECT", + "--to-port", + port, + netns=netns, + ) + else: + run_iptables( + "-t", + "nat", + "-I", + "PREROUTING", + "-i", + interface, + "-s", + client, + "-p", + "tcp", + "--dport", + "443", + "-j", + "REDIRECT", + "--to-port", + port, + ) + run_iptables( + "-t", + "nat", + "-I", + "PREROUTING", + "-i", + interface, + "-s", + client, + "-p", + "tcp", + "--dport", + "80", + "-j", + "REDIRECT", + "--to-port", + port + ) + + +def disable_mitmdump(interface, client, port, netns): + """Disable mitmdump on this interface.""" + + if netns: + run_iptables( + "-t", + "nat", + "-D", + "PREROUTING", + "-p", + "tcp", + "--dport", + "443", + "-j", + "REDIRECT", + "--to-port", + port, + netns=netns, + ) + run_iptables( + "-t", + "nat", + "-D", + "PREROUTING", + "-p", + "tcp", + "--dport", + "80", + "-j", + "REDIRECT", + "--to-port", + port, + netns=netns, + ) + else: + run_iptables( + "-t", + "nat", + "-D", + "PREROUTING", + "-i", + interface, + "-s", + client, + "-p", + "tcp", + "--dport", + "443", + "-j", + "REDIRECT", + "--to-port", + port, + ) + run_iptables( + "-t", + "nat", + "-D", + "PREROUTING", + "-i", + interface, + "-s", + client, + "-p", + "tcp", + "--dport", + "80", + "-j", + "REDIRECT", + "--to-port", + port, + ) + +def polarproxy_enable(interface, client, tls_port, proxy_port): + log.info("Enabling polarproxy route.") run_iptables( "-t", "nat", "-I", "PREROUTING", + "1", "-i", interface, - "-s", + "--source", client, "-p", "tcp", "--dport", - "443", + tls_port, "-j", "REDIRECT", - "--to-port", - port, + "--to", + proxy_port ) run_iptables( - "-t", - "nat", - "-I", - "PREROUTING", + "-A", + "INPUT", "-i", interface, - "-s", - client, "-p", "tcp", "--dport", - "80", + proxy_port, + "-m", + "state", + "--state", + "NEW", "-j", - "REDIRECT", - "--to-port", - port, + "ACCEPT" ) - -def disable_mitmdump(interface, client, port): - """Disable mitmdump on this interface.""" +def polarproxy_disable(interface, client, tls_port, proxy_port): + log.info("Disabling polarproxy route.") run_iptables( "-t", "nat", @@ -237,37 +438,34 @@ def disable_mitmdump(interface, client, port): "PREROUTING", "-i", interface, - "-s", + "--source", client, "-p", "tcp", "--dport", - "443", + tls_port, "-j", "REDIRECT", - "--to-port", - port, + "--to", + proxy_port ) run_iptables( - "-t", - "nat", "-D", - "PREROUTING", + "INPUT", "-i", interface, - "-s", - client, "-p", "tcp", "--dport", - "80", + proxy_port, + "-m", + "state", + "--state", + "NEW", "-j", - "REDIRECT", - "--to-port", - port, + "ACCEPT" ) - def init_rttable(rt_table, interface): """Initialise routing table for this interface using routes from main table.""" @@ -636,6 +834,50 @@ def inetsim_disable(ipaddr, inetsim_ip, dns_port, resultserver_port, ports): run_iptables("-D", "OUTPUT", "--source", ipaddr, "-j", "DROP") +def interface_route_tun_enable(ipaddr: str, out_interface: str, task_id: str): + """Enable routing and NAT via tun output_interface.""" + log.info("Enabling interface routing via: %s for task: %s", out_interface, task_id) + + # mark packets from analysis VM + run_iptables("-t", "mangle", "-I", "PREROUTING", "--source", ipaddr, "-j", "MARK", "--set-mark", task_id) + + run_iptables("-t", "nat", "-I", "POSTROUTING", "--source", ipaddr, "-o", out_interface, "-j", "MASQUERADE") + # ACCEPT forward + run_iptables("-t", "filter", "-I", "FORWARD", "--source", ipaddr, "-o", out_interface, "-j", "ACCEPT") + + # in routing table add route table task_id + run(s.ip, "rule", "add", "fwmark", task_id, "lookup", task_id) + + peer_ip = get_tun_peer_address(out_interface) + if peer_ip: + log.info("interface_route_enable %s has peer: %s ", out_interface, peer_ip) + run(s.ip, "route", "add", "default", "via", peer_ip, "table", task_id) + else: + log.error("interface_route_enable missing peer IP ") + + +def interface_route_tun_disable(ipaddr: str, out_interface: str, task_id: str): + """Disable routing and NAT via tun output_interface.""" + log.info("Disable interface routing via: %s for task: %s", out_interface, task_id) + + # mark packets from analysis VM + run_iptables("-t", "mangle", "-D", "PREROUTING", "--source", ipaddr, "-j", "MARK", "--set-mark", task_id) + + run_iptables("-t", "nat", "-D", "POSTROUTING", "--source", ipaddr, "-o", out_interface, "-j", "MASQUERADE") + # ACCEPT forward + run_iptables("-t", "filter", "-D", "FORWARD", "--source", ipaddr, "-o", out_interface, "-j", "ACCEPT") + + # in routing table add route table task_id + run(s.ip, "rule", "del", "fwmark", task_id, "lookup", task_id) + + peer_ip = get_tun_peer_address(out_interface) + if peer_ip: + log.info("interface_route_disable %s has peer %s", out_interface, peer_ip) + run(s.ip, "route", "del", "default", "via", peer_ip, "table", task_id) + else: + log.error("interface_route_disable missing peer IP ") + + def socks5_enable(ipaddr, resultserver_port, dns_port, proxy_port): """Enable hijacking of all traffic and send it to socks5.""" log.info("Enabling socks route.") @@ -745,6 +987,8 @@ def drop_disable(ipaddr, resultserver_port): "srcroute_disable": srcroute_disable, "inetsim_enable": inetsim_enable, "inetsim_disable": inetsim_disable, + "interface_route_tun_enable": interface_route_tun_enable, + "interface_route_tun_disable": interface_route_tun_disable, "socks5_enable": socks5_enable, "socks5_disable": socks5_disable, "drop_enable": drop_enable, @@ -756,6 +1000,8 @@ def drop_disable(ipaddr, resultserver_port): "delete_dev_from_vrf": delete_dev_from_vrf, "enable_mitmdump": enable_mitmdump, "disable_mitmdump": disable_mitmdump, + "polarproxy_enable": polarproxy_enable, + "polarproxy_disable": polarproxy_disable, } if __name__ == "__main__": @@ -763,6 +1009,7 @@ def drop_disable(ipaddr, resultserver_port): parser.add_argument("socket", nargs="?", default="/tmp/cuckoo-rooter", help="Unix socket path") parser.add_argument("-g", "--group", default="cape", help="Unix socket group") parser.add_argument("--systemctl", default="/bin/systemctl", help="Systemctl wrapper script for invoking OpenVPN") + parser.add_argument("--sysctl", default="/usr/sbin/sysctl", help="Path to sysctl") parser.add_argument("--iptables", default="/sbin/iptables", help="Path to iptables") parser.add_argument("--iptables-save", default="/sbin/iptables-save", help="Path to iptables-save") parser.add_argument("--iptables-restore", default="/sbin/iptables-restore", help="Path to iptables-restore") @@ -786,9 +1033,14 @@ def drop_disable(ipaddr, resultserver_port): if not settings.iptables or not path_exists(settings.iptables): sys.exit("The `iptables` binary is not available, eh?!") + if not settings.sysctl or not path_exists(settings.sysctl): + sys.exit("The `sysctrl` binary is not available, eh?!") + if os.getuid(): sys.exit("This utility is supposed to be ran as root.") + enable_ip_forwarding(settings.sysctl) + if path_exists(settings.socket): path_delete(settings.socket) @@ -882,14 +1134,6 @@ def handle_sigterm(sig, f): try: output = handlers[command](*args, **kwargs) except Exception as e: - log.exception("Error executing command: {}".format(command)) + log.exception("Error executing command: %s", command) error = str(e) - server.sendto( - json.dumps( - { - "output": output, - "exception": error, - } - ).encode(), - addr, - ) + server.sendto(json.dumps({"output": output, "exception": error}).encode(), addr) diff --git a/utils/route.py b/utils/route.py index 16517d47d3e..5bbf62726ff 100755 --- a/utils/route.py +++ b/utils/route.py @@ -1,21 +1,21 @@ #!/usr/bin/python """ - Aux script for VPN setup - - Get a look on utils/vpn2cape.py - Example: - /etc/iproute2/rt_tables - 5 host1 - 6 host2 - 7 host3 - - conf/routing.conf - [vpn5] - name = X.ovpn - description = X - interface = tunX - rt_table = host1 +Aux script for VPN setup + +Get a look on utils/vpn2cape.py +Example: + /etc/iproute2/rt_tables + 5 host1 + 6 host2 + 7 host3 + + conf/routing.conf + [vpn5] + name = X.ovpn + description = X + interface = tunX + rt_table = host1 """ import os diff --git a/utils/router_manager.py b/utils/router_manager.py index 75eca81722f..dfff28c5c75 100644 --- a/utils/router_manager.py +++ b/utils/router_manager.py @@ -28,6 +28,11 @@ print("Missing dependency: poetry run pip install psutil") +def check_privileges(): + if not os.environ.get("SUDO_UID") and os.geteuid() != 0: + raise PermissionError("You need to run this script with sudo or as root.") + + def _rooter_response_check(rooter_response): if rooter_response and rooter_response["exception"] is not None: raise CuckooCriticalError(f"Error execution rooter command: {rooter_response['exception']}") @@ -149,7 +154,7 @@ def route_disable(route, interface, rt_table, machine, reject_segments, reject_h ) parser.add_argument("-v", "--verbose", action="store_true", help="Enable verbose logging") args = parser.parse_args() - + check_privileges() route = args.route rt_table = None reject_segments = None diff --git a/utils/test_suricata_signature.py b/utils/test_suricata_signature.py deleted file mode 100644 index aa2a35ea455..00000000000 --- a/utils/test_suricata_signature.py +++ /dev/null @@ -1,17 +0,0 @@ -# Copyright (C) 2010-2015 Cuckoo Foundation. -# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org -# See the file 'docs/LICENSE' for copying permission. - -import os -import sys - -if sys.version_info[:2] < (3, 8): - sys.exit("You are running an incompatible version of Python, please use >= 3.8") - -CUCKOO_ROOT = os.path.join(os.path.abspath(os.path.dirname(__file__)), "..") -sys.path.append(CUCKOO_ROOT) - -from lib.cuckoo.core.plugins import get_suricata_family - -if __name__ == "__main__": - print("Suricata detects as:", get_suricata_family(sys.argv[1])) diff --git a/utils/tls.py b/utils/tls.py index 23fe7feb4e8..b8919a0b7de 100644 --- a/utils/tls.py +++ b/utils/tls.py @@ -1,12 +1,8 @@ import os +import re from dataclasses import dataclass from typing import ClassVar -try: - import re2 as re -except ImportError: - import re - @dataclass() class TLS12KeyLog: diff --git a/utils/vpn2cape.py b/utils/vpn2cape.py index ee037345385..911efdd0ea1 100644 --- a/utils/vpn2cape.py +++ b/utils/vpn2cape.py @@ -30,7 +30,7 @@ def main(folder, port): # rt_table rt = "" - rt = re.findall(f"remote\s(.*)\s{port}", tmp) + rt = re.findall(fr"remote\s(.*)\s{port}", tmp) if rt: # start from id idx_start rt_table.setdefault(str(index + idx_start), rt[0]) @@ -70,7 +70,7 @@ def main(folder, port): ) vpns.append(f"vpn_{index + idx_start}") - file = file.replace(" ", "\ ") + file = file.replace(" ", r"\ ") paths.append(f"sudo openvpn --config {file} &") if write: diff --git a/uwsgi/cape.conf b/uwsgi/cape.conf index ca06515952c..ee5429f84c4 100644 --- a/uwsgi/cape.conf +++ b/uwsgi/cape.conf @@ -11,28 +11,27 @@ server { } server { - listen 80; - listen [::]:80; - server_name www.; + listen 80; + listen [::]:80; + server_name www.; return 301 https://$host$request_uri; } server { - ;if ($bad_referer){ - ; return ; - ;} - if ($block_ua) { - return 444; - } - - if ($http_user_agent = "") { - return 444; - } + #if ($bad_referer){ + # return ; + #} + #if ($block_ua) { + # return 444; + #} + #if ($http_user_agent = "") { + # return 444; + #} modsecurity on; modsecurity_rules_file /etc/nginx/modsec/main.conf; - listen 443 ssl http2 default_server; + listen 443 ssl default_server; server_name www.; ssl_certificate /etc/letsencrypt/live/-0001/fullchain.pem; ssl_certificate_key /etc/letsencrypt/live/-0001/privkey.pem; @@ -43,26 +42,22 @@ server { ssl_prefer_server_ciphers on; ssl_ciphers ECDH+AESGCM:ECDH+AES256:ECDH+AES128:DHE+AES128:!ADH:!AECDH:!MD5; - deny 248.119.234.85; - ## # Nginx Bad Bot Blocker Includes # REPO: https://github.com/mitchellkrogza/nginx-ultimate-bad-bot-blocker ## - include /etc/nginx/bots.d/ddos.conf; - include /etc/nginx/bots.d/blockbots.conf; + include /etc/nginx/bots.d/ddos.conf; + include /etc/nginx/bots.d/blockbots.conf; ssl_dhparam /etc/nginx/cert/dhparam.pem; - ssl_protocols TLSv1 TLSv1.1 TLSv1.2 TLSv1.3; + ssl_protocols TLSv1.2 TLSv1.3; add_header Strict-Transport-Security "max-age=31536000" always; - location /static/ { - alias /opt/CAPEv2/web/static/; - } + location / { proxy_pass http://127.0.0.1:8000; - client_max_body_size 32M; + client_max_body_size 32M; client_body_buffer_size 512k; proxy_send_timeout 90; proxy_read_timeout 300; @@ -72,9 +67,13 @@ server { proxy_set_header X-Real-IP $remote_addr; proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; proxy_set_header X-Forwarded-Proto $scheme; + include uwsgi_params; # the uwsgi_params in nginx + } + location /static/ { + alias /opt/CAPEv2/web/static/; + } location '/.well-known/acme-challenge' { default_type "text/plain"; root /var/www/; } - } } diff --git a/uwsgi/cape.ini b/uwsgi/cape.ini index a1a08dc3167..0945feb4b2c 100644 --- a/uwsgi/cape.ini +++ b/uwsgi/cape.ini @@ -2,14 +2,14 @@ lazy-apps = True vacuum = True ; if using with NGINX - ;http-socket = 127.0.0.1:8000 + http-socket = 127.0.0.1:8000 ; if standalone - http-socket = 0.0.0.0:8000 + ;http-socket = 0.0.0.0:8000 static-map = /static=/opt/CAPEv2/web/static # to get venv path run: cd /opt/CAPEv2 && poetry env list --full-path # virtualenv = # You might need to change plugin version, depending on your python version - plugins = python310 + plugins = python312 callable = application chdir = /opt/CAPEv2/web file = web/wsgi.py diff --git a/web/analysis/templatetags/analysis_tags.py b/web/analysis/templatetags/analysis_tags.py index bf75af0fe6d..791dfffcd2b 100644 --- a/web/analysis/templatetags/analysis_tags.py +++ b/web/analysis/templatetags/analysis_tags.py @@ -71,7 +71,6 @@ def get_detection_by_pid(dictionary, key): return detections = dictionary.get(str(key), "") if detections: - if len(detections) > 1: output = " -> ".join([malware_name_url_pattern.format(malware_name=name) for name in detections]) else: diff --git a/web/analysis/urls.py b/web/analysis/urls.py index 46f5ed47c96..540ab467f82 100644 --- a/web/analysis/urls.py +++ b/web/analysis/urls.py @@ -2,9 +2,10 @@ # This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org # See the file "docs/LICENSE" for copying permission. -from analysis import views from django.urls import re_path +from analysis import views + urlpatterns = [ re_path(r"^$", views.index, name="analysis"), re_path(r"^page/(?P\d+)/$", views.index, name="index"), diff --git a/web/analysis/views.py b/web/analysis/views.py index fd3c7158516..ceb262e12d9 100644 --- a/web/analysis/views.py +++ b/web/analysis/views.py @@ -71,12 +71,13 @@ processing_cfg = Config("processing") reporting_cfg = Config("reporting") +integrations_cfg = Config("integrations") web_cfg = Config("web") try: # On demand features HAVE_FLARE_CAPA = False - if processing_cfg.flare_capa.on_demand: + if integrations_cfg.flare_capa.on_demand: from lib.cuckoo.common.integrations.capa import HAVE_FLARE_CAPA, flare_capa_details except (NameError, ImportError): print("Can't import FLARE-CAPA") @@ -114,7 +115,7 @@ HAVE_BINGRAPH = False HAVE_FLOSS = False -if processing_cfg.floss.on_demand: +if integrations_cfg.floss.on_demand: from lib.cuckoo.common.integrations.floss import HAVE_FLOSS, Floss USE_SEVENZIP = False @@ -125,7 +126,7 @@ # Used for displaying enabled config options in Django UI enabledconf = {} on_demand_conf = {} -for cfile in ("reporting", "processing", "auxiliary", "web", "distributed"): +for cfile in ("integrations", "reporting", "processing", "auxiliary", "web", "distributed"): curconf = Config(cfile) confdata = curconf.get_config() for item in confdata: @@ -757,6 +758,9 @@ def load_files(request, task_id, category): tls_path = os.path.join(ANALYSIS_BASE_PATH, "analyses", str(task_id), "tlsdump", "tlsdump.log") if _path_safe(tls_path): ajax_response["tlskeys_exists"] = _path_safe(tls_path) + mitmdump_path = os.path.join(ANALYSIS_BASE_PATH, "analyses", str(task_id), "mitmdump", "dump.har") + if _path_safe(mitmdump_path): + ajax_response["mitmdump_exists"] = _path_safe(mitmdump_path) elif category == "behavior": ajax_response["detections2pid"] = data.get("detections2pid", {}) return render(request, page, ajax_response) @@ -907,7 +911,7 @@ def chunk(request, task_id, pid, pagenum): else: chunk = dict(calls=[]) - if record["info"]["machine"].get("platform", "") == "linux": + if record["info"].get("machine", {}).get("platform", "") == "linux": return render(request, "analysis/strace/_chunk.html", {"chunk": chunk}) else: return render(request, "analysis/behavior/_chunk.html", {"chunk": chunk}) @@ -968,12 +972,12 @@ def filtered_chunk(request, task_id, pid, category, apilist, caller, tid): apis[:] = [s.strip().lower() for s in apis if len(s.strip())] # Populate dict, fetching data from all calls and selecting only appropriate category/APIs. - for call in process["calls"]: + for call in process.get("calls", []): if enabledconf["mongodb"]: chunk = mongo_find_one("calls", {"_id": call}) if es_as_db: chunk = es.search(index=get_calls_index(), body={"query": {"match": {"_id": call}}})["hits"]["hits"][0]["_source"] - for call in chunk["calls"]: + for call in chunk.get("calls", []): # filter by call or tid if caller != "null" or tid != "0": if caller in ("null", call["caller"]) and tid in ("0", call["thread_id"]): @@ -1458,7 +1462,7 @@ def search_behavior(request, task_id): for argument in call["arguments"]: if search_argname and argument["name"] != search_argname: continue - if query.search(argument["value"]): + if isinstance(argument["value"], (str, bytes)) and query.search(argument["value"]): process_results.append(call) break @@ -1942,6 +1946,9 @@ def file(request, category, task_id, dlfile): path = [] for dfile in os.listdir(buf): path.append(os.path.join(buf, dfile)) + elif category == "mitmdump": + path = os.path.join(CUCKOO_ROOT, "storage", "analyses", task_id, "mitmdump", "dump.har") + cd = "text/plain" else: return render(request, "error.html", {"error": "Category not defined"}) @@ -2425,7 +2432,7 @@ def comments(request, task_id): @conditional_login_required(login_required, settings.WEB_AUTHENTICATION) def vtupload(request, category, task_id, filename, dlfile): - if enabledconf["vtupload"] and settings.VTDL_KEY: + if enabledconf["vtupload"] and integrations_cfg.virustotal.apikey: try: folder_name = False path = False @@ -2442,7 +2449,7 @@ def vtupload(request, category, task_id, filename, dlfile): if not path or not _path_safe(path): return render(request, "error.html", {"error": f"File not found: {os.path.basename(path)}"}) - headers = {"x-apikey": settings.VTDL_KEY} + headers = {"x-apikey": integrations_cfg.virustotal.apikey} files = {"file": (filename, open(path, "rb"))} response = requests.post("https://www.virustotal.com/api/v3/files", files=files, headers=headers) if response.ok: @@ -2480,11 +2487,11 @@ def statistics_data(request, days=7): on_demand_config_mapper = { "bingraph": reporting_cfg, - "flare_capa": processing_cfg, + "flare_capa": integrations_cfg, "vba2graph": processing_cfg, "xlsdeobf": processing_cfg, "strings": processing_cfg, - "floss": processing_cfg, + "floss": integrations_cfg, } @@ -2515,7 +2522,9 @@ def on_demand(request, service: str, task_id: str, category: str, sha256): "xlsdeobf", "strings", "floss", - ) and not on_demand_config_mapper.get(service, {}).get(service, {}).get("on_demand"): + ) and not getattr( + on_demand_config_mapper.get(service, {}), service + ).get("on_demand"): return render(request, "error.html", {"error": "Not supported/enabled service on demand"}) # Self Extracted support folder @@ -2539,6 +2548,7 @@ def on_demand(request, service: str, task_id: str, category: str, sha256): details = False if service == "flare_capa" and HAVE_FLARE_CAPA: + # ToDo check if PE details = flare_capa_details(path, category.lower(), on_demand=True) if not details: details = {"msg": "No results"} diff --git a/web/apiv2/urls.py b/web/apiv2/urls.py index aa512323add..11053b43a68 100644 --- a/web/apiv2/urls.py +++ b/web/apiv2/urls.py @@ -2,12 +2,12 @@ # This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org # See the file "docs/LICENSE" for copying permission. -from apiv2 import views - # from django.conf.urls import include from django.urls import path, re_path from rest_framework.authtoken.views import obtain_auth_token +from apiv2 import views + urlpatterns = [ re_path(r"^$", views.index, name="apiv2"), # disabled due to token auth @@ -17,7 +17,7 @@ re_path(r"^tasks/stats/$", views.task_x_hours), re_path(r"^tasks/create/url/$", views.tasks_create_url), re_path(r"^tasks/create/dlnexec/$", views.tasks_create_dlnexec), - re_path(r"^tasks/create/vtdl/$", views.tasks_vtdl), + re_path(r"^tasks/create/download_services/$", views.tasks_download_services), re_path(r"^tasks/create/static/$", views.tasks_create_static), re_path(r"^tasks/search/md5/(?P([a-fA-F\d]{32}))/$", views.tasks_search), re_path(r"^tasks/search/sha1/(?P([a-fA-F\d]{40}))/$", views.tasks_search), @@ -48,6 +48,7 @@ re_path(r"^tasks/get/procmemory/(?P\d+)/(?P\d{1,5})/$", views.tasks_procmemory), re_path(r"^tasks/get/fullmemory/(?P\d+)/$", views.tasks_fullmemory), re_path(r"^tasks/get/pcap/(?P\d+)/$", views.tasks_pcap), + re_path(r"^tasks/get/tlspcap/(?P\d+)/$", views.tasks_tlspcap), re_path(r"^tasks/get/evtx/(?P\d+)/$", views.tasks_evtx), re_path(r"^tasks/get/dropped/(?P\d+)/$", views.tasks_dropped), re_path(r"^tasks/get/surifile/(?P\d+)/$", views.tasks_surifile), @@ -72,4 +73,6 @@ # re_path(r"^tasks/add/(?P[A-Za-z0-9]+)/(?P\d+)/$", views.post_processing), re_path(r"^tasks/statistics/(?P\d+)/$", views.statistics_data), re_path(r"^exitnodes/$", views.exit_nodes_list), + # re_path(r"^dist/tasks_reported$", views.dist_tasks_reported), + # re_path(r"^dist/tasks_notification/(?P\d+)$", views.dist_tasks_notification), ] diff --git a/web/apiv2/views.py b/web/apiv2/views.py index 882f4005cd0..bc9e8727166 100644 --- a/web/apiv2/views.py +++ b/web/apiv2/views.py @@ -43,7 +43,7 @@ from lib.cuckoo.common.web_utils import ( apiconf, download_file, - download_from_vt, + download_from_3rdparty, force_int, parse_request_arguments, perform_search, @@ -53,7 +53,13 @@ statistics, validate_task, ) -from lib.cuckoo.core.database import TASK_RECOVERED, TASK_RUNNING, Database, Task, _Database +from lib.cuckoo.core.database import ( + TASK_RECOVERED, + TASK_RUNNING, + Database, + Task, + _Database, +) from lib.cuckoo.core.rooter import _load_socks5_operational, vpns try: @@ -83,6 +89,7 @@ web_conf = Config("web") routing_conf = Config("routing") reporting_conf = Config("reporting") +dist_conf = Config("distributed") zlib_compresion = False if repconf.compression.enabled: @@ -97,15 +104,36 @@ if repconf.mongodb.enabled: - from dev_utils.mongodb import mongo_delete_data, mongo_find, mongo_find_one, mongo_find_one_and_update + from dev_utils.mongodb import ( + mongo_delete_data, + mongo_find, + mongo_find_one, + mongo_find_one_and_update, + ) es_as_db = False if repconf.elasticsearchdb.enabled and not repconf.elasticsearchdb.searchonly: - from dev_utils.elasticsearchdb import elastic_handler, get_analysis_index, get_query_by_info_id + from dev_utils.elasticsearchdb import ( + elastic_handler, + get_analysis_index, + get_query_by_info_id, + ) es_as_db = True es = elastic_handler + +DIST_ENABLED = False +if dist_conf.distributed.enabled: + from lib.cuckoo.common.dist_db import create_session + from lib.cuckoo.common.dist_db import Task as DTask + + dist_session = create_session( + dist_conf.distributed.db, + echo=False, + ) + DIST_ENABLED = True + db: _Database = Database() @@ -213,6 +241,9 @@ def tasks_create_static(request): resp["data"]["task_ids"] = task_ids if extra_details and "config" in extra_details: resp["data"]["config"] = extra_details["config"] + if extra_details.get("errors"): + resp["errors"].extend(extra_details["errors"]) + callback = apiconf.filecreate.get("status") if task_ids: if len(task_ids) == 1: @@ -228,6 +259,7 @@ def tasks_create_static(request): resp["url"].append("{0}/submit/status/{1}".format(apiconf.api.get("url"), tid)) else: resp = {"error": True, "error_value": "Error adding task to database"} + return Response(resp) @@ -320,7 +352,7 @@ def tasks_create_file(request): for content, tmp_path, _ in list_of_tasks: if pcap: - if tmp_path.lower().endswith(".saz"): + if tmp_path.lower().endswith(b".saz"): saz = saz_to_pcap(tmp_path) if saz: try: @@ -341,6 +373,7 @@ def tasks_create_file(request): if tmp_path: details["path"] = tmp_path details["content"] = content + status, tasks_details = download_file(**details) if status == "error": details["errors"].append({os.path.basename(tmp_path).decode(): tasks_details}) @@ -697,6 +730,7 @@ def tasks_search(request, md5=None, sha1=None, sha256=None): return Response(resp) +# ToDo requires proper review and rewrite # Return Task ID's and data that match a hash. @csrf_exempt @api_view(["POST"]) @@ -710,6 +744,7 @@ def ext_tasks_search(request): return_data = [] term = request.data.get("option", "") value = request.data.get("argument", "") + search_limit = request.data.get("search_limit", 50) if term and value: records = False @@ -717,12 +752,15 @@ def ext_tasks_search(request): resp = {"error": True, "error_value": "Invalid Option. '%s' is not a valid option." % term} return Response(resp) - if term in ("ids", "options", "tags_tasks"): + if term == "tags_tasks": + value = [int(v.id) for v in db.list_tasks(tags_tasks_like=value, limit=int(search_limit))] + elif term == "options": + value = [int(v.id) for v in db.list_tasks(options_like=value, limit=search_limit)] + elif term == "ids": if all([v.strip().isdigit() for v in value.split(",")]): value = [int(v.strip()) for v in filter(None, value.split(","))] else: return Response({"error": True, "error_value": "Not all values are integers"}) - if term == "ids": tmp_value = [] for task in db.list_tasks(task_ids=value) or []: if task.status == "reported": @@ -1139,6 +1177,10 @@ def tasks_report(request, task_id, report_format="json", make_zip=False): if check["error"]: return Response(check) + if check.get("tlp", "") in ("red", "Red"): + return Response({"error": True, "error_value": "Task has a TLP of RED"}) + + rtid = check.get("rtid", 0) if rtid: task_id = rtid @@ -1163,6 +1205,7 @@ def tasks_report(request, task_id, report_format="json", make_zip=False): "maec5": "report.maec-5.0.json", "metadata": "report.metadata.xml", "litereport": "lite.json", + "parti": "report.parti", } report_formats = { @@ -1212,17 +1255,28 @@ def tasks_report(request, task_id, report_format="json", make_zip=False): elif report_format == "protobuf": content = "application/octet-stream" ext = "protobuf" + elif report_format == "parti": + ext = "parti" + content = "application/zip" fname = "%s_report.%s" % (task_id, ext) if make_zip: - mem_zip = create_zip(files=report_path) - if mem_zip is False: - resp = {"error": True, "error_value": "Can't create zip archive for report file"} - return Response(resp) - - resp = StreamingHttpResponse(mem_zip, content_type="application/zip") - resp["Content-Length"] = len(mem_zip.getvalue()) - resp["Content-Disposition"] = f"attachment; filename={report_format}.zip" + if os.path.exists(report_path + ".zip"): + report_path += ".zip" + resp = StreamingHttpResponse( + FileWrapper(open(report_path, "rb"), 8096), content_type="application/zip" + ) + resp["Content-Length"] = os.path.getsize(report_path) + resp["Content-Disposition"] = "attachment; filename=" + fname + else: + mem_zip = create_zip(files=report_path) + if mem_zip is False: + resp = {"error": True, "error_value": "Can't create zip archive for report file"} + return Response(resp) + + resp = StreamingHttpResponse(mem_zip, content_type="application/zip") + resp["Content-Length"] = len(mem_zip.getvalue()) + resp["Content-Disposition"] = f"attachment; filename={report_format}.zip" else: resp = StreamingHttpResponse( FileWrapper(open(report_path, "rb"), 8096), content_type=content or "application/octet-stream;" @@ -1265,7 +1319,7 @@ def tasks_report(request, task_id, report_format="json", make_zip=False): else: zf.write(filepath, filedir) except Exception as e: - log.error(e, exc_info=True) + log.exception(e) # exception for lite report that is under reports/lite.json if report_format.lower() == "lite": @@ -1297,6 +1351,10 @@ def tasks_iocs(request, task_id, detail=None): if check["error"]: return Response(check) + if check.get("tlp", "") in ("red", "Red"): + return Response({"error": True, "error_value": "Task has a TLP of RED"}) + + rtid = check.get("rtid", 0) if rtid: task_id = rtid @@ -1527,6 +1585,10 @@ def tasks_screenshot(request, task_id, screenshot="all"): if check["error"]: return Response(check) + if check.get("tlp", "") in ("red", "Red"): + return Response({"error": True, "error_value": "Task has a TLP of RED"}) + + rtid = check.get("rtid", 0) if rtid: task_id = rtid @@ -1576,6 +1638,10 @@ def tasks_pcap(request, task_id): if check["error"]: return Response(check) + if check.get("tlp", "") in ("red", "Red"): + return Response({"error": True, "error_value": "Task has a TLP of RED"}) + + rtid = check.get("rtid", 0) if rtid: task_id = rtid @@ -1595,6 +1661,36 @@ def tasks_pcap(request, task_id): return Response(resp) +@csrf_exempt +@api_view(["GET"]) +def tasks_tlspcap(request, task_id): + if not apiconf.tasktlspcap.get("enabled"): + resp = {"error": True, "error_value": "TLS PCAP download API is disabled"} + return Response(resp) + + check = validate_task(task_id) + if check["error"]: + return Response(check) + + rtid = check.get("rtid", 0) + if rtid: + task_id = rtid + + srcfile = os.path.join(CUCKOO_ROOT, "storage", "analyses", "%s" % task_id, "polarproxy", "tls.pcap") + if not os.path.normpath(srcfile).startswith(ANALYSIS_BASE_PATH): + return render(request, "error.html", {"error": f"File not found: {os.path.basename(srcfile)}"}) + if path_exists(srcfile): + fname = "%s_tls.pcap" % task_id + resp = StreamingHttpResponse(FileWrapper(open(srcfile, "rb"), 8096), content_type="application/vnd.tcpdump.pcap") + resp["Content-Length"] = os.path.getsize(srcfile) + resp["Content-Disposition"] = "attachment; filename=" + fname + return resp + + else: + resp = {"error": True, "error_value": "TLS PCAP does not exist"} + return Response(resp) + + @csrf_exempt @api_view(["GET"]) def tasks_evtx(request, task_id): @@ -1606,6 +1702,10 @@ def tasks_evtx(request, task_id): if check["error"]: return Response(check) + if check.get("tlp", "") in ("red", "Red"): + return Response({"error": True, "error_value": "Task has a TLP of RED"}) + + rtid = check.get("rtid", 0) if rtid: task_id = rtid @@ -1628,18 +1728,15 @@ def tasks_evtx(request, task_id): @csrf_exempt @api_view(["GET"]) def tasks_mitmdump(request, task_id): - if not apiconf.taskmitmdump.get("enabled"): + if not apiconf.mitmdump.get("enabled"): resp = {"error": True, "error_value": "Mitmdump HAR download API is disabled"} return Response(resp) - check = validate_task(task_id) if check["error"]: return Response(check) - rtid = check.get("rtid", 0) if rtid: task_id = rtid - harfile = os.path.join(CUCKOO_ROOT, "storage", "analyses", "%s" % task_id, "mitmdump", "dump.har") if not os.path.normpath(harfile).startswith(ANALYSIS_BASE_PATH): return render(request, "error.html", {"error": f"File not found: {os.path.basename(harfile)}"}) @@ -1649,7 +1746,6 @@ def tasks_mitmdump(request, task_id): resp["Content-Length"] = os.path.getsize(harfile) resp["Content-Disposition"] = "attachment; filename=" + fname return resp - else: resp = {"error": True, "error_value": "HAR file does not exist"} return Response(resp) @@ -1666,6 +1762,10 @@ def tasks_dropped(request, task_id): if check["error"]: return Response(check) + if check.get("tlp", "") in ("red", "Red"): + return Response({"error": True, "error_value": "Task has a TLP of RED"}) + + rtid = check.get("rtid", 0) if rtid: task_id = rtid @@ -1713,6 +1813,10 @@ def tasks_surifile(request, task_id): if check["error"]: return Response(check) + if check.get("tlp", "") in ("red", "Red"): + return Response({"error": True, "error_value": "Task has a TLP of RED"}) + + rtid = check.get("rtid", 0) if rtid: task_id = rtid @@ -1823,6 +1927,10 @@ def tasks_procmemory(request, task_id, pid="all"): if check["error"]: return Response(check) + if check.get("tlp", "") in ("red", "Red"): + return Response({"error": True, "error_value": "Task has a TLP of RED"}) + + rtid = check.get("rtid", 0) if rtid: task_id = rtid @@ -1897,6 +2005,10 @@ def tasks_fullmemory(request, task_id): if check["error"]: return Response(check) + if check.get("tlp", "") in ("red", "Red"): + return Response({"error": True, "error_value": "Task has a TLP of RED"}) + + rtid = check.get("rtid", 0) if rtid: task_id = rtid @@ -2141,6 +2253,10 @@ def tasks_payloadfiles(request, task_id): if check["error"]: return Response(check) + if check.get("tlp", "") in ("red", "Red"): + return Response({"error": True, "error_value": "Task has a TLP of RED"}) + + rtid = check.get("rtid", 0) if rtid: task_id = rtid @@ -2174,6 +2290,10 @@ def tasks_procdumpfiles(request, task_id): if check["error"]: return Response(check) + if check.get("tlp", "") in ("red", "Red"): + return Response({"error": True, "error_value": "Task has a TLP of RED"}) + + rtid = check.get("rtid", 0) if rtid: task_id = rtid @@ -2207,6 +2327,10 @@ def tasks_config(request, task_id, cape_name=False): if check["error"]: return Response(check) + if check.get("tlp", "") in ("red", "Red"): + return Response({"error": True, "error_value": "Task has a TLP of RED"}) + + rtid = check.get("rtid", 0) if rtid: task_id = rtid @@ -2310,13 +2434,16 @@ def limit_exceeded(request, exception): } -def common_download_func(service, request): +@csrf_exempt +@api_view(["POST"]) +def tasks_download_services(request): + # Check if this API function is enabled + if not apiconf.downloading_services.get("enabled"): + return Response({"error": True, "error_value": "Download sample API is Disabled"}) resp = {} - hashes = request.data.get(dl_service_map[service].strip()) - if not hashes: - hashes = request.POST.get("hashes".strip(), None) + hashes = request.POST.get("hashes").strip() if not hashes: - return Response({"error": True, "error_value": f"hashes (hash list) or {dl_service_map[service]} value is empty"}) + return Response({"error": True, "error_value": "hashes value is empty"}) resp["error"] = False # Parse potential POST options (see submission/views.py) options = request.POST.get("options", "") @@ -2328,18 +2455,9 @@ def common_download_func(service, request): task_machines = [] vm_list = [] opt_apikey = False - - if service == "VirusTotal": - opts = get_options(options) - if opts: - opt_apikey = opts.get("apikey", False) - - if not (settings.VTDL_KEY or opt_apikey): - resp = { - "error": True, - "error_value": ("You specified VirusTotal but must edit the file and specify your VTDL_KEY variable"), - } - return Response(resp) + opts = get_options(options) + if opts: + opt_apikey = opts.get("apikey", False) for vm in db.list_machines(): vm_list.append(vm.label) @@ -2373,13 +2491,14 @@ def common_download_func(service, request): "fhash": False, "options": options, "only_extraction": False, - "service": service, + "service": "", "user_id": request.user.id or 0, } - if service == "VirusTotal": - details["apikey"] = settings.VTDL_KEY or opt_apikey - details = download_from_vt(hashes, details, opt_filename, settings) + if opt_apikey: + details["apikey"] = opt_apikey + + details = download_from_3rdparty(hashes, details, opt_filename) if isinstance(details.get("task_ids"), list): tasks_count = len(details["task_ids"]) else: @@ -2438,7 +2557,7 @@ def _stream_iterator(fp, guest_name, chunk_size=1024): resp = {"error": True, "error_value": "filepath not set"} return Response(resp) if request.data.get("is_local", ""): - if filepath.startswith(("/", "\/")): + if filepath.startswith(("/", r"\/")): resp = {"error": True, "error_value": "Filepath mustn't start with /"} return Response(resp) filepath = os.path.join(CUCKOO_ROOT, "storage", "analyses", f"{task_id}", filepath) @@ -2455,15 +2574,52 @@ def _stream_iterator(fp, guest_name, chunk_size=1024): return Response(resp) return StreamingHttpResponse(streaming_content=r.iter_content(chunk_size=1024), content_type="application/octet-stream") except requests.exceptions.RequestException as ex: - log.error(ex, exc_info=True) + log.exception(ex) resp = {"error": True, "error_value": f"Requests exception: {ex}"} return Response(resp) @csrf_exempt -@api_view(["POST"]) -def tasks_vtdl(request): - # Check if this API function is enabled - if not apiconf.vtdl.get("enabled"): - return Response({"error": True, "error_value": "VTDL Create API is Disabled"}) - return common_download_func("VirusTotal", request) +@api_view(["GET"]) +def dist_tasks_reported(request): + # List finished tasks here + if not DIST_ENABLED: + return Response( + { + "Error": True, + "error_value": "Distributed CAPE is not enabled", + } + ) + """ + + Add new API endpoint in CAPE to query the tasks that are reported and ready to be retrieved + Add new API endpoint in CAPE to set "task.notificated = True" for a specific task + + yeah we could script that go and fetch reported tasks. + can you currently list tasks that are finished but waiting to be retrieved in the api? + e.g. in the notification_loop() in dist.py, where it queries tasks that need to be sent to the callback url it does this: + + if there was an pi endpoint that exposed that, and another that allowed us to set notificated on the task when we'd finished processing it, then we wouldnt need the callback anymore + """ + # change to with session as + dist_db = dist_session() + ready = [] + tasks = dist_db.query(DTask).filter_by(finished=True, retrieved=True, notificated=False).order_by(DTask.id.desc()).all() + for task in tasks or []: + ready.append(task.main_task_id) + dist_db.close() + return Response({"Tasks": ready}) + + +@csrf_exempt +@api_view(["GET"]) +def dist_tasks_notification(request, task_id: int): + dist_db = dist_session() + tasks = dist_db.query(DTask).filter_by(main_task_id=task_id).order_by(DTask.id.desc()).all() + if not tasks: + return Response({"error": True, "error_value": f"No tasks found with main_task_id: {task_id}"}) + for task in tasks: + # main_db.set_status(task.main_task_id, TASK_REPORTED) + # log.debug("reporting main_task_id: {}".format(task.main_task_id)) + task.notificated = True + diff --git a/web/compare/urls.py b/web/compare/urls.py index 3b32527f6f1..610c56b9c02 100644 --- a/web/compare/urls.py +++ b/web/compare/urls.py @@ -2,9 +2,10 @@ # This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org # See the file "docs/LICENSE" for copying permission. -from compare import views from django.urls import re_path +from compare import views + urlpatterns = [ re_path(r"^(?P\d+)/$", views.left, name="compare_left"), re_path(r"^(?P\d+)/(?P\d+)/$", views.both, name="compare_both"), diff --git a/web/dashboard/urls.py b/web/dashboard/urls.py index ff2e6b25406..de7ca558275 100644 --- a/web/dashboard/urls.py +++ b/web/dashboard/urls.py @@ -2,9 +2,10 @@ # This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org # See the file "docs/LICENSE" for copying permission. -from dashboard import views from django.urls import re_path +from dashboard import views + urlpatterns = [ re_path(r"^$", views.index), ] diff --git a/web/guac/consumers.py b/web/guac/consumers.py index 89b0bcd600c..fb966d16727 100644 --- a/web/guac/consumers.py +++ b/web/guac/consumers.py @@ -32,12 +32,15 @@ async def connect(self): params = urllib.parse.parse_qs(self.scope["query_string"].decode()) if "rdp" in guest_protocol: - guest_host = params.get("guest_ip", "") + hosts = params.get("guest_ip", "") + guest_host = hosts[0] guest_port = int(web_cfg.guacamole.guest_rdp_port) or 3389 + ignore_cert = "true" if web_cfg.guacamole.ignore_rdp_cert is True else "false" else: guest_host = web_cfg.guacamole.vnc_host or "localhost" ports = params.get("vncport", ["5900"]) guest_port = int(ports[0]) + ignore_cert = "false" guacd_recording_name = params.get("recording_name", ["task-recording"])[0] @@ -53,6 +56,7 @@ async def connect(self): password=guest_password, recording_path=guacd_recording_path, recording_name=guacd_recording_name, + ignore_cert=ignore_cert, ) if self.client.connected: diff --git a/web/guac/urls.py b/web/guac/urls.py index bf0bb539242..dbf41e6f560 100644 --- a/web/guac/urls.py +++ b/web/guac/urls.py @@ -1,4 +1,5 @@ from django.urls import re_path + from guac import views urlpatterns = [ diff --git a/web/submission/urls.py b/web/submission/urls.py index 756d8efc0e8..a2b2f8fde69 100644 --- a/web/submission/urls.py +++ b/web/submission/urls.py @@ -3,6 +3,7 @@ # See the file 'docs/LICENSE' for copying permission. from django.urls import re_path + from submission import views urlpatterns = [ diff --git a/web/submission/views.py b/web/submission/views.py index a8c9e7efccd..a6b9b297442 100644 --- a/web/submission/views.py +++ b/web/submission/views.py @@ -26,8 +26,8 @@ from lib.cuckoo.common.utils import get_options, get_user_filename, sanitize_filename, store_temp_file from lib.cuckoo.common.web_utils import ( download_file, - download_from_bazaar, - download_from_vt, + download_from_3rdparty, + downloader_services, get_file_content, load_vms_exits, load_vms_tags, @@ -357,10 +357,6 @@ def index(request, task_id=None, resubmit_hash=None): if request.POST.get("job_category"): job_category = request.POST.get("job_category") - # amsidump is enabled by default in the monitor for Win10+ - if web_conf.amsidump.enabled and not request.POST.get("amsidump"): - options += "amsidump=0," - options = options[:-1] opt_apikey = False @@ -386,6 +382,8 @@ def index(request, task_id=None, resubmit_hash=None): "user_id": request.user.id or 0, "package": package, } + if opt_apikey: + details["apikey"] = opt_apikey task_category = False samples = [] if "hash" in request.POST and request.POST.get("hash", False) and request.POST.get("hash")[0] != "": @@ -406,24 +404,14 @@ def index(request, task_id=None, resubmit_hash=None): elif "dlnexec" in request.POST and request.POST.get("dlnexec").strip(): task_category = "dlnexec" samples = request.POST.get("dlnexec").strip() - elif ( - settings.VTDL_ENABLED - and "vtdl" in request.POST - and request.POST.get("vtdl", False) - and request.POST.get("vtdl")[0] != "" - ): - task_category = "vtdl" - samples = request.POST.get("vtdl").strip() - elif "bazaar" in request.POST and request.POST.get("bazaar").strip(): - task_category = "bazaar" - samples = request.POST.get("bazaar").strip() - + elif "hashes" in request.POST and request.POST.get("hashes", False) and request.POST.get("hashes")[0] != "": + task_category = "downloading_service" + samples = request.POST.get("hashes").strip() list_of_tasks = [] if task_category in ("url", "dlnexec"): if not samples: return render(request, "error.html", {"error": "You specified an invalid URL!"}) - - for url in samples.split(","): + for url in samples.split(web_conf.general.url_splitter): url = url.replace("hxxps://", "https://").replace("hxxp://", "http://").replace("[.]", ".") if task_category == "dlnexec": path, content, sha256 = process_new_dlnexec_task(url, route, options, custom) @@ -467,7 +455,7 @@ def index(request, task_id=None, resubmit_hash=None): paths.append(path) if not paths: - for folder_name in ("selfextracted", "files"): + for folder_name in ("selfextracted", "files", "procdump", "CAPE"): # Self Extracted support folder path = os.path.join(settings.CUCKOO_PATH, "storage", "analyses", str(task_id), folder_name, hash) if path_exists(path): @@ -493,7 +481,7 @@ def index(request, task_id=None, resubmit_hash=None): list_of_tasks.append((content, path, hash)) # Hack for resubmit first find all files and then put task as proper category - if job_category and job_category in ("resubmit", "sample", "static", "pcap", "dlnexec", "vtdl", "bazaar"): + if job_category and job_category in ("resubmit", "sample", "static", "pcap", "dlnexec", "downloading_service"): task_category = job_category if task_category == "resubmit": @@ -633,20 +621,8 @@ def index(request, task_id=None, resubmit_hash=None): if tasks_details.get("errors"): details["errors"].extend(tasks_details["errors"]) - elif task_category == "vtdl": - if not settings.VTDL_KEY: - return render( - request, - "error.html", - {"error": "You specified VirusTotal but must edit the file and specify your VTDL_KEY variable"}, - ) - else: - if opt_apikey: - details["apikey"] = opt_apikey - details = download_from_vt(samples, details, opt_filename, settings) - - elif task_category == "bazaar": - details = download_from_bazaar(samples, details, opt_filename, settings) + elif task_category == "downloading_service": + details = download_from_3rdparty(samples, opt_filename, details) if details.get("task_ids"): tasks_count = len(details["task_ids"]) @@ -671,8 +647,6 @@ def index(request, task_id=None, resubmit_hash=None): return render(request, "error.html", err_data) else: enabledconf = {} - enabledconf["vt"] = settings.VTDL_ENABLED - enabledconf["bazaar"] = settings.BAZAAR_ENABLED enabledconf["kernel"] = settings.OPT_ZER0M0N enabledconf["memory"] = processing.memory.get("enabled") enabledconf["procmemory"] = processing.procmemory.get("enabled") @@ -686,6 +660,7 @@ def index(request, task_id=None, resubmit_hash=None): enabledconf["amsidump"] = web_conf.amsidump.enabled enabledconf["pre_script"] = web_conf.pre_script.enabled enabledconf["during_script"] = web_conf.during_script.enabled + enabledconf["downloading_service"] = bool(downloader_services.downloaders) all_vms_tags = load_vms_tags() @@ -806,7 +781,7 @@ def status(request, task_id): "status": status, "task_id": task_id, "session_data": "", - "target": task.sample.sha256 if task.sample.sha256 else task.target, + "target": task.sample.sha256 if getattr(task, "sample") else task.target, } if settings.REMOTE_SESSION: machine = db.view_machine_by_label(task.machine) diff --git a/web/templates/admin/captcha_login.html b/web/templates/admin/captcha_login.html new file mode 100644 index 00000000000..c314764d571 --- /dev/null +++ b/web/templates/admin/captcha_login.html @@ -0,0 +1,70 @@ +{% extends "admin/base_site.html" %} +{% load i18n static %} + +{% block extrastyle %}{{ block.super }} +{{ form.media }} +{% endblock %} + +{% block bodyclass %}{{ block.super }} login{% endblock %} + +{% block usertools %}{% endblock %} + +{% block nav-global %}{% endblock %} + +{% block content_title %}{% endblock %} + +{% block breadcrumbs %}{% endblock %} + +{% block content %} +{% if form.errors and not form.non_field_errors %} +

+{% if form.errors.items|length == 1 %}{% trans "Please correct the error below." %}{% else %}{% trans "Please correct the errors below." %}{% endif %} +

+{% endif %} + +{% if form.non_field_errors %} +{% for error in form.non_field_errors %} +

+ {{ error }} +

+{% endfor %} +{% endif %} + +
+ +{% if user.is_authenticated %} +

+{% blocktrans trimmed %} + You are authenticated as {{ username }}, but are not authorized to + access this page. Would you like to login to a different account? +{% endblocktrans %} +

+{% endif %} + +
{% csrf_token %} +
+ {{ form.username.errors }} + {{ form.username.label_tag }} {{ form.username }} +
+
+ {{ form.password.errors }} + {{ form.password.label_tag }} {{ form.password }} + +
+
+ {{ form.captcha.errors }} + {{ form.captcha }} +
+ {% url 'admin_password_reset' as password_reset_url %} + {% if password_reset_url %} + + {% endif %} +
+ +
+
+ +
+{% endblock %} diff --git a/web/templates/analysis/behavior/_processes.html b/web/templates/analysis/behavior/_processes.html index 9a2f5668626..4230d983a2d 100644 --- a/web/templates/analysis/behavior/_processes.html +++ b/web/templates/analysis/behavior/_processes.html @@ -84,9 +84,9 @@ } function load_filtered_chunk(pid, category, caller, tid) { // Trim leading and trailing spaces - var inputValue = $("#apifilter_" + pid).val().trim(); + var inputValue = $("#apifilter_" + pid).val().trim(); // Split input by commas, trim spaces, and join back with commas - var apis = inputValue.split(',').map(api => api.trim()).join(','); + var apis = inputValue.split(',').map(api => api.trim()).join(','); var encodedlist = (apis === "") ? encodeURI("!null") : encodeURI(apis); $("#process_" + pid + " div.calltable").load("/analysis/filtered/{{id}}/" + pid + "/" + category + "/" + encodedlist + "/" + caller + "/" + tid + "/", function (data, status, xhr) { @@ -135,6 +135,9 @@ {% if process.environ.CommandLine %}
Command Line: {{ process.environ.CommandLine }} {% endif %} + {% if process.environ.DllBase %} +
Dll Image Base: {{ process.environ.DllBase }}, + {% endif %} {% if process.environ.MainExeBase %}
Image Base: {{ process.environ.MainExeBase }}, {% endif %} diff --git a/web/templates/analysis/generic/_file_info.html b/web/templates/analysis/generic/_file_info.html index 493d8d1c07b..85df4097d05 100644 --- a/web/templates/analysis/generic/_file_info.html +++ b/web/templates/analysis/generic/_file_info.html @@ -16,6 +16,12 @@ {% endif %} + {% if file.note %} + + Note + {{file.note}} + + {% endif %} {% if file.cape_type %} Type @@ -258,6 +264,7 @@ {% if file.dotnet %}{% endif %} {% if file.pdf %}{% endif %} {% if file.lnk %}{% endif %} + {% if file.rdp %}{% endif %} {% if file.java %}{% endif %} {% if file.office %}{% endif %} {% if file.office.XLMMacroDeobfuscator %}{% endif %} @@ -273,7 +280,7 @@ {% if not file.flare_capa and on_demand.flare_capa %} CAPA {% elif file.flare_capa %} - + {% endif %} {% endif %} {% if config.strings %} @@ -323,8 +330,10 @@ {% if file.decoded_files %} {% endif %} - {% if file.extracted_files %} - + {% if file.selfextract %} + {% for name, details in file.selfextract.items %} + + {% endfor %} {% endif %} @@ -337,14 +346,18 @@
- {% if file.flare_capa.CAPABILITY %} - {{file.flare_capa|flare_capa_capability}} - {% endif %} - {% if file.flare_capa.ATTCK %} - {{file.flare_capa|flare_capa_attck}} - {% endif %} - {% if file.flare_capa.MBC %} - {{file.flare_capa|flare_capa_mbc}} + {% if file.flare_capa.CAPABILITY or file.flare_capa.ATTCK or file.flare_capa.MBC %} + {% if file.flare_capa.CAPABILITY %} + {{ file.flare_capa|flare_capa_capability }} + {% endif %} + {% if file.flare_capa.ATTCK %} + {{ file.flare_capa|flare_capa_attck }} + {% endif %} + {% if file.flare_capa.MBC %} + {{ file.flare_capa|flare_capa_mbc }} + {% endif %} + {% else %} + No results {% endif %}
@@ -417,18 +430,26 @@ {% endif %} - {% if file.extracted_files %} - - -
-
- {% for sub_file in file.extracted_files %} + {% if file.selfextract %} + {% for name, details in file.selfextract.items %} + + +
+
+ {% if details.password %} + + Archive password: + {{details.password}} + + {% endif %} + {% for sub_file in details.extracted_files %} {% include "analysis/generic/_subfile_info.html" %} {% endfor %} +
-
- - + + + {% endfor %}
{% endif %} {% if config.yara_detail and file.yara %}
{% include "analysis/generic/_yara.html" %}

{% endif %} @@ -437,6 +458,7 @@ {% if file.dotnet %}
{% include "analysis/generic/_dotnet.html" %}

{% endif %} {% if file.pdf %}
{% include "analysis/generic/_pdf.html" %}

{% endif %} {% if file.lnk %}
{% include "analysis/generic/_lnk.html" %}

{% endif %} + {% if file.rdp %}
{% include "analysis/generic/_rdp.html" %}

{% endif %} {% if file.java %}
{% include "analysis/generic/_java.html" %}

{% endif %} {% if file.office %}
{% include "analysis/generic/_office.html" %}

{% endif %} {% if file.office.XLMMacroDeobfuscator %}
{% include "analysis/generic/_xlmmacro.html" %}

{% endif %} diff --git a/web/templates/analysis/generic/_office.html b/web/templates/analysis/generic/_office.html index 4a207611047..3cd8d976f37 100644 --- a/web/templates/analysis/generic/_office.html +++ b/web/templates/analysis/generic/_office.html @@ -119,12 +119,12 @@

Extracted Macros

{% endif %} {% endif %} {% endif %} - {% if file.office_rtf %} + {% if file.office.rtf %}

RTF document details

- {% for key, value in file.office_rtf.items %} + {% for key, value in file.office.rtf.items %}
Object ID: {{key}}
@@ -137,7 +137,7 @@
Object ID: {{key}}
{% for block in value %} {% if block.sha256 %} - + {% else %} {% endif %} @@ -154,7 +154,7 @@
Object ID: {{key}}

{% endif %} - {% if file.office_dde %} + {% if file.office.dde %}
Filename
{{block.filename}}{{block.filename}}{{block.filename}}
@@ -163,7 +163,7 @@
Object ID: {{key}}
- +

DDE Detected

{{file.office_dde}}{{file.dde}}
diff --git a/web/templates/analysis/generic/_rdp.html b/web/templates/analysis/generic/_rdp.html new file mode 100644 index 00000000000..076bb802ebf --- /dev/null +++ b/web/templates/analysis/generic/_rdp.html @@ -0,0 +1,70 @@ +
+{% if file.rdp %} +

RDP file details

+
+ + + + + + {% for key, value in file.rdp.items %} + {% if key != "certificates" %} + + + + + {% else %} +
NameValue
{{key}}{{value}}
+ {% for cert in value %} + + + + + + {% if cert.subject %} + + {% endif %} + {% if cert.issuer %} + + + + + {% endif %} + {% if cert.not_before %} + + + + + {% endif %} + {% if cert.not_after %} + + + + + {% endif %} + {% if cert.serial_number %} + + + + + {% endif %} + {% if cert.fingerprint_sha256 %} + + + + + {% endif %} + {% if cert.fingerprint_sha1 %} + + + + + {% endif %} +
CertificateValue
Subject Organization Name {{cert.subject}}
Issuer{{cert.issuer}}
Not valid before{{cert.not_before}}
Not valid after{{cert.not_after}}
Serial number{{cert.serial_number}}
Cert's sha256 fingerprint{{cert.fingerprint_sha256}}
Cert's sha1 fingerprint{{cert.fingerprint_sha1}}
+ {% endfor %} + {% endif %} + {% endfor %} +{% else %} + Nothing to display. +{% endif %} +
diff --git a/web/templates/analysis/generic/_subfile_capeyara.html b/web/templates/analysis/generic/_subfile_capeyara.html new file mode 100644 index 00000000000..4c65b4a087d --- /dev/null +++ b/web/templates/analysis/generic/_subfile_capeyara.html @@ -0,0 +1,50 @@ +
+ {% load key_tags %} +
+

CAPE Yara Details

+

+
+ {% for hit in sub_file.cape_yara %} +
+ +
+
+ + {% if hit.strings %} + + + + + {% endif %} + {% if hit.addresses %} + + + + + {% endif %} +
Strings +
    + {% for string in hit.strings %} +
  • {{string}}
  • + {% endfor %} +
+
String Name: Address +
    + {% for key, value in hit.addresses.items %} +
  • {{key}}: {{value}}
  • + {% endfor %} +
+
+
+
+
+ {% endfor %} +
+
+
diff --git a/web/templates/analysis/generic/_subfile_info.html b/web/templates/analysis/generic/_subfile_info.html index 7baf2f19288..2549ec14f26 100644 --- a/web/templates/analysis/generic/_subfile_info.html +++ b/web/templates/analysis/generic/_subfile_info.html @@ -2,6 +2,12 @@ {% load key_tags %}
+ {% if sub_file.note %} + + + + + {% endif %} {% if sub_file.cape_type %} @@ -241,12 +247,12 @@ {% else %} @@ -269,7 +275,7 @@ {% if not sub_file.flare_capa and on_demand.flare_capa %} CAPA {% elif sub_file.flare_capa %} - + FLARE CAPA {% endif %} {% endif %} {% if config.strings %} @@ -330,14 +336,18 @@
- {% if sub_file.flare_capa.CAPABILITY %} - {{sub_file.flare_capa|flare_capa_capability}} - {% endif %} - {% if sub_file.flare_capa.ATTCK %} - {{sub_file.flare_capa|flare_capa_attck}} - {% endif %} - {% if sub_file.flare_capa.MBC %} - {{sub_file.flare_capa|flare_capa_mbc}} + {% if file.flare_capa.CAPABILITY or file.flare_capa.ATTCK or file.flare_capa.MBC %} + {% if file.flare_capa.CAPABILITY %} + {{ file.flare_capa|flare_capa_capability }} + {% endif %} + {% if file.flare_capa.ATTCK %} + {{ file.flare_capa|flare_capa_attck }} + {% endif %} + {% if file.flare_capa.MBC %} + {{ file.flare_capa|flare_capa_mbc }} + {% endif %} + {% else %} + No results {% endif %}
@@ -416,14 +426,14 @@
{% endif %} - {% if config.yara_detail and subfile.yara %}
{% include "analysis/generic/_yara.html" %}

{% endif %} - {% if config.yara_detail and subfile.cape_yara %}
{% include "analysis/generic/_capeyara.html" %}

{% endif %} - {% if subfile.pe %}
{% include "analysis/generic/_pe.html" %}

{% endif %} - {% if subfile.dotnet %}
{% include "analysis/generic/_dotnet.html" %}

{% endif %} - {% if subfile.pdf %}
{% include "analysis/generic/_pdf.html" %}

{% endif %} - {% if subfile.lnk %}
{% include "analysis/generic/_lnk.html" %}

{% endif %} - {% if subfile.java %}
{% include "analysis/generic/_java.html" %}

{% endif %} - {% if subfile.office %}
{% include "analysis/generic/_office.html" %}

{% endif %} - {% if subfile.floss %}
{% include "analysis/generic/_floss.html" %}

{% endif %} + {% if config.yara_detail and sub_file.yara %}
{% include "analysis/generic/_subfile_yara.html" %}

{% endif %} + {% if config.yara_detail and sub_file.cape_yara %}
{% include "analysis/generic/_subfile_capeyara.html" %}

{% endif %} + {% if sub_file.pe %}
{% include "analysis/generic/_pe.html" %}

{% endif %} + {% if sub_file.dotnet %}
{% include "analysis/generic/_dotnet.html" %}

{% endif %} + {% if sub_file.pdf %}
{% include "analysis/generic/_pdf.html" %}

{% endif %} + {% if sub_file.lnk %}
{% include "analysis/generic/_lnk.html" %}

{% endif %} + {% if sub_file.java %}
{% include "analysis/generic/_java.html" %}

{% endif %} + {% if sub_file.office %}
{% include "analysis/generic/_office.html" %}

{% endif %} + {% if sub_file.floss %}
{% include "analysis/generic/_floss.html" %}

{% endif %} {% if graphs.bingraph.enabled and graphs.bingraph.content|getkey:sub_file.sha256 %}

{% endif %} diff --git a/web/templates/analysis/generic/_subfile_yara.html b/web/templates/analysis/generic/_subfile_yara.html new file mode 100644 index 00000000000..9dd301820b0 --- /dev/null +++ b/web/templates/analysis/generic/_subfile_yara.html @@ -0,0 +1,50 @@ +
+ {% load key_tags %} +
+

Yara Details

+

+
+ {% for hit in sub_file.yara %} +
+ +
+
+
Note{{sub_file.note}}
Type
- {% if file.pe %}{% endif %} - {% if file.dotnet %}{% endif %} - {% if file.pdf %}{% endif %} - {% if file.lnk %}{% endif %} - {% if file.java %}{% endif %} - {% if file.office %}{% endif %} + {% if sub_file.pe %}{% endif %} + {% if sub_file.dotnet %}{% endif %} + {% if sub_file.pdf %}{% endif %} + {% if sub_file.lnk %}{% endif %} + {% if sub_file.java %}{% endif %} + {% if sub_file.office %}{% endif %}
{{ graphs.bingraph.content|getkey:sub_file.sha256|safe }}
+ {% if hit.strings %} + + + + + {% endif %} + {% if hit.addresses %} + + + + + {% endif %} +
Strings +
    + {% for string in hit.strings %} +
  • {{string}}
  • + {% endfor %} +
+
String Name: Address +
    + {% for key, value in hit.addresses.items %} +
  • {{key}}: {{value}}
  • + {% endfor %} +
+
+
+
+
+ {% endfor %} +
+ +
diff --git a/web/templates/analysis/index.html b/web/templates/analysis/index.html index 6b7d847bcd1..a4f0a2cf73c 100644 --- a/web/templates/analysis/index.html +++ b/web/templates/analysis/index.html @@ -36,7 +36,7 @@
Recent Files
{% if files %} - +
diff --git a/web/templates/analysis/network/_hosts_not_ajax.html b/web/templates/analysis/network/_hosts_not_ajax.html index b859a9c537c..01381f3946d 100644 --- a/web/templates/analysis/network/_hosts_not_ajax.html +++ b/web/templates/analysis/network/_hosts_not_ajax.html @@ -28,7 +28,7 @@

Hosts

{% if host.asn %} - + {% endif %} {% endif %} diff --git a/web/templates/analysis/network/index.html b/web/templates/analysis/network/index.html index 200f5d5ea29..f32a3e1b3b4 100644 --- a/web/templates/analysis/network/index.html +++ b/web/templates/analysis/network/index.html @@ -3,12 +3,15 @@
PCAP {% if pcapng.sha256 %} - PCAP-NG + PCAP-NG {% endif %} - PCAP + PCAP {% if tlskeys_exists %} TLS keys {% endif %} + {% if mitmdump_exists %} + Mitmdump + {% endif %}
{% endif %}
ID {{host.country_name}}{{host.asn}}{{host.asn}}
+
diff --git a/web/templates/apiv2/index.html b/web/templates/apiv2/index.html index 381faf075c3..b03e159d43f 100644 --- a/web/templates/apiv2/index.html +++ b/web/templates/apiv2/index.html @@ -37,23 +37,24 @@

API -

- - {% if config.vtdl.enabled %} + + {% if config.downloading_services.enabled %} {% else %} {% endif %} - + @@ -136,7 +137,7 @@

API -
  • RPS: {{ config.fileview.rps }}
  • RPM: {{ config.fileview.rpm }}
-

+ @@ -703,7 +704,7 @@

API - RPM: {{ config.payloadfiles.rpm }} -

+ @@ -753,6 +754,28 @@

API - curl {{ config.api.url }}/apiv2/tasks/[days]/

+ + + {% if config.mitmdump.enabled %} + + {% else %} + + {% endif %} + + + + + + +
Prefix
curl -F file=@/path/to/file -F machine="VM-Name" -H "Authorization: Token YOU_TOKEN" {{ config.api.url }}/apiv2/tasks/create/file/
-  Note: machine is optional. Header depends of the config if Token auth is enabled
+In case of PCAP you need to add -F pcap=1 +Note: machine is optional. Header depends of the config if Token auth is enabled in api.conf
VirusTotal download and analyzeDownload from file service and analyzeYesNo
    -
  • RPS: {{ config.vtdl.rps }}
  • -
  • RPM: {{ config.vtdl.rpm }}
  • +
  • RPS: {{ config.downloading_services.rps }}
  • +
  • RPM: {{ config.downloading_services.rpm }}
Download a file from VT for analysis. Return object will be JSON.Download a file from VT or MalwareBazaar or other service for analysis. Return object will be JSON.
View information about a specific sample that it's CAPE's database. Return object will be JSON.View information about a specific sample that's in CAPE's database. Return object will be JSON.
Download the Cape payload files associated with a task by ID. Return object will be octet-stream. (.zip). Use 7zip or pyzipper to extractDownload the CAPE payload files associated with a task by ID. Return object will be octet-stream. (.zip). Use 7zip or pyzipper to extract
Mitmdump HAR DownloadYesNo +
    +
  • RPS: {{ config.mitmdump.rps }}
  • +
  • RPM: {{ config.mitmdump.rpm }}
  • +
+
Download the HAR file of mitmdump given a Task ID. Return will be a HAR file. +
+
curl {{ config.api.url }}/apiv2/tasks/get/mitmdump/[task id]/
+
{% else %} diff --git a/web/templates/submission/index.html b/web/templates/submission/index.html index 91947c0aee2..8dc3f72eb10 100644 --- a/web/templates/submission/index.html +++ b/web/templates/submission/index.html @@ -61,7 +61,7 @@ $("#package_description").text(title); }); // tooltips - $('[data-toggle="tooltip"]').tooltip(); + $('[data-toggle="tooltip"]').tooltip(); });
@@ -74,11 +74,8 @@ {% else %} - {% if config.vt %} - - {% endif %} - {% if config.bazaar %} - + {% if config.downloading_service %} + {% endif %} {% if config.url_analysis %} @@ -107,22 +104,13 @@
- - {% if config.vt %} -
-
- - -
-
- {% endif %} - {% if config.bazaar %} -
-
- - + {% if config.downloading_service %} +
+
+ + +
-
{% endif %}
@@ -544,11 +532,22 @@ unhook-apis Capability to dynamically unhook previously hooked functions (unhook-apis option takes colon-separated list e.g. unhook-apis=NtSetInformationThread:NtDelayExecution) - + ttd ttd=1. TTD integration (Microsoft Time Travel Debugging). Requires binaries to be placed in correct folder - + + polarproxy + Run PolarProxy to generate PCAP with decrypted TLS streams. Ex: polarproxy=1 + + + tlsport + TLS port for PolarProxy to MITM (Default: 443). Ex: tlsport=10443 + + + mitmdump + Run mitmdump to generate HAR with decrypted TLS streams. Ex: mitmdump=1 +
@@ -629,7 +628,7 @@ {% if config.amsidump %}
{% endif %} diff --git a/web/users/migrations/0001_initial.py b/web/users/migrations/0001_initial.py index 04677ea1c82..e7631ace34b 100644 --- a/web/users/migrations/0001_initial.py +++ b/web/users/migrations/0001_initial.py @@ -4,7 +4,6 @@ class Migration(migrations.Migration): - initial = True dependencies = [ diff --git a/web/users/migrations/0002_reports.py b/web/users/migrations/0002_reports.py index 35bc9b28e74..bdffb93d232 100644 --- a/web/users/migrations/0002_reports.py +++ b/web/users/migrations/0002_reports.py @@ -5,7 +5,6 @@ class Migration(migrations.Migration): - dependencies = [ ("users", "0001_initial"), ] diff --git a/web/users/migrations/0003_rename_field_subscription.py b/web/users/migrations/0003_rename_field_subscription.py index 54ab307011f..11c8e6f8434 100644 --- a/web/users/migrations/0003_rename_field_subscription.py +++ b/web/users/migrations/0003_rename_field_subscription.py @@ -24,7 +24,6 @@ def reverse_migrate(apps, schema_editor): class Migration(migrations.Migration): - dependencies = [ ("users", "0002_reports"), ] diff --git a/web/web/allauth_adapters.py b/web/web/allauth_adapters.py index 9d29042b76a..12fdf93eda3 100644 --- a/web/web/allauth_adapters.py +++ b/web/web/allauth_adapters.py @@ -1,4 +1,5 @@ from allauth.account.adapter import DefaultAccountAdapter +from allauth.socialaccount.adapter import DefaultSocialAccountAdapter from allauth.account.signals import email_confirmed, user_signed_up from django import forms from django.conf import settings @@ -36,3 +37,24 @@ def email_confirmed_(request, email_address, **kwargs): user = User.objects.get(email=email_address.email) user.is_active = not settings.MANUAL_APPROVE user.save() + +class MySocialAccountAdapter(DefaultSocialAccountAdapter): + def pre_social_login(self, request, sociallogin): + """ + Invoked just before a social login is about to proceed. + """ + user_email = sociallogin.account.extra_data.get("email") + if user_email and settings.SOCIAL_AUTH_EMAIL_DOMAIN: + domain = user_email.split("@")[1] + if domain != settings.SOCIAL_AUTH_EMAIL_DOMAIN: + raise forms.ValidationError(f"Please use email with domain: {settings.SOCIAL_AUTH_EMAIL_DOMAIN}") + + def save_user(self, request, sociallogin, form=None): + """ + Saves a new User instance using information provided from social account provider. + """ + user = super(MySocialAccountAdapter, self).save_user(request, sociallogin, form) + user.email = sociallogin.account.extra_data.get("email") + user.username = sociallogin.account.extra_data.get("email").split("@")[0] + user.save() + return user diff --git a/web/web/allauth_forms.py b/web/web/allauth_forms.py index 1c313fe7193..ed18efdcdde 100644 --- a/web/web/allauth_forms.py +++ b/web/web/allauth_forms.py @@ -1,6 +1,6 @@ -from captcha.fields import ReCaptchaField -from captcha.widgets import ReCaptchaV2Checkbox from django import forms +from django_recaptcha.fields import ReCaptchaField +from django_recaptcha.widgets import ReCaptchaV2Checkbox class CaptchedSignUpForm(forms.Form): diff --git a/web/web/headers.py b/web/web/headers.py index b0ff02479f4..8bb7b519bd7 100644 --- a/web/web/headers.py +++ b/web/web/headers.py @@ -21,6 +21,7 @@ class CuckooHeaders(MiddlewareMixin): def __init__(self, get_response): self.get_response = get_response self._is_coroutine = False + self.async_mode = False def process_response(self, request, response): response["Server"] = "Machete Server" diff --git a/web/web/middleware/db_transaction.py b/web/web/middleware/db_transaction.py index c4700ee9fe2..31f461ce154 100644 --- a/web/web/middleware/db_transaction.py +++ b/web/web/middleware/db_transaction.py @@ -6,5 +6,8 @@ def __init__(self, get_response): self.get_response = get_response def __call__(self, request): - with Database().session.begin(): - return self.get_response(request) + db = Database() + with db.session.begin(): + resp = self.get_response(request) + db.session.remove() + return resp diff --git a/web/web/settings.py b/web/web/settings.py index faa4dc0ab11..fad99699e22 100644 --- a/web/web/settings.py +++ b/web/web/settings.py @@ -28,7 +28,6 @@ init_rooter() init_routing() - cfg = Config("reporting") aux_cfg = Config("auxiliary") web_cfg = Config("web") @@ -88,12 +87,6 @@ MOLOCH_BASE = moloch_cfg.get("base") MOLOCH_NODE = moloch_cfg.get("node") MOLOCH_ENABLED = moloch_cfg.get("enabled", False) - -VTDL_ENABLED = web_cfg.download_services.get("virustotal", False) -VTDL_KEY = web_cfg.download_services.get("vtkey", False) - -BAZAAR_ENABLED = web_cfg.download_services.get("malwarebazaar", False) - TEMP_PATH = Config().cuckoo.get("tmppath", "/tmp") # DEPRECATED - Enabled/Disable Zer0m0n tickbox on the submission page @@ -118,8 +111,7 @@ USE_I18N = True # Deprecated in Django 5.0 -# If you set this to False, Django will not format dates, numbers and -# calendars according to the current locale. +# If you set this to False, Django will not format dates, numbers and calendars according to the current locale. USE_L10N = True # Disabling time zone support and using local time for web interface and storage. @@ -159,8 +151,10 @@ # Don't put anything in this directory yourself; store your static files # in apps' "static/" subdirectories and in STATICFILES_DIRS. # Example: "/home/media/media.lawrence.com/static/" +# When NGINX is as reverse proxy you need to put next line in local_settings.py STATIC_ROOT = "" + # URL prefix for static files. # Example: "http://media.lawrence.com/static/" STATIC_URL = "/static/" @@ -221,6 +215,7 @@ # in case you want custom auth, place logic in web/web/middleware/custom_auth.py # "web.middleware.CustomAuth", "web.middleware.DBTransactionMiddleware", + "allauth.account.middleware.AccountMiddleware", ] OTP_TOTP_ISSUER = "CAPE Sandbox" @@ -257,109 +252,14 @@ "allauth", "allauth.account", "allauth.socialaccount", - "allauth.socialaccount.providers.agave", - "allauth.socialaccount.providers.amazon", - "allauth.socialaccount.providers.amazon_cognito", - "allauth.socialaccount.providers.angellist", - "allauth.socialaccount.providers.apple", - "allauth.socialaccount.providers.asana", - "allauth.socialaccount.providers.auth0", - "allauth.socialaccount.providers.authentiq", - "allauth.socialaccount.providers.azure", - "allauth.socialaccount.providers.baidu", - "allauth.socialaccount.providers.basecamp", - "allauth.socialaccount.providers.battlenet", - "allauth.socialaccount.providers.bitbucket", - "allauth.socialaccount.providers.bitbucket_oauth2", - "allauth.socialaccount.providers.bitly", - "allauth.socialaccount.providers.box", - "allauth.socialaccount.providers.cern", - "allauth.socialaccount.providers.coinbase", - "allauth.socialaccount.providers.dataporten", - "allauth.socialaccount.providers.daum", - "allauth.socialaccount.providers.digitalocean", - "allauth.socialaccount.providers.discord", - "allauth.socialaccount.providers.disqus", - "allauth.socialaccount.providers.douban", - "allauth.socialaccount.providers.doximity", - "allauth.socialaccount.providers.draugiem", - "allauth.socialaccount.providers.dropbox", - "allauth.socialaccount.providers.dwolla", - "allauth.socialaccount.providers.edmodo", - "allauth.socialaccount.providers.edx", - "allauth.socialaccount.providers.eventbrite", - "allauth.socialaccount.providers.eveonline", - "allauth.socialaccount.providers.evernote", - "allauth.socialaccount.providers.exist", - "allauth.socialaccount.providers.facebook", - "allauth.socialaccount.providers.feedly", - "allauth.socialaccount.providers.figma", - "allauth.socialaccount.providers.fivehundredpx", - "allauth.socialaccount.providers.flickr", - "allauth.socialaccount.providers.foursquare", - "allauth.socialaccount.providers.fxa", - "allauth.socialaccount.providers.github", - "allauth.socialaccount.providers.gitlab", - "allauth.socialaccount.providers.globus", - "allauth.socialaccount.providers.google", - "allauth.socialaccount.providers.hubic", - "allauth.socialaccount.providers.instagram", - "allauth.socialaccount.providers.jupyterhub", - "allauth.socialaccount.providers.kakao", - "allauth.socialaccount.providers.keycloak", - "allauth.socialaccount.providers.line", - "allauth.socialaccount.providers.linkedin", - "allauth.socialaccount.providers.linkedin_oauth2", - "allauth.socialaccount.providers.mailchimp", - "allauth.socialaccount.providers.mailru", - "allauth.socialaccount.providers.meetup", - "allauth.socialaccount.providers.microsoft", - "allauth.socialaccount.providers.naver", - "allauth.socialaccount.providers.nextcloud", - "allauth.socialaccount.providers.odnoklassniki", - "allauth.socialaccount.providers.openid", - "allauth.socialaccount.providers.openstreetmap", - "allauth.socialaccount.providers.orcid", - "allauth.socialaccount.providers.patreon", - "allauth.socialaccount.providers.paypal", - "allauth.socialaccount.providers.persona", - "allauth.socialaccount.providers.pinterest", - "allauth.socialaccount.providers.quickbooks", - "allauth.socialaccount.providers.reddit", - "allauth.socialaccount.providers.robinhood", - "allauth.socialaccount.providers.salesforce", - "allauth.socialaccount.providers.sharefile", - "allauth.socialaccount.providers.shopify", - "allauth.socialaccount.providers.slack", - "allauth.socialaccount.providers.soundcloud", - "allauth.socialaccount.providers.spotify", - "allauth.socialaccount.providers.stackexchange", - "allauth.socialaccount.providers.steam", - "allauth.socialaccount.providers.stocktwits", - "allauth.socialaccount.providers.strava", - "allauth.socialaccount.providers.stripe", - "allauth.socialaccount.providers.telegram", - "allauth.socialaccount.providers.trello", - "allauth.socialaccount.providers.tumblr", - "allauth.socialaccount.providers.twentythreeandme", - "allauth.socialaccount.providers.twitch", - "allauth.socialaccount.providers.twitter", - "allauth.socialaccount.providers.untappd", - "allauth.socialaccount.providers.vimeo", - "allauth.socialaccount.providers.vimeo_oauth2", - "allauth.socialaccount.providers.vk", - "allauth.socialaccount.providers.weibo", - "allauth.socialaccount.providers.weixin", - "allauth.socialaccount.providers.windowslive", - "allauth.socialaccount.providers.xing", - "allauth.socialaccount.providers.yahoo", - "allauth.socialaccount.providers.yandex", - "allauth.socialaccount.providers.ynab", - "allauth.socialaccount.providers.zoho", - "allauth.socialaccount.providers.zoom", - "allauth.socialaccount.providers.okta", + # Keeping this as example but disabling as some of them has extra dependencies. Check official docs. + # "allauth.socialaccount.providers.github", + # "allauth.socialaccount.providers.gitlab", + # "allauth.socialaccount.providers.google", + # "allauth.socialaccount.providers.microsoft", "crispy_forms", - "captcha", # https://pypi.org/project/django-recaptcha/ + "crispy_bootstrap4", + "django_recaptcha", # https://pypi.org/project/django-recaptcha/ "rest_framework", "rest_framework.authtoken", ] @@ -388,6 +288,7 @@ NOCAPTCHA = web_cfg.web_auth.get("captcha", False) # create your keys here -> https://www.google.com/recaptcha/about/ +# Set those in local_settings.py RECAPTCHA_PRIVATE_KEY = "TEST_PUBLIC_KEY" RECAPTCHA_PUBLIC_KEY = "TEST_PRIVATE_KEY" RECAPTCHA_DEFAULT_ACTION = "generic" @@ -423,7 +324,7 @@ SITE_ID = 1 -# https://django-allauth.readthedocs.io/en/latest/configuration.html +# https://docs.allauth.org/en/dev/socialaccount/configuration.html if web_cfg.registration.get("email_confirmation", False): ACCOUNT_EMAIL_VERIFICATION = "mandatory" SOCIALACCOUNT_EMAIL_VERIFICATION = ACCOUNT_EMAIL_VERIFICATION @@ -433,13 +334,24 @@ ACCOUNT_EMAIL_REQUIRED = web_cfg.registration.get("email_required", False) ACCOUNT_EMAIL_SUBJECT_PREFIX = web_cfg.registration.get("email_prefix_subject", False) -ACCOUNT_LOGIN_ATTEMPTS_LIMIT = 3 +ACCOUNT_RATE_LIMITS = {"login_failed": "3/m"} LOGIN_REDIRECT_URL = "/" ACCOUNT_LOGOUT_REDIRECT_URL = "/accounts/login/" MANUAL_APPROVE = web_cfg.registration.get("manual_approve", False) REGISTRATION_ENABLED = web_cfg.registration.get("enabled", False) EMAIL_CONFIRMATION = web_cfg.registration.get("email_confirmation", False) -#### ALlauth end +SOCIAL_AUTH_EMAIL_DOMAIN = web_cfg.web_auth.get("social_auth_email_domain", False) + +# be careful with SOCIALACCOUNT_AUTO_SIGNUP, if True, it will bypass custom sighup functions, default is True +# SOCIALACCOUNT_AUTO_SIGNUP = True +# SOCIALACCOUNT_ONLY = True +# SOCIALACCOUNT_LOGIN_ON_GET=True +# ACCOUNT_SIGNUP_FORM_CLASS = None +# In case you want to verify domain of email + set the username +# SOCIALACCOUNT_ADAPTER = 'web.allauth_adapters.MySocialAccountAdapter' +# ACCOUNT_DEFAULT_HTTP_PROTOCOL = "https" + +#### AllAuth end if web_cfg.registration.get("disposable_email_disable", False): DISPOSABLE_DOMAIN_LIST = os.path.join(CUCKOO_PATH, web_cfg.registration.disposable_domain_list) @@ -484,7 +396,7 @@ # Max size MAX_UPLOAD_SIZE = web_cfg.general.max_sample_size - +# Google's OAuth might need: "strict-origin-when-cross-origin" SECURE_REFERRER_POLICY = "same-origin" # "no-referrer-when-downgrade" # https://django-csp.readthedocs.io/en/latest/configuration.html