diff --git a/.dockerignore b/.dockerignore index dab4c870..9cc6705c 100644 --- a/.dockerignore +++ b/.dockerignore @@ -39,7 +39,7 @@ data/ logs/ *.log -# Documentation (not needed in container, except README.md for setup.py) +# Documentation (not needed in container) docs/ # Tests (not needed in production) diff --git a/.env.example b/.env.example index 2b44f202..1853148d 100644 --- a/.env.example +++ b/.env.example @@ -15,16 +15,11 @@ HOTKEY_NAME=default # Weights & Biases API key for logging # Signup https://wandb.ai/site for a key WANDB_API_KEY= -# for issue bounties api calls +# GitHub PAT for validator API calls GITTENSOR_VALIDATOR_PAT= # Optional custom name for wandb logging WANDB_VALIDATOR_NAME=vali -# ******* MINER VARIABLES ******* -# GitHub Personal Access Token -# https://github.com/settings/personal-access-tokens -GITTENSOR_MINER_PAT= - # validator database settings (for gittensor validator/dashboard) STORE_DB_RESULTS=false # DB_HOST= diff --git a/.github/PULL_REQUEST_TEMPLATE/weight_adjustment.md b/.github/PULL_REQUEST_TEMPLATE/weight_adjustment.md index 4dbb726f..58b25c91 100644 --- a/.github/PULL_REQUEST_TEMPLATE/weight_adjustment.md +++ b/.github/PULL_REQUEST_TEMPLATE/weight_adjustment.md @@ -2,28 +2,28 @@ ### Changes Summary -| Metric | Gold | Silver | Bronze | Total | -| -------------------- | ---- | ------ | ------ | ----- | -| Repositories Added | 0 | 0 | 0 | 0 | -| Repositories Removed | 0 | 0 | 0 | 0 | -| Weights Modified | 0 | 0 | 0 | 0 | -| Net Weight Change | 0 | 0 | 0 | 0 | +| Metric | Total | +| -------------------- | ----- | +| Repositories Added | 0 | +| Repositories Removed | 0 | +| Weights Modified | 0 | +| Net Weight Change | 0 | ### Added Repositories -| Repository | Tier | Branch | Weight | -| ---------- | ------ | ------ | ------ | -| owner/repo | silver | main | 20.00 | +| Repository | Branch | Weight | +| ---------- | ------ | ------ | +| owner/repo | main | 20.00 | ### Removed Repositories -| Repository | Tier | Reason | -| ---------- | ------ | ------ | -| owner/repo | silver | — | +| Repository | Reason | +| ---------- | ------ | +| owner/repo | — | ### Justification @@ -49,4 +49,4 @@ Example: - [ ] Changes summary table is filled in accurately - [ ] Net weight changes are justified in the Justification section -- [ ] Added repositories have correct tier, branch, and initial weight +- [ ] Added repositories have correct branch and initial weight diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 7a55e69b..f401a950 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -12,16 +12,14 @@ jobs: - name: Checkout code uses: actions/checkout@v4 + - name: Install uv + uses: astral-sh/setup-uv@v4 + - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: '3.12' - cache: 'pip' + run: uv python install 3.12 - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install -r requirements.txt + run: uv sync --extra dev - name: Run tests - run: pytest tests/ -v + run: uv run pytest tests/ -v diff --git a/.github/workflows/typecheck.yml b/.github/workflows/typecheck.yml new file mode 100644 index 00000000..975a30c2 --- /dev/null +++ b/.github/workflows/typecheck.yml @@ -0,0 +1,23 @@ +name: Type Check + +on: + pull_request: + branches: [main, test] + +jobs: + pyright: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Install uv + uses: astral-sh/setup-uv@v4 + + - name: Set up Python + run: uv python install 3.12 + + - name: Install dependencies + run: uv sync --extra dev + + - name: Run pyright + run: uv run pyright diff --git a/.gitignore b/.gitignore index 7992e30f..c8df1b52 100644 --- a/.gitignore +++ b/.gitignore @@ -16,11 +16,8 @@ wandb *.log -# Merge predictions local DB -merge-prediction-data/ -gt-merge-preds.db -gt-merge-preds.db-wal -gt-merge-preds.db-shm +# Validator data directory (contains sensitive miner PATs) +data/ CLAUDE.md .claude/ @@ -30,3 +27,6 @@ CLAUDE.md target/ **/*.rs.bk *.lock +!uv.lock + +**/.venv/ \ No newline at end of file diff --git a/Dockerfile b/Dockerfile index 01cc5b90..5c06d06a 100644 --- a/Dockerfile +++ b/Dockerfile @@ -12,15 +12,13 @@ RUN pip install --break-system-packages uv WORKDIR /app -# Copy dependency files -COPY pyproject.toml uv.lock ./ +# Copy dependency files first (for Docker layer caching) +COPY pyproject.toml uv.lock README.md ./ -# Create venv and sync dependencies -ENV VENV_DIR=/opt/venv -ENV VIRTUAL_ENV=$VENV_DIR -ENV PATH="$VENV_DIR/bin:$PATH" -RUN uv venv --python python3 $VENV_DIR && uv sync +# Install dependencies only (no project install yet — source code not copied) +ENV PATH="/app/.venv/bin:$PATH" +RUN uv sync --no-install-project -# Copy application code and install +# Copy application code and install the project COPY . . -RUN uv pip install -e . +RUN uv sync diff --git a/README.md b/README.md index 67e52323..bfba94f3 100644 --- a/README.md +++ b/README.md @@ -34,20 +34,22 @@ The result: a sustainable incentive layer that channels resources toward buildin ## Miners -**Recommended: Deploy with Docker** - -> [!NOTE] -> The port in .env must be accessible by the public. +No miner neuron required — just register your GitHub PAT with validators using the CLI. ```bash -# Quick start +# Install git clone https://github.com/entrius/gittensor.git cd gittensor -cp .env.example .env -# Edit .env with proper values -nano .env +uv sync + +# Set your GitHub PAT +export GITTENSOR_MINER_PAT=ghp_your_token_here + +# Broadcast PAT to validators +gitt miner post --wallet --hotkey -docker-compose -f docker-compose.miner.yml up -d +# Check which validators have your PAT stored +gitt miner check --wallet --hotkey ``` See full guide **[here](https://docs.gittensor.io/miner.html)** diff --git a/docker-compose.miner.yml b/docker-compose.miner.yml deleted file mode 100644 index e879726f..00000000 --- a/docker-compose.miner.yml +++ /dev/null @@ -1,15 +0,0 @@ -services: - miner: - image: entrius/gittensor:latest - container_name: gt-miner - restart: unless-stopped - entrypoint: /app/scripts/miner-entrypoint.sh - env_file: - - .env - ports: - - "${PORT}:${PORT}" - volumes: - # 'ro' = readonly - - ${WALLET_PATH}:/root/.bittensor/wallets:ro - labels: - - "com.centurylinklabs.watchtower.enable=true" diff --git a/docker-compose.vali.yml b/docker-compose.vali.yml index a663f87d..bc6d602e 100644 --- a/docker-compose.vali.yml +++ b/docker-compose.vali.yml @@ -12,7 +12,7 @@ services: volumes: # 'ro' = readonly - ${WALLET_PATH}:/root/.bittensor/wallets:ro - - ./merge-prediction-data:/app/data + - ./data:/app/data # optional: uncomment this if you are running validator database # networks: # - gittensor_network diff --git a/gittensor/classes.py b/gittensor/classes.py index 8790a1bd..31e092e4 100644 --- a/gittensor/classes.py +++ b/gittensor/classes.py @@ -8,9 +8,8 @@ import bittensor as bt -from gittensor.constants import MIN_TOKEN_SCORE_FOR_BASE_SCORE +from gittensor.constants import MERGED_PR_BASE_SCORE, MIN_TOKEN_SCORE_FOR_BASE_SCORE from gittensor.utils.utils import parse_repo_name -from gittensor.validator.oss_contributions.tier_config import Tier, TierConfig, TierStats GITHUB_DOMAIN = 'https://github.com/' @@ -73,7 +72,8 @@ def __post_init__(self): self.file_extension = self._calculate_file_extension() def _calculate_file_extension(self) -> str: - return self.filename.split('.')[-1].lower() if '.' in self.filename else '' + basename = self.filename.split('/')[-1] + return basename.split('.')[-1].lower() if '.' in basename else '' def is_test_file(self) -> bool: filename_lower = self.filename.lower() @@ -130,6 +130,18 @@ class Issue: state: Optional[str] = None # "OPEN" or "CLOSED" author_association: Optional[str] = None # e.g., "OWNER", "MEMBER", "COLLABORATOR", "CONTRIBUTOR", "NONE" + # Issue discovery fields + author_github_id: Optional[str] = None # Issue author's GitHub user ID (for miner matching) + is_transferred: bool = False + updated_at: Optional[datetime] = None + discovery_base_score: float = 0.0 + discovery_earned_score: float = 0.0 + discovery_review_quality_multiplier: float = 1.0 + discovery_repo_weight_multiplier: float = 1.0 + discovery_time_decay_multiplier: float = 1.0 + discovery_credibility_multiplier: float = 1.0 + discovery_open_issue_spam_multiplier: float = 1.0 + @dataclass class PullRequest: @@ -142,7 +154,7 @@ class PullRequest: repository_full_name: str uid: int hotkey: str - github_id: str + github_id: Optional[str] title: str author_login: str merged_at: Optional[datetime] # None for OPEN PRs @@ -150,7 +162,6 @@ class PullRequest: # PR state based fields pr_state: PRState - repository_tier_configuration: Optional[TierConfig] = None # assigned when scoring PR # Score fields repo_weight_multiplier: float = 1.0 @@ -163,8 +174,6 @@ class PullRequest: credibility_multiplier: float = 1.0 review_quality_multiplier: float = 1.0 # Penalty for CHANGES_REQUESTED reviews from maintainers changes_requested_count: int = 0 # Number of maintainer CHANGES_REQUESTED reviews - raw_credibility: float = 1.0 # Before applying ^k scalar - credibility_scalar: int = 1 # The k value from tier config earned_score: float = 0.0 collateral_score: float = 0.0 # For OPEN PRs: potential_score * collateral_percent @@ -195,14 +204,9 @@ def set_file_changes(self, file_changes: List[FileChange]) -> None: def is_pioneer_eligible(self) -> bool: """Check if this PR qualifies for pioneer consideration. - A PR is eligible if it is merged, has a tier configuration, - and meets the minimum token score quality gate. + A PR is eligible if it is merged and meets the minimum token score quality gate. """ - return ( - self.repository_tier_configuration is not None - and self.merged_at is not None - and self.token_score >= MIN_TOKEN_SCORE_FOR_BASE_SCORE - ) + return self.merged_at is not None and self.token_score >= MIN_TOKEN_SCORE_FOR_BASE_SCORE def calculate_final_earned_score(self) -> float: """Combine base score with all multipliers. Pioneer dividend is added separately after.""" @@ -217,13 +221,7 @@ def calculate_final_earned_score(self) -> float: self.earned_score = self.base_score * prod(multipliers.values()) - # Log all multipliers (credibility shows ^k format) - def _format_multiplier(k: str, v: float) -> str: - if k == 'cred': - return f'cred={self.raw_credibility:.2f}^{self.credibility_scalar}' - return f'{k}={v:.2f}' - - mult_str = ' × '.join(_format_multiplier(k, v) for k, v in multipliers.items()) + mult_str = ' × '.join(f'{k}={v:.2f}' for k, v in multipliers.items()) bt.logging.info( f'├─ {self.pr_state.value} PR #{self.number} ({self.repository_full_name}) → {self.earned_score:.2f}' ) @@ -232,7 +230,7 @@ def _format_multiplier(k: str, v: float) -> str: return self.earned_score @classmethod - def from_graphql_response(cls, pr_data: dict, uid: int, hotkey: str, github_id: str) -> 'PullRequest': + def from_graphql_response(cls, pr_data: dict, uid: int, hotkey: str, github_id: Optional[str]) -> 'PullRequest': """Create PullRequest from GraphQL API response for any PR state.""" from gittensor.validator.utils.datetime_utils import parse_github_timestamp_to_cst @@ -246,6 +244,8 @@ def from_graphql_response(cls, pr_data: dict, uid: int, hotkey: str, github_id: for issue in raw_issues: if is_merged and not (issue.get('closedAt') and issue.get('state') == 'CLOSED'): continue + issue_author = issue.get('author') or {} + author_db_id = issue_author.get('databaseId') issues.append( Issue( number=issue['number'], @@ -254,16 +254,17 @@ def from_graphql_response(cls, pr_data: dict, uid: int, hotkey: str, github_id: title=issue['title'], created_at=parse_github_timestamp_to_cst(issue['createdAt']) if issue.get('createdAt') else None, closed_at=parse_github_timestamp_to_cst(issue['closedAt']) if issue.get('closedAt') else None, - author_login=issue.get('author', {}).get('login') if issue.get('author') else None, + author_login=issue_author.get('login'), state=issue.get('state'), author_association=issue.get('authorAssociation'), + author_github_id=str(author_db_id) if author_db_id else None, + updated_at=parse_github_timestamp_to_cst(issue['updatedAt']) if issue.get('updatedAt') else None, ) ) description: str = pr_data.get('bodyText', '') - last_edited_at = ( - parse_github_timestamp_to_cst(pr_data.get('lastEditedAt')) if pr_data.get('lastEditedAt') else None - ) + raw_edited_at = pr_data.get('lastEditedAt') + last_edited_at = parse_github_timestamp_to_cst(raw_edited_at) if isinstance(raw_edited_at, str) else None merged_at = parse_github_timestamp_to_cst(pr_data['mergedAt']) if is_merged else None return cls( @@ -300,7 +301,6 @@ class MinerEvaluation: total_collateral_score: float = 0.0 # Collateral from open PRs total_nodes_scored: int = 0 # Total AST nodes scored across all PRs unique_repos_count: int = 0 - qualified_unique_repos_count: int = 0 # Repos meeting min token score threshold # Overall token scoring breakdown (aggregated across all PRs) total_token_score: float = 0.0 @@ -315,10 +315,19 @@ class MinerEvaluation: closed_pull_requests: List[PullRequest] = field(default_factory=list) unique_repos_contributed_to: Set[str] = field(default_factory=set) - # Tier level details (None = no tier unlocked yet) - current_tier: Optional[Tier] = None - credibility_by_tier: Dict[Tier, float] = field(default_factory=dict) - stats_by_tier: Dict[Tier, TierStats] = field(default_factory=lambda: {tier: TierStats() for tier in Tier}) + # Eligibility and credibility + is_eligible: bool = False + credibility: float = 0.0 + + # Issue discovery scoring + issue_discovery_score: float = 0.0 + issue_token_score: float = 0.0 # sum of solving PR token_scores for scored issues + issue_credibility: float = 0.0 + is_issue_eligible: bool = False + total_solved_issues: int = 0 + total_valid_solved_issues: int = 0 # solved issues where solving PR has token_score >= 5 + total_closed_issues: int = 0 + total_open_issues: int = 0 @property def total_prs(self) -> int: @@ -479,6 +488,14 @@ def __add__(self, other: 'ScoreBreakdown') -> 'ScoreBreakdown': ) +class ScoringCategory(Enum): + """Category of a scored file""" + + SOURCE = 'source' # Non-test code files scored via tree-diff + TEST = 'test' # Test files (any scoring method) + NON_CODE = 'non_code' # Everything else (line-count, skipped, binary, etc.) + + @dataclass class FileScoreResult: """Result of scoring a single file.""" @@ -491,6 +508,14 @@ class FileScoreResult: scoring_method: str # 'tree-diff', 'line-count', 'skipped-*' breakdown: Optional[ScoreBreakdown] = None # Only populated for tree-diff scoring + @property + def category(self) -> ScoringCategory: + if self.is_test_file: + return ScoringCategory.TEST + if self.scoring_method == 'tree-diff': + return ScoringCategory.SOURCE + return ScoringCategory.NON_CODE + @dataclass class PrScoringResult: @@ -501,9 +526,111 @@ class PrScoringResult: total_score: float total_nodes_scored: int # Total AST nodes scored across all files + total_lines: int # Total lines changed across all files file_results: List[FileScoreResult] score_breakdown: Optional[ScoreBreakdown] = None # Aggregated breakdown across all files + @property + def density(self) -> float: + """Code density (total_score / total_lines), capped at MAX_CODE_DENSITY_MULTIPLIER""" + from gittensor.constants import MAX_CODE_DENSITY_MULTIPLIER + + if self.total_lines <= 0: + return 0.0 + return min(self.total_score / self.total_lines, MAX_CODE_DENSITY_MULTIPLIER) + + +_EMPTY_SCORING_RESULT = PrScoringResult( + total_score=0.0, + total_nodes_scored=0, + total_lines=0, + file_results=[], +) + + +@dataclass +class PrScoringResultCategorized: + """Scoring results split by category, with aggregate totals""" + + total_score: float + total_nodes_scored: int + score_breakdown: Optional[ScoreBreakdown] + by_category: Dict[ScoringCategory, PrScoringResult] + + @property + def file_results(self) -> List[FileScoreResult]: + results = [] + for pr_result in self.by_category.values(): + results.extend(pr_result.file_results) + return results + + def get(self, category: ScoringCategory) -> PrScoringResult: + """Get results for a specific category, returns empty result if none""" + return self.by_category.get(category, _EMPTY_SCORING_RESULT) + + def calculate_initial_base_score(self) -> float: + """Sum of per-category density-scaled base scores, 0 if below threshold""" + token_score = self.score_breakdown.total_score if self.score_breakdown else 0.0 + if token_score < MIN_TOKEN_SCORE_FOR_BASE_SCORE: + return 0.0 + return sum(MERGED_PR_BASE_SCORE * self.get(cat).density for cat in ScoringCategory) + + def calculate_contribution_bonus(self) -> float: + """Contribution bonus from SOURCE category score only""" + from gittensor.constants import CONTRIBUTION_SCORE_FOR_FULL_BONUS, MAX_CONTRIBUTION_BONUS + + source_score = self.get(ScoringCategory.SOURCE).total_score + bonus_percent = min(1.0, source_score / CONTRIBUTION_SCORE_FOR_FULL_BONUS) + return round(bonus_percent * MAX_CONTRIBUTION_BONUS, 2) + + @classmethod + def from_file_results( + cls, + file_results: List[FileScoreResult], + ) -> 'PrScoringResultCategorized': + """Build a categorized result from file results in a single pass""" + # Per-category accumulators + cat_files: Dict[ScoringCategory, List[FileScoreResult]] = {} + cat_score: Dict[ScoringCategory, float] = {} + cat_nodes: Dict[ScoringCategory, int] = {} + cat_lines: Dict[ScoringCategory, int] = {} + cat_breakdowns: Dict[ScoringCategory, List[ScoreBreakdown]] = {} + + # Totals + total_score = 0.0 + total_nodes = 0 + all_breakdowns: List[ScoreBreakdown] = [] + + for f in file_results: + cat = f.category + cat_files.setdefault(cat, []).append(f) + cat_score[cat] = cat_score.get(cat, 0.0) + f.score + cat_nodes[cat] = cat_nodes.get(cat, 0) + f.nodes_scored + cat_lines[cat] = cat_lines.get(cat, 0) + f.total_lines + total_score += f.score + total_nodes += f.nodes_scored + if f.breakdown is not None: + cat_breakdowns.setdefault(cat, []).append(f.breakdown) + all_breakdowns.append(f.breakdown) + + by_category: Dict[ScoringCategory, PrScoringResult] = {} + for cat, results in cat_files.items(): + bd = cat_breakdowns.get(cat) + by_category[cat] = PrScoringResult( + total_score=cat_score[cat], + total_nodes_scored=cat_nodes[cat], + total_lines=cat_lines[cat], + file_results=results, + score_breakdown=sum(bd, start=ScoreBreakdown()) if bd else None, + ) + + return cls( + total_score=total_score, + total_nodes_scored=total_nodes, + score_breakdown=sum(all_breakdowns, start=ScoreBreakdown()) if all_breakdowns else None, + by_category=by_category, + ) + @dataclass class CachedEvaluation: diff --git a/gittensor/cli/issue_commands/__init__.py b/gittensor/cli/issue_commands/__init__.py index 1b10388b..a95c3cbc 100644 --- a/gittensor/cli/issue_commands/__init__.py +++ b/gittensor/cli/issue_commands/__init__.py @@ -8,7 +8,6 @@ gitt issues (alias: i) - Issue management commands list List issues or view a specific issue submissions List open PR submissions for an issue - predict Predict merge probabilities for PR submissions register Register a new issue bounty bounty-pool View total bounty pool pending-harvest View pending emissions @@ -25,6 +24,7 @@ import click from .admin import admin +from .help import StyledGroup # Re-export helpers from .helpers import ( @@ -41,31 +41,19 @@ issue_harvest, issue_register, ) -from .predict import issues_predict from .submissions import issues_submissions from .view import admin_info, issues_bounty_pool, issues_list, issues_pending_harvest from .vote import vote -@click.group(name='issues') +@click.group(name='issues', cls=StyledGroup) def issues_group(): - """Issue management commands. - - \b - Commands: - list List issues or view a specific issue - submissions List open PR submissions for an issue - predict Predict merge probabilities for PR submissions - register Register a new issue bounty - bounty-pool View total bounty pool - pending-harvest View pending emissions - """ + """Manage issue bounties, submissions, and predictions.""" pass issues_group.add_command(issues_list, name='list') issues_group.add_command(issues_submissions, name='submissions') -issues_group.add_command(issues_predict, name='predict') issues_group.add_command(issue_register, name='register') issues_group.add_command(issues_bounty_pool, name='bounty-pool') issues_group.add_command(issues_pending_harvest, name='pending-harvest') @@ -97,7 +85,6 @@ def register_commands(cli): 'vote', 'admin', 'issues_submissions', - 'issues_predict', 'issue_register', 'issue_harvest', # Helpers diff --git a/gittensor/cli/issue_commands/admin.py b/gittensor/cli/issue_commands/admin.py index b08f418d..983b8260 100644 --- a/gittensor/cli/issue_commands/admin.py +++ b/gittensor/cli/issue_commands/admin.py @@ -16,6 +16,7 @@ import click from rich.panel import Panel +from .help import StyledGroup from .helpers import ( console, format_alpha, @@ -26,75 +27,37 @@ resolve_network, validate_issue_id, validate_ss58_address, + with_network_contract_options, + with_wallet_options, ) -@click.group(name='admin') +@click.group(name='admin', cls=StyledGroup) def admin(): """Owner-only administrative commands. These commands require the contract owner wallet. - - \b - Commands: - info View contract configuration - cancel-issue Cancel an issue - payout-issue Manual payout fallback - set-owner Transfer ownership - set-treasury Change treasury hotkey - add-vali Add a validator to the whitelist - remove-vali Remove a validator from the whitelist """ pass @admin.command('cancel-issue') @click.argument('issue_id', type=int) -@click.option( - '--network', - '-n', - default=None, - type=click.Choice(['finney', 'test', 'local'], case_sensitive=False), - help='Network (finney/test/local)', -) -@click.option( - '--rpc-url', - default=None, - help='Subtensor RPC endpoint (overrides --network)', -) -@click.option( - '--contract', - default='', - help='Contract address (uses config if empty)', -) -@click.option( - '--wallet-name', - '--wallet.name', - '--wallet', - default='default', - help='Wallet name', -) -@click.option( - '--wallet-hotkey', - '--wallet.hotkey', - '--hotkey', - default='default', - help='Hotkey name', -) +@with_wallet_options() +@with_network_contract_options('Contract address (uses config if empty)') def admin_cancel(issue_id: int, network: str, rpc_url: str, contract: str, wallet_name: str, wallet_hotkey: str): """Cancel an issue (owner only). - Immediately cancels an issue without requiring validator consensus. - Bounty funds are returned to the alpha pool. + [dim]Immediately cancels an issue without validator consensus. Bounty funds are returned to the alpha pool.[/dim] - \b - Arguments: + [dim]Arguments: ISSUE_ID: On-chain issue ID to cancel + [/dim] - \b - Examples: - gitt admin cancel-issue 1 - gitt a cancel-issue 5 --network test + [dim]Examples: + $ gitt admin cancel-issue 1 + $ gitt a cancel-issue 5 --network test + [/dim] """ contract_addr = get_contract_address(contract) ws_endpoint, network_name = resolve_network(network, rpc_url) @@ -154,51 +117,22 @@ def admin_cancel(issue_id: int, network: str, rpc_url: str, contract: str, walle @admin.command('payout-issue') @click.argument('issue_id', type=int) -@click.option( - '--network', - '-n', - default=None, - type=click.Choice(['finney', 'test', 'local'], case_sensitive=False), - help='Network (finney/test/local)', -) -@click.option( - '--rpc-url', - default=None, - help='Subtensor RPC endpoint (overrides --network)', -) -@click.option( - '--contract', - default='', - help='Contract address (uses config if empty)', -) -@click.option( - '--wallet-name', - '--wallet.name', - '--wallet', - default='default', - help='Wallet name', -) -@click.option( - '--wallet-hotkey', - '--wallet.hotkey', - '--hotkey', - default='default', - help='Hotkey name', -) +@with_wallet_options() +@with_network_contract_options('Contract address (uses config if empty)') def admin_payout(issue_id: int, network: str, rpc_url: str, contract: str, wallet_name: str, wallet_hotkey: str): """Manual payout fallback (owner only). - Pays out a completed issue bounty to the solver. The solver address - is determined by validator consensus and stored in the contract. + [dim]Pays out a completed issue bounty to the solver. + The solver address is determined by validator consensus and stored in the contract.[/dim] - \b - Arguments: + [dim]Arguments: ISSUE_ID: On-chain ID of a completed issue + [/dim] - \b - Examples: - gitt admin payout-issue 1 - gitt a payout-issue 3 --network test + [dim]Examples: + $ gitt admin payout-issue 1 + $ gitt a payout-issue 3 --network test + [/dim] """ contract_addr = get_contract_address(contract) ws_endpoint, network_name = resolve_network(network, rpc_url) @@ -258,47 +192,18 @@ def admin_payout(issue_id: int, network: str, rpc_url: str, contract: str, walle @admin.command('set-owner') @click.argument('new_owner', type=str) -@click.option( - '--network', - '-n', - default=None, - type=click.Choice(['finney', 'test', 'local'], case_sensitive=False), - help='Network (finney/test/local)', -) -@click.option( - '--rpc-url', - default=None, - help='Subtensor RPC endpoint (overrides --network)', -) -@click.option( - '--contract', - default='', - help='Contract address', -) -@click.option( - '--wallet-name', - '--wallet.name', - '--wallet', - default='default', - help='Wallet name', -) -@click.option( - '--wallet-hotkey', - '--wallet.hotkey', - '--hotkey', - default='default', - help='Hotkey name', -) +@with_wallet_options() +@with_network_contract_options('Contract address') def admin_set_owner(new_owner: str, network: str, rpc_url: str, contract: str, wallet_name: str, wallet_hotkey: str): """Transfer contract ownership (owner only). - \b - Arguments: + [dim]Arguments: NEW_OWNER: SS58 address of the new owner + [/dim] - \b - Examples: - gitt admin set-owner 5Hxxx... + [dim]Examples: + $ gitt admin set-owner 5Hxxx... + [/dim] """ contract_addr = get_contract_address(contract) ws_endpoint, network_name = resolve_network(network, rpc_url) @@ -349,53 +254,23 @@ def admin_set_owner(new_owner: str, network: str, rpc_url: str, contract: str, w @admin.command('set-treasury') @click.argument('new_treasury', type=str) -@click.option( - '--network', - '-n', - default=None, - type=click.Choice(['finney', 'test', 'local'], case_sensitive=False), - help='Network (finney/test/local)', -) -@click.option( - '--rpc-url', - default=None, - help='Subtensor RPC endpoint (overrides --network)', -) -@click.option( - '--contract', - default='', - help='Contract address', -) -@click.option( - '--wallet-name', - '--wallet.name', - '--wallet', - default='default', - help='Wallet name', -) -@click.option( - '--wallet-hotkey', - '--wallet.hotkey', - '--hotkey', - default='default', - help='Hotkey name', -) +@with_wallet_options() +@with_network_contract_options('Contract address') def admin_set_treasury( new_treasury: str, network: str, rpc_url: str, contract: str, wallet_name: str, wallet_hotkey: str ): """Change treasury hotkey (owner only). - The treasury hotkey receives staking emissions that fund bounty payouts. - Changing the treasury resets all Active/Registered issue bounty amounts - to 0 (they will be re-funded on next harvest from the new treasury). + [dim]The treasury hotkey receives staking emissions that fund bounty payouts. Changing the treasury resets all + Active/Registered issue bounty amounts to 0 (they will be re-funded on the next harvest from the new treasury).[/dim] - \b - Arguments: + [dim]Arguments: NEW_TREASURY: SS58 address of the new treasury hotkey + [/dim] - \b - Examples: - gitt admin set-treasury 5Hxxx... + [dim]Examples: + $ gitt admin set-treasury 5Hxxx... + [/dim] """ contract_addr = get_contract_address(contract) ws_endpoint, network_name = resolve_network(network, rpc_url) @@ -449,51 +324,21 @@ def admin_set_treasury( @admin.command('add-vali') @click.argument('hotkey', type=str) -@click.option( - '--network', - '-n', - default=None, - type=click.Choice(['finney', 'test', 'local'], case_sensitive=False), - help='Network (finney/test/local)', -) -@click.option( - '--rpc-url', - default=None, - help='Subtensor RPC endpoint (overrides --network)', -) -@click.option( - '--contract', - default='', - help='Contract address', -) -@click.option( - '--wallet-name', - '--wallet.name', - '--wallet', - default='default', - help='Wallet name', -) -@click.option( - '--wallet-hotkey', - '--wallet.hotkey', - '--hotkey', - default='default', - help='Hotkey name', -) +@with_wallet_options() +@with_network_contract_options('Contract address') def admin_add_validator(hotkey: str, network: str, rpc_url: str, contract: str, wallet_name: str, wallet_hotkey: str): """Add a validator to the voting whitelist (owner only). - Whitelisted validators can vote on solutions and issue cancellations. - The consensus threshold adjusts automatically: simple majority after - 3 validators are added. + [dim]Whitelisted validators can vote on solutions and issue cancellations. + The consensus threshold adjusts automatically to a simple majority after 3 validators are added.[/dim] - \b - Arguments: + [dim]Arguments: HOTKEY: SS58 address of the validator hotkey to whitelist + [/dim] - \b - Examples: - gitt admin add-vali 5Hxxx... + [dim]Examples: + $ gitt admin add-vali 5Hxxx... + [/dim] """ contract_addr = get_contract_address(contract) ws_endpoint, network_name = resolve_network(network, rpc_url) @@ -547,51 +392,22 @@ def admin_add_validator(hotkey: str, network: str, rpc_url: str, contract: str, @admin.command('remove-vali') @click.argument('hotkey', type=str) -@click.option( - '--network', - '-n', - default=None, - type=click.Choice(['finney', 'test', 'local'], case_sensitive=False), - help='Network (finney/test/local)', -) -@click.option( - '--rpc-url', - default=None, - help='Subtensor RPC endpoint (overrides --network)', -) -@click.option( - '--contract', - default='', - help='Contract address', -) -@click.option( - '--wallet-name', - '--wallet.name', - '--wallet', - default='default', - help='Wallet name', -) -@click.option( - '--wallet-hotkey', - '--wallet.hotkey', - '--hotkey', - default='default', - help='Hotkey name', -) +@with_wallet_options() +@with_network_contract_options('Contract address') def admin_remove_validator( hotkey: str, network: str, rpc_url: str, contract: str, wallet_name: str, wallet_hotkey: str ): """Remove a validator from the voting whitelist (owner only). - The consensus threshold adjusts automatically after removal. + [dim]The consensus threshold adjusts automatically after removal.[/dim] - \b - Arguments: + [dim]Arguments: HOTKEY: SS58 address of the validator hotkey to remove + [/dim] - \b - Examples: - gitt admin remove-vali 5Hxxx... + [dim]Examples: + $ gitt admin remove-vali 5Hxxx... + [/dim] """ contract_addr = get_contract_address(contract) ws_endpoint, network_name = resolve_network(network, rpc_url) diff --git a/gittensor/cli/issue_commands/helpers.py b/gittensor/cli/issue_commands/helpers.py index 18608aa5..33cb42d4 100644 --- a/gittensor/cli/issue_commands/helpers.py +++ b/gittensor/cli/issue_commands/helpers.py @@ -16,12 +16,11 @@ from contextlib import nullcontext from decimal import Decimal, InvalidOperation from pathlib import Path -from typing import Any, ContextManager, Dict, List, Optional, Tuple +from typing import Any, Callable, ContextManager, Dict, List, Optional, Tuple, TypeVar import click from rich.console import Console from rich.panel import Panel -from substrateinterface import SubstrateInterface from gittensor.cli.issue_commands.tables import build_pr_table from gittensor.constants import CONTRACT_ADDRESS @@ -51,6 +50,110 @@ console = Console() +CommandFunc = TypeVar('CommandFunc', bound=Callable[..., Any]) +NETWORK_CHOICE = click.Choice(['finney', 'test', 'local'], case_sensitive=False) + + +def apply_click_options(*decorators: Callable[[CommandFunc], CommandFunc]) -> Callable[[CommandFunc], CommandFunc]: + """Apply Click decorators in the declared display order.""" + + def wrapper(func: CommandFunc) -> CommandFunc: + for decorator in reversed(decorators): + func = decorator(func) + return func + + return wrapper + + +def with_wallet_options( + wallet_default: str = 'default', hotkey_default: str = 'default' +) -> Callable[[CommandFunc], CommandFunc]: + """Add the standard wallet name/hotkey options.""" + return apply_click_options( + click.option( + '--wallet-name', + '--wallet.name', + '--wallet', + default=wallet_default, + help='Wallet name', + ), + click.option( + '--wallet-hotkey', + '--wallet.hotkey', + '--hotkey', + default=hotkey_default, + help='Hotkey name', + ), + ) + + +def with_network_contract_options( + contract_help: str, +) -> Callable[[CommandFunc], CommandFunc]: + """Add the standard network / rpc / contract option bundle.""" + return apply_click_options( + click.option( + '--network', + '-n', + default=None, + type=NETWORK_CHOICE, + help='Network (finney/test/local)', + ), + click.option( + '--rpc-url', + default=None, + help='Subtensor RPC endpoint (overrides --network)', + ), + click.option( + '--contract', + default='', + help=contract_help, + ), + ) + + +def with_cli_behavior_options( + *, + include_verbose: bool = False, + include_json: bool = False, + include_yes: bool = False, + verbose_help: str = 'Show debug output', + json_help: str = 'Output as JSON for scripting', + yes_help: str = 'Skip confirmation prompt (non-interactive/CI)', +) -> Callable[[CommandFunc], CommandFunc]: + """Add common CLI behavior options such as verbose, JSON, and confirmation controls.""" + decorators: list[Callable[[CommandFunc], CommandFunc]] = [] + + if include_verbose: + decorators.append( + click.option( + '--verbose', + '-v', + is_flag=True, + help=verbose_help, + ) + ) + if include_json: + decorators.append( + click.option( + '--json', + 'as_json', + is_flag=True, + help=json_help, + ) + ) + if include_yes: + decorators.append( + click.option( + '--yes', + '-y', + is_flag=True, + help=yes_help, + ) + ) + + return apply_click_options(*decorators) + def format_alpha(raw_amount: int, decimals: int = 2) -> str: """Format raw token amount (9-decimal) as human-readable ALPHA string. @@ -157,7 +260,7 @@ def fetch_open_issue_pull_requests( repository_full_name: str, issue_number: int, as_json: bool, -) -> List[Dict[str, Any]]: +) -> list: """Fetch open PR submissions for a GitHub issue.""" token = get_github_pat() or '' if not token and not as_json: @@ -167,7 +270,7 @@ def fetch_open_issue_pull_requests( from gittensor.utils.github_api_tools import find_prs_for_issue with loading_context('Fetching open pull request submissions from GitHub...', as_json): - prs: List[Dict[str, Any]] = find_prs_for_issue( + prs = find_prs_for_issue( repository_full_name, issue_number, token=token or None, @@ -195,6 +298,8 @@ def print_issue_submission_table( def resolve_netuid_from_contract(ws_endpoint: str, contract_addr: str) -> Optional[int]: """Read the subnet netuid stored in the on-chain contract.""" + # Keep this import local so CLI help can render without optional chain deps installed. + from substrateinterface import SubstrateInterface substrate = SubstrateInterface(url=ws_endpoint) packed = _read_contract_packed_storage(substrate, contract_addr) @@ -216,7 +321,7 @@ def verify_miner_registration(ws_endpoint: str, contract_addr: str, hotkey_ss58: return bool(subtensor.is_hotkey_registered(netuid=netuid, hotkey_ss58=hotkey_ss58)) except TypeError: # API compatibility fallback across bittensor versions. - return bool(subtensor.is_hotkey_registered(netuid, hotkey_ss58)) + return bool(subtensor.is_hotkey_registered(hotkey_ss58, netuid)) # --------------------------------------------------------------------------- @@ -256,7 +361,7 @@ def validate_bounty_amount(bounty: str) -> int: ) sign, digits, exponent = d.as_tuple() - decimal_places = max(0, -exponent) + decimal_places = max(0, -int(exponent)) if decimal_places > ALPHA_DECIMALS: raise click.BadParameter( f'Maximum {ALPHA_DECIMALS} decimal places allowed (got {decimal_places})', @@ -788,7 +893,7 @@ def read_issues_from_contract(ws_endpoint: str, contract_addr: str, verbose: boo except ImportError as e: console.print(f'[yellow]Cannot read from contract: {e}[/yellow]') - console.print('[dim]Install with: pip install substrate-interface[/dim]') + console.print('[dim]Install with: uv sync[/dim]') return [] except Exception as e: if verbose: diff --git a/gittensor/cli/issue_commands/mutations.py b/gittensor/cli/issue_commands/mutations.py index 205f32fc..f1e16977 100644 --- a/gittensor/cli/issue_commands/mutations.py +++ b/gittensor/cli/issue_commands/mutations.py @@ -14,6 +14,7 @@ import click from rich.panel import Panel +from .help import StyledCommand from .helpers import ( MAX_ISSUE_NUMBER, _is_interactive, @@ -31,11 +32,11 @@ ) -@click.command('register') +@click.command('register', cls=StyledCommand) @click.option( '--repo', required=True, - help='Repository in owner/repo format (e.g., opentensor/btcli)', + help='Repository in owner/repo format (e.g., latent-to/btcli)', ) @click.option( '--issue', @@ -101,21 +102,20 @@ def issue_register( """ Register a new issue with a bounty (OWNER ONLY). - This command registers a GitHub issue on the smart contract - with a target bounty amount. Only the contract owner can - register new issues. + [dim]This command registers a GitHub issue on the smart contract with a target bounty amount. + Only the contract owner can register new issues.[/dim] - \b - Arguments: + [dim]Arguments: --repo: Repository in owner/repo format --issue: GitHub issue number --bounty: Target bounty amount in ALPHA + [/dim] - \b - Examples: - gitt issues register --repo opentensor/btcli --issue 144 --bounty 100 - gitt i reg --repo tensorflow/tensorflow --issue 12345 --bounty 50 - gitt i reg --repo owner/repo --issue 1 --bounty 10 -y + [dim]Examples: + $ gitt issues register --repo latent-to/btcli --issue 144 --bounty 100 + $ gitt i reg --repo tensorflow/tensorflow --issue 12345 --bounty 50 + $ gitt i reg --repo owner/repo --issue 1 --bounty 10 -y + [/dim] """ console.print('\n[bold cyan]Register Issue for Bounty[/bold cyan]\n') @@ -209,7 +209,7 @@ def issue_register( console.print('[dim]Submitting transaction...[/dim]') result = contract_instance.exec( - keypair, + keypair, # type: ignore[arg-type] 'register_issue', args={ 'github_url': github_url, @@ -243,7 +243,7 @@ def issue_register( except ImportError as e: print_error(f'Missing dependency - {e}') - console.print('[dim]Install with: pip install substrate-interface bittensor[/dim]') + console.print('[dim]Install with: uv sync[/dim]') except Exception as e: error_msg = str(e) if 'ContractReverted' in error_msg: @@ -256,7 +256,7 @@ def issue_register( print_error(f'Error registering issue: {e}') -@click.command('harvest') +@click.command('harvest', cls=StyledCommand) @click.option( '--wallet-name', '--wallet.name', @@ -293,14 +293,14 @@ def issue_harvest(wallet_name: str, wallet_hotkey: str, network: str, rpc_url: s """ Manually trigger emission harvest from contract treasury. - This command is permissionless - any wallet can trigger it. - The contract handles emission collection and distribution internally. + [dim]This command is permissionless - any wallet can trigger it. + The contract handles emission collection and distribution internally.[/dim] - \b - Examples: - gitt harvest - gitt harvest --verbose - gitt harvest --wallet-name mywallet --wallet-hotkey mykey + [dim]Examples: + $ gitt harvest + $ gitt harvest --verbose + $ gitt harvest --wallet-name mywallet --wallet-hotkey mykey + [/dim] """ console.print('\n[bold cyan]Manual Emission Harvest[/bold cyan]\n') @@ -385,7 +385,7 @@ def issue_harvest(wallet_name: str, wallet_hotkey: str, network: str, rpc_url: s except ImportError as e: print_error(f'Missing dependency — {e}') - console.print('[dim]Install with: pip install bittensor substrate-interface[/dim]') + console.print('[dim]Install with: uv sync[/dim]') except Exception as e: import traceback diff --git a/gittensor/cli/issue_commands/predict.py b/gittensor/cli/issue_commands/predict.py deleted file mode 100644 index 0e9c8f83..00000000 --- a/gittensor/cli/issue_commands/predict.py +++ /dev/null @@ -1,481 +0,0 @@ -# The MIT License (MIT) -# Copyright © 2025 Entrius - -"""Predict command (`gitt issues predict`).""" - -import json as json_mod - -import click - -from gittensor.miner.broadcast import broadcast_predictions - -from .help import StyledCommand -from .helpers import ( - _is_interactive, - confirm_panel, - console, - emit_json, - fetch_issue_from_contract, - fetch_open_issue_pull_requests, - get_contract_address, - handle_exception, - load_config, - loading_context, - print_error, - print_issue_submission_table, - print_network_header, - print_success, - print_warning, - resolve_netuid_from_contract, - resolve_network, - success_panel, - validate_issue_id, - verify_miner_registration, -) - - -@click.command('predict', cls=StyledCommand) -@click.option( - '--id', - 'issue_id', - required=True, - type=int, - help='On-chain issue ID', -) -@click.option('--pr', 'pr_number', default=None, type=int, help='PR number to predict (use with --probability)') -@click.option('--probability', default=None, type=float, help='Probability for --pr in [0.0, 1.0]') -@click.option('--json-input', default=None, type=str, help='Batch predictions JSON: {"101": 0.85, "103": 0.10}') -@click.option('--yes', '-y', is_flag=True, help='Skip confirmation prompt') -@click.option( - '--wallet-name', - '--wallet.name', - '--wallet', - default='default', - help='Wallet name', -) -@click.option( - '--wallet-hotkey', - '--wallet.hotkey', - '--hotkey', - default='default', - help='Hotkey name', -) -@click.option( - '--network', - '-n', - default=None, - type=click.Choice(['finney', 'test', 'local'], case_sensitive=False), - help='Network (finney/test/local)', -) -@click.option( - '--rpc-url', - default=None, - help='Subtensor RPC endpoint (overrides --network)', -) -@click.option( - '--contract', - default='', - help='Contract address (uses default if empty)', -) -@click.option('--verbose', '-v', is_flag=True, help='Show debug output') -@click.option('--json', 'as_json', is_flag=True, help='Output as JSON for scripting') -def issues_predict( - issue_id: int, - pr_number: int | None, - probability: float | None, - json_input: str | None, - yes: bool, - wallet_name: str, - wallet_hotkey: str, - network: str | None, - rpc_url: str | None, - contract: str, - verbose: bool, - as_json: bool, -): - """Submit miner predictions for PRs on a bountied issue. - - [dim]This command validates active issue state, miner registration, - and probability bounds (each in [0.0, 1.0], total <= 1.0).[/dim] - - [dim]Input modes: - 1. `--pr --probability <0.0-1.0>` for a single prediction - 2. `--json-input '{"101": 0.85, "103": 0.10}'` for batch predictions - 3. Interactive prompt (default when neither `--pr` nor `--json-input` is provided)[/dim] - - [dim]Notes: - - `--yes/-y` skips confirmation prompts. - - `--pr/--probability` and `--json-input` are mutually exclusive.[/dim] - - [dim]Examples: - $ gitt i predict --id 42 --pr 101 --probability 0.85 -y - $ gitt i predict --id 42 --json-input '{"101": 0.5, "103": 0.3}' -y - $ gitt i predict --id 42 - $ gitt i predict --id 42 --pr 101 --probability 0.7 -y --json - [/dim] - """ - # 1) Validate on-chain issue ID. - try: - validate_issue_id(issue_id, 'id') - except click.BadParameter as e: - handle_exception(as_json, str(e), 'bad_parameter') - - # 2) Validate prediction mode and parse JSON batch input (if provided). - try: - parsed_json_predictions = _prevalidate_prediction_inputs(pr_number, probability, json_input) - except (click.BadParameter, click.ClickException) as e: - handle_exception(as_json, str(e)) - - # 3) Determine execution mode from validated inputs. - is_batch_mode = parsed_json_predictions is not None - is_single_pr_mode = pr_number is not None - is_interactive_mode = not is_batch_mode and not is_single_pr_mode - - if is_interactive_mode and as_json: - handle_exception(as_json, '--json mode requires --pr/--probability or --json-input.') - - # 4) Resolve network/contract context. - contract_addr = get_contract_address(contract) - _require_contract_address(contract_addr, as_json) - ws_endpoint, network_name = resolve_network(network, rpc_url) - effective_wallet, effective_hotkey = _resolve_wallet_identity(wallet_name, wallet_hotkey) - - netuid = resolve_netuid_from_contract(ws_endpoint, contract_addr) - if netuid is None: - handle_exception(as_json, 'Could not resolve netuid from contract.') - - if not as_json: - print_network_header(network_name, contract_addr) - console.print(f'Wallet: {effective_wallet}/{effective_hotkey}\n') - - # 5) Resolve issue + fetch eligible open PR submissions. - repo_full_name, issue_number = _resolve_issue_context( - ws_endpoint=ws_endpoint, - contract_addr=contract_addr, - issue_id=issue_id, - verbose=verbose, - as_json=as_json, - ) - - pull_requests = fetch_open_issue_pull_requests( - repository_full_name=repo_full_name, - issue_number=issue_number, - as_json=as_json, - ) - - if not pull_requests: - handle_exception(as_json, 'No open pull request submissions found for this issue') - - # 6) Show submissions table only for interactive mode. - if is_interactive_mode: - print_issue_submission_table( - repository_full_name=repo_full_name, - issue_number=issue_number, - pull_requests=pull_requests, - trailing_newline=True, - ) - - skip_continue_prompt = yes or not _is_interactive() - if not skip_continue_prompt and not click.confirm('Ready to start prediction?', default=True): - print_warning('Prediction cancelled') - return - - # 7) Collect predictions by mode; validate PR membership for non-interactive modes. - try: - if is_interactive_mode: - predictions = _collect_predictions_interactive(pull_requests) - else: - predictions = {pr_number: float(probability)} if is_single_pr_mode else parsed_json_predictions - _validate_predictions_against_open_prs(predictions, pull_requests) - except (click.ClickException, click.BadParameter) as e: - handle_exception(as_json, str(e)) - - payload = { - 'issue_id': issue_id, - 'repository': repo_full_name, - 'predictions': dict(predictions), - 'github_access_token': '***', - } - - # 8) Confirmation prompt (interactive only). - if not as_json and is_interactive_mode: - lines = format_prediction_lines(predictions) - confirm_panel(lines, title='Prediction Confirmation') - skip_confirm = yes or not _is_interactive() - if not skip_confirm and not click.confirm('Proceed?', default=True): - print_warning('Prediction cancelled') - return - - # 9) Verify miner registration before broadcasting. - _resolve_registered_miner_hotkey( - wallet_name=effective_wallet, - wallet_hotkey=effective_hotkey, - ws_endpoint=ws_endpoint, - contract_addr=contract_addr, - as_json=as_json, - ) - - # 10) Show payload and broadcast to validators. - if as_json: - emit_json(payload, pretty=True) - - if not as_json: - success_panel(json_mod.dumps(payload, indent=2), title='Prediction Synapse') - - with loading_context('Broadcasting predictions to validators...', as_json): - results = broadcast_predictions( - payload=payload, - wallet_name=effective_wallet, - wallet_hotkey=effective_hotkey, - ws_endpoint=ws_endpoint, - netuid=netuid, - ) - - if as_json: - emit_json(results, pretty=True) - else: - _print_broadcast_results(results) - - -def validate_probability(value: float, param_hint: str = 'probability') -> float: - """Validate probability is in the inclusive [0.0, 1.0] range.""" - if not (0.0 <= value <= 1.0): - raise click.BadParameter( - f'Probability must be between 0.0 and 1.0 (got {value})', - param_hint=param_hint, - ) - return value - - -def _validate_prediction_mode( - pr_number: int | None, - probability: float | None, - json_input: str | None, -) -> tuple[bool, bool, bool]: - """Validate mutually exclusive prediction input modes.""" - has_pr = pr_number is not None - has_probability = probability is not None - has_json_input = json_input is not None - - if has_json_input and (has_pr or has_probability): - raise click.ClickException('Use either --pr/--probability or --json-input, not both.') - if not has_pr and has_probability: - raise click.ClickException('--probability requires --pr.') - if has_pr and not has_probability: - raise click.ClickException('--probability is required when --pr is set.') - - return has_pr, has_probability, has_json_input - - -def _require_contract_address(contract_addr: str, as_json: bool) -> None: - """Require a configured contract address before network work.""" - if not contract_addr: - if as_json: - handle_exception(as_json, 'Contract address not configured') - print_error('Contract address not configured') - raise SystemExit(1) - - -def _resolve_wallet_identity(wallet_name: str, wallet_hotkey: str) -> tuple[str, str]: - """Resolve effective wallet/hotkey names from CLI args and config defaults.""" - config = load_config() - effective_wallet = wallet_name if wallet_name != 'default' else config.get('wallet', wallet_name) - effective_hotkey = wallet_hotkey if wallet_hotkey != 'default' else config.get('hotkey', wallet_hotkey) - return effective_wallet, effective_hotkey - - -def _resolve_issue_context( - ws_endpoint: str, - contract_addr: str, - issue_id: int, - verbose: bool, - as_json: bool, -) -> tuple[str, int]: - """Load and validate on-chain issue context for prediction.""" - try: - with loading_context('Reading issues from contract...', as_json): - issue = fetch_issue_from_contract(ws_endpoint, contract_addr, issue_id, verbose=verbose) - except click.ClickException as e: - handle_exception(as_json, str(e)) - - repo_full_name = str(issue.get('repository_full_name', '')) - issue_number = int(issue.get('issue_number', 0)) - return repo_full_name, issue_number - - -def _resolve_registered_miner_hotkey( - wallet_name: str, - wallet_hotkey: str, - ws_endpoint: str, - contract_addr: str, - as_json: bool, -) -> str: - """Load wallet hotkey and ensure it is registered on the contract subnet.""" - try: - import bittensor as bt - - with loading_context('Validating miner identity and registration...', as_json): - wallet = bt.Wallet(name=wallet_name, hotkey=wallet_hotkey) - miner_hotkey = wallet.hotkey.ss58_address - is_registered = verify_miner_registration(ws_endpoint, contract_addr, miner_hotkey) - except Exception as e: - handle_exception(as_json, f'Failed to validate wallet/miner registration: {e}') - - if not is_registered: - handle_exception(as_json, f'Wallet hotkey is not registered miner on subnet: {miner_hotkey}') - return miner_hotkey - - -def _parse_json_predictions(json_input: str) -> dict[int, float]: - """Parse and validate JSON batch predictions payload.""" - try: - raw = json_mod.loads(json_input) - except json_mod.JSONDecodeError as e: - raise click.BadParameter(f'Invalid JSON: {e}', param_hint='--json-input') - - if not isinstance(raw, dict): - raise click.BadParameter( - 'JSON input must be an object: {"pr_number": probability, ...}', - param_hint='--json-input', - ) - - parsed_predictions: dict[int, float] = {} - for key, value in raw.items(): - try: - pr_num = int(key) - except (TypeError, ValueError): - raise click.BadParameter(f'Invalid PR number in JSON: {key}', param_hint='--json-input') - try: - parsed_predictions[pr_num] = validate_probability(float(value), '--json-input') - except (TypeError, ValueError): - raise click.BadParameter( - f'Invalid probability value for PR #{key} in JSON: {value}', - param_hint='--json-input', - ) - - if len(parsed_predictions) == 0: - raise click.BadParameter( - 'JSON input must include at least one PR prediction.', - param_hint='--json-input', - ) - return parsed_predictions - - -def format_prediction_lines(predictions: dict[int, float]) -> str: - """Format sorted prediction lines with running total.""" - lines = [f'PR #{pr_num}: {prob:.4f}' for pr_num, prob in sorted(predictions.items())] - lines.append(f'Total: {sum(predictions.values()):.4f}') - return '\n'.join(lines) - - -def _print_broadcast_results(results: dict[str, object]) -> None: - """Print broadcast results in human-readable format.""" - if results.get('error'): - print_error(str(results['error'])) - return - if results.get('success'): - print_success(f'Prediction accepted by {results["accepted"]}/{results["total_validators"]} validator(s)') - else: - print_error( - f'Prediction rejected or unreachable: {results["rejected"]}/{results["total_validators"]} validator(s)' - ) - - for r in results.get('results', []): - status = 'accepted' if r['accepted'] else 'rejected' - reason = f' ({r["rejection_reason"]})' if r.get('rejection_reason') else '' - console.print(f' {r["validator"]}... {status}{reason}') - - -def _collect_predictions_interactive(prs: list[dict]) -> dict[int, float]: - """Prompt for per-PR probabilities in interactive mode.""" - predictions: dict[int, float] = {} - running_total = 0.0 - - for pr in prs: - number = pr.get('number') - if not isinstance(number, int): - continue - - while True: - raw = click.prompt( - f'Probability for PR #{number} (0.0-1.0, blank to skip)', - default='', - show_default=False, - ).strip() - if raw == '': - break - - try: - value = validate_probability(float(raw), f'PR #{number}') - except ValueError: - print_error(f'Invalid number: {raw}') - continue - except click.BadParameter as e: - print_error(str(e)) - continue - - proposed_total = running_total + value - if proposed_total > 1.0: - print_error( - f'Total probability cannot exceed 1.0 (current {running_total:.4f}, proposed {proposed_total:.4f})' - ) - continue - - predictions[number] = value - running_total = proposed_total - if running_total >= 0.99: - console.print(f'[yellow]Running total: {running_total:.4f} (approaching 1.0)[/yellow]') - else: - console.print(f'[dim]Running total: {running_total:.4f}[/dim]') - break - - if not predictions: - raise click.ClickException('No predictions entered.') - - return predictions - - -def _validate_predictions_against_open_prs( - predictions: dict[int, float], - prs: list[dict], - param_hint: str = 'predictions', -) -> None: - """Validate PR IDs exist in open PRs for this issue and total is <= 1.0.""" - valid_pr_numbers = {int(p.get('number')) for p in prs if isinstance(p.get('number'), int)} - for number in predictions: - if number not in valid_pr_numbers: - available = sorted(valid_pr_numbers) - raise click.BadParameter( - f'PR #{number} is not an open PR for this issue. Open PRs: {available}', - param_hint=param_hint, - ) - _validate_prediction_total(predictions, param_hint) - - -def _validate_prediction_total(predictions: dict[int, float], param_hint: str) -> None: - """Validate that prediction probability total does not exceed 1.0.""" - total = sum(predictions.values()) - if total > 1.0: - raise click.BadParameter( - f'Sum of probabilities must be <= 1.0 (got {total:.4f})', - param_hint=param_hint, - ) - - -def _prevalidate_prediction_inputs( - pr_number: int | None, - probability: float | None, - json_input: str | None, -) -> dict[int, float] | None: - """Validate CLI prediction inputs before any network I/O.""" - _, has_probability, has_json_input = _validate_prediction_mode(pr_number, probability, json_input) - - if has_probability: - validate_probability(float(probability), '--probability') - - if not has_json_input: - return None - - parsed_predictions = _parse_json_predictions(str(json_input)) - _validate_prediction_total(parsed_predictions, '--json-input') - return parsed_predictions diff --git a/gittensor/cli/issue_commands/submissions.py b/gittensor/cli/issue_commands/submissions.py index 2671f834..71c6d071 100644 --- a/gittensor/cli/issue_commands/submissions.py +++ b/gittensor/cli/issue_commands/submissions.py @@ -3,6 +3,8 @@ """Issue submissions command (`gitt issues submissions`).""" +from __future__ import annotations + import click from .help import StyledCommand @@ -18,6 +20,8 @@ print_warning, resolve_network, validate_issue_id, + with_cli_behavior_options, + with_network_contract_options, ) @@ -29,25 +33,8 @@ type=int, help='On-chain issue ID', ) -@click.option( - '--network', - '-n', - default=None, - type=click.Choice(['finney', 'test', 'local'], case_sensitive=False), - help='Network (finney/test/local)', -) -@click.option( - '--rpc-url', - default=None, - help='Subtensor RPC endpoint (overrides --network)', -) -@click.option( - '--contract', - default='', - help='Contract address (uses default if empty)', -) -@click.option('--verbose', '-v', is_flag=True, help='Show debug output') -@click.option('--json', 'as_json', is_flag=True, help='Output as JSON for scripting') +@with_cli_behavior_options(include_verbose=True, include_json=True) +@with_network_contract_options('Contract address (uses default if empty)') def issues_submissions( issue_id: int, network: str | None, diff --git a/gittensor/cli/issue_commands/view.py b/gittensor/cli/issue_commands/view.py index a4ac442c..d09a2e6f 100644 --- a/gittensor/cli/issue_commands/view.py +++ b/gittensor/cli/issue_commands/view.py @@ -18,21 +18,25 @@ from rich.panel import Panel from rich.table import Table +from .help import StyledCommand from .helpers import ( _read_contract_packed_storage, _read_issues_from_child_storage, colorize_status, console, + emit_error_json, format_alpha, get_contract_address, print_error, print_network_header, read_issues_from_contract, resolve_network, + with_cli_behavior_options, + with_network_contract_options, ) -@click.command('list') +@click.command('list', cls=StyledCommand) @click.option( '--id', 'issue_id', @@ -40,34 +44,21 @@ type=int, help='View a specific issue by ID', ) -@click.option( - '--network', - '-n', - default=None, - type=click.Choice(['finney', 'test', 'local'], case_sensitive=False), - help='Network (finney/test/local)', -) -@click.option( - '--rpc-url', - default=None, - help='Subtensor RPC endpoint (overrides --network)', -) -@click.option( - '--contract', - default='', - help='Contract address (uses default if empty)', +@with_cli_behavior_options( + include_verbose=True, + include_json=True, + verbose_help='Show debug output for contract reads', ) -@click.option('--verbose', '-v', is_flag=True, help='Show debug output for contract reads') -@click.option('--json', 'as_json', is_flag=True, help='Output as JSON for scripting') +@with_network_contract_options('Contract address (uses default if empty)') def issues_list(issue_id: int, network: str, rpc_url: str, contract: str, verbose: bool, as_json: bool): """List issues or view a specific issue. - \b - Examples: - gitt issues list - gitt i list --network test - gitt i list --id 1 - gitt i list --json + [dim]Examples: + $ gitt issues list + $ gitt i list --network test + $ gitt i list --id 1 + $ gitt i list --json + [/dim] """ contract_addr = get_contract_address(contract) ws_endpoint, network_name = resolve_network(network, rpc_url) @@ -88,6 +79,9 @@ def issues_list(issue_id: int, network: str, rpc_url: str, contract: str, verbos issue['target_alpha'] = format_alpha(issue.get('target_bounty', 0), 4) if issue_id is not None: issue = next((i for i in issues if i['id'] == issue_id), None) + if issue is None: + emit_error_json(f'Issue {issue_id} not found on-chain.', error_type='not_found') + raise SystemExit(1) console.print(json_mod.dumps(issue, indent=2, default=str)) else: console.print(json_mod.dumps(issues, indent=2, default=str)) @@ -183,33 +177,16 @@ def issues_list(issue_id: int, network: str, rpc_url: str, contract: str, verbos console.print('[dim]Register an issue: gitt issues register --repo owner/repo --issue 1 --bounty 100[/dim]') -@click.command('bounty-pool') -@click.option( - '--network', - '-n', - default=None, - type=click.Choice(['finney', 'test', 'local'], case_sensitive=False), - help='Network (finney/test/local)', -) -@click.option( - '--rpc-url', - default=None, - help='Subtensor RPC endpoint (overrides --network)', -) -@click.option( - '--contract', - default='', - help='Contract address (uses config if empty)', -) -@click.option('--verbose', '-v', is_flag=True, help='Show debug output') -@click.option('--json', 'as_json', is_flag=True, help='Output as JSON for scripting') +@click.command('bounty-pool', cls=StyledCommand) +@with_cli_behavior_options(include_verbose=True, include_json=True) +@with_network_contract_options('Contract address (uses config if empty)') def issues_bounty_pool(network: str, rpc_url: str, contract: str, verbose: bool, as_json: bool): """View total bounty pool (sum of all issue bounty amounts). - \b - Examples: - gitt issues bounty-pool - gitt i bounty-pool --json + [dim]Examples: + $ gitt issues bounty-pool + $ gitt i bounty-pool --json + [/dim] """ contract_addr = get_contract_address(contract) ws_endpoint, network_name = resolve_network(network, rpc_url) @@ -250,33 +227,16 @@ def issues_bounty_pool(network: str, rpc_url: str, contract: str, verbose: bool, print_error(str(e)) -@click.command('pending-harvest') -@click.option( - '--network', - '-n', - default=None, - type=click.Choice(['finney', 'test', 'local'], case_sensitive=False), - help='Network (finney/test/local)', -) -@click.option( - '--rpc-url', - default=None, - help='Subtensor RPC endpoint (overrides --network)', -) -@click.option( - '--contract', - default='', - help='Contract address (uses config if empty)', -) -@click.option('--verbose', '-v', is_flag=True, help='Show debug output') -@click.option('--json', 'as_json', is_flag=True, help='Output as JSON for scripting') +@click.command('pending-harvest', cls=StyledCommand) +@with_cli_behavior_options(include_verbose=True, include_json=True) +@with_network_contract_options('Contract address (uses config if empty)') def issues_pending_harvest(network: str, rpc_url: str, contract: str, verbose: bool, as_json: bool): """View pending harvest (treasury stake minus allocated bounties). - \b - Examples: - gitt issues pending-harvest - gitt i pending-harvest --json + [dim]Examples: + $ gitt issues pending-harvest + $ gitt i pending-harvest --json + [/dim] """ contract_addr = get_contract_address(contract) ws_endpoint, network_name = resolve_network(network, rpc_url) @@ -334,33 +294,16 @@ def issues_pending_harvest(network: str, rpc_url: str, contract: str, verbose: b print_error(str(e)) -@click.command('info') -@click.option( - '--network', - '-n', - default=None, - type=click.Choice(['finney', 'test', 'local'], case_sensitive=False), - help='Network (finney/test/local)', -) -@click.option( - '--rpc-url', - default=None, - help='Subtensor RPC endpoint (overrides --network)', -) -@click.option( - '--contract', - default='', - help='Contract address (uses config if empty)', -) -@click.option('--verbose', '-v', is_flag=True, help='Show debug output') -@click.option('--json', 'as_json', is_flag=True, help='Output as JSON for scripting') +@click.command('info', cls=StyledCommand) +@with_cli_behavior_options(include_verbose=True, include_json=True) +@with_network_contract_options('Contract address (uses config if empty)') def admin_info(network: str, rpc_url: str, contract: str, verbose: bool, as_json: bool): """View contract configuration. - \b - Examples: - gitt admin info - gitt a info --json + [dim]Examples: + $ gitt admin info + $ gitt a info --json + [/dim] """ contract_addr = get_contract_address(contract) ws_endpoint, network_name = resolve_network(network, rpc_url) diff --git a/gittensor/cli/issue_commands/vote.py b/gittensor/cli/issue_commands/vote.py index 87af762b..4c0c78e2 100644 --- a/gittensor/cli/issue_commands/vote.py +++ b/gittensor/cli/issue_commands/vote.py @@ -17,6 +17,7 @@ from rich.panel import Panel from rich.table import Table +from .help import StyledGroup from .helpers import ( console, get_contract_address, @@ -26,6 +27,9 @@ resolve_network, validate_issue_id, validate_ss58_address, + with_cli_behavior_options, + with_network_contract_options, + with_wallet_options, ) @@ -56,17 +60,11 @@ def parse_pr_number(pr_input: str) -> int: raise ValueError(f'Cannot parse PR number from: {pr_input}') -@click.group(name='vote') +@click.group(name='vote', cls=StyledGroup) def vote(): """Validator consensus operations. These commands are used by validators to manage issue bounty payouts. - - \b - Commands: - solution Vote for a solver on an active issue - cancel Vote to cancel an issue - list List whitelisted validators """ pass @@ -76,37 +74,8 @@ def vote(): @click.argument('solver_hotkey', type=str) @click.argument('solver_coldkey', type=str) @click.argument('pr_number_or_url', type=str) -@click.option( - '--wallet-name', - '--wallet.name', - '--wallet', - default='default', - help='Wallet name', -) -@click.option( - '--wallet-hotkey', - '--wallet.hotkey', - '--hotkey', - default='default', - help='Hotkey name', -) -@click.option( - '--network', - '-n', - default=None, - type=click.Choice(['finney', 'test', 'local'], case_sensitive=False), - help='Network (finney/test/local)', -) -@click.option( - '--rpc-url', - default=None, - help='Subtensor RPC endpoint (overrides --network)', -) -@click.option( - '--contract', - default='', - help='Contract address (uses config if empty)', -) +@with_wallet_options() +@with_network_contract_options('Contract address (uses config if empty)') def val_vote_solution( issue_id: int, solver_hotkey: str, @@ -120,17 +89,17 @@ def val_vote_solution( ): """Vote for a solution on an active issue (triggers auto-payout on consensus). - \b - Arguments: + [dim]Arguments: ISSUE_ID: On-chain issue ID to vote on SOLVER_HOTKEY: SS58 address of the solver's hotkey SOLVER_COLDKEY: SS58 address of the solver's coldkey (payout destination) PR_NUMBER_OR_URL: PR number or full GitHub PR URL + [/dim] - \b - Examples: - gitt vote solution 1 5Hxxx... 5Hyyy... 123 - gitt vote solution 1 5Hxxx... 5Hyyy... https://github.com/.../pull/123 + [dim]Examples: + $ gitt vote solution 1 5Hxxx... 5Hyyy... 123 + $ gitt vote solution 1 5Hxxx... 5Hyyy... https://github.com/.../pull/123 + [/dim] """ contract_addr = get_contract_address(contract) ws_endpoint, network_name = resolve_network(network, rpc_url) @@ -195,37 +164,8 @@ def val_vote_solution( @vote.command('cancel') @click.argument('issue_id', type=int) @click.argument('reason', type=str) -@click.option( - '--wallet-name', - '--wallet.name', - '--wallet', - default='default', - help='Wallet name', -) -@click.option( - '--wallet-hotkey', - '--wallet.hotkey', - '--hotkey', - default='default', - help='Hotkey name', -) -@click.option( - '--network', - '-n', - default=None, - type=click.Choice(['finney', 'test', 'local'], case_sensitive=False), - help='Network (finney/test/local)', -) -@click.option( - '--rpc-url', - default=None, - help='Subtensor RPC endpoint (overrides --network)', -) -@click.option( - '--contract', - default='', - help='Contract address (uses config if empty)', -) +@with_wallet_options() +@with_network_contract_options('Contract address (uses config if empty)') def val_vote_cancel_issue( issue_id: int, reason: str, @@ -237,15 +177,15 @@ def val_vote_cancel_issue( ): """Vote to cancel an issue (works on Registered or Active). - \b - Arguments: + [dim]Arguments: ISSUE_ID: On-chain issue ID to cancel REASON: Reason for cancellation + [/dim] - \b - Examples: - gitt vote cancel 1 "External solution found" - gitt vote cancel 42 "Issue invalid" + [dim]Examples: + $ gitt vote cancel 1 "External solution found" + $ gitt vote cancel 42 "Issue invalid" + [/dim] """ contract_addr = get_contract_address(contract) ws_endpoint, network_name = resolve_network(network, rpc_url) @@ -295,32 +235,16 @@ def val_vote_cancel_issue( @vote.command('list') -@click.option( - '--network', - '-n', - default=None, - type=click.Choice(['finney', 'test', 'local'], case_sensitive=False), - help='Network (finney/test/local)', -) -@click.option( - '--rpc-url', - default=None, - help='Subtensor RPC endpoint (overrides --network)', -) -@click.option( - '--contract', - default='', - help='Contract address (uses config if empty)', -) -@click.option('--json', 'as_json', is_flag=True, help='Output as JSON for scripting') +@with_cli_behavior_options(include_json=True) +@with_network_contract_options('Contract address (uses config if empty)') def vote_list_validators(network: str, rpc_url: str, contract: str, as_json: bool): """List whitelisted validators and consensus threshold. - \b - Examples: - gitt vote list - gitt vote list --network test - gitt vote list --json + [dim]Examples: + $ gitt vote list + $ gitt vote list --network test + $ gitt vote list --json + [/dim] """ contract_addr = get_contract_address(contract) ws_endpoint, network_name = resolve_network(network, rpc_url) diff --git a/gittensor/cli/main.py b/gittensor/cli/main.py index 931b9c50..0b38d7ed 100644 --- a/gittensor/cli/main.py +++ b/gittensor/cli/main.py @@ -20,6 +20,7 @@ from rich.table import Table from gittensor.cli.issue_commands import register_commands +from gittensor.cli.issue_commands.help import StyledAliasGroup, StyledGroup console = Console() @@ -28,64 +29,17 @@ CONFIG_FILE = GITTENSOR_DIR / 'config.json' -class AliasGroup(click.Group): - """Click Group that supports command aliases without duplicate help entries.""" - - def __init__(self, *args, **kwargs): - super().__init__(*args, **kwargs) - self._aliases = {} # alias -> canonical name - - def add_alias(self, name, alias): - """Register an alias for an existing command.""" - self._aliases[alias] = name - - def get_command(self, ctx, cmd_name): - # Resolve alias to canonical name - canonical = self._aliases.get(cmd_name, cmd_name) - return super().get_command(ctx, canonical) - - def format_commands(self, ctx, formatter): - """Write the help text, appending aliases to command descriptions.""" - # Build reverse map: canonical -> list of aliases - alias_map = {} - for alias, canonical in self._aliases.items(): - alias_map.setdefault(canonical, []).append(alias) - - commands = [] - for subcommand in self.list_commands(ctx): - cmd = self.commands.get(subcommand) - if cmd is None or cmd.hidden: - continue - help_text = cmd.get_short_help_str(limit=150) - aliases = alias_map.get(subcommand) - if aliases: - alias_str = ', '.join(sorted(aliases)) - subcommand = f'{subcommand}, {alias_str}' - commands.append((subcommand, help_text)) - - if commands: - with formatter.section('Commands'): - formatter.write_dl(commands) - - -@click.group(cls=AliasGroup) +@click.group(cls=StyledAliasGroup) @click.version_option(version='3.2.0', prog_name='gittensor') def cli(): """Gittensor CLI - Manage issue bounties and validator operations""" pass -@click.group(name='config', invoke_without_command=True) +@click.group(name='config', cls=StyledGroup, invoke_without_command=True) @click.pass_context def config_group(ctx): - """CLI configuration management. - - Show current configuration (default) or set config values. - - \b - Subcommands: - set Set a config value - """ + """Show current configuration (default) or set configuration values.""" # If no subcommand, show config if ctx.invoked_subcommand is None: show_config() @@ -129,19 +83,21 @@ def show_config(): def config_set(key: str, value: str): """Set a configuration value. - \b - Common keys: + [dim]Use this command to override values stored in `~/.gittensor/config.json`.[/dim] + + [dim]Common keys: wallet Wallet name hotkey Hotkey name contract_address Contract address ws_endpoint WebSocket endpoint network Network (local, test, finney) + [/dim] - \b - Examples: - gitt config set wallet alice - gitt config set contract_address 5Cxxx... - gitt config set network local + [dim]Examples: + $ gitt config set wallet alice + $ gitt config set contract_address 5Cxxx... + $ gitt config set network local + [/dim] """ # Ensure config directory exists GITTENSOR_DIR.mkdir(parents=True, exist_ok=True) @@ -170,6 +126,11 @@ def config_set(key: str, value: str): # Register config group cli.add_command(config_group) +# Register miner commands +from gittensor.cli.miner_commands import register_miner_commands # noqa: E402 + +register_miner_commands(cli) + # Register issue commands with new flat structure register_commands(cli) diff --git a/gittensor/cli/miner_commands/__init__.py b/gittensor/cli/miner_commands/__init__.py new file mode 100644 index 00000000..6d57eb9e --- /dev/null +++ b/gittensor/cli/miner_commands/__init__.py @@ -0,0 +1,37 @@ +# Entrius 2025 + +""" +CLI commands for miner PAT management. + +Command structure: + gitt miner (alias: m) - Miner management commands + post Broadcast GitHub PAT to validators + check Check how many validators have your PAT +""" + +import click + +from .check import miner_check +from .post import miner_post + + +@click.group(name='miner') +def miner_group(): + """Miner management commands. + + \b + Commands: + post Broadcast your GitHub PAT to validators + check Check how many validators have your PAT stored + """ + pass + + +miner_group.add_command(miner_post, name='post') +miner_group.add_command(miner_check, name='check') + + +def register_miner_commands(cli): + """Register miner commands with the root CLI group.""" + cli.add_command(miner_group, name='miner') + cli.add_alias('miner', 'm') diff --git a/gittensor/cli/miner_commands/check.py b/gittensor/cli/miner_commands/check.py new file mode 100644 index 00000000..9e6993a6 --- /dev/null +++ b/gittensor/cli/miner_commands/check.py @@ -0,0 +1,168 @@ +# Entrius 2025 + +"""gitt miner check — Check how many validators have your PAT stored.""" + +import asyncio +import json +import sys + +import click +from rich.console import Console +from rich.table import Table + +from .post import NETUID_DEFAULT, _load_config_value, _resolve_endpoint + +console = Console() + + +@click.command() +@click.option('--wallet', 'wallet_name', default=None, help='Bittensor wallet name.') +@click.option('--hotkey', 'wallet_hotkey', default=None, help='Bittensor hotkey name.') +@click.option('--netuid', type=int, default=NETUID_DEFAULT, help='Subnet UID.', show_default=True) +@click.option('--network', default=None, help='Network name (local, test, finney).') +@click.option('--rpc-url', default=None, help='Subtensor RPC endpoint URL (overrides --network).') +@click.option('--json-output', 'json_mode', is_flag=True, default=False, help='Output results as JSON.') +def miner_check(wallet_name, wallet_hotkey, netuid, network, rpc_url, json_mode): + """Check how many validators have your PAT stored. + + Sends a lightweight probe to each validator — no PAT is transmitted. + + \b + Examples: + gitt miner check --wallet alice --hotkey default + gitt miner check --wallet alice --hotkey default --network test + """ + import bittensor as bt + + from gittensor.synapses import PatCheckSynapse + + # 1. Resolve wallet and network + wallet_name = wallet_name or _load_config_value('wallet') or 'default' + wallet_hotkey = wallet_hotkey or _load_config_value('hotkey') or 'default' + ws_endpoint = _resolve_endpoint(network, rpc_url) + + if not json_mode: + console.print(f'[dim]Wallet: {wallet_name}/{wallet_hotkey} | Network: {ws_endpoint} | Netuid: {netuid}[/dim]') + + # 2. Set up bittensor objects + if not json_mode: + with console.status('[bold]Connecting to network...'): + try: + wallet = bt.Wallet(name=wallet_name, hotkey=wallet_hotkey) + subtensor = bt.Subtensor(network=ws_endpoint) + metagraph = subtensor.metagraph(netuid=netuid) + dendrite = bt.Dendrite(wallet=wallet) + except Exception as e: + _error(f'Failed to initialize bittensor: {e}', json_mode) + sys.exit(1) + else: + try: + wallet = bt.Wallet(name=wallet_name, hotkey=wallet_hotkey) + subtensor = bt.Subtensor(network=ws_endpoint) + metagraph = subtensor.metagraph(netuid=netuid) + dendrite = bt.Dendrite(wallet=wallet) + except Exception as e: + _error(f'Failed to initialize bittensor: {e}', json_mode) + sys.exit(1) + + # Verify miner is registered + if wallet.hotkey.ss58_address not in metagraph.hotkeys: + _error(f'Hotkey {wallet.hotkey.ss58_address[:16]}... is not registered on subnet {netuid}.', json_mode) + sys.exit(1) + + # 3. Find active validator axons (vtrust > 0.1 = actively participating in consensus) + validator_axons = [] + validator_uids = [] + for uid in range(metagraph.n): + if metagraph.validator_trust[uid] > 0.1 and metagraph.axons[uid].is_serving: + validator_axons.append(metagraph.axons[uid]) + validator_uids.append(uid) + + if not validator_axons: + _error('No reachable validator axons found on the network.', json_mode) + sys.exit(1) + + # 4. Send check probes + synapse = PatCheckSynapse() + + if not json_mode: + with console.status(f'[bold]Checking {len(validator_axons)} validators...'): + responses = asyncio.get_event_loop().run_until_complete( + dendrite( + axons=validator_axons, + synapse=synapse, + deserialize=False, + timeout=15.0, + ) + ) + else: + responses = asyncio.get_event_loop().run_until_complete( + dendrite( + axons=validator_axons, + synapse=synapse, + deserialize=False, + timeout=15.0, + ) + ) + + # 5. Collect results + results = [] + for uid, axon, resp in zip(validator_uids, validator_axons, responses): + has_pat = getattr(resp, 'has_pat', None) + pat_valid = getattr(resp, 'pat_valid', None) + reason = getattr(resp, 'rejection_reason', None) + results.append( + { + 'uid': uid, + 'hotkey': axon.hotkey[:16] + '...', + 'has_pat': has_pat, + 'pat_valid': pat_valid, + 'rejection_reason': reason, + } + ) + + valid_count = sum(1 for r in results if r['pat_valid'] is True) + no_response_count = sum(1 for r in results if r['has_pat'] is None) + + # 6. Display results + if json_mode: + click.echo( + json.dumps( + { + 'total_validators': len(results), + 'valid': valid_count, + 'invalid': len(results) - valid_count - no_response_count, + 'no_response': no_response_count, + 'results': results, + }, + indent=2, + ) + ) + else: + table = Table(title='PAT Check Results') + table.add_column('UID', style='cyan', justify='right') + table.add_column('Validator', style='dim') + table.add_column('Status', justify='center') + table.add_column('Reason', style='dim') + + for r in results: + if r['pat_valid'] is True: + status = '[green]✓ valid[/green]' + elif r['has_pat'] is False: + status = '[red]✗ no PAT[/red]' + elif r['pat_valid'] is False: + status = '[red]✗ invalid[/red]' + else: + status = '[yellow]— no response[/yellow]' + table.add_row(str(r['uid']), r['hotkey'], status, r.get('rejection_reason') or '') + + console.print(table) + console.print(f'\n[bold]{valid_count}/{len(results)} validators have a valid PAT stored.[/bold]') + + +def _error(msg: str, json_mode: bool): + """Print an error message in the appropriate format.""" + if json_mode: + click.echo(json.dumps({'success': False, 'error': msg})) + else: + console.print(f'[red]Error: {msg}[/red]') diff --git a/gittensor/cli/miner_commands/post.py b/gittensor/cli/miner_commands/post.py new file mode 100644 index 00000000..9d0a0eb4 --- /dev/null +++ b/gittensor/cli/miner_commands/post.py @@ -0,0 +1,272 @@ +# Entrius 2025 + +"""gitt miner post — Broadcast GitHub PAT to validators.""" + +from __future__ import annotations + +import asyncio +import json +import os +import sys + +import click +import requests +from rich.console import Console +from rich.table import Table + +from gittensor.constants import BASE_GITHUB_API_URL + +console = Console() + +# Shared CLI options for wallet/network configuration +NETUID_DEFAULT = 2 + + +@click.command() +@click.option('--wallet', 'wallet_name', default=None, help='Bittensor wallet name.') +@click.option('--hotkey', 'wallet_hotkey', default=None, help='Bittensor hotkey name.') +@click.option('--netuid', type=int, default=NETUID_DEFAULT, help='Subnet UID.', show_default=True) +@click.option('--network', default=None, help='Network name (local, test, finney).') +@click.option('--rpc-url', default=None, help='Subtensor RPC endpoint URL (overrides --network).') +@click.option( + '--pat', + default=None, + help='GitHub Personal Access Token. If not provided, falls back to GITTENSOR_MINER_PAT env var or interactive prompt.', +) +@click.option('--json-output', 'json_mode', is_flag=True, default=False, help='Output results as JSON.') +def miner_post(wallet_name, wallet_hotkey, netuid, network, rpc_url, pat, json_mode): + """Broadcast your GitHub PAT to all validators on the network. + + Validators will validate your PAT (test GitHub API access, check account age), + then store it locally for use during scoring rounds. + + \b + PAT resolution order: + 1. --pat flag + 2. GITTENSOR_MINER_PAT environment variable + 3. Interactive prompt (non-JSON mode only) + + \b + Examples: + gitt miner post --wallet alice --hotkey default --pat ghp_xxxx + gitt miner post --wallet alice --hotkey default + gitt miner post --wallet alice --hotkey default --network test + """ + import bittensor as bt + + from gittensor.synapses import PatBroadcastSynapse + + # 1. Load and validate PAT locally (flag > env var > interactive prompt) + pat = pat or os.environ.get('GITTENSOR_MINER_PAT') + if not pat: + if json_mode: + _error('--pat flag or GITTENSOR_MINER_PAT environment variable is required for JSON mode.', json_mode) + sys.exit(1) + pat = click.prompt('Enter your GitHub Personal Access Token', hide_input=True) + + # 1b. Validate PAT locally + if not json_mode: + with console.status('[bold]Validating PAT...'): + pat_valid = _validate_pat_locally(pat) + else: + pat_valid = _validate_pat_locally(pat) + + if not pat_valid: + _error('GitHub PAT is invalid or expired. Check your GITTENSOR_MINER_PAT.', json_mode) + sys.exit(1) + + if not json_mode: + console.print('[green]PAT is valid.[/green]') + + # 2. Resolve wallet and network + wallet_name = wallet_name or _load_config_value('wallet') or 'default' + wallet_hotkey = wallet_hotkey or _load_config_value('hotkey') or 'default' + ws_endpoint = _resolve_endpoint(network, rpc_url) + + if not json_mode: + console.print(f'[dim]Wallet: {wallet_name}/{wallet_hotkey} | Network: {ws_endpoint} | Netuid: {netuid}[/dim]') + + # 3. Set up bittensor objects + if not json_mode: + with console.status('[bold]Connecting to network...'): + try: + wallet = bt.Wallet(name=wallet_name, hotkey=wallet_hotkey) + subtensor = bt.Subtensor(network=ws_endpoint) + metagraph = subtensor.metagraph(netuid=netuid) + dendrite = bt.Dendrite(wallet=wallet) + except Exception as e: + _error(f'Failed to initialize bittensor: {e}', json_mode) + sys.exit(1) + else: + try: + wallet = bt.Wallet(name=wallet_name, hotkey=wallet_hotkey) + subtensor = bt.Subtensor(network=ws_endpoint) + metagraph = subtensor.metagraph(netuid=netuid) + dendrite = bt.Dendrite(wallet=wallet) + except Exception as e: + _error(f'Failed to initialize bittensor: {e}', json_mode) + sys.exit(1) + + # Verify miner is registered + if wallet.hotkey.ss58_address not in metagraph.hotkeys: + _error(f'Hotkey {wallet.hotkey.ss58_address[:16]}... is not registered on subnet {netuid}.', json_mode) + sys.exit(1) + + # 4. Find active validator axons (vtrust > 0.1 = actively participating in consensus) + validator_axons = [] + validator_uids = [] + for uid in range(metagraph.n): + if metagraph.validator_trust[uid] > 0.1 and metagraph.axons[uid].is_serving: + validator_axons.append(metagraph.axons[uid]) + validator_uids.append(uid) + + if not validator_axons: + _error('No reachable validator axons found on the network.', json_mode) + sys.exit(1) + + # 5. Broadcast + synapse = PatBroadcastSynapse(github_access_token=pat) + + if not json_mode: + with console.status(f'[bold]Broadcasting to {len(validator_axons)} validators...'): + responses = asyncio.get_event_loop().run_until_complete( + dendrite( + axons=validator_axons, + synapse=synapse, + deserialize=False, + timeout=30.0, + ) + ) + else: + responses = asyncio.get_event_loop().run_until_complete( + dendrite( + axons=validator_axons, + synapse=synapse, + deserialize=False, + timeout=30.0, + ) + ) + + # 6. Collect results + results = [] + for uid, axon, resp in zip(validator_uids, validator_axons, responses): + accepted = getattr(resp, 'accepted', None) + reason = getattr(resp, 'rejection_reason', None) + status_code = getattr(resp.dendrite, 'status_code', None) if hasattr(resp, 'dendrite') else None + results.append( + { + 'uid': uid, + 'hotkey': axon.hotkey[:16] + '...', + 'accepted': accepted, + 'rejection_reason': reason, + 'status_code': status_code, + } + ) + + accepted_count = sum(1 for r in results if r['accepted'] is True) + + # 7. Display results + if json_mode: + click.echo( + json.dumps( + { + 'success': accepted_count > 0, + 'total_validators': len(results), + 'accepted': accepted_count, + 'rejected': len(results) - accepted_count, + 'results': results, + }, + indent=2, + ) + ) + else: + table = Table(title='PAT Broadcast Results') + table.add_column('UID', style='cyan', justify='right') + table.add_column('Validator', style='dim') + table.add_column('Status', justify='center') + table.add_column('Reason', style='dim') + + for r in results: + if r['accepted'] is True: + status = '[green]✓[/green]' + elif r['accepted'] is False: + status = '[red]✗[/red]' + else: + status = '[yellow]—[/yellow]' + table.add_row(str(r['uid']), r['hotkey'], status, r.get('rejection_reason') or '') + + console.print(table) + console.print(f'\n[bold]{accepted_count}/{len(results)} validators accepted your PAT.[/bold]') + + +def _validate_pat_locally(pat: str) -> bool: + """Validate PAT mirrors the validator-side checks: user identity + GraphQL access.""" + headers = {'Authorization': f'token {pat}', 'Accept': 'application/vnd.github.v3+json'} + try: + # Check basic auth + user_resp = requests.get(f'{BASE_GITHUB_API_URL}/user', headers=headers, timeout=15) + if user_resp.status_code != 200: + return False + + # Check GraphQL access (same test the validator runs during PAT broadcast) + gql_headers = {'Authorization': f'bearer {pat}', 'Accept': 'application/json'} + gql_resp = requests.post( + f'{BASE_GITHUB_API_URL}/graphql', + json={'query': '{ viewer { login } }'}, + headers=gql_headers, + timeout=15, + ) + if gql_resp.status_code != 200: + console.print( + '[red]PAT lacks GraphQL API access. Fine-grained PATs need "Public Repositories (read-only)" permission.[/red]' + ) + return False + + return True + except requests.RequestException: + return False + + +def _load_config_value(key: str): + """Load a value from ~/.gittensor/config.json, or None.""" + from pathlib import Path + + config_file = Path.home() / '.gittensor' / 'config.json' + if not config_file.exists(): + return None + try: + config = json.loads(config_file.read_text()) + return config.get(key) + except (json.JSONDecodeError, OSError): + return None + + +NETWORK_MAP = { + 'local': 'ws://127.0.0.1:9944', + 'test': 'wss://test.finney.opentensor.ai:443/', + 'finney': 'wss://entrypoint-finney.opentensor.ai:443/', +} + + +def _resolve_endpoint(network: str | None, rpc_url: str | None) -> str: + """Resolve the subtensor endpoint from CLI args or config.""" + if rpc_url: + return rpc_url + if network: + return NETWORK_MAP.get(network, network) + # Try config file + config_network = _load_config_value('network') + config_endpoint = _load_config_value('ws_endpoint') + if config_endpoint: + return config_endpoint + if config_network: + return NETWORK_MAP.get(config_network) or config_network + return NETWORK_MAP['finney'] + + +def _error(msg: str, json_mode: bool): + """Print an error message in the appropriate format.""" + if json_mode: + click.echo(json.dumps({'success': False, 'error': msg})) + else: + console.print(f'[red]Error: {msg}[/red]') diff --git a/gittensor/constants.py b/gittensor/constants.py index 3e38697c..add85e04 100644 --- a/gittensor/constants.py +++ b/gittensor/constants.py @@ -1,5 +1,6 @@ # Entrius 2025 -from datetime import datetime, timezone +import re +from typing import Dict # ============================================================================= # General @@ -11,9 +12,10 @@ # GitHub API # ============================================================================= BASE_GITHUB_API_URL = 'https://api.github.com' -MIN_GITHUB_ACCOUNT_AGE = 180 # days # 1MB max file size for github api file fetches. Files exceeding this get no score. MAX_FILE_SIZE_BYTES = 1_000_000 +# Too many object lookups in one GraphQL query can trigger 502 errors and lose all results. +MAX_FILES_PER_GRAPHQL_BATCH = 50 # ============================================================================= # Language & File Scoring @@ -52,11 +54,11 @@ # ============================================================================= # Repository & PR Scoring # ============================================================================= -PR_LOOKBACK_DAYS = 90 # how many days a merged pr will count for scoring -DEFAULT_MERGED_PR_BASE_SCORE = 30 +PR_LOOKBACK_DAYS = 35 # rolling window for scoring +MERGED_PR_BASE_SCORE = 30 MIN_TOKEN_SCORE_FOR_BASE_SCORE = 5 # PRs below this get 0 base score (can still earn contribution bonus) MAX_CONTRIBUTION_BONUS = 30 -DEFAULT_MAX_CONTRIBUTION_SCORE_FOR_FULL_BONUS = 2000 +CONTRIBUTION_SCORE_FOR_FULL_BONUS = 2000 # Boosts MAX_CODE_DENSITY_MULTIPLIER = 3.0 @@ -71,13 +73,12 @@ # Issue boosts MAX_ISSUE_CLOSE_WINDOW_DAYS = 1 -MAX_ISSUE_AGE_FOR_MAX_SCORE = 40 # days # Time decay (sigmoid curve) TIME_DECAY_GRACE_PERIOD_HOURS = 12 # hours before time decay begins TIME_DECAY_SIGMOID_MIDPOINT = 10 # days until 50% score loss TIME_DECAY_SIGMOID_STEEPNESS_SCALAR = 0.4 -TIME_DECAY_MIN_MULTIPLIER = 0.05 # 5% of score will retain through lookback days (90D) +TIME_DECAY_MIN_MULTIPLIER = 0.05 # 5% of score will retain through lookback window # comment nodes for token scoring COMMENT_NODE_TYPES = frozenset( @@ -90,31 +91,58 @@ } ) +# Extensions where source files may contain inline test blocks (e.g. Rust #[cfg(test)], Zig test declarations) +INLINE_TEST_EXTENSIONS = frozenset({'rs', 'zig', 'd'}) + +INLINE_TEST_PATTERNS: Dict[str, re.Pattern] = { + 'rs': re.compile(r'^\s*(?:#\[(?:cfg\()?test\b|#!\[cfg\(test\)\]|#\[\w+::test\b)', re.MULTILINE), + 'zig': re.compile(r'^\s*test\b\s*[{"]', re.MULTILINE), + 'd': re.compile(r'^\s*unittest\b', re.MULTILINE), +} + # ============================================================================= -# Tiers & Collateral System +# Eligibility Gate (OSS Contributions) # ============================================================================= -TIER_BASED_INCENTIVE_MECHANISM_START_DATE = datetime(2025, 12, 31, 3, 45, 00, tzinfo=timezone.utc) -DEFAULT_COLLATERAL_PERCENT = 0.20 +MIN_VALID_MERGED_PRS = 5 # minimum "valid" merged PRs (token_score >= MIN_TOKEN_SCORE_FOR_BASE_SCORE) to receive score +MIN_CREDIBILITY = 0.80 # minimum credibility ratio to receive score +CREDIBILITY_MULLIGAN_COUNT = 1 # number of closed PRs forgiven (erased from merged+closed counts entirely) -# Tier-based emission allocation splits -TIER_EMISSION_SPLITS = { - 'Bronze': 0.15, # 15% of emissions - 'Silver': 0.35, # 35% of emissions - 'Gold': 0.50, # 50% of emissions -} +# ============================================================================= +# Issue Discovery +# ============================================================================= +# Eligibility gate (stricter than OSS contributions) +MIN_VALID_SOLVED_ISSUES = 7 # minimum solved issues where solving PR has token_score >= MIN_TOKEN_SCORE_FOR_BASE_SCORE +MIN_ISSUE_CREDIBILITY = 0.80 # minimum issue credibility ratio + +# Review quality cliff model (different from OSS: has clean bonus + steeper penalty) +ISSUE_REVIEW_CLEAN_BONUS = 1.1 # multiplier when 0 CHANGES_REQUESTED rounds +ISSUE_REVIEW_PENALTY_RATE = 0.15 # per CHANGES_REQUESTED round after cliff + +# Open issue spam threshold +OPEN_ISSUE_SPAM_BASE_THRESHOLD = 5 # half the PR base of 10 +OPEN_ISSUE_SPAM_TOKEN_SCORE_PER_SLOT = 300.0 # +1 allowed open issue per this much token score +MAX_OPEN_ISSUE_THRESHOLD = 30 + +# Repo-centric closed issue scan caps (validator PAT budget) +REPO_SCAN_PER_REPO_CAP = 300 # max solver lookups per repo +REPO_SCAN_GLOBAL_CAP = 1500 # max solver lookups per round +REPO_SCAN_CONCURRENCY = 2 # concurrent solver lookup threads + +# ============================================================================= +# Collateral +# ============================================================================= +OPEN_PR_COLLATERAL_PERCENT = 0.20 # ============================================================================= # Rewards & Emissions # ============================================================================= RECYCLE_UID = 0 -# Network emission scaling (unique repos) -UNIQUE_REPOS_MAX_RECYCLE = 0.8 -UNIQUE_REPOS_RECYCLE_DECAY_RATE = 0.005 - -# Network emission scaling (total token score from tiered miners) -TOKEN_SCORE_MAX_RECYCLE = 0.8 -TOKEN_SCORE_RECYCLE_DECAY_RATE = 0.000012 +# Hardcoded emission splits per competition (replaces dynamic emissions) +OSS_EMISSION_SHARE = 0.30 # 30% to OSS contributions (PR scoring) +ISSUE_DISCOVERY_EMISSION_SHARE = 0.30 # 30% to issue discovery +RECYCLE_EMISSION_SHARE = 0.25 # 25% to recycle UID 0 +# ISSUES_TREASURY_EMISSION_SHARE = 0.15 defined below (15% to smart contract treasury) # ============================================================================= # Spam & Gaming Mitigation @@ -124,17 +152,17 @@ # PR Review Quality Multiplier REVIEW_PENALTY_RATE = 0.12 # 12% deduction per CHANGES_REQUESTED review from a maintainer -# Issue multiplier bonuses -MAX_ISSUE_AGE_BONUS = 0.75 # Max bonus for issue age (scales with sqrt of days open) -MAINTAINER_ISSUE_BONUS = 0.25 # Extra bonus when issue was created by a maintainer +# Issue multiplier (flat values, no age scaling) +STANDARD_ISSUE_MULTIPLIER = 1.33 # Non-maintainer issue author +MAINTAINER_ISSUE_MULTIPLIER = 1.66 # Issue author is OWNER/MEMBER/COLLABORATOR # Excessive open PRs penalty # Multiplier = 1.0 if open PRs <= threshold, 0.0 otherwise EXCESSIVE_PR_PENALTY_BASE_THRESHOLD = 10 # Dynamic open PR threshold bonus for top contributors -# Bonus = floor(total_unlocked_token_score / 500) -# Example: 1500 token score across unlocked tiers / 500 = +3 bonus -OPEN_PR_THRESHOLD_TOKEN_SCORE = 500.0 # Token score per +1 bonus (sum of all unlocked tiers) +# Bonus = floor(total_token_score / 300) +# Example: 900 total token score / 300 = +3 bonus +OPEN_PR_THRESHOLD_TOKEN_SCORE = 300.0 # Token score per +1 bonus MAX_OPEN_PR_THRESHOLD = 30 # Maximum open PR threshold (base + bonus capped at this value) # ============================================================================= @@ -143,26 +171,3 @@ CONTRACT_ADDRESS = '5FWNdk8YNtNcHKrAx2krqenFrFAZG7vmsd2XN2isJSew3MrD' ISSUES_TREASURY_UID = 111 # UID of the smart contract neuron, if set to RECYCLE_UID then it's disabled ISSUES_TREASURY_EMISSION_SHARE = 0.15 # % of emissions allocated to funding issues treasury - -# ============================================================================= -# Merge Predictions -# ============================================================================= -PREDICTIONS_EMISSIONS_SHARE = 0.15 # % of emissions allocated to prediction competition -PREDICTIONS_TOP_K = 3 # only top-K miners by EMA receive prediction rewards -PREDICTIONS_TOP_K_SHARES = [0.50, 0.35, 0.15] # fixed reward split for top-K miners (must sum to 1.0) - -PREDICTIONS_EMA_BETA = 0.1 # EMA decay rate for predictions record -PREDICTIONS_CORRECTNESS_EXPONENT = 3 # exponent on correctness to harshly punish incorrect predictions -PREDICTIONS_TIMELINESS_EXPONENT = 1.8 # curve for early prediction bonus. higher = sharper curve. 1.0 = linear -PREDICTIONS_MAX_TIMELINESS_BONUS = 0.75 # max bonus for earliest predictions -PREDICTIONS_MAX_CONSENSUS_BONUS = 0.25 # max bonus for pre-convergence predictions -PREDICTIONS_MAX_ORDER_BONUS = 0.75 # max bonus for first correct predictor (applies to merged PR only) -PREDICTIONS_ORDER_CORRECTNESS_THRESHOLD = 0.66 # min raw correctness to qualify for order bonus -# variance threshold for full rewards -# if variance across predictions never exceeds this threshold, the solution must be 'obvious' -PREDICTIONS_CONSENSUS_VARIANCE_TARGET = 0.2 - -# Cooldown & Limits -PREDICTIONS_COOLDOWN_SECONDS = 900 # 15 min cooldown per miner per PR re-prediction -PREDICTIONS_MIN_VALUE = 0.0 -PREDICTIONS_MAX_VALUE = 1.0 diff --git a/gittensor/miner/__init__.py b/gittensor/miner/__init__.py deleted file mode 100644 index 488d5209..00000000 --- a/gittensor/miner/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# GitTensor Miner Module diff --git a/gittensor/miner/broadcast.py b/gittensor/miner/broadcast.py deleted file mode 100644 index cdcf0bce..00000000 --- a/gittensor/miner/broadcast.py +++ /dev/null @@ -1,85 +0,0 @@ -# Entrius 2025 - -"""Broadcast PredictionSynapse from miner to all validator axons.""" - -import asyncio - -import bittensor as bt - -from gittensor.miner.token_mgmt import load_token -from gittensor.synapses import PredictionSynapse - - -def broadcast_predictions( - payload: dict[str, object], - wallet_name: str, - wallet_hotkey: str, - ws_endpoint: str, - netuid: int, -) -> dict[str, object]: - """Broadcast PredictionSynapse to all validator axons via dendrite. - - Args: - payload: Dict with issue_id, repository, predictions. - wallet_name: Bittensor wallet name. - wallet_hotkey: Bittensor hotkey name. - ws_endpoint: Subtensor RPC endpoint. - netuid: Subnet UID to broadcast on. - - Returns: - Dict with success, total_validators, accepted, rejected, results. - """ - github_pat = load_token(quiet=True) - if not github_pat: - return {'success': False, 'error': 'GITTENSOR_MINER_PAT not set or invalid.', 'results': []} - - wallet = bt.Wallet(name=wallet_name, hotkey=wallet_hotkey) - subtensor = bt.Subtensor(network=ws_endpoint) - metagraph = subtensor.metagraph(netuid=netuid) - dendrite = bt.Dendrite(wallet=wallet) - - synapse = PredictionSynapse( - github_access_token=github_pat, - issue_id=int(payload['issue_id']), - repository=str(payload['repository']), - predictions={int(k): float(v) for k, v in payload['predictions'].items()}, - ) - - # Get axons for high-trust validators with permit that are actively serving. - validator_axons = [ - axon - for uid, axon in enumerate(metagraph.axons) - if metagraph.validator_permit[uid] and axon.is_serving and float(metagraph.Tv[uid]) > 0.5 - ] - - if not validator_axons: - return {'success': False, 'error': 'No reachable validator axons found on the network.', 'results': []} - - responses = asyncio.get_event_loop().run_until_complete( - dendrite( - axons=validator_axons, - synapse=synapse, - deserialize=False, - timeout=12.0, - ) - ) - - results = [] - for axon, resp in zip(validator_axons, responses): - results.append( - { - 'validator': axon.hotkey[:16], - 'accepted': resp.accepted if hasattr(resp, 'accepted') else None, - 'rejection_reason': resp.rejection_reason if hasattr(resp, 'rejection_reason') else None, - 'status_code': resp.dendrite.status_code if hasattr(resp, 'dendrite') else None, - } - ) - - accepted_count = sum(1 for r in results if r['accepted'] is True) - return { - 'success': accepted_count > 0, - 'total_validators': len(validator_axons), - 'accepted': accepted_count, - 'rejected': len(results) - accepted_count, - 'results': results, - } diff --git a/gittensor/miner/token_mgmt.py b/gittensor/miner/token_mgmt.py deleted file mode 100644 index 29ba0780..00000000 --- a/gittensor/miner/token_mgmt.py +++ /dev/null @@ -1,81 +0,0 @@ -# Copyright © 2025 Entrius -import os -import sys -import time -from typing import Optional - -import bittensor as bt -import requests - -from gittensor.constants import BASE_GITHUB_API_URL - - -def init() -> bool: - """Initialize and check if GitHub token exists in environment - - Returns: - bool: Always returns True if token exists, otherwise exits - - Raises: - SystemExit: If GITTENSOR_MINER_PAT environment variable is not set - """ - token = os.getenv('GITTENSOR_MINER_PAT') - if not token: - bt.logging.error('GitHub Token NOT FOUND. Please set GITTENSOR_MINER_PAT environment variable.') - bt.logging.error('Refer to README.md and the miner setup for more information.') - sys.exit(1) - - bt.logging.success('Found GITTENSOR_MINER_PAT in environment') - return True - - -def load_token(quiet: bool = False) -> Optional[str]: - """ - Load GitHub token from environment variable - - Returns: - Optional[str]: The GitHub access token string if valid, None otherwise - """ - if not quiet: - bt.logging.info('Loading GitHub token from environment.') - - access_token = os.getenv('GITTENSOR_MINER_PAT') - - if not access_token: - if not quiet: - bt.logging.error('No GitHub token found in GITTENSOR_MINER_PAT environment variable!') - return None - - # Test if token is still valid - if is_token_valid(access_token): - if not quiet: - bt.logging.info('GitHub token loaded successfully and is valid.') - return access_token - - if not quiet: - bt.logging.error('GitHub token is invalid or expired.') - return None - - -def is_token_valid(token: str) -> bool: - """ - Test if a GitHub token is valid by making a simple API call. - - Args: - token (str): GitHub personal access token to validate - - Returns: - bool: True if valid token, False otherwise - """ - headers = {'Authorization': f'token {token}', 'Accept': 'application/vnd.github.v3+json'} - - for attempt in range(3): - try: - response = requests.get(f'{BASE_GITHUB_API_URL}/user', headers=headers, timeout=15) - return response.status_code == 200 - except Exception as e: - bt.logging.warning(f'Error validating GitHub token (attempt {attempt + 1}/3): {e}') - if attempt < 2: # Don't sleep on last attempt - time.sleep(3) - - return False diff --git a/gittensor/mock.py b/gittensor/mock.py deleted file mode 100644 index 8ac7f6dc..00000000 --- a/gittensor/mock.py +++ /dev/null @@ -1,115 +0,0 @@ -import asyncio -import random -import time -from typing import List - -import bittensor as bt - - -class MockSubtensor(bt.MockSubtensor): - def __init__(self, netuid, n=16, wallet=None, network='mock'): - super().__init__(network=network) - - if not self.subnet_exists(netuid): - self.create_subnet(netuid) - - # Register ourself (the validator) as a neuron at uid=0 - if wallet is not None: - self.force_register_neuron( - netuid=netuid, - hotkey=wallet.hotkey.ss58_address, - coldkey=wallet.coldkey.ss58_address, - balance=100000, - stake=100000, - ) - - # Register n mock neurons who will be miners - for i in range(1, n + 1): - self.force_register_neuron( - netuid=netuid, - hotkey=f'miner-hotkey-{i}', - coldkey='mock-coldkey', - balance=100000, - stake=100000, - ) - - -class MockMetagraph(bt.Metagraph): - def __init__(self, netuid=1, network='mock', subtensor=None): - super().__init__(netuid=netuid, network=network, sync=False) - - if subtensor is not None: - self.subtensor = subtensor - self.sync(subtensor=subtensor) - - for axon in self.axons: - axon.ip = '127.0.0.0' - axon.port = 8091 - - bt.logging.info(f'Metagraph: {self}') - bt.logging.info(f'Axons: {self.axons}') - - -class MockDendrite(bt.Dendrite): - """ - Replaces a real bittensor network request with a mock request that just returns some static response for all axons that are passed and adds some random delay. - """ - - def __init__(self, wallet): - super().__init__(wallet) - - async def forward( - self, - axons: List[bt.Axon], - synapse: bt.Synapse = bt.Synapse(), - timeout: float = 12, - deserialize: bool = True, - run_async: bool = True, - streaming: bool = False, - ): - if streaming: - raise NotImplementedError('Streaming not implemented yet.') - - async def query_all_axons(streaming: bool): - """Queries all axons for responses.""" - - async def single_axon_response(i, axon): - """Queries a single axon for a response.""" - - start_time = time.time() - s = synapse.copy() - # Attach some more required data so it looks real - s = self.preprocess_synapse_for_request(axon, s, timeout) - # We just want to mock the response, so we'll just fill in some data - process_time = random.random() - if process_time < timeout: - s.dendrite.process_time = str(time.time() - start_time) - # Update the status code and status message of the dendrite to match the axon - s.dummy_output = s.dummy_input * 2 - s.dendrite.status_code = 200 - s.dendrite.status_message = 'OK' - synapse.dendrite.process_time = str(process_time) - else: - s.dummy_output = 0 - s.dendrite.status_code = 408 - s.dendrite.status_message = 'Timeout' - synapse.dendrite.process_time = str(timeout) - - # Return the updated synapse object after deserializing if requested - if deserialize: - return s.deserialize() - else: - return s - - return await asyncio.gather(*(single_axon_response(i, target_axon) for i, target_axon in enumerate(axons))) - - return await query_all_axons(streaming) - - def __str__(self) -> str: - """ - Returns a string representation of the Dendrite object. - - Returns: - str: The string representation of the Dendrite object in the format "dendrite()". - """ - return 'MockDendrite({})'.format(self.keypair.ss58_address) diff --git a/gittensor/synapses.py b/gittensor/synapses.py index f46249f3..47eb98af 100644 --- a/gittensor/synapses.py +++ b/gittensor/synapses.py @@ -4,41 +4,30 @@ import bittensor as bt -class GitPatSynapse(bt.Synapse): - """ - This synapse is used to request GitHub access tokens from a miner and receive the response. +class PatBroadcastSynapse(bt.Synapse): + """Miner-initiated push synapse to broadcast their GitHub PAT to validators. - Attributes: - - github_access_token: A string value representing the GitHub access token. - Initially None for requests, and set to the actual token for responses. + The miner sets github_access_token on the request. The validator validates the PAT + (checks it works, extracts GitHub ID, verifies account age, runs a test query) + and responds with accepted/rejection_reason. """ - github_access_token: Optional[str] = None + # Miner request + github_access_token: str + # Validator response + accepted: Optional[bool] = None + rejection_reason: Optional[str] = None -class PredictionSynapse(bt.Synapse): - """Miner-initiated push synapse for merge predictions. - Request fields (set by miner): - - github_access_token: Miner's GitHub PAT for identity verification and account age check. - - issue_id: On-chain issue ID (NOT GitHub issue number). - - repository: Full repo name, e.g. "entrius/gittensor". - - predictions: Mapping of PR number -> probability (0.0-1.0). - Sum across all of a miner's predictions for an issue must be <= 1.0. - Each submission can contain one or many PR predictions. - Submitting a prediction for a PR that already has one overwrites it. +class PatCheckSynapse(bt.Synapse): + """Probe for miners to check if a validator has their PAT stored and valid. - Response fields (set by validator): - - accepted: Whether the prediction was stored. - - rejection_reason: Human-readable reason if rejected. + No PAT is sent — the validator identifies the miner by their dendrite hotkey, + looks up the stored PAT, and re-validates it (GitHub API check + test query). """ - # Miner Request - github_access_token: str - issue_id: int - repository: str - predictions: dict[int, float] - - # Validator Response - accepted: Optional[bool] = None + # Validator response + has_pat: Optional[bool] = None + pat_valid: Optional[bool] = None rejection_reason: Optional[str] = None diff --git a/gittensor/utils/config.py b/gittensor/utils/config.py index 2de04c0e..8afe763d 100644 --- a/gittensor/utils/config.py +++ b/gittensor/utils/config.py @@ -19,6 +19,7 @@ import argparse import os import subprocess +from typing import Any import bittensor as bt @@ -41,7 +42,7 @@ def is_cuda_available(): return 'cpu' -def check_config(cls, config: 'bt.Config'): +def check_config(cls, config: Any): r"""Checks/validates the config namespace object.""" bt.logging.check_config(config) @@ -115,45 +116,6 @@ def add_args(cls, parser): ) -def add_miner_args(cls, parser): - """Add miner specific arguments to the parser.""" - - parser.add_argument( - '--neuron.name', - type=str, - help='Trials for this neuron go in neuron.root / (wallet_cold - wallet_hot) / neuron.name. ', - default='miner', - ) - - parser.add_argument( - '--blacklist.force_validator_permit', - action='store_true', - help='If set, we will force incoming requests to have a permit.', - default=True, - ) - - parser.add_argument( - '--blacklist.allow_non_registered', - action='store_true', - help='If set, miners will accept queries from non registered entities. (Dangerous!)', - default=False, - ) - - parser.add_argument( - '--blacklist.min_stake', - type=int, - help='Minimum stake required for a validator to query this miner.', - default=12500, - ) - - parser.add_argument( - '--dev_mode', - action='store_true', - help='Enable development mode with additional logging and features.', - default=False, - ) - - def add_validator_args(cls, parser): """Add validator specific arguments to the parser.""" diff --git a/gittensor/utils/github_api_tools.py b/gittensor/utils/github_api_tools.py index 8f089318..dedeadc6 100644 --- a/gittensor/utils/github_api_tools.py +++ b/gittensor/utils/github_api_tools.py @@ -1,6 +1,7 @@ # Entrius 2025 import base64 import fnmatch +import os import re import time from dataclasses import dataclass @@ -24,8 +25,8 @@ BASE_GITHUB_API_URL, MAINTAINER_ASSOCIATIONS, MAX_FILE_SIZE_BYTES, + MAX_FILES_PER_GRAPHQL_BATCH, PR_LOOKBACK_DAYS, - TIER_BASED_INCENTIVE_MECHANISM_START_DATE, ) from gittensor.utils.models import PRInfo from gittensor.validator.utils.load_weights import RepositoryConfig @@ -35,6 +36,7 @@ query($userId: ID!, $limit: Int!, $cursor: String) { node(id: $userId) { ... on User { + issues(states: [OPEN]) { totalCount } pullRequests(first: $limit, states: [MERGED, OPEN, CLOSED], orderBy: {field: CREATED_AT, direction: DESC}, after: $cursor) { pageInfo { hasNextPage @@ -87,8 +89,10 @@ state createdAt closedAt + updatedAt author { login + ... on User { databaseId } } authorAssociation } @@ -138,12 +142,8 @@ def make_headers(token: str) -> Dict[str, str]: } -# In-process cache for GitHub /user responses, keyed by PAT. -_GITHUB_USER_CACHE: Dict[str, Dict[str, Any]] = {} - - def get_github_user(token: str) -> Optional[Dict[str, Any]]: - """Fetch GitHub user data for a PAT with retry and in-process cache. + """Fetch GitHub user data for a PAT with retry. Args: token (str): Github pat @@ -153,11 +153,6 @@ def get_github_user(token: str) -> Optional[Dict[str, Any]]: if not token: return None - # Check cache first to avoid duplicate /user calls for the same PAT. - cached = _GITHUB_USER_CACHE.get(token) - if cached is not None: - return cached - headers = make_headers(token) # Retry logic for timeout issues @@ -171,7 +166,6 @@ def get_github_user(token: str) -> Optional[Dict[str, Any]]: bt.logging.warning(f'Failed to parse GitHub /user JSON response: {e}') return None - _GITHUB_USER_CACHE[token] = user_data return user_data bt.logging.warning( @@ -249,6 +243,62 @@ def get_github_account_age_days(token: str) -> Optional[int]: return None +def get_merge_base_sha(repository: str, base_sha: str, head_sha: str, token: str) -> Optional[str]: + """ + Get the merge-base commit SHA between two refs using GitHub's compare API. + + The merge-base is the common ancestor commit — the correct "before" state + for computing a PR's own changes via tree-diff scoring. + + Args: + repository: Repository in format 'owner/repo' + base_sha: Base branch ref OID + head_sha: Head branch ref OID + token: GitHub PAT + + Returns: + Merge-base commit SHA, or None if the request fails + """ + headers = make_headers(token) + max_attempts = 3 + + for attempt in range(max_attempts): + try: + response = requests.get( + f'{BASE_GITHUB_API_URL}/repos/{repository}/compare/{base_sha}...{head_sha}', + headers=headers, + timeout=15, + ) + + if response.status_code == 200: + data = response.json() + merge_base = (data.get('merge_base_commit') or {}).get('sha') + if merge_base: + return merge_base + bt.logging.warning(f'Compare API returned 200 but no merge_base_commit for {repository}') + return None + + if attempt < max_attempts - 1: + backoff_delay = min(5 * (2 ** (attempt)), 30) + bt.logging.warning( + f'Compare API for {repository} failed with status {response.status_code} ' + f'(attempt {attempt + 1}/{max_attempts}), retrying in {backoff_delay}s...' + ) + time.sleep(backoff_delay) + + except requests.exceptions.RequestException as e: + if attempt < max_attempts - 1: + backoff_delay = min(5 * (2 ** (attempt)), 30) + bt.logging.warning( + f'Compare API error for {repository} (attempt {attempt + 1}/{max_attempts}): {e}, ' + f'retrying in {backoff_delay}s...' + ) + time.sleep(backoff_delay) + + bt.logging.warning(f'Compare API for {repository} failed after {max_attempts} attempts. Will use base_ref_oid.') + return None + + def get_pull_request_file_changes(repository: str, pr_number: int, token: str) -> Optional[List[FileChange]]: """ Get the diff for a specific PR by repository name and PR number. @@ -819,7 +869,7 @@ def try_add_open_or_closed_pr( lookback_date_filter: Date filter for lookback period """ # Ignore all maintainer contributions - if pr_raw.get('authorAssociation') in MAINTAINER_ASSOCIATIONS: + if not os.environ.get('DEV_MODE') and pr_raw.get('authorAssociation') in MAINTAINER_ASSOCIATIONS: return if pr_state == PRState.OPEN.value: @@ -869,7 +919,7 @@ def should_skip_merged_pr( # Skip if PR author is a maintainer author_association = pr_raw.get('authorAssociation') - if author_association in MAINTAINER_ASSOCIATIONS: + if not os.environ.get('DEV_MODE') and author_association in MAINTAINER_ASSOCIATIONS: return ( True, f'Skipping PR #{pr_raw["number"]} in {repository_full_name} - author is {author_association} (has direct merge capabilities)', @@ -935,6 +985,10 @@ def load_miners_prs( """ bt.logging.info('*****Fetching PRs*****') + if not miner_eval.github_pat: + bt.logging.warning(f'UID {miner_eval.uid} has no github_pat, skipping PR fetch') + return + lookback_date_filter = datetime.now(timezone.utc) - timedelta(days=PR_LOOKBACK_DAYS) global_user_id = base64.b64encode(f'04:User{miner_eval.github_id}'.encode()).decode() @@ -973,6 +1027,10 @@ def load_miners_prs( bt.logging.warning('User not found or no pull requests') break + # Extract open issue count from first page (User-level field, not paginated) + if cursor is None: + miner_eval.total_open_issues = user_data.get('issues', {}).get('totalCount', 0) + pr_data: Dict = user_data.get('pullRequests', {}) prs: List = pr_data.get('nodes', []) page_info: Dict = pr_data.get('pageInfo', {}) @@ -982,19 +1040,6 @@ def load_miners_prs( repository_full_name = parse_repo_name(pr_raw['repository']) pr_state = pr_raw['state'] - # Stop querying once we hit PRs older than the tier incentive start date - pr_creation_time = datetime.fromisoformat(pr_raw['createdAt'].rstrip('Z')).replace( - tzinfo=timezone.utc - ) - - if pr_creation_time < TIER_BASED_INCENTIVE_MECHANISM_START_DATE: - bt.logging.info( - f'Reached PR #{pr_raw["number"]} in {repository_full_name} created at {pr_creation_time}, ' - f'before tier incentive start date ({TIER_BASED_INCENTIVE_MECHANISM_START_DATE}). ' - f'Stopping PR fetch.' - ) - return - if repository_full_name not in master_repositories: bt.logging.info(f'Skipping PR #{pr_raw["number"]} in {repository_full_name} - ineligible repo') continue @@ -1006,6 +1051,9 @@ def load_miners_prs( inactive_dt = datetime.fromisoformat(repo_config.inactive_at.rstrip('Z')).replace( tzinfo=timezone.utc ) + pr_creation_time = datetime.fromisoformat(pr_raw['createdAt'].rstrip('Z')).replace( + tzinfo=timezone.utc + ) # Skip PR if it was created after the repo became inactive if pr_creation_time >= inactive_dt: bt.logging.info( @@ -1022,7 +1070,7 @@ def load_miners_prs( ) if should_skip: - bt.logging.debug(skip_reason) + bt.logging.debug(skip_reason or '') continue miner_eval.add_merged_pull_request(pr_raw) @@ -1192,34 +1240,27 @@ def check_github_issue_closed(repo: str, issue_number: int, token: str) -> Optio return None -def fetch_file_contents_batch( +def _fetch_file_contents_batch( repo_owner: str, repo_name: str, head_sha: str, - file_paths: List[str], + batch_paths: List[str], token: str, ) -> Dict[str, Optional[str]]: - """ - Fetch multiple file contents from a repository in a single GraphQL request. - - Uses retry logic with exponential backoff for reliability. + """Fetch file contents for a single batch of paths in one GraphQL request. Args: repo_owner: Repository owner repo_name: Repository name head_sha: The commit SHA to fetch files at - file_paths: List of file paths to fetch + batch_paths: File paths for this batch token: GitHub PAT for authentication Returns: - Dict mapping file paths to their contents (None if file is binary, deleted, or too large) + Dict mapping file paths to their contents (None if binary, deleted, or too large) """ - if not file_paths: - return {} - - # Build GraphQL query with aliased file fields file_fields = [] - for i, path in enumerate(file_paths): + for i, path in enumerate(batch_paths): expression = f'{head_sha}:{path}' file_fields.append( f'file{i}: object(expression: "{expression}") {{ ... on Blob {{ text byteSize isBinary }} }}' @@ -1235,19 +1276,18 @@ def fetch_file_contents_batch( variables = {'owner': repo_owner, 'name': repo_name} - # Execute with retry logic data = execute_graphql_query(query, variables, token) if data is None: bt.logging.warning(f'Failed to fetch file contents for {repo_owner}/{repo_name}') - return {path: None for path in file_paths} + return {path: None for path in batch_paths} if 'errors' in data: bt.logging.warning(f'GraphQL errors fetching files: {data["errors"]}') repo_data = data.get('data', {}).get('repository', {}) - results = {} + results: Dict[str, Optional[str]] = {} - for i, path in enumerate(file_paths): + for i, path in enumerate(batch_paths): file_data = repo_data.get(f'file{i}') if file_data is None: @@ -1262,6 +1302,38 @@ def fetch_file_contents_batch( return results +def fetch_file_contents_batch( + repo_owner: str, + repo_name: str, + head_sha: str, + file_paths: List[str], + token: str, +) -> Dict[str, Optional[str]]: + """Fetch file contents in batched GraphQL requests so large PRs don't hit complexity limits. + + Args: + repo_owner: Repository owner + repo_name: Repository name + head_sha: The commit SHA to fetch files at + file_paths: List of file paths to fetch + token: GitHub PAT for authentication + + Returns: + Dict mapping file paths to their contents (None if binary, deleted, or too large) + """ + if not file_paths: + return {} + + results: Dict[str, Optional[str]] = {} + + for batch_start in range(0, len(file_paths), MAX_FILES_PER_GRAPHQL_BATCH): + batch_paths = file_paths[batch_start : batch_start + MAX_FILES_PER_GRAPHQL_BATCH] + batch_results = _fetch_file_contents_batch(repo_owner, repo_name, head_sha, batch_paths, token) + results.update(batch_results) + + return results + + @dataclass class FileContentPair: """Holds both old (base) and new (head) content for a file.""" @@ -1270,50 +1342,41 @@ class FileContentPair: new_content: Optional[str] # None for deleted files -def fetch_file_contents_with_base( +def _fetch_file_contents_with_base_batch( repo_owner: str, repo_name: str, base_sha: str, head_sha: str, - file_changes: List['FileChangeType'], + batch_changes: List['FileChangeType'], token: str, ) -> Dict[str, FileContentPair]: - """ - Fetch both base and head (old and new) versions of files in a single GraphQL request. + """Fetch base and head file contents for a single batch of file changes. Args: repo_owner: Repository owner repo_name: Repository name base_sha: The base branch SHA (before PR changes) head_sha: The head/merge commit SHA (after PR changes) - file_changes: List of FileChange objects (needed for status and previous_filename) + batch_changes: File changes for this batch token: GitHub PAT for authentication Returns: Dict mapping file paths to FileContentPair (old_content, new_content) - - For new files: old_content is None - - For deleted files: new_content is None - - For renamed files: old_content fetched from previous_filename """ - if not file_changes: - return {} - - # Build GraphQL query with both base and head versions file_fields = [] - for i, fc in enumerate(file_changes): - # Determine the path to fetch for base version - # For renames, use previous_filename; otherwise use current filename + for i, fc in enumerate(batch_changes): + # Renames need the old path for the base version base_path = fc.previous_filename if fc.previous_filename else fc.filename head_path = fc.filename - # Only fetch base version if file wasn't newly added + # New files have no base version to fetch if fc.status != 'added': base_expr = f'{base_sha}:{base_path}' file_fields.append( f'base{i}: object(expression: "{base_expr}") {{ ... on Blob {{ text byteSize isBinary }} }}' ) - # Only fetch head version if file wasn't deleted + # Deleted files have no head version to fetch if fc.status != 'removed': head_expr = f'{head_sha}:{head_path}' file_fields.append( @@ -1333,11 +1396,10 @@ def fetch_file_contents_with_base( variables = {'owner': repo_owner, 'name': repo_name} - # Execute with retry logic data = execute_graphql_query(query, variables, token) if data is None: bt.logging.warning(f'Failed to fetch file contents for {repo_owner}/{repo_name}') - return {fc.filename: FileContentPair(None, None) for fc in file_changes} + return {fc.filename: FileContentPair(None, None) for fc in batch_changes} if 'errors' in data: bt.logging.warning(f'GraphQL errors fetching files: {data["errors"]}') @@ -1345,17 +1407,17 @@ def fetch_file_contents_with_base( repo_data = data.get('data', {}).get('repository', {}) results: Dict[str, FileContentPair] = {} - for i, fc in enumerate(file_changes): + for i, fc in enumerate(batch_changes): old_content = None new_content = None - # Extract base (old) content if applicable + # Pull the old content unless this file was just added if fc.status != 'added': base_data = repo_data.get(f'base{i}') if base_data and not base_data.get('isBinary') and base_data.get('byteSize', 0) <= MAX_FILE_SIZE_BYTES: old_content = base_data.get('text') - # Extract head (new) content if applicable + # Pull the new content unless this file was removed if fc.status != 'removed': head_data = repo_data.get(f'head{i}') if head_data and not head_data.get('isBinary') and head_data.get('byteSize', 0) <= MAX_FILE_SIZE_BYTES: @@ -1364,3 +1426,37 @@ def fetch_file_contents_with_base( results[fc.filename] = FileContentPair(old_content=old_content, new_content=new_content) return results + + +def fetch_file_contents_with_base( + repo_owner: str, + repo_name: str, + base_sha: str, + head_sha: str, + file_changes: List['FileChangeType'], + token: str, +) -> Dict[str, FileContentPair]: + """Fetch old and new versions of files in batches so large PRs don't hit complexity limits. + + Args: + repo_owner: Repository owner + repo_name: Repository name + base_sha: The base branch SHA (before PR changes) + head_sha: The head/merge commit SHA (after PR changes) + file_changes: List of FileChange objects (needed for status and previous_filename) + token: GitHub PAT for authentication + + Returns: + Dict mapping file paths to FileContentPair (old_content, new_content) + """ + if not file_changes: + return {} + + results: Dict[str, FileContentPair] = {} + + for batch_start in range(0, len(file_changes), MAX_FILES_PER_GRAPHQL_BATCH): + batch = file_changes[batch_start : batch_start + MAX_FILES_PER_GRAPHQL_BATCH] + batch_results = _fetch_file_contents_with_base_batch(repo_owner, repo_name, base_sha, head_sha, batch, token) + results.update(batch_results) + + return results diff --git a/gittensor/utils/logging.py b/gittensor/utils/logging.py index c0b7ecfc..0967b4c9 100644 --- a/gittensor/utils/logging.py +++ b/gittensor/utils/logging.py @@ -22,7 +22,7 @@ def event(self, message, *args, **kws): if self.isEnabledFor(EVENTS_LEVEL_NUM): self._log(EVENTS_LEVEL_NUM, message, args, **kws) - logging.Logger.event = event + logging.Logger.event = event # type: ignore[attr-defined] formatter = logging.Formatter( '%(asctime)s | %(levelname)s | %(message)s', diff --git a/gittensor/utils/uids.py b/gittensor/utils/uids.py index b5eecd9c..bd0f409f 100644 --- a/gittensor/utils/uids.py +++ b/gittensor/utils/uids.py @@ -3,7 +3,7 @@ import bittensor as bt -def check_uid_availability(metagraph: 'bt.metagraph.Metagraph', uid: int, vpermit_tao_limit: int) -> bool: +def check_uid_availability(metagraph: 'bt.Metagraph', uid: int, vpermit_tao_limit: int) -> bool: """Return whether a UID is eligible for querying. Args: @@ -41,4 +41,4 @@ def get_all_uids(self, exclude: List[int] = []) -> set[int]: # Ensure miner UID 0 is always included (subnet requirement) available_miner_uids.add(0) - return sorted(available_miner_uids) + return set(available_miner_uids) diff --git a/gittensor/validator/forward.py b/gittensor/validator/forward.py index 771e89fe..0e456042 100644 --- a/gittensor/validator/forward.py +++ b/gittensor/validator/forward.py @@ -2,25 +2,35 @@ # Copyright © 2025 Entrius import asyncio -from typing import TYPE_CHECKING, Dict, Tuple +from typing import TYPE_CHECKING, Dict, Set, Tuple import bittensor as bt import numpy as np from gittensor.classes import MinerEvaluation from gittensor.constants import ( + ISSUE_DISCOVERY_EMISSION_SHARE, ISSUES_TREASURY_EMISSION_SHARE, ISSUES_TREASURY_UID, - PREDICTIONS_EMISSIONS_SHARE, - PREDICTIONS_TOP_K, - PREDICTIONS_TOP_K_SHARES, + OSS_EMISSION_SHARE, + RECYCLE_EMISSION_SHARE, + RECYCLE_UID, ) from gittensor.utils.uids import get_all_uids from gittensor.validator.issue_competitions.forward import issue_competitions -from gittensor.validator.merge_predictions.settlement import merge_predictions +from gittensor.validator.issue_discovery.normalize import ( + normalize_issue_discovery_rewards, +) +from gittensor.validator.issue_discovery.repo_scan import scan_closed_issues +from gittensor.validator.issue_discovery.scoring import score_discovered_issues from gittensor.validator.oss_contributions.reward import get_rewards -from gittensor.validator.utils.config import VALIDATOR_STEPS_INTERVAL, VALIDATOR_WAIT +from gittensor.validator.utils.config import ( + GITTENSOR_VALIDATOR_PAT, + VALIDATOR_STEPS_INTERVAL, + VALIDATOR_WAIT, +) from gittensor.validator.utils.load_weights import ( + RepositoryConfig, load_master_repo_weights, load_programming_language_weights, load_token_config, @@ -34,128 +44,52 @@ async def forward(self: 'Validator') -> None: """Execute the validator's forward pass. Performs the core validation cycle every VALIDATOR_STEPS_INTERVAL steps: - 1. Score OSS contributions (pure scoring, no side effects) - 2. Run issue bounties verification (needs tier data from scoring) - 3. Settle merge predictions (score + update EMAs) - 4. Build blended rewards array across all emission sources - 5. Update scores with blended rewards - - Emission blending: - - OSS contributions: 70% (1.0 - treasury - predictions) - - Issue bounties treasury: 15% flat to treasury UID - - Merge predictions: 15% distributed by EMA scores + 1. Score OSS contributions (PR scoring) + 2. Run issue bounties verification + 3. Score issue discovery (repo scan + scoring) + 4. Store all evaluations to DB + 5. Blend emission pools and update scores + + Emission blending (hardcoded per-competition): + - OSS contributions: 30% + - Issue discovery: 30% + - Issue treasury: 15% (flat to UID 111) + - Recycle: 25% (flat to UID 0) """ if self.step % VALIDATOR_STEPS_INTERVAL == 0: miner_uids = get_all_uids(self) + master_repositories = load_master_repo_weights() - rewards, miner_evaluations = await oss_contributions(self, miner_uids) + # 1. Score OSS contributions + oss_rewards, miner_evaluations, cached_uids = await oss_contributions(self, miner_uids, master_repositories) + # 2. Issue bounties verification await issue_competitions(self, miner_evaluations) - await merge_predictions(self, miner_evaluations) - - # Build blended rewards array across all emission sources - oss_share = 1.0 - ISSUES_TREASURY_EMISSION_SHARE - PREDICTIONS_EMISSIONS_SHARE - rewards *= oss_share - - if ISSUES_TREASURY_UID > 0 and ISSUES_TREASURY_UID in miner_uids: - sorted_uids = sorted(miner_uids) - treasury_idx = sorted_uids.index(ISSUES_TREASURY_UID) - rewards[treasury_idx] = ISSUES_TREASURY_EMISSION_SHARE - - bt.logging.info( - f'Treasury allocation: Smart Contract UID {ISSUES_TREASURY_UID} receives ' - f'{ISSUES_TREASURY_EMISSION_SHARE * 100:.0f}% of emissions' - ) + # 3. Score issue discovery + issue_rewards = await issue_discovery(miner_evaluations, master_repositories, miner_uids) - prediction_rewards = build_prediction_ema_rewards(self, miner_uids, miner_evaluations) - rewards += prediction_rewards + # 4. Store all evaluations to DB (includes issue discovery fields) + await self.bulk_store_evaluation(miner_evaluations, skip_uids=cached_uids) - bt.logging.info( - f'Blended rewards: OSS {oss_share * 100:.0f}% + treasury {ISSUES_TREASURY_EMISSION_SHARE * 100:.0f}% ' - f'+ predictions {PREDICTIONS_EMISSIONS_SHARE * 100:.0f}% ' - f'(prediction sum={prediction_rewards.sum():.4f})' - ) + # 5. Blend 4 emission pools into final rewards + rewards = blend_emission_pools(oss_rewards, issue_rewards, miner_uids) self.update_scores(rewards, miner_uids) await asyncio.sleep(VALIDATOR_WAIT) -def build_prediction_ema_rewards( +async def oss_contributions( self: 'Validator', miner_uids: set[int], - miner_evaluations: Dict[int, MinerEvaluation], -) -> np.ndarray: - """Build rewards array from prediction EMA scores using top-K winner-takes-most. - - Only the top PREDICTIONS_TOP_K miners by EMA score receive rewards, - split according to PREDICTIONS_TOP_K_SHARES (50%/35%/15%). - Ties are broken by rounds (more settled issues = higher rank). + master_repositories: Dict[str, RepositoryConfig], +) -> Tuple[np.ndarray, Dict[int, MinerEvaluation], Set[int]]: + """Score OSS contributions and return normalized rewards + miner evaluations + cached UIDs. - Maps github_id-keyed EMAs back to UIDs via miner_evaluations. + Pure scoring — no DB storage or emission blending. Those are handled by forward(). """ - sorted_uids = sorted(miner_uids) - prediction_rewards = np.zeros(len(sorted_uids), dtype=np.float64) - - all_emas = self.mp_storage.get_all_emas() - if not all_emas: - return prediction_rewards - - # Build github_id -> uid mapping from current miner evaluations - # NOTE: detect_and_penalize_miners_sharing_github() already zeroes github_id - # for duplicate accounts before this runs, so the '!= 0' filter handles them. - github_id_to_uid: Dict[str, int] = {} - for uid, evaluation in miner_evaluations.items(): - if evaluation and evaluation.github_id and evaluation.github_id != '0': - github_id_to_uid[evaluation.github_id] = uid - - # Collect eligible miners: (ema_score, rounds, uid) - eligible: list[tuple[float, int, int]] = [] - for mp_record in all_emas: - github_id = mp_record['github_id'] - ema_score = mp_record['ema_score'] - - if ema_score <= 0: - continue - - uid = github_id_to_uid.get(github_id) - if uid is None or uid not in miner_uids: - continue - - rounds = mp_record.get('rounds', 0) or 0 - eligible.append((ema_score, rounds, uid)) - - if not eligible: - return prediction_rewards - - # Rank by EMA descending, then by rounds descending (tiebreaker) - eligible.sort(key=lambda x: (x[0], x[1]), reverse=True) - - # Award top-K miners their fixed shares - top_k = min(PREDICTIONS_TOP_K, len(eligible)) - for rank in range(top_k): - _, _, uid = eligible[rank] - idx = sorted_uids.index(uid) - prediction_rewards[idx] = PREDICTIONS_TOP_K_SHARES[rank] * PREDICTIONS_EMISSIONS_SHARE - - top_miners_log = ', '.join( - f'UID {uid} (ema={ema:.4f}, rounds={rounds}, share={PREDICTIONS_TOP_K_SHARES[i] * 100:.0f}%)' - for i, (ema, rounds, uid) in enumerate(eligible[:top_k]) - ) - bt.logging.info(f'Merge prediction top-{top_k} rewards: {top_miners_log}') - - return prediction_rewards - - -async def oss_contributions(self: 'Validator', miner_uids: set[int]) -> Tuple[np.ndarray, Dict[int, MinerEvaluation]]: - """Score OSS contributions and return raw rewards + miner evaluations. - - Pure scoring — no treasury allocation or weight updates. Those are - handled by the caller (forward()). - """ - master_repositories = load_master_repo_weights() programming_languages = load_programming_language_weights() token_config = load_token_config() @@ -167,8 +101,85 @@ async def oss_contributions(self: 'Validator', miner_uids: set[int]) -> Tuple[np bt.logging.info(f'Token config: {tree_sitter_count} tree-sitter languages') bt.logging.info(f'Neurons to evaluate: {len(miner_uids)}') - rewards, miner_evaluations = await get_rewards( + rewards, miner_evaluations, cached_uids = await get_rewards( self, miner_uids, master_repositories, programming_languages, token_config ) - return rewards, miner_evaluations + return rewards, miner_evaluations, cached_uids + + +async def issue_discovery( + miner_evaluations: Dict[int, MinerEvaluation], + master_repositories: Dict[str, RepositoryConfig], + miner_uids: set[int], +) -> np.ndarray: + """Score issue discovery and return normalized rewards array. + + 1. Scan tracked repos for miner-authored closed issues (validator PAT) + 2. Score issue discovery using PR-linked issues + scan results + 3. Normalize into independent pool + + Returns numpy array of normalized issue discovery rewards (sorted by UID). + """ + # Scan tracked repos for closed issues not linked to miner PRs + scan_issues: Dict[str, list] = {} + if GITTENSOR_VALIDATOR_PAT: + scan_issues = await scan_closed_issues(miner_evaluations, master_repositories, GITTENSOR_VALIDATOR_PAT) + + # Score issue discovery + score_discovered_issues(miner_evaluations, master_repositories, scan_issues) + + # Normalize into independent pool + issue_rewards_dict = normalize_issue_discovery_rewards(miner_evaluations) + + sorted_uids = sorted(miner_uids) + return np.array([issue_rewards_dict.get(uid, 0.0) for uid in sorted_uids]) + + +def blend_emission_pools( + oss_rewards: np.ndarray, + issue_rewards: np.ndarray, + miner_uids: set[int], +) -> np.ndarray: + """Blend 4 emission pools into a single rewards array. + + - OSS contributions: 30% + - Issue discovery: 30% + - Issue treasury: 15% (flat to UID 111) + - Recycle: 25% (flat to UID 0) + """ + sorted_uids = sorted(miner_uids) + rewards = np.zeros(len(sorted_uids)) + recycle_extra = 0.0 + + # Pool 1: OSS contributions (30%) + oss_total = float(oss_rewards.sum()) + if oss_total > 0: + rewards += oss_rewards * OSS_EMISSION_SHARE + else: + recycle_extra += OSS_EMISSION_SHARE + + # Pool 2: Issue discovery (30%) + issue_total = float(issue_rewards.sum()) + if issue_total > 0: + rewards += issue_rewards * ISSUE_DISCOVERY_EMISSION_SHARE + else: + recycle_extra += ISSUE_DISCOVERY_EMISSION_SHARE + + # Pool 3: Issue treasury (15% flat to UID 111) + if ISSUES_TREASURY_UID > 0 and ISSUES_TREASURY_UID in miner_uids: + treasury_idx = sorted_uids.index(ISSUES_TREASURY_UID) + rewards[treasury_idx] += ISSUES_TREASURY_EMISSION_SHARE + bt.logging.info( + f'Treasury allocation: UID {ISSUES_TREASURY_UID} receives ' + f'{ISSUES_TREASURY_EMISSION_SHARE * 100:.0f}% of emissions' + ) + + # Pool 4: Recycle (25% + unclaimed from empty pools) + if RECYCLE_UID in miner_uids: + recycle_idx = sorted_uids.index(RECYCLE_UID) + rewards[recycle_idx] += RECYCLE_EMISSION_SHARE + recycle_extra + if recycle_extra > 0: + bt.logging.info(f'Recycling {recycle_extra * 100:.0f}% unclaimed emissions from empty pools') + + return rewards diff --git a/gittensor/validator/issue_competitions/contract_client.py b/gittensor/validator/issue_competitions/contract_client.py index 8d2bc29c..796e22e2 100644 --- a/gittensor/validator/issue_competitions/contract_client.py +++ b/gittensor/validator/issue_competitions/contract_client.py @@ -137,7 +137,7 @@ def _get_child_storage_key(self) -> Optional[str]: if not info or 'trie_id' not in info: return None - trie_id = info['trie_id'] + trie_id = info['trie_id'] # type: ignore[call-overload] if isinstance(trie_id, str): trie_id_hex = trie_id.replace('0x', '') @@ -342,7 +342,7 @@ def _read_contract_u32(self, method_name: str) -> int: value = self._extract_u32_from_response(response) return value if value is not None else 0 - def _raw_contract_read(self, method_name: str, args: dict = None) -> Optional[bytes]: + def _raw_contract_read(self, method_name: str, args: dict = None) -> Optional[bytes]: # type: ignore[assignment] """Read from contract using raw RPC call. Returns the ink! return payload (after stripping the ContractExecResult @@ -556,7 +556,7 @@ def _exec_contract_raw( method_name: str, args: dict, keypair, - gas_limit: dict = None, + gas_limit: dict = None, # type: ignore[assignment] value: int = 0, ) -> Optional[str]: """Execute a contract method using raw extrinsic submission.""" @@ -586,10 +586,10 @@ def _exec_contract_raw( signer_address = keypair.ss58_address account_info = self.subtensor.substrate.query('System', 'Account', [signer_address]) if hasattr(account_info, 'value'): - account_data = account_info.value + account_data = account_info.value # type: ignore[union-attr] else: account_data = account_info - free_balance = account_data.get('data', {}).get('free', 0) + free_balance = account_data.get('data', {}).get('free', 0) # type: ignore[union-attr] if free_balance < 100_000_000: bt.logging.error(f'{method_name}: insufficient balance for fees') return None @@ -725,7 +725,7 @@ def get_treasury_stake(self) -> int: # Alpha returns U64F64 fixed-point: bits field contains raw value # Upper 64 bits are integer part (the stake amount in raw units) if hasattr(alpha_result, 'value') and alpha_result.value: - bits = alpha_result.value.get('bits', 0) + bits = alpha_result.value.get('bits', 0) # type: ignore[union-attr] elif isinstance(alpha_result, dict): bits = alpha_result.get('bits', 0) else: diff --git a/gittensor/validator/issue_competitions/forward.py b/gittensor/validator/issue_competitions/forward.py index bd8e1656..ace775fb 100644 --- a/gittensor/validator/issue_competitions/forward.py +++ b/gittensor/validator/issue_competitions/forward.py @@ -30,7 +30,7 @@ async def issue_competitions( 1. Harvest emissions into the bounty pool 2. Get active issues from the smart contract 3. For each active issue, check GitHub: - - If solved by bronze+ miner -> vote_solution + - If solved by eligible miner -> vote_solution - If closed but not by eligible miner -> vote_cancel_issue Args: @@ -63,15 +63,13 @@ async def issue_competitions( if harvest_result and harvest_result.get('status') == 'success': bt.logging.success(f'Harvested emissions! Extrinsic: {harvest_result.get("tx_hash", "")}') - # Build mapping of github_id->hotkey for bronze+ miners only (eligible for payouts) + # Build mapping of github_id->hotkey for eligible miners only eligible_miners = { eval.github_id: eval.hotkey for eval in miner_evaluations.values() - if eval.github_id and eval.github_id != '0' and eval.current_tier is not None + if eval.github_id and eval.github_id != '0' and eval.is_eligible } - bt.logging.info( - f'Issue bounties: {len(eligible_miners)} eligible miners (bronze+) out of {len(miner_evaluations)} total' - ) + bt.logging.info(f'Issue bounties: {len(eligible_miners)} eligible miners out of {len(miner_evaluations)} total') for github_id, hotkey in eligible_miners.items(): bt.logging.info(f' Eligible miner: github_id={github_id}, hotkey={hotkey[:12]}...') @@ -134,7 +132,7 @@ async def issue_competitions( bt.logging.info(f'Voted cancel (solver {solver_github_id} not eligible): {issue_label}') continue - miner_coldkey = get_miner_coldkey(miner_hotkey, self.subtensor, self.config.netuid) + miner_coldkey = get_miner_coldkey(miner_hotkey, self.subtensor, self.config.netuid) # type: ignore[attr-defined] if not miner_coldkey: bt.logging.warning( f'Could not get coldkey for hotkey {miner_hotkey} (solver {solver_github_id}): {issue_label}' @@ -174,7 +172,7 @@ async def issue_competitions( ) else: bt.logging.info( - '***** Issue Bounties Complete: processed {len(active_issues)} issues (no state changes) *****' + f'***** Issue Bounties Complete: processed {len(active_issues)} issues (no state changes) *****' ) except Exception as e: diff --git a/tests/validator/merge_predictions/__init__.py b/gittensor/validator/issue_discovery/__init__.py similarity index 100% rename from tests/validator/merge_predictions/__init__.py rename to gittensor/validator/issue_discovery/__init__.py diff --git a/gittensor/validator/issue_discovery/normalize.py b/gittensor/validator/issue_discovery/normalize.py new file mode 100644 index 00000000..cf47dd3c --- /dev/null +++ b/gittensor/validator/issue_discovery/normalize.py @@ -0,0 +1,34 @@ +# The MIT License (MIT) +# Copyright © 2025 Entrius + +from typing import Dict + +import bittensor as bt + +from gittensor.classes import MinerEvaluation + + +def normalize_issue_discovery_rewards(miner_evaluations: Dict[int, MinerEvaluation]) -> Dict[int, float]: + """Normalize issue discovery scores to sum to 1.0, preserving ratios.""" + + if not miner_evaluations: + return {} + + rewards: Dict[int, float] = {} + nonzero_count = 0 + + for uid, evaluation in miner_evaluations.items(): + rewards[uid] = evaluation.issue_discovery_score + if rewards[uid] > 0: + nonzero_count += 1 + + total = sum(rewards.values()) + if total <= 0: + bt.logging.info('Issue discovery: all scores are zero, returning empty rewards') + return rewards + + normalized = {uid: score / total for uid, score in rewards.items()} + + bt.logging.info(f'Issue discovery: normalized {nonzero_count} miners with scores > 0') + + return normalized diff --git a/gittensor/validator/issue_discovery/repo_scan.py b/gittensor/validator/issue_discovery/repo_scan.py new file mode 100644 index 00000000..2ac232de --- /dev/null +++ b/gittensor/validator/issue_discovery/repo_scan.py @@ -0,0 +1,250 @@ +# The MIT License (MIT) +# Copyright © 2025 Entrius + +"""Repo-centric closed issue scan for issue discovery. + +Detects miner-authored closed issues that aren't linked to any miner's merged PR: +- Case 2: Solved by a non-miner PR → positive credibility (no score) +- Case 3: Closed without any PR → negative credibility + +Uses the validator PAT for all API calls. Rate-limited by per-repo and global caps. +""" + +import asyncio +from datetime import datetime, timedelta, timezone +from typing import Dict, List, Optional, Set, Tuple + +import bittensor as bt +import requests + +from gittensor.classes import Issue, MinerEvaluation +from gittensor.constants import ( + BASE_GITHUB_API_URL, + PR_LOOKBACK_DAYS, + REPO_SCAN_CONCURRENCY, + REPO_SCAN_GLOBAL_CAP, + REPO_SCAN_PER_REPO_CAP, +) +from gittensor.utils.github_api_tools import find_solver_from_cross_references +from gittensor.validator.utils.load_weights import RepositoryConfig + + +async def scan_closed_issues( + miner_evaluations: Dict[int, MinerEvaluation], + master_repositories: Dict[str, RepositoryConfig], + validator_pat: str, +) -> Dict[str, List[Issue]]: + """Scan tracked repos for miner-authored closed issues not linked to miner PRs. + + Args: + miner_evaluations: All miner evaluations (post-OSS scoring) + master_repositories: Tracked repositories + validator_pat: Validator's GitHub PAT for API calls + + Returns: + Dict[github_id → List[Issue]] with issues classified for credibility counting. + Issues with closed_at set = solved by non-miner PR (case 2, positive credibility). + Issues without closed_at = closed without PR (case 3, negative credibility). + """ + if not validator_pat: + bt.logging.info('Issue discovery scan: no validator PAT, skipping') + return {} + + # Build miner github_id set + miner_github_ids: Set[str] = set() + for evaluation in miner_evaluations.values(): + if evaluation.github_id and evaluation.github_id != '0': + miner_github_ids.add(evaluation.github_id) + + if not miner_github_ids: + return {} + + # Build set of already-known issues from PR data (skip these in scan) + known_issues: Set[Tuple[str, int]] = set() # (repo, issue_number) + for evaluation in miner_evaluations.values(): + for pr in evaluation.merged_pull_requests + evaluation.open_pull_requests + evaluation.closed_pull_requests: + if pr.issues: + for issue in pr.issues: + known_issues.add((issue.repository_full_name, issue.number)) + + bt.logging.info( + f'Issue discovery scan: {len(miner_github_ids)} miners, ' + f'{len(known_issues)} known issues, {len(master_repositories)} repos to scan' + ) + + lookback_date = (datetime.now(timezone.utc) - timedelta(days=PR_LOOKBACK_DAYS)).strftime('%Y-%m-%dT%H:%M:%SZ') + + # Sort repos by weight descending (high-value repos first) + sorted_repos = sorted(master_repositories.items(), key=lambda x: x[1].weight, reverse=True) + + # Filter out inactive repos + active_repos = [(name, config) for name, config in sorted_repos if config.inactive_at is None] + + result: Dict[str, List[Issue]] = {} + global_lookup_count = 0 + + for i, (repo_name, repo_config) in enumerate(active_repos, 1): + if global_lookup_count >= REPO_SCAN_GLOBAL_CAP: + bt.logging.info(f'Issue discovery scan: global cap ({REPO_SCAN_GLOBAL_CAP}) reached, stopping') + break + + remaining_global = REPO_SCAN_GLOBAL_CAP - global_lookup_count + lookups_done = await _scan_repo( + repo_name, + lookback_date, + validator_pat, + miner_github_ids, + known_issues, + result, + min(REPO_SCAN_PER_REPO_CAP, remaining_global), + ) + global_lookup_count += lookups_done + + if i % 25 == 0: + bt.logging.info( + f'Issue discovery scan: {i}/{len(active_repos)} repos scanned, {global_lookup_count} lookups' + ) + + total_issues = sum(len(issues) for issues in result.values()) + bt.logging.info( + f'Issue discovery scan complete: {total_issues} issues found, {global_lookup_count} solver lookups used' + ) + + return result + + +async def _scan_repo( + repo_name: str, + lookback_date: str, + validator_pat: str, + miner_github_ids: Set[str], + known_issues: Set[Tuple[str, int]], + result: Dict[str, List[Issue]], + lookup_cap: int, +) -> int: + """Scan a single repo's closed issues. Returns number of solver lookups performed.""" + + closed_issues = _fetch_closed_issues(repo_name, lookback_date, validator_pat) + if not closed_issues: + return 0 + + # Filter to miner-authored issues not already known + unmatched: List[dict] = [] + for issue_raw in closed_issues: + user = issue_raw.get('user') or {} + author_id = str(user.get('id', '')) + issue_number = issue_raw.get('number') + + if not author_id or author_id not in miner_github_ids: + continue + if (repo_name, issue_number) in known_issues: + continue + # Skip pull requests (GitHub REST /issues endpoint includes PRs) + if 'pull_request' in issue_raw: + continue + + unmatched.append(issue_raw) + + if not unmatched: + return 0 + + bt.logging.info(f'{repo_name}: {len(unmatched)} unmatched miner-authored closed issues') + + # Resolve unmatched issues with solver lookups (capped) + capped = unmatched[:lookup_cap] + semaphore = asyncio.Semaphore(REPO_SCAN_CONCURRENCY) + + async def _lookup(issue_raw: dict) -> Tuple[dict, Optional[int], Optional[int]]: + async with semaphore: + solver_id, pr_number = await asyncio.to_thread( + find_solver_from_cross_references, + repo_name, + issue_raw['number'], + validator_pat, + ) + return issue_raw, solver_id, pr_number + + tasks = [_lookup(issue_raw) for issue_raw in capped] + resolved = await asyncio.gather(*tasks, return_exceptions=True) + + for item in resolved: + if isinstance(item, BaseException): + bt.logging.warning(f'Solver lookup error in {repo_name}: {item}') + continue + + assert isinstance(item, tuple) + issue_raw, solver_id, pr_number = item + user = issue_raw.get('user') or {} + author_github_id = str(user.get('id', '')) + + issue = Issue( + number=issue_raw['number'], + pr_number=pr_number or 0, + repository_full_name=repo_name, + title=issue_raw.get('title', ''), + created_at=_parse_iso(issue_raw.get('created_at')), + author_login=user.get('login'), + author_github_id=author_github_id, + state='CLOSED', + ) + + if solver_id is not None: + # Case 2: solved by non-miner PR → positive credibility + issue.closed_at = _parse_iso(issue_raw.get('closed_at')) + else: + # Case 3: closed without PR → negative credibility + issue.closed_at = None + + result.setdefault(author_github_id, []).append(issue) + + return len(capped) + + +def _fetch_closed_issues(repo_name: str, since: str, token: str) -> List[dict]: + """Fetch closed issues from a repo via REST API with pagination.""" + headers = {'Authorization': f'token {token}', 'Accept': 'application/vnd.github.v3+json'} + all_issues: List[dict] = [] + page = 1 + + while True: + try: + response = requests.get( + f'{BASE_GITHUB_API_URL}/repos/{repo_name}/issues', + params={'state': 'closed', 'since': since, 'per_page': 100, 'page': page}, + headers=headers, + timeout=30, + ) + if response.status_code in (404, 422): + bt.logging.debug(f'Issue scan {repo_name} page {page}: HTTP {response.status_code}') + break + if response.status_code != 200: + bt.logging.warning(f'Issue scan {repo_name} page {page}: HTTP {response.status_code}') + break + + issues = response.json() + if not issues: + break + + all_issues.extend(issues) + page += 1 + + # Safety: don't paginate forever + if page > 100: + bt.logging.warning(f'Issue scan {repo_name}: hit 100-page limit') + break + + except requests.RequestException as e: + bt.logging.warning(f'Issue scan {repo_name} page {page}: {e}') + break + + return all_issues + + +def _parse_iso(value: Optional[str]) -> Optional[datetime]: + """Parse an ISO 8601 timestamp string to datetime.""" + if not value: + return None + try: + return datetime.fromisoformat(value.replace('Z', '+00:00')) + except (ValueError, AttributeError): + return None diff --git a/gittensor/validator/issue_discovery/scoring.py b/gittensor/validator/issue_discovery/scoring.py new file mode 100644 index 00000000..c981397d --- /dev/null +++ b/gittensor/validator/issue_discovery/scoring.py @@ -0,0 +1,302 @@ +# The MIT License (MIT) +# Copyright © 2025 Entrius + +import math +from collections import defaultdict +from datetime import datetime, timezone +from typing import Dict, List, Optional, Tuple + +import bittensor as bt + +from gittensor.classes import Issue, MinerEvaluation +from gittensor.constants import ( + CREDIBILITY_MULLIGAN_COUNT, + ISSUE_REVIEW_CLEAN_BONUS, + ISSUE_REVIEW_PENALTY_RATE, + MAX_OPEN_ISSUE_THRESHOLD, + MIN_ISSUE_CREDIBILITY, + MIN_TOKEN_SCORE_FOR_BASE_SCORE, + MIN_VALID_SOLVED_ISSUES, + OPEN_ISSUE_SPAM_BASE_THRESHOLD, + OPEN_ISSUE_SPAM_TOKEN_SCORE_PER_SLOT, + SECONDS_PER_HOUR, + TIME_DECAY_GRACE_PERIOD_HOURS, + TIME_DECAY_MIN_MULTIPLIER, + TIME_DECAY_SIGMOID_MIDPOINT, + TIME_DECAY_SIGMOID_STEEPNESS_SCALAR, +) +from gittensor.validator.utils.load_weights import RepositoryConfig + + +def calculate_issue_review_quality_multiplier(changes_requested_count: int) -> float: + """Cliff model: clean bonus when 0 changes requested, then linear penalty. + + 0 rounds → 1.1 (clean bonus) + 1 round → 0.85 + 2 rounds → 0.70 + 7+ rounds → 0.0 + """ + if changes_requested_count == 0: + return ISSUE_REVIEW_CLEAN_BONUS + return max(0.0, 1.0 - ISSUE_REVIEW_PENALTY_RATE * changes_requested_count) + + +def calculate_open_issue_spam_multiplier(total_open_issues: int, solved_token_score: float) -> float: + """Binary penalty for excessive open issues. + + threshold = min(BASE + floor(token_score / PER_SLOT), MAX) + Returns 1.0 if under threshold, 0.0 otherwise. + """ + bonus = int(solved_token_score // OPEN_ISSUE_SPAM_TOKEN_SCORE_PER_SLOT) + threshold = min(OPEN_ISSUE_SPAM_BASE_THRESHOLD + bonus, MAX_OPEN_ISSUE_THRESHOLD) + return 1.0 if total_open_issues <= threshold else 0.0 + + +def calculate_issue_credibility(solved_count: int, closed_count: int) -> float: + """Calculate issue credibility with mulligan. + + credibility = solved / (solved + max(0, closed - mulligan)) + """ + adjusted_closed = max(0, closed_count - CREDIBILITY_MULLIGAN_COUNT) + total = solved_count + adjusted_closed + if total == 0: + return 0.0 + return solved_count / total + + +def check_issue_eligibility(solved_count: int, closed_count: int) -> Tuple[bool, float, str]: + """Check if a miner passes the issue discovery eligibility gate. + + Returns (is_eligible, issue_credibility, reason). + """ + credibility = calculate_issue_credibility(solved_count, closed_count) + + if solved_count < MIN_VALID_SOLVED_ISSUES: + return False, credibility, f'{solved_count}/{MIN_VALID_SOLVED_ISSUES} valid solved issues' + + if credibility < MIN_ISSUE_CREDIBILITY: + return False, credibility, f'Issue credibility {credibility:.2f} < {MIN_ISSUE_CREDIBILITY}' + + return True, credibility, '' + + +def _calculate_time_decay_from_merge(merged_at: datetime) -> float: + """Time decay anchored to a PR's merge date. Same sigmoid as OSS contributions.""" + now = datetime.now(timezone.utc) + hours_since_merge = (now - merged_at).total_seconds() / SECONDS_PER_HOUR + + if hours_since_merge < TIME_DECAY_GRACE_PERIOD_HOURS: + return 1.0 + + days_since_merge = hours_since_merge / 24 + sigmoid = 1 / (1 + math.exp(TIME_DECAY_SIGMOID_STEEPNESS_SCALAR * (days_since_merge - TIME_DECAY_SIGMOID_MIDPOINT))) + return max(sigmoid, TIME_DECAY_MIN_MULTIPLIER) + + +def score_discovered_issues( + miner_evaluations: Dict[int, MinerEvaluation], + master_repositories: Dict[str, RepositoryConfig], + scan_issues: Optional[Dict[str, List[Issue]]] = None, +) -> None: + """Score issue discovery for all miners. Mutates miner_evaluations in place. + + Args: + miner_evaluations: All miner evaluations (from OSS scoring phase) + master_repositories: Repository configs for repo_weight lookup + scan_issues: Issues from repo-centric scan, keyed by github_id + """ + bt.logging.info('**Scoring issue discovery**') + + # Build github_id → uid mapping for all valid miners + github_id_to_uid: Dict[str, int] = {} + for uid, evaluation in miner_evaluations.items(): + if evaluation.github_id and evaluation.github_id != '0': + github_id_to_uid[evaluation.github_id] = uid + + if not github_id_to_uid: + bt.logging.info('No valid miners for issue discovery') + return + + # Collect all discoverer data: github_id → {solved_issues, closed_count, scored_issues} + discoverer_data: Dict[str, _DiscovererData] = defaultdict(lambda: _DiscovererData()) + + # Phase 1: Collect issues from all miners' merged PRs + _collect_issues_from_prs(miner_evaluations, github_id_to_uid, discoverer_data, master_repositories) + + # Phase 2: Merge in scan issues (from repo-centric closed scan) + if scan_issues: + _merge_scan_issues(scan_issues, github_id_to_uid, discoverer_data) + + if not discoverer_data: + bt.logging.info('No issue discovery data found') + return + + # Phase 3: For each discoverer, check eligibility and compute scores + for discoverer_github_id, data in discoverer_data.items(): + uid = github_id_to_uid.get(discoverer_github_id) + if uid is None: + continue + + evaluation = miner_evaluations.get(uid) + if not evaluation: + continue + + evaluation.total_solved_issues = data.solved_count + evaluation.total_valid_solved_issues = data.valid_solved_count + evaluation.total_closed_issues = data.closed_count + evaluation.issue_token_score = round(data.issue_token_score, 2) + + is_eligible, credibility, reason = check_issue_eligibility(data.valid_solved_count, data.closed_count) + evaluation.is_issue_eligible = is_eligible + evaluation.issue_credibility = credibility + + if not is_eligible: + bt.logging.info(f'UID {uid} issue discovery ineligible: {reason}') + continue + + # Calculate spam multiplier once per miner (uses sum of solving PR token_scores) + spam_mult = calculate_open_issue_spam_multiplier(evaluation.total_open_issues, data.issue_token_score) + + # Score each eligible issue + total_discovery_score = 0.0 + for issue in data.scored_issues: + issue.discovery_credibility_multiplier = round(credibility, 2) + issue.discovery_open_issue_spam_multiplier = spam_mult + issue.discovery_earned_score = round( + issue.discovery_base_score + * issue.discovery_repo_weight_multiplier + * issue.discovery_time_decay_multiplier + * issue.discovery_review_quality_multiplier + * issue.discovery_credibility_multiplier + * issue.discovery_open_issue_spam_multiplier, + 2, + ) + total_discovery_score += issue.discovery_earned_score + + evaluation.issue_discovery_score = round(total_discovery_score, 2) + + bt.logging.info( + f'UID {uid} issue discovery: {data.solved_count} solved, {data.closed_count} closed, ' + f'credibility={credibility:.2f}, score={evaluation.issue_discovery_score:.2f}' + ) + + bt.logging.info('Issue discovery scoring complete.') + + +class _DiscovererData: + """Accumulator for a single discoverer's issue data.""" + + __slots__ = ('solved_count', 'valid_solved_count', 'closed_count', 'scored_issues', 'issue_token_score') + + def __init__(self): + self.solved_count: int = 0 + self.valid_solved_count: int = 0 # solved where solving PR has token_score >= 5 + self.closed_count: int = 0 + self.scored_issues: List[Issue] = [] + self.issue_token_score: float = 0.0 # sum of solving PR token_scores + + +def _collect_issues_from_prs( + miner_evaluations: Dict[int, MinerEvaluation], + github_id_to_uid: Dict[str, int], + discoverer_data: Dict[str, _DiscovererData], + master_repositories: Dict[str, RepositoryConfig], +) -> None: + """Collect issues from all miners' merged PRs and attribute to discoverers. + + Enforces one-issue-per-PR rule: earliest-created issue gets score, others credibility only. + """ + # Track which PRs have already awarded a discovery score (one-issue-per-PR rule) + pr_scored: set = set() # (repo, pr_number) + + for uid, evaluation in miner_evaluations.items(): + for pr in evaluation.merged_pull_requests: + if not pr.issues or not pr.merged_at: + continue + + # Sort issues by creation date (earliest first) for one-issue-per-PR selection + sorted_issues = sorted( + [i for i in pr.issues if i.author_github_id], + key=lambda i: i.created_at or datetime.max.replace(tzinfo=timezone.utc), + ) + + for issue in sorted_issues: + discoverer_id = issue.author_github_id + if not discoverer_id or discoverer_id not in github_id_to_uid: + continue + + data = discoverer_data[discoverer_id] + + # Classify: is this issue solved (merged PR closed it)? + is_solved = issue.state == 'CLOSED' and pr.merged_at is not None + + if is_solved: + data.solved_count += 1 + else: + data.closed_count += 1 + continue # No score for unsolved issues + + # Anti-gaming: post-merge edit detection + if issue.updated_at and pr.merged_at and issue.updated_at > pr.merged_at: + bt.logging.info( + f'Issue #{issue.number} edited after PR #{pr.number} merge — 0 score, counts as closed' + ) + data.solved_count -= 1 + data.closed_count += 1 + continue + + # Count valid solved (PR quality gate only — independent of same-account/one-per-PR) + if pr.token_score >= MIN_TOKEN_SCORE_FOR_BASE_SCORE: + data.valid_solved_count += 1 + + # Same-account: discoverer == solver → 0 score but credibility counts + if discoverer_id == pr.github_id: + continue + + # One-issue-per-PR: only the first (earliest-created) issue gets scored + pr_key = (pr.repository_full_name, pr.number) + if pr_key in pr_scored: + continue # Credibility already counted above, skip scoring + pr_scored.add(pr_key) + + # Check solving PR quality gate for scoring + if pr.token_score < MIN_TOKEN_SCORE_FOR_BASE_SCORE: + continue + + # Populate discovery scoring fields + repo_config = master_repositories.get(pr.repository_full_name) + issue.discovery_base_score = pr.base_score + issue.discovery_repo_weight_multiplier = round(repo_config.weight if repo_config else 0.01, 2) + issue.discovery_time_decay_multiplier = round(_calculate_time_decay_from_merge(pr.merged_at), 2) + issue.discovery_review_quality_multiplier = round( + calculate_issue_review_quality_multiplier(pr.changes_requested_count), 2 + ) + # credibility and spam multipliers applied in the main loop after eligibility check + + data.scored_issues.append(issue) + data.issue_token_score += pr.token_score + + +def _merge_scan_issues( + scan_issues: Dict[str, List[Issue]], + github_id_to_uid: Dict[str, int], + discoverer_data: Dict[str, _DiscovererData], +) -> None: + """Merge repo-scan results into discoverer data. + + Scan issues are pre-classified as solved (case 2) or closed (case 3). + They only contribute to credibility — no discovery score since the solving PR + was by a non-miner (or no solver found). + """ + for github_id, issues in scan_issues.items(): + if github_id not in github_id_to_uid: + continue + + data = discoverer_data[github_id] + for issue in issues: + if issue.state == 'CLOSED' and issue.closed_at: + # Case 2: solved by non-miner PR → positive credibility + data.solved_count += 1 + else: + # Case 3: closed without PR → negative credibility + data.closed_count += 1 diff --git a/gittensor/validator/merge_predictions/__init__.py b/gittensor/validator/merge_predictions/__init__.py deleted file mode 100644 index 11d1b6b6..00000000 --- a/gittensor/validator/merge_predictions/__init__.py +++ /dev/null @@ -1 +0,0 @@ -# Entrius 2025 diff --git a/gittensor/validator/merge_predictions/checks.py b/gittensor/validator/merge_predictions/checks.py deleted file mode 100644 index 3ae283fa..00000000 --- a/gittensor/validator/merge_predictions/checks.py +++ /dev/null @@ -1,69 +0,0 @@ -# Entrius 2025 - -"""External state checks for predictions (on-chain + GitHub).""" - -from typing import TYPE_CHECKING - -import bittensor as bt - -from gittensor.validator.issue_competitions.contract_client import ( - ContractIssue, - IssueCompetitionContractClient, - IssueStatus, -) -from gittensor.validator.utils.config import GITTENSOR_VALIDATOR_PAT - -if TYPE_CHECKING: - from neurons.validator import Validator - - -def check_issue_active(validator: 'Validator', issue_id: int) -> tuple[str | None, ContractIssue | None]: - """Verify issue is in a predictable state on-chain. Returns (error, issue).""" - try: - from gittensor.validator.utils.issue_competitions import get_contract_address - - contract_addr = get_contract_address() - if not contract_addr: - return 'Issue bounties not configured on this validator', None - - client = IssueCompetitionContractClient( - contract_address=contract_addr, - subtensor=validator.subtensor, - ) - issue = client.get_issue(issue_id) - if issue is None: - return f'Issue {issue_id} not found on-chain', None - if issue.status not in (IssueStatus.REGISTERED, IssueStatus.ACTIVE): - return f'Issue {issue_id} is not in a predictable state (status: {issue.status.name})', None - except Exception as e: - bt.logging.warning(f'Failed to check issue state for {issue_id}: {e}') - return f'Could not verify issue state: {e}', None - - return None, issue - - -def check_prs_open(repository: str, issue_number: int, predictions: dict[int, float]) -> tuple[str | None, set[int]]: - """Verify all predicted PRs are still open on GitHub. - - Returns (error, open_pr_numbers). open_pr_numbers is the full set of open PRs - for this issue, used downstream to exclude closed-PR predictions from probability totals. - """ - if not GITTENSOR_VALIDATOR_PAT: - bt.logging.warning('No GITTENSOR_VALIDATOR_PAT, skipping PR open check') - return None, set() - - try: - from gittensor.utils.github_api_tools import find_prs_for_issue - - open_prs = find_prs_for_issue(repository, issue_number, open_only=True, token=GITTENSOR_VALIDATOR_PAT) - open_pr_numbers = {pr.get('number') if isinstance(pr, dict) else getattr(pr, 'number', None) for pr in open_prs} - - for pr_number in predictions: - if pr_number not in open_pr_numbers: - return f'PR #{pr_number} is not open on {repository}', open_pr_numbers - - except Exception as e: - bt.logging.warning(f'Failed to check PR state for {repository}: {e}') - return None, set() - - return None, open_pr_numbers diff --git a/gittensor/validator/merge_predictions/handler.py b/gittensor/validator/merge_predictions/handler.py deleted file mode 100644 index a9da3536..00000000 --- a/gittensor/validator/merge_predictions/handler.py +++ /dev/null @@ -1,145 +0,0 @@ -# Entrius 2025 - -"""Axon handler for PredictionSynapse. - -Attached to the validator's axon via functools.partial in Validator.__init__(). -Runs in the axon's FastAPI thread pool — fully parallel to the main scoring loop. -""" - -from datetime import datetime, timezone -from typing import TYPE_CHECKING, Tuple - -import bittensor as bt - -from gittensor.synapses import PredictionSynapse -from gittensor.validator.merge_predictions.checks import check_issue_active, check_prs_open -from gittensor.validator.merge_predictions.validation import validate_prediction_values -from gittensor.validator.utils.github_validation import validate_github_credentials - -if TYPE_CHECKING: - from neurons.validator import Validator - - -async def handle_prediction(validator: 'Validator', synapse: PredictionSynapse) -> PredictionSynapse: - """Validate and store a miner's prediction. Runs in axon thread pool.""" - - mp_storage = validator.mp_storage - miner_hotkey = synapse.dendrite.hotkey - uid = validator.metagraph.hotkeys.index(miner_hotkey) - - def _reject(reason: str) -> PredictionSynapse: - synapse.accepted = False - synapse.rejection_reason = reason - bt.logging.warning( - f'Merge prediction rejected — UID: {uid}, ID: {synapse.issue_id}, ' - f'repo: {synapse.repository}, PRs: {list(synapse.predictions.keys())}, ' - f'reason: {reason}' - ) - return synapse - - # 1) Verify issue is in a predictable state on-chain - error, issue = check_issue_active(validator, synapse.issue_id) - if error: - return _reject(error) - - # 2) Verify predicted PRs are still open on GitHub - error, open_pr_numbers = check_prs_open(synapse.repository, issue.issue_number, synapse.predictions) - if error: - return _reject(error) - - # 3) Validate GitHub identity + account age - github_id, error = validate_github_credentials(uid, synapse.github_access_token) - if error: - return _reject(error) - - # 4) Validate prediction values - error = validate_prediction_values(synapse.predictions) - if error: - return _reject(error) - - # 5) Per-PR cooldown check - submitted_prs = list(synapse.predictions.items()) - for pr_number, pred_value in submitted_prs: - cooldown_remaining = mp_storage.check_cooldown(uid, miner_hotkey, synapse.issue_id, pr_number) - if cooldown_remaining is not None: - return _reject(f'PR #{pr_number} on cooldown ({cooldown_remaining:.0f}s remaining)') - - # 6) Total probability check — exclude all PRs in this submission (they will be overwritten) - submitted_pr_numbers = set(synapse.predictions.keys()) - existing_total = mp_storage.get_miner_total_for_issue( - uid, - miner_hotkey, - synapse.issue_id, - exclude_prs=submitted_pr_numbers, - only_prs=open_pr_numbers, - ) - new_total = sum(synapse.predictions.values()) - if existing_total + new_total > 1.0: - return _reject( - f'Total probability exceeds 1.0 — other open PRs: {existing_total:.4f} + submission: {new_total:.4f} = {existing_total + new_total:.4f}' - ) - - # 7) Compute variance at time of submission and store all predictions - variance = mp_storage.compute_current_variance(synapse.issue_id) - - now = datetime.now(timezone.utc).isoformat() - - for pr_number, pred_value in submitted_prs: - mp_storage.store_prediction( - uid=uid, - hotkey=miner_hotkey, - github_id=github_id, - issue_id=synapse.issue_id, - repository=synapse.repository, - issue_number=issue.issue_number, - pr_number=pr_number, - prediction=pred_value, - variance_at_prediction=variance, - ) - - # Save to Postgres if enabled - if validator.db_storage: - validator.db_storage.store_prediction( - uid=uid, - hotkey=miner_hotkey, - github_id=github_id, - issue_id=synapse.issue_id, - repository=synapse.repository, - issue_number=issue.issue_number, - pr_number=pr_number, - prediction=pred_value, - variance_at_prediction=variance, - timestamp=now, - ) - - bt.logging.success( - f'Merge prediction stored — UID: {uid}, ID: {synapse.issue_id}, ' - f'issue: #{issue.issue_number}, repo: {synapse.repository}, ' - f'PRs: {[pr for pr, _ in submitted_prs]}, github_id: {github_id}' - ) - - synapse.accepted = True - return synapse - - -async def blacklist_prediction(validator: 'Validator', synapse: PredictionSynapse) -> Tuple[bool, str]: - """Reject synapses from unregistered hotkeys.""" - if synapse.dendrite is None or synapse.dendrite.hotkey is None: - return True, 'Missing dendrite or hotkey' - - if synapse.dendrite.hotkey not in validator.metagraph.hotkeys: - return True, 'Unregistered hotkey' - - return False, 'Hotkey recognized' - - -async def priority_prediction(validator: 'Validator', synapse: PredictionSynapse) -> float: - """Priority by stake — higher stake = processed first.""" - if synapse.dendrite is None or synapse.dendrite.hotkey is None: - return 0.0 - - try: - uid = validator.metagraph.hotkeys.index(synapse.dendrite.hotkey) - return float(validator.metagraph.S[uid]) - except ValueError: - return 0.0 diff --git a/gittensor/validator/merge_predictions/mp_storage.py b/gittensor/validator/merge_predictions/mp_storage.py deleted file mode 100644 index 446a18ac..00000000 --- a/gittensor/validator/merge_predictions/mp_storage.py +++ /dev/null @@ -1,293 +0,0 @@ -# Entrius 2025 - -"""SQLite storage for merge predictions. - -Each validator stores predictions independently. One row per miner per PR. -Thread-safe via WAL mode — the axon handler writes while the scoring loop reads. -""" - -import sqlite3 -import threading -from datetime import datetime, timezone -from pathlib import Path -from typing import Optional - -import bittensor as bt - -from gittensor.constants import PREDICTIONS_COOLDOWN_SECONDS -from gittensor.validator.utils.config import MP_DB_PATH - - -class PredictionStorage: - """Thread-safe SQLite storage for merge predictions.""" - - def __init__(self, db_path: Optional[str] = None): - self._db_path = db_path or MP_DB_PATH - Path(self._db_path).parent.mkdir(parents=True, exist_ok=True) - self._lock = threading.Lock() - self._init_db() - - def _get_connection(self) -> sqlite3.Connection: - conn = sqlite3.connect(self._db_path) - conn.execute('PRAGMA journal_mode=WAL') - conn.execute('PRAGMA busy_timeout=5000') - conn.row_factory = sqlite3.Row - return conn - - def _init_db(self): - with self._get_connection() as conn: - conn.execute(""" - CREATE TABLE IF NOT EXISTS predictions ( - uid INTEGER NOT NULL, - hotkey TEXT NOT NULL, - github_id TEXT NOT NULL, - issue_id INTEGER NOT NULL, - repository TEXT NOT NULL, - issue_number INTEGER NOT NULL, - pr_number INTEGER NOT NULL, - prediction REAL NOT NULL, - timestamp TEXT NOT NULL, - variance_at_prediction REAL, - PRIMARY KEY (uid, hotkey, github_id, issue_id, pr_number) - ) - """) - conn.execute(""" - CREATE TABLE IF NOT EXISTS prediction_emas ( - github_id TEXT NOT NULL, - ema_score REAL NOT NULL DEFAULT 0.0, - rounds INTEGER NOT NULL DEFAULT 0, - updated_at TEXT NOT NULL, - PRIMARY KEY (github_id) - ) - """) - conn.execute(""" - CREATE TABLE IF NOT EXISTS settled_issues ( - issue_id INTEGER NOT NULL PRIMARY KEY, - outcome TEXT NOT NULL, - merged_pr_number INTEGER, - settled_at TEXT NOT NULL - ) - """) - - # Migrations - try: - conn.execute('ALTER TABLE predictions ADD COLUMN issue_number INTEGER NOT NULL DEFAULT 0') - except sqlite3.OperationalError: - pass - - conn.execute(""" - CREATE INDEX IF NOT EXISTS idx_predictions_issue - ON predictions (issue_id) - """) - conn.execute(""" - CREATE INDEX IF NOT EXISTS idx_predictions_miner_issue - ON predictions (uid, hotkey, issue_id) - """) - conn.commit() - bt.logging.info(f'Prediction storage initialized at {self._db_path}') - - def check_cooldown(self, uid: int, hotkey: str, issue_id: int, pr_number: int) -> Optional[float]: - """Return seconds remaining on cooldown, or None if no cooldown active.""" - with self._get_connection() as conn: - row = conn.execute( - 'SELECT timestamp FROM predictions WHERE uid = ? AND hotkey = ? AND issue_id = ? AND pr_number = ?', - (uid, hotkey, issue_id, pr_number), - ).fetchone() - - if row is None: - return None - - last_ts = datetime.fromisoformat(row['timestamp']) - elapsed = (datetime.now(timezone.utc) - last_ts).total_seconds() - remaining = PREDICTIONS_COOLDOWN_SECONDS - elapsed - return remaining if remaining > 0 else None - - def get_miner_total_for_issue( - self, - uid: int, - hotkey: str, - issue_id: int, - exclude_prs: Optional[set[int]] = None, - only_prs: Optional[set[int]] = None, - ) -> float: - """Get sum of a miner's existing predictions for an issue. - - Args: - exclude_prs: Exclude these PRs from the sum (for batch updates). - only_prs: If provided, only count predictions on these PRs (open PRs). - Predictions on closed PRs are excluded from the total, - freeing that probability for reallocation. - """ - with self._get_connection() as conn: - query = 'SELECT COALESCE(SUM(prediction), 0.0) as total FROM predictions WHERE uid = ? AND hotkey = ? AND issue_id = ?' - params: list = [uid, hotkey, issue_id] - - if exclude_prs: - placeholders = ','.join('?' for _ in exclude_prs) - query += f' AND pr_number NOT IN ({placeholders})' - params.extend(exclude_prs) - - if only_prs: - placeholders = ','.join('?' for _ in only_prs) - query += f' AND pr_number IN ({placeholders})' - params.extend(only_prs) - - row = conn.execute(query, params).fetchone() - return float(row['total']) - - def compute_current_variance(self, issue_id: int) -> float: - """Compute avg variance across all PRs for an issue (used for consensus bonus).""" - with self._get_connection() as conn: - rows = conn.execute( - """ - SELECT pr_number, AVG(prediction) as mean_pred, - AVG(prediction * prediction) - AVG(prediction) * AVG(prediction) as var_pred - FROM predictions - WHERE issue_id = ? - GROUP BY pr_number - """, - (issue_id,), - ).fetchall() - - if not rows: - return 0.0 - - variances = [max(0.0, float(r['var_pred'])) for r in rows] - return sum(variances) / len(variances) - - def store_prediction( - self, - uid: int, - hotkey: str, - github_id: str, - issue_id: int, - repository: str, - issue_number: int, - pr_number: int, - prediction: float, - variance_at_prediction: float, - ) -> None: - """Insert or replace a single PR prediction. Resets timestamp on that PR only.""" - now = datetime.now(timezone.utc).isoformat() - - with self._lock: - with self._get_connection() as conn: - conn.execute( - """ - INSERT INTO predictions (uid, hotkey, github_id, issue_id, repository, issue_number, pr_number, prediction, timestamp, variance_at_prediction) - VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?) - ON CONFLICT (uid, hotkey, github_id, issue_id, pr_number) - DO UPDATE SET prediction = excluded.prediction, - timestamp = excluded.timestamp, - variance_at_prediction = excluded.variance_at_prediction - """, - ( - uid, - hotkey, - github_id, - issue_id, - repository, - issue_number, - pr_number, - prediction, - now, - variance_at_prediction, - ), - ) - conn.commit() - - def get_peak_variance_time(self, issue_id: int) -> Optional[datetime]: - """Get the timestamp when variance was highest for an issue. - - Returns the prediction timestamp with the max variance_at_prediction, - or None if no predictions exist. - """ - with self._get_connection() as conn: - row = conn.execute( - 'SELECT timestamp FROM predictions WHERE issue_id = ? ORDER BY variance_at_prediction DESC LIMIT 1', - (issue_id,), - ).fetchone() - if row is None: - return None - return datetime.fromisoformat(row['timestamp']) - - def get_predictions_for_issue(self, issue_id: int) -> list[dict]: - """Get all predictions for an issue (used at settlement).""" - with self._get_connection() as conn: - rows = conn.execute( - 'SELECT * FROM predictions WHERE issue_id = ? ORDER BY uid, pr_number', - (issue_id,), - ).fetchall() - return [dict(r) for r in rows] - - def delete_predictions_for_issue(self, issue_id: int) -> int: - """Delete all predictions for a settled/cancelled issue. Returns rows deleted.""" - with self._lock: - with self._get_connection() as conn: - cursor = conn.execute('DELETE FROM predictions WHERE issue_id = ?', (issue_id,)) - conn.commit() - return cursor.rowcount - - # ========================================================================= - # Settlement tracking - # ========================================================================= - - def is_issue_settled(self, issue_id: int) -> bool: - """Check if an issue has already been settled.""" - with self._get_connection() as conn: - row = conn.execute( - 'SELECT 1 FROM settled_issues WHERE issue_id = ?', - (issue_id,), - ).fetchone() - return row is not None - - def mark_issue_settled(self, issue_id: int, outcome: str, merged_pr_number: int | None = None) -> None: - """Record that an issue has been settled. Idempotent (INSERT OR IGNORE).""" - now = datetime.now(timezone.utc).isoformat() - with self._lock: - with self._get_connection() as conn: - conn.execute( - 'INSERT OR IGNORE INTO settled_issues (issue_id, outcome, merged_pr_number, settled_at) ' - 'VALUES (?, ?, ?, ?)', - (issue_id, outcome, merged_pr_number, now), - ) - conn.commit() - - # ========================================================================= - # EMA tracking - # ========================================================================= - - def get_ema(self, github_id: str) -> float: - """Get a miner's current prediction EMA score. Returns 0.0 if no record.""" - with self._get_connection() as conn: - row = conn.execute( - 'SELECT ema_score FROM prediction_emas WHERE github_id = ?', - (github_id,), - ).fetchone() - return float(row['ema_score']) if row else 0.0 - - def update_ema(self, github_id: str, new_ema: float) -> None: - """Upsert a miner's prediction EMA score, keyed by github_id.""" - now = datetime.now(timezone.utc).isoformat() - with self._lock: - with self._get_connection() as conn: - conn.execute( - """ - INSERT INTO prediction_emas (github_id, ema_score, rounds, updated_at) - VALUES (?, ?, 1, ?) - ON CONFLICT (github_id) - DO UPDATE SET ema_score = excluded.ema_score, - rounds = prediction_emas.rounds + 1, - updated_at = excluded.updated_at - """, - (github_id, new_ema, now), - ) - conn.commit() - - def get_all_emas(self) -> list[dict]: - """Get all miner EMA scores. Used at weight-setting time for blending.""" - with self._get_connection() as conn: - rows = conn.execute( - 'SELECT github_id, ema_score, rounds, updated_at FROM prediction_emas ORDER BY github_id', - ).fetchall() - return [dict(r) for r in rows] diff --git a/gittensor/validator/merge_predictions/scoring.py b/gittensor/validator/merge_predictions/scoring.py deleted file mode 100644 index eb8a7c7e..00000000 --- a/gittensor/validator/merge_predictions/scoring.py +++ /dev/null @@ -1,245 +0,0 @@ -# Entrius 2025 - -"""Pure scoring functions for merge predictions. - -All functions are stateless — data in, scores out. No DB queries or side effects. - -Formula per PR: - pr_score = correctness³ * (1 + timeliness_bonus + consensus_bonus + order_bonus) - -Where: - - correctness: log-loss derived (prediction for merged, 1-prediction for non-merged), cubed - - timeliness_bonus: 0.0-0.75, rewards early predictions (gated: raw correctness >= 0.66) - - consensus_bonus: 0.0-0.25, rewards pre-convergence predictions (gated: raw correctness >= 0.66) - - order_bonus: 0.0-0.75, rewards first correct predictor, merged PR only (gated: raw correctness >= 0.66) - -All bonuses require raw correctness >= ORDER_CORRECTNESS_THRESHOLD to activate. -Issue score: weighted_mean × coverage, where merged PR gets weight=N, non-merged weight=1, - and coverage = prs_predicted / total_prs. -""" - -from dataclasses import dataclass -from datetime import datetime - -from gittensor.constants import ( - PREDICTIONS_CORRECTNESS_EXPONENT, - PREDICTIONS_EMA_BETA, - PREDICTIONS_MAX_CONSENSUS_BONUS, - PREDICTIONS_MAX_ORDER_BONUS, - PREDICTIONS_MAX_TIMELINESS_BONUS, - PREDICTIONS_ORDER_CORRECTNESS_THRESHOLD, - PREDICTIONS_TIMELINESS_EXPONENT, -) - -# ============================================================================= -# Data structures -# ============================================================================= - - -@dataclass -class PrPrediction: - pr_number: int - prediction: float # 0.0-1.0 - prediction_time: datetime # when this PR prediction was submitted - variance_at_prediction: float - - -@dataclass -class PrOutcome: - pr_number: int - outcome: float # 1.0 for merged PR, 0.0 for all others - pr_open_time: datetime # when this PR was opened on GitHub - - -@dataclass -class PrScore: - pr_number: int - correctness: float - timeliness_bonus: float - consensus_bonus: float - order_bonus: float - score: float # correctness³ * (1 + timeliness + consensus + order) - - -@dataclass -class MinerIssueScore: - uid: int - pr_scores: list[PrScore] - issue_score: float # weighted mean (merged PR weight=N, non-merged weight=1) - - -# ============================================================================= -# Scoring functions -# ============================================================================= - - -def raw_correctness(prediction: float, outcome: float) -> float: - """Log-loss derived correctness before exponentiation. - - Merged PR (outcome=1.0): score = prediction. - Non-merged PR (outcome=0.0): score = 1 - prediction. - """ - return prediction if outcome == 1.0 else 1.0 - prediction - - -def score_correctness(prediction: float, outcome: float) -> float: - """Cubed correctness. Heavily punishes inaccuracy.""" - return raw_correctness(prediction, outcome) ** PREDICTIONS_CORRECTNESS_EXPONENT - - -def score_timeliness(prediction_time: datetime, settlement_time: datetime, pr_open_time: datetime) -> float: - """Bounded timeliness bonus (0.0 to MAX_TIMELINESS_BONUS). - - Rewards earlier predictions within the PR's lifetime window. - """ - total_window = (settlement_time - pr_open_time).total_seconds() - if total_window <= 0: - return 0.0 - - time_remaining = (settlement_time - prediction_time).total_seconds() - ratio = max(0.0, min(1.0, time_remaining / total_window)) - return PREDICTIONS_MAX_TIMELINESS_BONUS * ratio**PREDICTIONS_TIMELINESS_EXPONENT - - -def score_consensus_bonus(prediction_time: datetime, peak_variance_time: datetime, settlement_time: datetime) -> float: - """Bounded consensus bonus (0.0 to MAX_CONSENSUS_BONUS). - - Rewards predictions made before or near peak disagreement. - Pre-peak: full bonus. Post-peak: linearly decays to 0 at settlement. - """ - if prediction_time <= peak_variance_time: - return PREDICTIONS_MAX_CONSENSUS_BONUS - - remaining_window = (settlement_time - peak_variance_time).total_seconds() - if remaining_window <= 0: - return 0.0 - - time_after_peak = (prediction_time - peak_variance_time).total_seconds() - ratio = max(0.0, min(1.0, time_after_peak / remaining_window)) - return PREDICTIONS_MAX_CONSENSUS_BONUS * (1.0 - ratio) - - -def score_order_bonus(rank: int) -> float: - """Order bonus for the merged PR only. bonus = max / rank. - - Rank 0 means unqualified (below correctness threshold). Returns 0.0. - """ - if rank <= 0: - return 0.0 - return PREDICTIONS_MAX_ORDER_BONUS / rank - - -# ============================================================================= -# Order ranking (cross-miner) -# ============================================================================= - - -def compute_merged_pr_order_ranks( - all_miners_predictions: dict[int, list[PrPrediction]], - merged_pr_number: int, -) -> dict[int, int]: - """Rank miners by who first correctly predicted the merged PR. - - Only miners with raw correctness >= threshold qualify. - Ranked by prediction_time (earliest first). - - Returns: - dict mapping uid -> rank (1-indexed). Unqualified miners are absent. - """ - qualifying = [] - - for uid, predictions in all_miners_predictions.items(): - for pred in predictions: - if pred.pr_number != merged_pr_number: - continue - rc = raw_correctness(pred.prediction, 1.0) - if rc >= PREDICTIONS_ORDER_CORRECTNESS_THRESHOLD: - qualifying.append((uid, pred.prediction_time)) - break - - qualifying.sort(key=lambda x: x[1]) - - return {uid: rank for rank, (uid, _) in enumerate(qualifying, start=1)} - - -# ============================================================================= -# Aggregation -# ============================================================================= - - -def score_miner_issue( - uid: int, - predictions: list[PrPrediction], - outcomes: list[PrOutcome], - settlement_time: datetime, - peak_variance_time: datetime, - merged_pr_order_ranks: dict[int, int], -) -> MinerIssueScore: - """Score a single miner's predictions for one issue. - - Fills unpredicted PRs, scores each PR, then computes a weighted issue score - where the merged PR gets weight=N (total PRs) and non-merged get weight=1. - """ - outcome_map = {o.pr_number: o for o in outcomes} - merged_prs = {o.pr_number for o in outcomes if o.outcome == 1.0} - n_prs = len(outcomes) - - miner_rank = merged_pr_order_ranks.get(uid, 0) - - pr_scores = [] - for pred in predictions: - outcome = outcome_map.get(pred.pr_number) - if outcome is None: - continue - - rc = raw_correctness(pred.prediction, outcome.outcome) - correctness = rc**PREDICTIONS_CORRECTNESS_EXPONENT - qualifies_for_bonus = rc >= PREDICTIONS_ORDER_CORRECTNESS_THRESHOLD - - timeliness_bonus = ( - score_timeliness(pred.prediction_time, settlement_time, outcome.pr_open_time) - if qualifies_for_bonus - else 0.0 - ) - consensus_bonus = ( - score_consensus_bonus(pred.prediction_time, peak_variance_time, settlement_time) - if qualifies_for_bonus - else 0.0 - ) - - is_merged = pred.pr_number in merged_prs - order_bonus = score_order_bonus(miner_rank) if is_merged and qualifies_for_bonus else 0.0 - - score = correctness * (1.0 + timeliness_bonus + consensus_bonus + order_bonus) - pr_scores.append( - PrScore( - pr_number=pred.pr_number, - correctness=correctness, - timeliness_bonus=timeliness_bonus, - consensus_bonus=consensus_bonus, - order_bonus=order_bonus, - score=score, - ) - ) - - # Weighted mean: merged PR gets weight=N, non-merged get weight=1 - total_weight = 0.0 - weighted_sum = 0.0 - for ps in pr_scores: - weight = n_prs if ps.pr_number in merged_prs else 1.0 - weighted_sum += ps.score * weight - total_weight += weight - - raw_issue_score = weighted_sum / total_weight if total_weight > 0 else 0.0 - - # Coverage multiplier: reward miners who reviewed the full field - prs_predicted = len(pr_scores) - coverage = prs_predicted / n_prs if n_prs > 0 else 0.0 - issue_score = raw_issue_score * coverage - - return MinerIssueScore(uid=uid, pr_scores=pr_scores, issue_score=issue_score) - - -def update_ema(current_round_score: float, previous_ema: float) -> float: - """Exponential moving average for a miner's prediction track record.""" - return PREDICTIONS_EMA_BETA * current_round_score + (1.0 - PREDICTIONS_EMA_BETA) * previous_ema diff --git a/gittensor/validator/merge_predictions/settlement.py b/gittensor/validator/merge_predictions/settlement.py deleted file mode 100644 index 4b7e76cc..00000000 --- a/gittensor/validator/merge_predictions/settlement.py +++ /dev/null @@ -1,411 +0,0 @@ -# Entrius 2025 - -"""Settlement orchestrator for merge predictions. - -Queries COMPLETED and CANCELLED issues from the smart contract and scores -miners' predictions, updating their EMA. - -- COMPLETED issues: scored normally, predictions deleted after settlement. -- CANCELLED issues with a merged PR: scored (solver wasn't an eligible miner, - but the PR was still merged — predictions are still valid). -- CANCELLED issues without a merged PR: voided — predictions deleted, no EMA impact. - -The `settled_issues` table is the durable settled marker — once an issue is -recorded there, subsequent passes skip it regardless of prediction state. -""" - -from collections import defaultdict -from datetime import datetime, timezone -from typing import TYPE_CHECKING, Dict - -import bittensor as bt - -from gittensor.classes import MinerEvaluation -from gittensor.utils.github_api_tools import check_github_issue_closed, get_pr_open_times -from gittensor.validator.issue_competitions.contract_client import IssueCompetitionContractClient, IssueStatus -from gittensor.validator.merge_predictions.scoring import ( - MinerIssueScore, - PrOutcome, - PrPrediction, - compute_merged_pr_order_ranks, - score_miner_issue, - update_ema, -) -from gittensor.validator.utils.config import GITTENSOR_VALIDATOR_PAT -from gittensor.validator.utils.issue_competitions import get_contract_address - -if TYPE_CHECKING: - from neurons.validator import Validator - - -# ============================================================================= -# Helper functions -# ============================================================================= - - -def db_storage_void(validator: 'Validator', issue_id: int) -> None: - """Best-effort mirror of a voided issue to Postgres.""" - if validator.db_storage: - now = datetime.now(timezone.utc).isoformat() - validator.db_storage.store_settled_issue(issue_id, 'voided', None, now) - - -def _build_outcomes( - predictions: list[dict], - merged_pr_number: int, - repository: str, - pr_open_times: dict[int, datetime], - settlement_time: datetime, -) -> list[PrOutcome]: - """Build PrOutcome list from raw prediction rows + merged PR number.""" - predicted_pr_numbers = list({p['pr_number'] for p in predictions}) - - if merged_pr_number not in predicted_pr_numbers: - predicted_pr_numbers.append(merged_pr_number) - - outcomes: list[PrOutcome] = [] - for pr_num in predicted_pr_numbers: - outcome_value = 1.0 if pr_num == merged_pr_number else 0.0 - pr_open_time = pr_open_times.get(pr_num) - if pr_open_time is None: - pr_pred_times = [datetime.fromisoformat(p['timestamp']) for p in predictions if p['pr_number'] == pr_num] - pr_open_time = min(pr_pred_times) if pr_pred_times else settlement_time - - outcomes.append(PrOutcome(pr_number=pr_num, outcome=outcome_value, pr_open_time=pr_open_time)) - - return outcomes - - -def _group_miner_predictions( - predictions: list[dict], - metagraph, -) -> tuple[dict[int, list[PrPrediction]], dict[int, str]]: - """Filter deregistered miners and group predictions by UID. - - Returns: - (all_miners_predictions, uid_to_github_id) - """ - all_miners_predictions: dict[int, list[PrPrediction]] = defaultdict(list) - uid_to_github_id: dict[int, str] = {} - - for p in predictions: - uid = p['uid'] - if uid >= len(metagraph.hotkeys) or metagraph.hotkeys[uid] != p['hotkey']: - bt.logging.debug(f'Merge predictions: skipping deregistered miner uid={uid} hotkey={p["hotkey"][:12]}...') - continue - - all_miners_predictions[uid].append( - PrPrediction( - pr_number=p['pr_number'], - prediction=p['prediction'], - prediction_time=datetime.fromisoformat(p['timestamp']), - variance_at_prediction=p.get('variance_at_prediction', 0.0) or 0.0, - ) - ) - uid_to_github_id[uid] = p['github_id'] - - return dict(all_miners_predictions), uid_to_github_id - - -def _score_and_update_emas( - validator: 'Validator', - miners_preds: dict[int, list[PrPrediction]], - uid_to_github_id: dict[int, str], - outcomes: list[PrOutcome], - settlement_time: datetime, - peak_variance_time: datetime, - order_ranks: dict[int, int], -) -> list[dict]: - """Score each miner and update EMA. Returns list of result dicts for logging.""" - mp_storage = validator.mp_storage - results = [] - - for uid, miner_preds in miners_preds.items(): - github_id = uid_to_github_id.get(uid) - if not github_id: - bt.logging.warning(f'Merge predictions: no github_id for uid={uid}, skipping EMA update') - continue - - issue_score: MinerIssueScore = score_miner_issue( - uid=uid, - predictions=miner_preds, - outcomes=outcomes, - settlement_time=settlement_time, - peak_variance_time=peak_variance_time, - merged_pr_order_ranks=order_ranks, - ) - - previous_ema = mp_storage.get_ema(github_id) - new_ema = update_ema(issue_score.issue_score, previous_ema) - mp_storage.update_ema(github_id, new_ema) - - # Mirror EMA to Postgres - if validator.db_storage: - now = datetime.now(timezone.utc).isoformat() - validator.db_storage.store_prediction_ema(github_id, new_ema, 1, now) - - results.append( - { - 'uid': uid, - 'github_id': github_id, - 'score': issue_score.issue_score, - 'previous_ema': previous_ema, - 'new_ema': new_ema, - 'rank': order_ranks.get(uid, 0), - 'prs_predicted': len(miner_preds), - } - ) - - return results - - -def _log_issue_settlement( - issue_label: str, - merged_pr_number: int, - all_miners_predictions: dict[int, list[PrPrediction]], - uid_to_github_id: dict[int, str], - miner_results: list[dict], -) -> None: - """Rich per-issue logging block.""" - # Submission summary - total_submissions = sum(len(preds) for preds in all_miners_predictions.values()) - bt.logging.info(f' {total_submissions} submissions from {len(all_miners_predictions)} miners:') - - for uid, preds in all_miners_predictions.items(): - gh_id = uid_to_github_id.get(uid, '?') - merged_preds = [p for p in preds if p.pr_number == merged_pr_number] - avg_on_merged = sum(p.prediction for p in merged_preds) / len(merged_preds) if merged_preds else 0.0 - bt.logging.info( - f' UID: {uid} (gh: {gh_id}) PRs predicted: {len(preds)} ' - f'avg on merged PR #{merged_pr_number}: {avg_on_merged:.2f}' - ) - - # Scoring results - if miner_results: - sorted_results = sorted(miner_results, key=lambda r: r['score'], reverse=True) - bt.logging.info(' Scoring results:') - for r in sorted_results: - rank_str = f'rank #{r["rank"]}' if r['rank'] > 0 else 'unranked' - marker = '\u2605' if r == sorted_results[0] else ' ' - bt.logging.info( - f' {marker} UID: {r["uid"]} score: {r["score"]:.4f} ' - f'ema: {r["previous_ema"]:.4f} \u2192 {r["new_ema"]:.4f} ({rank_str})' - ) - - -def _settle_issue( - validator: 'Validator', - issue, - issue_label: str, - merged_pr_number: int, - settlement_reason: str = 'completed', -) -> bool: - """Full settlement pipeline for one issue. - - Loads predictions, builds outcomes, scores, updates EMAs, logs, deletes. - Shared by both COMPLETED and CANCELLED-with-merge paths. - - Returns True if settled successfully. - """ - mp_storage = validator.mp_storage - - predictions = mp_storage.get_predictions_for_issue(issue.id) - if not predictions: - return False - - unique_prs = {p['pr_number'] for p in predictions} - bt.logging.info( - f'--- Settling {settlement_reason} issue ID: {issue.id}, ' - f'{issue.repository_full_name}#{issue.issue_number}, ' - f'{len(unique_prs)} PRs submitted (merged PR #{merged_pr_number}) ---' - ) - - settlement_time = datetime.now(timezone.utc) - - peak_variance_time = mp_storage.get_peak_variance_time(issue.id) - if peak_variance_time is None: - peak_variance_time = settlement_time - - # Collect unique PR numbers for open-time lookup - predicted_pr_numbers = list({p['pr_number'] for p in predictions}) - if merged_pr_number not in predicted_pr_numbers: - predicted_pr_numbers.append(merged_pr_number) - - pr_open_times = get_pr_open_times(issue.repository_full_name, predicted_pr_numbers, GITTENSOR_VALIDATOR_PAT) - - outcomes = _build_outcomes( - predictions, merged_pr_number, issue.repository_full_name, pr_open_times, settlement_time - ) - all_miners_predictions, uid_to_github_id = _group_miner_predictions(predictions, validator.metagraph) - - if not all_miners_predictions: - bt.logging.debug(f'Merge predictions: no active miners had predictions for {issue_label}') - rows_deleted = mp_storage.delete_predictions_for_issue(issue.id) - bt.logging.info(f' Predictions deleted ({rows_deleted} rows)') - return False - - order_ranks = compute_merged_pr_order_ranks(all_miners_predictions, merged_pr_number) - - miner_results = _score_and_update_emas( - validator, - all_miners_predictions, - uid_to_github_id, - outcomes, - settlement_time, - peak_variance_time, - order_ranks, - ) - - _log_issue_settlement(issue_label, merged_pr_number, all_miners_predictions, uid_to_github_id, miner_results) - - rows_deleted = mp_storage.delete_predictions_for_issue(issue.id) - bt.logging.info(f' Predictions deleted ({rows_deleted} rows)') - - mp_storage.mark_issue_settled(issue.id, 'scored', merged_pr_number) - - # Mirror settlement + delete to Postgres - if validator.db_storage: - now = datetime.now(timezone.utc).isoformat() - validator.db_storage.store_settled_issue(issue.id, 'scored', merged_pr_number, now) - - return True - - -# ============================================================================= -# Main settlement function -# ============================================================================= - - -async def merge_predictions( - self: 'Validator', - miner_evaluations: Dict[int, MinerEvaluation], -) -> None: - """Settle merge predictions for COMPLETED and CANCELLED issues. - - 1. Query COMPLETED issues from contract - - Skip if already in settled_issues table - - check_github_issue_closed to get merged PR number - - Score miners, update EMAs, delete predictions, record in settled_issues - - 2. Query CANCELLED issues from contract - - Skip if already in settled_issues table - - check_github_issue_closed to determine WHY it was cancelled: - a) Merged PR exists -> score + delete + record as 'scored' - b) No merged PR -> void: delete predictions + record as 'voided', no EMA impact - """ - try: - if not GITTENSOR_VALIDATOR_PAT: - bt.logging.warning( - 'GITTENSOR_VALIDATOR_PAT not set, skipping merge predictions settlement. (This DOES affect vstrust/consensus)' - ) - return - - contract_addr = get_contract_address() - if not contract_addr: - bt.logging.warning('Merge predictions: no contract address configured') - return - - bt.logging.info('***** Starting Merge Predictions Settlement *****') - - contract_client = IssueCompetitionContractClient( - contract_address=contract_addr, - subtensor=self.subtensor, - ) - - completed_settled = 0 - cancelled_settled = 0 - voided = 0 - skipped = 0 - - # --- COMPLETED issues --- - completed_issues = contract_client.get_issues_by_status(IssueStatus.COMPLETED) - bt.logging.info(f'Merge predictions: {len(completed_issues)} completed issues to check') - - for issue in completed_issues: - issue_label = f'{issue.repository_full_name}#{issue.issue_number} (id={issue.id})' - try: - if self.mp_storage.is_issue_settled(issue.id): - skipped += 1 - continue - - github_state = check_github_issue_closed( - issue.repository_full_name, issue.issue_number, GITTENSOR_VALIDATOR_PAT - ) - - if github_state is None: - bt.logging.debug(f'Merge predictions: could not check GitHub state for {issue_label}') - continue - - merged_pr_number = github_state.get('pr_number') - if not merged_pr_number: - bt.logging.warning( - f'Merge predictions: completed issue {issue_label} has no merged PR on GitHub, voiding' - ) - rows_deleted = self.mp_storage.delete_predictions_for_issue(issue.id) - bt.logging.info( - f' Voiding completed issue {issue_label} — no merged PR found, ' - f'{rows_deleted} predictions deleted, no EMA impact' - ) - self.mp_storage.mark_issue_settled(issue.id, 'voided') - db_storage_void(self, issue.id) - voided += 1 - continue - - if _settle_issue(self, issue, issue_label, merged_pr_number): - completed_settled += 1 - else: - skipped += 1 - - except Exception as e: - bt.logging.error(f'Merge predictions: error processing completed {issue_label}: {e}') - - # --- CANCELLED issues --- - cancelled_issues = contract_client.get_issues_by_status(IssueStatus.CANCELLED) - bt.logging.info(f'Merge predictions: {len(cancelled_issues)} cancelled issues to check') - - for issue in cancelled_issues: - issue_label = f'{issue.repository_full_name}#{issue.issue_number} (id={issue.id})' - try: - if self.mp_storage.is_issue_settled(issue.id): - skipped += 1 - continue - - github_state = check_github_issue_closed( - issue.repository_full_name, issue.issue_number, GITTENSOR_VALIDATOR_PAT - ) - - if github_state is None: - bt.logging.debug(f'Merge predictions: could not check GitHub state for {issue_label}') - continue - - merged_pr_number = github_state.get('pr_number') - - if merged_pr_number: - # Cancelled but PR was merged (solver not in subnet) — still score - if _settle_issue(self, issue, issue_label, merged_pr_number, settlement_reason='cancelled'): - cancelled_settled += 1 - else: - skipped += 1 - else: - # No merged PR — void: delete predictions, no EMA impact - rows_deleted = self.mp_storage.delete_predictions_for_issue(issue.id) - bt.logging.info( - f' Voiding cancelled issue ID {issue.id}, {issue.repository_full_name}' - f'#{issue.issue_number} — closed without merge, ' - f'{rows_deleted} predictions deleted, no EMA impact' - ) - self.mp_storage.mark_issue_settled(issue.id, 'voided') - db_storage_void(self, issue.id) - voided += 1 - - except Exception as e: - bt.logging.error(f'Merge predictions: error processing cancelled {issue_label}: {e}') - - bt.logging.info( - f'***** Merge Predictions Settlement Complete: ' - f'{completed_settled} completed settled, {cancelled_settled} cancelled settled, ' - f'{voided} voided, {skipped} skipped *****' - ) - - except Exception as e: - bt.logging.error(f'Merge predictions settlement failed: {e}') diff --git a/gittensor/validator/merge_predictions/validation.py b/gittensor/validator/merge_predictions/validation.py deleted file mode 100644 index c6de5f25..00000000 --- a/gittensor/validator/merge_predictions/validation.py +++ /dev/null @@ -1,26 +0,0 @@ -# Entrius 2025 - -"""Pure input validation for prediction payloads.""" - -from gittensor.constants import ( - PREDICTIONS_MAX_VALUE, - PREDICTIONS_MIN_VALUE, -) - - -def validate_prediction_values(predictions: dict[int, float]) -> str | None: - """Validate prediction payload structure and values.""" - if not predictions: - return 'Empty predictions' - - for pr_number, value in predictions.items(): - if not isinstance(pr_number, int) or pr_number <= 0: - return f'Invalid PR number: {pr_number}' - if not (PREDICTIONS_MIN_VALUE <= value <= PREDICTIONS_MAX_VALUE): - return f'Prediction for PR #{pr_number} out of range: {value} (must be {PREDICTIONS_MIN_VALUE}-{PREDICTIONS_MAX_VALUE})' - - total = sum(predictions.values()) - if total > 1.0: - return f'Submission total exceeds 1.0: {total:.4f}' - - return None diff --git a/gittensor/validator/oss_contributions/credibility.py b/gittensor/validator/oss_contributions/credibility.py index faef444b..1b405d06 100644 --- a/gittensor/validator/oss_contributions/credibility.py +++ b/gittensor/validator/oss_contributions/credibility.py @@ -1,171 +1,65 @@ # The MIT License (MIT) # Copyright © 2025 Entrius -from typing import TYPE_CHECKING, Dict, List +from typing import TYPE_CHECKING, List, Tuple import bittensor as bt -from gittensor.validator.oss_contributions.tier_config import ( - TIERS, - TIERS_ORDER, - Tier, - TierStats, - get_tier_from_config, +from gittensor.constants import ( + CREDIBILITY_MULLIGAN_COUNT, + MIN_CREDIBILITY, + MIN_TOKEN_SCORE_FOR_BASE_SCORE, + MIN_VALID_MERGED_PRS, ) if TYPE_CHECKING: from gittensor.classes import PullRequest -def get_tier(pr: 'PullRequest') -> Tier | None: - if pr.repository_tier_configuration: - return get_tier_from_config(pr.repository_tier_configuration) - return None - - -def calculate_tier_stats( - merged_prs: List['PullRequest'], - closed_prs: List['PullRequest'], - open_prs: List['PullRequest'] = [], - include_scoring_details: bool = False, -) -> Dict[Tier, TierStats]: - """Calculate merged/closed counts per tier.""" - from collections import defaultdict - - stats: Dict[Tier, TierStats] = {tier: TierStats() for tier in Tier} - repos_per_tier: Dict[Tier, set] = {tier: set() for tier in Tier} - # Track token scores per repository per tier - repo_token_scores_per_tier: Dict[Tier, Dict[str, float]] = {tier: defaultdict(float) for tier in Tier} - - for pr in merged_prs: - if tier := get_tier(pr): - stats[tier].merged_count += 1 - repos_per_tier[tier].add(pr.repository_full_name) - repo_token_scores_per_tier[tier][pr.repository_full_name] += pr.token_score - if include_scoring_details: - stats[tier].earned_score += pr.earned_score - # Aggregate token scoring breakdown - stats[tier].token_score += pr.token_score - stats[tier].structural_count += pr.structural_count - stats[tier].structural_score += pr.structural_score - stats[tier].leaf_count += pr.leaf_count - stats[tier].leaf_score += pr.leaf_score - - for pr in closed_prs: - if tier := get_tier(pr): - stats[tier].closed_count += 1 - - for pr in open_prs: - if tier := get_tier(pr): - stats[tier].open_count += 1 - if include_scoring_details: - stats[tier].collateral_score += pr.collateral_score - - for tier in TIERS_ORDER: - stats[tier].unique_repo_contribution_count = len(repos_per_tier[tier]) - # Calculate qualified repos based on tier's min token score per repo requirement - config = TIERS[tier] - if config.required_min_token_score_per_repo is not None: - qualified_count = sum( - 1 - for score in repo_token_scores_per_tier[tier].values() - if score >= config.required_min_token_score_per_repo - ) - stats[tier].qualified_unique_repo_count = qualified_count - else: - # If no min token score per repo required, all unique repos qualify - stats[tier].qualified_unique_repo_count = len(repos_per_tier[tier]) - - return stats - - -def is_tier_unlocked(tier: Tier, tier_stats: Dict[Tier, TierStats], log_reasons: bool = True) -> bool: - """ - Check if a tier is unlocked by verifying this tier and all below meet their own requirements. +def calculate_credibility(merged_prs: List['PullRequest'], closed_prs: List['PullRequest']) -> float: + """Calculate flat credibility ratio with mulligan applied. - Each tier's requirements define what's needed to maintain THAT tier. + Mulligan: up to CREDIBILITY_MULLIGAN_COUNT closed PRs are erased entirely — + they don't count in the denominator (merged + closed). - Args: - tier: The tier to check - tier_stats: Dictionary of tier statistics - log_reasons: Whether to log the reason when a tier is locked (default True) - """ - tier_idx = TIERS_ORDER.index(tier) - - for i in range(tier_idx + 1): # include current tier - check_tier = TIERS_ORDER[i] - config = TIERS[check_tier] - stats = tier_stats[check_tier] - - if config.required_credibility is not None: - if stats.credibility < config.required_credibility: - if log_reasons: - bt.logging.info( - f'{tier.value} locked: {check_tier.value} needs {config.required_credibility:.2f} credibility, has {stats.credibility:.2f}' - ) - return False - - if config.required_min_token_score is not None: - if stats.token_score < config.required_min_token_score: - if log_reasons: - bt.logging.info( - f'{tier.value} locked: {check_tier.value} needs {config.required_min_token_score:.1f} total token score, has {stats.token_score:.1f}' - ) - return False - - # Check unique repos with min token score requirement - if config.required_unique_repos_count is not None: - if stats.qualified_unique_repo_count < config.required_unique_repos_count: - if log_reasons: - min_score_str = ( - f' with {config.required_min_token_score_per_repo:.1f}+ token score' - if config.required_min_token_score_per_repo - else '' - ) - bt.logging.info( - f'{tier.value} locked: {check_tier.value} needs {config.required_unique_repos_count} unique repos{min_score_str}, has {stats.qualified_unique_repo_count}' - ) - return False - - return True - - -def calculate_credibility_per_tier( - merged_prs: List['PullRequest'], - closed_prs: List['PullRequest'], -) -> Dict[Tier, float]: + Returns credibility in [0.0, 1.0], or 0.0 if no attempts after mulligan. """ - Calculate credibility for each tier, enforcing tier progression. + merged_count = len(merged_prs) + closed_count = max(0, len(closed_prs) - CREDIBILITY_MULLIGAN_COUNT) + total_attempts = merged_count + closed_count - Returns dict of tier -> credibility (0.0 if tier not unlocked, else merged/total ratio). - """ - tier_stats = calculate_tier_stats(merged_prs, closed_prs) - tier_credibility: Dict[Tier, float] = {} - tier_display_parts = [] + if total_attempts == 0: + return 0.0 - for tier in Tier: - stats: TierStats = tier_stats[tier] + return merged_count / total_attempts - # Check if tier is unlocked (includes checking lower tiers) - # Suppress logging here - tier unlock reasons are logged in finalize_miner_scores - tier_unlocked = is_tier_unlocked(tier, tier_stats, log_reasons=False) - # No activity in this tier - if stats.total_attempts == 0: - tier_display_parts.append(f'{tier.value}: LOCKED') - continue +def check_eligibility(merged_prs: List['PullRequest'], closed_prs: List['PullRequest']) -> Tuple[bool, float, str]: + """Check if a miner passes the eligibility gate. + + Gate requires: + 1. At least MIN_VALID_MERGED_PRS merged PRs with token_score >= MIN_TOKEN_SCORE_FOR_BASE_SCORE + (after mulligan — if a closed PR was "valid", it no longer counts toward the minimum) + 2. At least MIN_CREDIBILITY credibility (after mulligan) + + Returns: + (is_eligible, credibility, reason) + reason is empty string if eligible, otherwise explains why not. + """ + credibility = calculate_credibility(merged_prs, closed_prs) - # Has activity but tier not unlocked - if not tier_unlocked: - tier_credibility[tier] = 0.0 - tier_display_parts.append(f'{tier.value}: LOCKED') - continue + # Count valid merged PRs (token_score >= threshold) + valid_merged_count = sum(1 for pr in merged_prs if pr.token_score >= MIN_TOKEN_SCORE_FOR_BASE_SCORE) - # Calculate actual credibility - credibility = stats.credibility - tier_credibility[tier] = credibility - tier_display_parts.append(f'{tier.value}: {stats.merged_count}/{stats.total_attempts} ({credibility:.2f})') + if valid_merged_count < MIN_VALID_MERGED_PRS: + reason = f'{valid_merged_count}/{MIN_VALID_MERGED_PRS} valid merged PRs (need {MIN_VALID_MERGED_PRS})' + bt.logging.info(f'Ineligible: {reason}') + return False, credibility, reason - bt.logging.info(f'Credibility: {" | ".join(tier_display_parts)}') + if credibility < MIN_CREDIBILITY: + reason = f'Credibility {credibility:.2f} < {MIN_CREDIBILITY} minimum' + bt.logging.info(f'Ineligible: {reason}') + return False, credibility, reason - return tier_credibility + bt.logging.info(f'Eligible: {valid_merged_count} valid PRs, credibility {credibility:.2f}') + return True, credibility, '' diff --git a/gittensor/validator/oss_contributions/dynamic_emissions.py b/gittensor/validator/oss_contributions/dynamic_emissions.py deleted file mode 100644 index 3cdaff33..00000000 --- a/gittensor/validator/oss_contributions/dynamic_emissions.py +++ /dev/null @@ -1,75 +0,0 @@ -from typing import Dict, Set - -import bittensor as bt -import numpy as np - -from gittensor.classes import MinerEvaluation -from gittensor.constants import ( - RECYCLE_UID, - TOKEN_SCORE_MAX_RECYCLE, - TOKEN_SCORE_RECYCLE_DECAY_RATE, - UNIQUE_REPOS_MAX_RECYCLE, - UNIQUE_REPOS_RECYCLE_DECAY_RATE, -) - - -def _exponential_unlock_scalar(value: float, max_recycle: float, decay_rate: float) -> float: - """Calculate scalar using exponential unlock curve, capped at 1.0.""" - return min(1.0, (1 - max_recycle) + max_recycle * (1 - np.exp(-decay_rate * value))) - - -def get_network_totals(miner_evaluations: Dict[int, MinerEvaluation]) -> tuple[int, float]: - """Extract unique repos count and total token score from tiered miners only. - - Only miners with a tier (bronze, silver, gold) are counted. - This excludes miners who haven't reached any tier yet. - """ - unique_repos: Set[str] = set() - total_token_score = 0.0 - - for evaluation in miner_evaluations.values(): - # Only count contributions from miners who have achieved a tier - if evaluation.current_tier is not None: - total_token_score += evaluation.total_token_score - - if repos := evaluation.unique_repos_contributed_to: - unique_repos.update(repos) - - return len(unique_repos), total_token_score - - -def apply_dynamic_emissions_using_network_contributions( - normalized_rewards: Dict[int, float], miner_evaluations: Dict[int, MinerEvaluation] -) -> Dict[int, float]: - """Scale normalized rewards based on network-wide contributions.""" - if not normalized_rewards: - bt.logging.warning('No normalized rewards provided for scaling') - return {} - - total_unique_repos, total_token_score = get_network_totals(miner_evaluations) - - unique_repo_scalar = _exponential_unlock_scalar( - total_unique_repos, UNIQUE_REPOS_MAX_RECYCLE, UNIQUE_REPOS_RECYCLE_DECAY_RATE - ) - token_score_scalar = _exponential_unlock_scalar( - total_token_score, TOKEN_SCORE_MAX_RECYCLE, TOKEN_SCORE_RECYCLE_DECAY_RATE - ) - final_scalar = (unique_repo_scalar + token_score_scalar) / 2.0 - - # Apply scaling and calculate recycled amount - total_original = sum(normalized_rewards.values()) - total_recycled = total_original * (1 - final_scalar) - - scaled_rewards = {uid: reward * final_scalar for uid, reward in normalized_rewards.items()} - scaled_rewards[RECYCLE_UID] = scaled_rewards.get(RECYCLE_UID, 0.0) + max( - total_recycled, 1.0 if total_original <= 0 else 0.0 - ) - - recycle_percentage = (total_recycled / total_original * 100) if total_original > 0 else 100.0 - - bt.logging.info( - f'Dynamic emissions: unique_repos={unique_repo_scalar:.2f}, token_score={token_score_scalar:.2f}, ' - f'recycle_scalar={final_scalar:.2f}, recycled={total_recycled:.2f} ({recycle_percentage:.2f}%)' - ) - - return scaled_rewards diff --git a/gittensor/validator/oss_contributions/inspections.py b/gittensor/validator/oss_contributions/inspections.py index 7cb90626..9f8398ba 100644 --- a/gittensor/validator/oss_contributions/inspections.py +++ b/gittensor/validator/oss_contributions/inspections.py @@ -2,13 +2,12 @@ # Copyright © 2025 Entrius from collections import defaultdict -from typing import Dict, List +from typing import Dict, List, Optional import bittensor as bt from gittensor.classes import MinerEvaluation from gittensor.constants import RECYCLE_UID -from gittensor.synapses import GitPatSynapse from gittensor.validator.utils.github_validation import validate_github_credentials @@ -44,13 +43,17 @@ def detect_and_penalize_miners_sharing_github(miner_evaluations: Dict[int, Miner bt.logging.info(f'Total duplicate miners penalized: {duplicate_count}') -def validate_response_and_initialize_miner_evaluation(uid: int, response: GitPatSynapse) -> MinerEvaluation: +def validate_response_and_initialize_miner_evaluation( + uid: int, hotkey: str, pat: Optional[str], stale_hotkey: Optional[str] = None +) -> MinerEvaluation: """ - Validate a miner's response and initialize their evaluation object. + Validate a miner's stored PAT and initialize their evaluation object. Args: uid: The miner's unique identifier - response: The GitPatSynapse response from the miner (may be None if miner didn't respond) + hotkey: The miner's hotkey + pat: The miner's GitHub PAT from local storage (may be None if not stored) + stale_hotkey: If set, the UID has a stored PAT from this old hotkey (re-registration detected) Returns: MinerEvaluation: Initialized evaluation object with failure reason if validation failed @@ -59,18 +62,26 @@ def validate_response_and_initialize_miner_evaluation(uid: int, response: GitPat if uid == RECYCLE_UID: return MinerEvaluation(uid=uid, hotkey='', failed_reason='SPECIAL CASE UID 0 - RECYCLE UID') - # Check for null response before accessing any attributes to prevent crashes - if not response or not response.axon: - return MinerEvaluation(uid=uid, hotkey='', failed_reason=f'No response provided by miner {uid}') + if not hotkey: + return MinerEvaluation(uid=uid, hotkey='', failed_reason=f'No hotkey for miner {uid}') - # Now safe to access response.axon.hotkey - miner_eval = MinerEvaluation(uid=uid, hotkey=response.axon.hotkey) + if not pat: + if stale_hotkey: + reason = ( + f'New miner registered on UID {uid}: ' + f'hotkey changed {stale_hotkey[:16]}... → {hotkey[:16]}... — miner must run `gitt miner post`' + ) + else: + reason = f'No stored PAT for miner {uid} — miner must run `gitt miner post`' + return MinerEvaluation(uid=uid, hotkey=hotkey, failed_reason=reason) - github_id, error = validate_github_credentials(uid, response.github_access_token) + miner_eval = MinerEvaluation(uid=uid, hotkey=hotkey) + + github_id, error = validate_github_credentials(uid, pat) if error: miner_eval.failed_reason = error return miner_eval miner_eval.github_id = github_id - miner_eval.github_pat = response.github_access_token + miner_eval.github_pat = pat return miner_eval diff --git a/gittensor/validator/oss_contributions/reward.py b/gittensor/validator/oss_contributions/reward.py index 0b15ef3e..9a28740f 100644 --- a/gittensor/validator/oss_contributions/reward.py +++ b/gittensor/validator/oss_contributions/reward.py @@ -2,16 +2,14 @@ # Copyright © 2025 Entrius from __future__ import annotations -from typing import TYPE_CHECKING, Dict, Tuple +from typing import TYPE_CHECKING, Dict, Optional, Tuple import bittensor as bt import numpy as np -from aiohttp import ClientConnectorError from gittensor.classes import MinerEvaluation -from gittensor.synapses import GitPatSynapse from gittensor.utils.github_api_tools import load_miners_prs -from gittensor.validator.oss_contributions.dynamic_emissions import apply_dynamic_emissions_using_network_contributions +from gittensor.validator import pat_storage from gittensor.validator.oss_contributions.inspections import ( detect_and_penalize_miners_sharing_github, validate_response_and_initialize_miner_evaluation, @@ -21,7 +19,6 @@ finalize_miner_scores, score_miner_prs, ) -from gittensor.validator.oss_contributions.tier_config import allocate_emissions_by_tier from gittensor.validator.utils.load_weights import LanguageConfig, RepositoryConfig, TokenConfig # NOTE: there was a circular import error, needed this if to resolve it @@ -29,50 +26,26 @@ from neurons.validator import Validator -async def query_miner(self, uid: int) -> GitPatSynapse: - """ - Returns: - GitPatSynapse: A gittensor protocol object with a miner github pat - """ - - bt.logging.debug(f'\nQuerying UID {uid}') - - try: - response = await self.dendrite( - axons=[self.metagraph.axons[uid]], - synapse=GitPatSynapse(), - # Don't deserialize, get the GitPatSynapse objects directly - deserialize=False, - ) - - # Extract the single response from the list - miner_response = response[0] if response else None - return miner_response - - except ClientConnectorError: - bt.logging.warning(f'Cannot connect to UID {uid} - miner unreachable') - return None - except Exception as e: - bt.logging.error(f'Error querying miner UID {uid}: {e}') - return None - - async def evaluate_miners_pull_requests( uid: int, - response: GitPatSynapse, + hotkey: str, + pat: Optional[str], master_repositories: Dict[str, RepositoryConfig], programming_languages: Dict[str, LanguageConfig], token_config: TokenConfig, + stale_hotkey: Optional[str] = None, ) -> MinerEvaluation: """ - Entry point from taking a miners response -> Get PRs -> Score PRs by tier + Entry point from taking a miners response -> Get PRs -> Score PRs Args: uid: The uid of the miner being evaluated - response: The GitPatSynapse (github access token) returned by the miner + hotkey: The miner's hotkey + pat: The miner's GitHub PAT (from local storage), or None if not available master_repositories: The incentivized repositories and their RepositoryConfig objects programming_languages: The programming languages and their weights token_config: Token-based scoring weights configuration + stale_hotkey: If set, the UID has a stored PAT from this old hotkey (re-registration detected) Returns: MinerEvaluation: The object containing scores, valid_prs, etc. @@ -80,7 +53,7 @@ async def evaluate_miners_pull_requests( bt.logging.info(f'******* Reward function called for UID: {uid} *******') - miner_eval = validate_response_and_initialize_miner_evaluation(uid, response) + miner_eval = validate_response_and_initialize_miner_evaluation(uid, hotkey, pat, stale_hotkey=stale_hotkey) if miner_eval.failed_reason is not None: bt.logging.info(f'UID {uid} not being evaluated: {miner_eval.failed_reason}') return miner_eval @@ -89,7 +62,7 @@ async def evaluate_miners_pull_requests( score_miner_prs(miner_eval, master_repositories, programming_languages, token_config) - # Clear PAT after scoring to avoid storing sensitive data + # Clear PAT after scoring to avoid storing sensitive data in memory miner_eval.github_pat = None bt.logging.info('*' * 50 + '\n') @@ -102,31 +75,46 @@ async def get_rewards( master_repositories: Dict[str, RepositoryConfig], programming_languages: Dict[str, LanguageConfig], token_config: TokenConfig, -) -> Tuple[np.ndarray, Dict[int, MinerEvaluation]]: - """ - Args: - uids (set[int]): All valid miner uids in the subnet - master_repositories (Dict[str, RepositoryConfig]): The dict of repositories (name -> RepositoryConfig) - programming_languages (Dict[str, LanguageConfig]): The dict of languages (extension -> LanguageConfig) - token_config (TokenConfig): Token-based scoring weights configuration +) -> Tuple[np.ndarray, Dict[int, MinerEvaluation], set]: + """Score OSS contributions for all miners. + Returns: - rewards (array[int]): An array of scores for all miners in sorted fashion, miner n score = index[n] + Tuple of (normalized_rewards_array, miner_evaluations, cached_uids). + DB storage and emission blending are handled by the caller (forward.py). """ bt.logging.info(f'UIDs: {uids}') - responses: Dict[int, GitPatSynapse] = {} + # Snapshot PATs once at the start of the scoring round. + # Mid-round broadcasts update the JSON file but do not affect this round. + all_pats = pat_storage.load_all_pats() + pat_by_uid = {entry['uid']: entry for entry in all_pats} + + bt.logging.info(f'PAT storage snapshot: {len(pat_by_uid)} miners have stored PATs') + miner_evaluations: Dict[int, MinerEvaluation] = {} - # Query miners and calculate score. + # Look up PATs and calculate score. for uid in uids: - # Retrieve PAT - miner_response = await query_miner(self, uid) - responses[uid] = miner_response + hotkey = self.metagraph.hotkeys[uid] + pat_entry = pat_by_uid.get(uid) + pat = None + stale_hotkey = None + if pat_entry: + if pat_entry.get('hotkey') == hotkey: + pat = pat_entry['pat'] + else: + stale_hotkey = pat_entry.get('hotkey') # Calculate score miner_evaluation = await evaluate_miners_pull_requests( - uid, miner_response, master_repositories, programming_languages, token_config + uid, + hotkey, + pat, + master_repositories, + programming_languages, + token_config, + stale_hotkey=stale_hotkey, ) miner_evaluations[uid] = miner_evaluation @@ -136,22 +124,14 @@ async def get_rewards( # Adjust scores for duplicate accounts detect_and_penalize_miners_sharing_github(miner_evaluations) - # Finalize scores: apply pioneer dividends, credibility, sum totals, deduct collateral + # Finalize scores: apply eligibility gate, credibility, pioneer dividends, collateral finalize_miner_scores(miner_evaluations) - # Allocate emissions by tier: replace total_score with tier-weighted allocations - allocate_emissions_by_tier(miner_evaluations) - - # Normalize the rewards between [0,1] + # Normalize the rewards between [0,1] — single flat pool normalized_rewards = normalize_rewards_linear(miner_evaluations) - # Scale rewards according to dynamic emission curve based off of miners total contributions. - final_rewards = apply_dynamic_emissions_using_network_contributions(normalized_rewards, miner_evaluations) - - # Store miner evaluations after calculating all scores - await self.bulk_store_evaluation(miner_evaluations, skip_uids=cached_uids) - return ( - np.array([final_rewards.get(uid, 0.0) for uid in sorted(uids)]), + np.array([normalized_rewards.get(uid, 0.0) for uid in sorted(uids)]), miner_evaluations, + cached_uids, ) diff --git a/gittensor/validator/oss_contributions/scoring.py b/gittensor/validator/oss_contributions/scoring.py index be9471e4..a253d46d 100644 --- a/gittensor/validator/oss_contributions/scoring.py +++ b/gittensor/validator/oss_contributions/scoring.py @@ -3,22 +3,25 @@ import math from datetime import datetime, timezone -from typing import Dict, Optional, Tuple +from typing import Dict, Tuple import bittensor as bt -from gittensor.classes import Issue, MinerEvaluation, PrScoringResult, PRState, PullRequest +from gittensor.classes import ( + Issue, + MinerEvaluation, + PrScoringResultCategorized, + PRState, + PullRequest, +) from gittensor.constants import ( - DEFAULT_MERGED_PR_BASE_SCORE, EXCESSIVE_PR_PENALTY_BASE_THRESHOLD, MAINTAINER_ASSOCIATIONS, - MAINTAINER_ISSUE_BONUS, - MAX_CODE_DENSITY_MULTIPLIER, - MAX_ISSUE_AGE_BONUS, - MAX_ISSUE_AGE_FOR_MAX_SCORE, + MAINTAINER_ISSUE_MULTIPLIER, MAX_ISSUE_CLOSE_WINDOW_DAYS, MAX_OPEN_PR_THRESHOLD, MIN_TOKEN_SCORE_FOR_BASE_SCORE, + OPEN_PR_COLLATERAL_PERCENT, OPEN_PR_THRESHOLD_TOKEN_SCORE, PIONEER_DIVIDEND_MAX_RATIO, PIONEER_DIVIDEND_RATE_1ST, @@ -27,6 +30,7 @@ REVIEW_PENALTY_RATE, SECONDS_PER_DAY, SECONDS_PER_HOUR, + STANDARD_ISSUE_MULTIPLIER, TIME_DECAY_GRACE_PERIOD_HOURS, TIME_DECAY_MIN_MULTIPLIER, TIME_DECAY_SIGMOID_MIDPOINT, @@ -35,22 +39,11 @@ from gittensor.utils.github_api_tools import ( FileContentPair, fetch_file_contents_with_base, + get_merge_base_sha, get_pull_request_file_changes, get_pull_request_maintainer_changes_requested_count, ) -from gittensor.validator.oss_contributions.credibility import ( - calculate_credibility_per_tier, - calculate_tier_stats, - is_tier_unlocked, -) -from gittensor.validator.oss_contributions.tier_config import ( - TIERS, - TIERS_ORDER, - Tier, - TierConfig, - TierStats, - get_tier_from_config, -) +from gittensor.validator.oss_contributions.credibility import check_eligibility from gittensor.validator.utils.load_weights import LanguageConfig, RepositoryConfig, TokenConfig from gittensor.validator.utils.tree_sitter_scoring import calculate_token_score_from_file_changes @@ -89,11 +82,12 @@ def score_pull_request( programming_languages: Dict[str, LanguageConfig], token_config: TokenConfig, ) -> None: - """Scores a single PR and populates relevant PullRequest fields (tier_config, etc.)""" + """Scores a single PR and populates relevant PullRequest fields.""" + assert miner_eval.github_pat is not None, f'UID {miner_eval.uid} has no github_pat' - pr.repository_tier_configuration = get_tier_config(pr.repository_full_name, master_repositories) - if not pr.repository_tier_configuration: - bt.logging.warning('No repository configuration found.') + repo_config = master_repositories.get(pr.repository_full_name) + if not repo_config: + bt.logging.warning(f'{pr.repository_full_name} not in master repositories. Skipping...') return # Only fetch file changes from GitHub if not already loaded (they are preloaded for testing only) @@ -124,6 +118,9 @@ def score_pull_request( def fetch_file_contents_for_pr(pr: PullRequest, github_pat: str) -> Dict[str, FileContentPair]: """Fetch both base and head file contents for all files in a PR using GraphQL batch fetch. + Uses the merge-base commit (common ancestor) as the "before" state rather than + the base branch tip, so the tree-diff only scores the PR's own changes. + Returns: Dict mapping filename to FileContentPair(old_content, new_content) - old_content: File content before the PR (None for new files) @@ -132,7 +129,6 @@ def fetch_file_contents_for_pr(pr: PullRequest, github_pat: str) -> Dict[str, Fi if not pr.file_changes or not pr.head_ref_oid or not pr.base_ref_oid: return {} - # Extract owner and repo name parts = pr.repository_full_name.split('/') if len(parts) != 2: bt.logging.warning(f'Invalid repository name format: {pr.repository_full_name}') @@ -140,21 +136,16 @@ def fetch_file_contents_for_pr(pr: PullRequest, github_pat: str) -> Dict[str, Fi owner, repo_name = parts - return fetch_file_contents_with_base( - owner, repo_name, pr.base_ref_oid, pr.head_ref_oid, pr.file_changes, github_pat - ) - - -def get_tier_config(repo_full_name: str, master_repositories: Dict[str, RepositoryConfig]) -> Optional[TierConfig]: - """Get tier configuration for a repository.""" - repo_config = master_repositories.get(repo_full_name) - if not repo_config: - return None + # Resolve merge-base to avoid scoring unrelated changes from the base branch. + # baseRefOid is the base branch tip, which may include commits not in this PR. + merge_base = get_merge_base_sha(pr.repository_full_name, pr.base_ref_oid, pr.head_ref_oid, github_pat) + base_sha = merge_base if merge_base else pr.base_ref_oid + if merge_base and merge_base != pr.base_ref_oid: + bt.logging.debug( + f'PR #{pr.number}: using merge-base {merge_base[:8]} instead of base_ref {pr.base_ref_oid[:8]}' + ) - tier_config = TIERS.get(repo_config.tier) if repo_config.tier else None - if not tier_config: - bt.logging.warning(f'{repo_full_name} is not configured to a tier. Skipping...') - return tier_config + return fetch_file_contents_with_base(owner, repo_name, base_sha, pr.head_ref_oid, pr.file_changes, github_pat) def calculate_base_score( @@ -163,9 +154,9 @@ def calculate_base_score( token_config: TokenConfig, file_contents: Dict[str, FileContentPair], ) -> float: - """Calculate base score using code density scaling + contribution bonus.""" - scoring_result: PrScoringResult = calculate_token_score_from_file_changes( - pr.file_changes, + """Calculate base score using per-category code density scaling + contribution bonus.""" + scoring_result: PrScoringResultCategorized = calculate_token_score_from_file_changes( + pr.file_changes or [], file_contents, token_config, programming_languages, @@ -179,27 +170,12 @@ def calculate_base_score( pr.leaf_count = scoring_result.score_breakdown.leaf_count pr.leaf_score = scoring_result.score_breakdown.leaf_score - # Calculate total lines changed across all files - total_lines = sum(f.total_lines for f in scoring_result.file_results) - - # Check minimum token score threshold for base score. PRs below threshold get 0 base score - if pr.token_score < MIN_TOKEN_SCORE_FOR_BASE_SCORE: - code_density = 0.0 - initial_base_score = 0.0 - elif total_lines > 0: - # Calculate code density (token_score / total_lines), capped - code_density = min(pr.token_score / total_lines, MAX_CODE_DENSITY_MULTIPLIER) - initial_base_score = DEFAULT_MERGED_PR_BASE_SCORE * code_density - else: - code_density = 0.0 - initial_base_score = 0.0 + initial_base_score = scoring_result.calculate_initial_base_score() - # Calculate contribution bonus, capped - tier_config: TierConfig = pr.repository_tier_configuration - bonus_percent = min(1.0, scoring_result.total_score / tier_config.contribution_score_for_full_bonus) - contribution_bonus = round(bonus_percent * tier_config.contribution_score_max_bonus, 2) + # Calculate contribution bonus from SOURCE category only + contribution_bonus = scoring_result.calculate_contribution_bonus() - # Final base score = density-scaled base + contribution bonus + # Final base score = sum of per-category density bases + contribution bonus base_score = round(initial_base_score + contribution_bonus, 2) # Log with note if below token threshold @@ -209,8 +185,7 @@ def calculate_base_score( else '' ) bt.logging.info( - f'Base score: {initial_base_score:.2f} (density {code_density:.2f}){threshold_note} + {contribution_bonus} bonus ' - f'({bonus_percent * 100:.0f}% of max {tier_config.contribution_score_max_bonus}) = {base_score:.2f}' + f'Base score: {initial_base_score:.2f}{threshold_note} + {contribution_bonus} bonus = {base_score:.2f}' ) return base_score @@ -219,16 +194,7 @@ def calculate_base_score( def calculate_review_quality_multiplier(changes_requested_count: int) -> float: """Calculate the review quality multiplier based on maintainer CHANGES_REQUESTED reviews. - Each CHANGES_REQUESTED review from a maintainer reduces the multiplier - by REVIEW_PENALTY_RATE cumulatively, floored at 0.0. - Formula: max(0.0, 1.0 - REVIEW_PENALTY_RATE × N) - - Args: - changes_requested_count: Number of CHANGES_REQUESTED reviews from maintainers - - Returns: - float: Multiplier in [0.0, 1.0] """ multiplier = max(0.0, 1.0 - REVIEW_PENALTY_RATE * changes_requested_count) if changes_requested_count > 0: @@ -250,13 +216,10 @@ def calculate_pr_multipliers( pr.issue_multiplier = round(calculate_issue_multiplier(pr), 2) if is_merged: - # Spam multiplier is recalculated in finalize_miner_scores with tier stats - # Set to 1.0 here as placeholder; will be updated when tier unlock status is known + # Spam multiplier is recalculated in finalize_miner_scores with total token score pr.open_pr_spam_multiplier = 1.0 pr.time_decay_multiplier = round(calculate_time_decay_multiplier(pr), 2) - pr.review_quality_multiplier = round(calculate_review_quality_multiplier(pr.changes_requested_count), 2) - else: pr.open_pr_spam_multiplier = 1.0 pr.time_decay_multiplier = 1.0 @@ -264,51 +227,29 @@ def calculate_pr_multipliers( pr.review_quality_multiplier = 1.0 -def calculate_open_pr_threshold( - tier_stats: Dict[Tier, TierStats] = None, -) -> int: - """ - Calculate dynamic open PR threshold based on total token score across unlocked tiers. - - Bonus = floor(total_unlocked_token_score / 500) - Example: 1500 token score across unlocked tiers / 500 = +3 bonus +def calculate_open_pr_threshold(total_token_score: float = 0.0) -> int: + """Calculate dynamic open PR threshold based on total token score. + Bonus = floor(total_token_score / OPEN_PR_THRESHOLD_TOKEN_SCORE) Threshold = min(BASE_THRESHOLD + bonus, MAX_OPEN_PR_THRESHOLD) """ - if tier_stats is None: - return EXCESSIVE_PR_PENALTY_BASE_THRESHOLD - - # Sum token scores from all unlocked tiers - total_unlocked_token_score = 0.0 - for tier in TIERS_ORDER: - if is_tier_unlocked(tier, tier_stats, log_reasons=False): - total_unlocked_token_score += tier_stats[tier].token_score - - bonus = int(total_unlocked_token_score // OPEN_PR_THRESHOLD_TOKEN_SCORE) + bonus = int(total_token_score // OPEN_PR_THRESHOLD_TOKEN_SCORE) return min(EXCESSIVE_PR_PENALTY_BASE_THRESHOLD + bonus, MAX_OPEN_PR_THRESHOLD) -def calculate_pr_spam_penalty_multiplier( - total_open_prs: int, - tier_stats: Dict[Tier, TierStats] = None, -) -> float: - """ - Apply penalty for excessive open PRs. - - Binary multiplier: - - 1.0 if open PRs <= threshold - - 0.0 otherwise +def calculate_pr_spam_penalty_multiplier(total_open_prs: int, total_token_score: float = 0.0) -> float: + """Apply penalty for excessive open PRs. - The threshold is dynamic based on the miner's total token score - across unlocked tiers. + Binary multiplier: 1.0 if open PRs <= threshold, 0.0 otherwise. """ - threshold = calculate_open_pr_threshold(tier_stats) + threshold = calculate_open_pr_threshold(total_token_score) return 1.0 if total_open_prs <= threshold else 0.0 def calculate_time_decay_multiplier(pr: PullRequest) -> float: """Calculate time decay multiplier for a single PR based on merge date.""" + assert pr.merged_at is not None, f'PR #{pr.number} has no merged_at' now = datetime.now(timezone.utc) hours_since_merge = (now - pr.merged_at).total_seconds() / SECONDS_PER_HOUR @@ -334,14 +275,14 @@ def calculate_pioneer_dividends( Must be called AFTER all earned_scores have been computed. """ - # Build index: (repo, uid) -> eligible PRs, and per-repo aggregates for ordering - pr_index: Dict[str, Dict[int, list]] = {} # repo -> {uid: [eligible PRs]} + pr_index: Dict[str, Dict[int, list]] = {} repo_contributions: Dict[str, Dict[int, Tuple[datetime, int, float]]] = {} for evaluation in miner_evaluations.values(): for pr in evaluation.merged_pull_requests: if not pr.is_pioneer_eligible(): continue + assert pr.merged_at is not None repo = pr.repository_full_name pr_index.setdefault(repo, {}).setdefault(pr.uid, []).append(pr) @@ -356,16 +297,13 @@ def calculate_pioneer_dividends( else: repo_contributions[repo][pr.uid] = (earliest_at, earliest_num, new_total) - # For each repo: rank contributors, calculate dividend, apply to pioneer PR for repo, uid_entries in repo_contributions.items(): sorted_uids = sorted(uid_entries.items(), key=lambda x: (x[1][0], x[1][1])) - # Set pioneer_rank via index lookup (no full evaluation scan) for rank_pos, (uid, _) in enumerate(sorted_uids): for pr in pr_index[repo][uid]: pr.pioneer_rank = rank_pos + 1 - # Calculate dividend from followers' earned_scores dividend = 0.0 for pos, (_, entry) in enumerate(sorted_uids[1:]): follower_earned = entry[2] @@ -379,14 +317,13 @@ def calculate_pioneer_dividends( if dividend <= 0: continue - # Find pioneer's earliest PR via index and apply capped dividend pioneer_uid = sorted_uids[0][0] pioneer_pr_number = sorted_uids[0][1][1] pioneer_pr = next(pr for pr in pr_index[repo][pioneer_uid] if pr.number == pioneer_pr_number) max_dividend = pioneer_pr.earned_score * PIONEER_DIVIDEND_MAX_RATIO capped = min(dividend, max_dividend) pioneer_pr.pioneer_dividend = round(capped, 2) - pioneer_pr.earned_score += pioneer_pr.pioneer_dividend + pioneer_pr.earned_score = round(pioneer_pr.earned_score + pioneer_pr.pioneer_dividend, 2) cap_note = f' (capped from {dividend:.2f})' if capped < dividend else '' bt.logging.info( @@ -417,36 +354,31 @@ def finalize_miner_scores(miner_evaluations: Dict[int, MinerEvaluation]) -> None has_contributions = len(evaluation.merged_pull_requests) > 0 or len(evaluation.closed_pull_requests) > 0 if not has_contributions: - bt.logging.info('No merged or closed PRs - skipping tier evaluation') + bt.logging.info('No merged or closed PRs - skipping evaluation') continue - evaluation.credibility_by_tier = calculate_credibility_per_tier( + # Check eligibility gate (credibility with mulligan + min valid PRs) + is_eligible, credibility, reason = check_eligibility( evaluation.merged_pull_requests, evaluation.closed_pull_requests ) + evaluation.is_eligible = is_eligible + evaluation.credibility = credibility - # Calculate tier stats early to determine unlocked tiers for spam multiplier - tier_stats = calculate_tier_stats( - merged_prs=evaluation.merged_pull_requests, - closed_prs=evaluation.closed_pull_requests, - open_prs=evaluation.open_pull_requests, - include_scoring_details=False, # Will recalculate with scoring details later - ) + if not is_eligible: + bt.logging.info(f'UID {uid} ineligible: {reason} — score set to 0') + continue - # Calculate spam multiplier once per miner (same for all their merged PRs) - spam_multiplier = calculate_pr_spam_penalty_multiplier(evaluation.total_open_prs, tier_stats) + # Calculate spam multiplier once per miner using total token score + # We need to compute total_token_score first from all merged PRs + preliminary_token_score = sum(pr.token_score for pr in evaluation.merged_pull_requests) + spam_multiplier = calculate_pr_spam_penalty_multiplier(evaluation.total_open_prs, preliminary_token_score) # Process merged PRs for pr in evaluation.merged_pull_requests: - # Apply spam multiplier (calculated once per miner based on unlocked tiers) pr.open_pr_spam_multiplier = spam_multiplier - # Apply tier level credibility^k to each PRs score - tier_config = pr.repository_tier_configuration - tier = get_tier_from_config(tier_config) - credibility = evaluation.credibility_by_tier.get(tier, 1.0) if tier else 1.0 - pr.raw_credibility = credibility - pr.credibility_scalar = tier_config.credibility_scalar - pr.credibility_multiplier = round(credibility**tier_config.credibility_scalar, 2) + # Apply linear credibility multiplier (k=1) + pr.credibility_multiplier = round(credibility, 2) pr.calculate_final_earned_score() @@ -458,10 +390,9 @@ def finalize_miner_scores(miner_evaluations: Dict[int, MinerEvaluation]) -> None evaluation.total_leaf_score += pr.leaf_score # Phase 2: Calculate pioneer dividends from follower earned_scores - # Must happen after Phase 1 so all earned_scores are available calculate_pioneer_dividends(miner_evaluations) - # Phase 3: Aggregate totals (including dividends), collateral, tier stats, logging + # Phase 3: Aggregate totals (including dividends), collateral, logging for uid, evaluation in miner_evaluations.items(): if not evaluation: continue @@ -476,42 +407,11 @@ def finalize_miner_scores(miner_evaluations: Dict[int, MinerEvaluation]) -> None evaluation.total_score += pr.earned_score evaluation.total_nodes_scored += pr.total_nodes_scored - # Apply collateral deduction (0 - 0 = 0 for empty miners) + # Apply collateral deduction earned_score = evaluation.total_score evaluation.total_score = max(0.0, earned_score - evaluation.total_collateral_score) evaluation.unique_repos_count = len(evaluation.unique_repos_contributed_to) - # Calculate tier stats (empty stats for no contributions, used for logging + dashboard) - tier_stats = calculate_tier_stats( - merged_prs=evaluation.merged_pull_requests, - closed_prs=evaluation.closed_pull_requests, - open_prs=evaluation.open_pull_requests, - include_scoring_details=True, - ) - - # Determine miner's current tier based on what tiers they've unlocked - for tier in TIERS.keys(): - evaluation.stats_by_tier[tier] = tier_stats[tier] - if is_tier_unlocked(tier, tier_stats): - evaluation.current_tier = tier - - # Set overall qualified unique repos count (Bronze threshold is lowest, so use that for overall count) - evaluation.qualified_unique_repos_count = ( - tier_stats[Tier.BRONZE].qualified_unique_repo_count - + tier_stats[Tier.SILVER].qualified_unique_repo_count - + tier_stats[Tier.GOLD].qualified_unique_repo_count - ) - - # Determine next tier for display - current_tier_str = evaluation.current_tier.value if evaluation.current_tier else 'None' - if evaluation.current_tier is None: - next_tier_str = f' (Next: {TIERS_ORDER[0].value})' - elif evaluation.current_tier == TIERS_ORDER[-1]: - next_tier_str = ' (Max)' - else: - next_idx = TIERS_ORDER.index(evaluation.current_tier) + 1 - next_tier_str = f' (Next: {TIERS_ORDER[next_idx].value})' - # UID summary bt.logging.info('') bt.logging.info('Summary:') @@ -521,26 +421,18 @@ def finalize_miner_scores(miner_evaluations: Dict[int, MinerEvaluation]) -> None bt.logging.info( f'├─ PRs: {evaluation.total_merged_prs} merged | {evaluation.total_open_prs} open | {evaluation.total_closed_prs} closed' ) - bt.logging.info(f'├─ Tier: {current_tier_str}{next_tier_str}') - bronze = evaluation.stats_by_tier[Tier.BRONZE] - silver = evaluation.stats_by_tier[Tier.SILVER] - gold = evaluation.stats_by_tier[Tier.GOLD] - bt.logging.info( - f'└─ Per-Tier: Bronze({bronze.merged_count}/{bronze.total_attempts}) | Silver({silver.merged_count}/{silver.total_attempts}) | Gold({gold.merged_count}/{gold.total_attempts})' - ) + bt.logging.info(f'└─ Eligible: {evaluation.is_eligible} | Credibility: {evaluation.credibility:.2f}') bt.logging.info('Finalization complete.') def calculate_issue_multiplier(pr: PullRequest) -> float: """ - Calculate PR score multiplier based on the first valid linked issue's age. - - Works for both merged PRs (uses issue.closed_at) and open PRs (uses current time). - Only the first valid issue is scored. Adds bonus if issue was created by a maintainer. + Calculate PR score multiplier based on the first valid linked issue. - Returns: - float: Multiplier between 1.0 and 2.0 + Returns a flat multiplier: MAINTAINER_ISSUE_MULTIPLIER (1.66) if the issue author + is a maintainer (OWNER/MEMBER/COLLABORATOR), otherwise STANDARD_ISSUE_MULTIPLIER (1.33). + Returns 1.0 if no valid linked issues. """ if not pr.issues: bt.logging.info(f'PR #{pr.number} - Contains no linked issues') @@ -552,39 +444,17 @@ def calculate_issue_multiplier(pr: PullRequest) -> float: return 1.0 issue = valid_issues[0] - is_merged = pr.pr_state == PRState.MERGED - - # Check if issue was created by a maintainer (extra bonus) - is_maintainer_issue = issue.author_association in MAINTAINER_ASSOCIATIONS if issue.author_association else False - maintainer_bonus = MAINTAINER_ISSUE_BONUS if is_maintainer_issue else 0.0 - maintainer_str = ' (maintainer)' if is_maintainer_issue else '' - - if not issue.created_at: - bonus = maintainer_bonus - bt.logging.info(f'Issue #{issue.number} - No creation date | bonus: {bonus:.2f}{maintainer_str}') - return 1.0 + bonus - - try: - end_date = issue.closed_at if (is_merged and issue.closed_at) else datetime.now(timezone.utc) - days_open = (end_date - issue.created_at).days - # Scale age bonus from 0 to MAX_ISSUE_AGE_BONUS based on sqrt of days open - age_ratio = math.sqrt(min(days_open, MAX_ISSUE_AGE_FOR_MAX_SCORE)) / math.sqrt(MAX_ISSUE_AGE_FOR_MAX_SCORE) - age_bonus = MAX_ISSUE_AGE_BONUS * age_ratio - total_bonus = age_bonus + maintainer_bonus - bt.logging.info(f'Issue #{issue.number} - Open for {days_open} days | bonus: {total_bonus:.2f}{maintainer_str}') - return 1.0 + total_bonus - except (ValueError, AttributeError) as e: - bt.logging.warning( - f'Issue #{issue.number} - Could not calculate age. Using maintainer bonus only: {maintainer_bonus:.2f}. Exception: {e}' - ) - return 1.0 + maintainer_bonus + is_maintainer = issue.author_association in MAINTAINER_ASSOCIATIONS if issue.author_association else False + multiplier = MAINTAINER_ISSUE_MULTIPLIER if is_maintainer else STANDARD_ISSUE_MULTIPLIER + label = 'maintainer' if is_maintainer else 'standard' + bt.logging.info(f'Issue #{issue.number} - {label} issue | multiplier: {multiplier}') + return multiplier def is_valid_issue(issue: Issue, pr: PullRequest) -> bool: """Check if issue is valid for bonus calculation (works for both merged and open PRs).""" is_merged = pr.pr_state == PRState.MERGED - # Common checks (both merged and open) if not issue.author_login: bt.logging.warning(f'Skipping issue #{issue.number} - Issue is missing author information') return False @@ -597,8 +467,7 @@ def is_valid_issue(issue: Issue, pr: PullRequest) -> bool: bt.logging.warning(f'Skipping issue #{issue.number} - Issue was created after PR was created') return False - # Merged-only checks - if is_merged: + if is_merged and pr.merged_at: if pr.last_edited_at and pr.last_edited_at > pr.merged_at: bt.logging.warning(f'Skipping issue #{issue.number} - PR was edited after merge') return False @@ -627,34 +496,26 @@ def calculate_open_pr_collateral_score(pr: PullRequest) -> float: """ Calculate collateral score for an open PR. - Collateral = base_score * applicable_multipliers * DEFAULT_COLLATERAL_PERCENT + Collateral = base_score * applicable_multipliers * OPEN_PR_COLLATERAL_PERCENT Applicable multipliers: repo_weight, issue NOT applicable: time_decay (merge-based), credibility_multiplier (merge-based), - uniqueness (cross-miner), open_pr_spam (not for collateral) + open_pr_spam (not for collateral) """ from math import prod - # Guard against missing tier configuration - if pr.repository_tier_configuration is None: - bt.logging.warning( - f'OPEN PR #{pr.number} in {pr.repository_full_name} has no tier configuration. Skipping collateral calculation.' - ) - return 0.0 - multipliers = { 'repo_weight': pr.repo_weight_multiplier, 'issue': pr.issue_multiplier, } potential_score = pr.base_score * prod(multipliers.values()) - collateral_percent = pr.repository_tier_configuration.open_pr_collateral_percentage - collateral_score = potential_score * collateral_percent + collateral_score = potential_score * OPEN_PR_COLLATERAL_PERCENT mult_str = ' | '.join([f'{k}: {v:.2f}' for k, v in multipliers.items()]) bt.logging.info( f'OPEN PR #{pr.number} | base: {pr.base_score:.2f} | {mult_str} | ' - f'potential: {potential_score:.2f} | collateral ({collateral_percent * 100:.0f}%): {collateral_score:.2f}' + f'potential: {potential_score:.2f} | collateral ({OPEN_PR_COLLATERAL_PERCENT * 100:.0f}%): {collateral_score:.2f}' ) return collateral_score diff --git a/gittensor/validator/oss_contributions/tier_config.py b/gittensor/validator/oss_contributions/tier_config.py deleted file mode 100644 index 5da7ca8e..00000000 --- a/gittensor/validator/oss_contributions/tier_config.py +++ /dev/null @@ -1,257 +0,0 @@ -from __future__ import annotations - -from dataclasses import dataclass -from enum import Enum -from typing import TYPE_CHECKING, Dict, Optional - -import bittensor as bt - -from gittensor.constants import ( - DEFAULT_COLLATERAL_PERCENT, - DEFAULT_MAX_CONTRIBUTION_SCORE_FOR_FULL_BONUS, - DEFAULT_MERGED_PR_BASE_SCORE, - MAX_CONTRIBUTION_BONUS, - TIER_EMISSION_SPLITS, -) - -if TYPE_CHECKING: - from gittensor.classes import MinerEvaluation - - -@dataclass -class TierStats: - """Statistics for a single tier.""" - - merged_count: int = 0 - closed_count: int = 0 - open_count: int = 0 - - unique_repo_contribution_count: int = 0 - # Unique repos that meet a min token score threshold - qualified_unique_repo_count: int = 0 - - # Included as scoring details at the tier level - earned_score: float = 0.0 - collateral_score: float = 0.0 - - # Token scoring breakdown for this tier - token_score: float = 0.0 - structural_count: int = 0 - structural_score: float = 0.0 - leaf_count: int = 0 - leaf_score: float = 0.0 - - @property - def total_attempts(self) -> int: - return self.merged_count + self.closed_count - - @property - def total_prs(self) -> int: - return self.merged_count + self.closed_count + self.open_count - - @property - def credibility(self) -> float: - return self.merged_count / self.total_attempts if self.total_attempts > 0 else 0.0 - - -class Tier(str, Enum): - BRONZE = 'Bronze' - SILVER = 'Silver' - GOLD = 'Gold' - - -TIER_DEFAULTS = { - 'merged_pr_base_score': DEFAULT_MERGED_PR_BASE_SCORE, - 'contribution_score_for_full_bonus': DEFAULT_MAX_CONTRIBUTION_SCORE_FOR_FULL_BONUS, - 'contribution_score_max_bonus': MAX_CONTRIBUTION_BONUS, - 'open_pr_collateral_percentage': DEFAULT_COLLATERAL_PERCENT, -} - - -@dataclass(frozen=True) -class TierConfig: - required_credibility: Optional[float] - required_min_token_score: Optional[float] # Minimum total token score to unlock tier - # Unique repos with min token score requirement (both must be set or both None) - required_unique_repos_count: Optional[int] # Number of unique repos needed - required_min_token_score_per_repo: Optional[float] # Min token score each repo must have - - # Tier-specific scaling - credibility_scalar: int - - # Defaults (can override per-tier if needed) - merged_pr_base_score: int = TIER_DEFAULTS['merged_pr_base_score'] - contribution_score_for_full_bonus: int = TIER_DEFAULTS['contribution_score_for_full_bonus'] - contribution_score_max_bonus: int = TIER_DEFAULTS['contribution_score_max_bonus'] - open_pr_collateral_percentage: int = TIER_DEFAULTS['open_pr_collateral_percentage'] - - -TIERS: dict[Tier, TierConfig] = { - Tier.BRONZE: TierConfig( - required_credibility=0.70, - required_min_token_score=None, - required_unique_repos_count=3, - required_min_token_score_per_repo=5.0, # At least n initial unique repos must have at least x token score - credibility_scalar=1.0, - ), - Tier.SILVER: TierConfig( - required_credibility=0.65, - required_min_token_score=300.0, # Minimum total token score for Silver unlock - required_unique_repos_count=3, - required_min_token_score_per_repo=89.0, # At least n repos must have at least x token score - credibility_scalar=1.5, - ), - Tier.GOLD: TierConfig( - required_credibility=0.60, - required_min_token_score=500.0, # Minimum total token score for Gold unlock - required_unique_repos_count=3, - required_min_token_score_per_repo=144.0, # At least n unique repos must have at least x token score - credibility_scalar=2.0, - ), -} -TIERS_ORDER: list[Tier] = list(TIERS.keys()) - - -def get_next_tier(current: Tier) -> Optional[Tier]: - """Returns the next tier, or None if already at top.""" - idx = TIERS_ORDER.index(current) - if idx + 1 < len(TIERS_ORDER): - return TIERS_ORDER[idx + 1] - return None - - -def get_tier_from_config(tier_config: TierConfig) -> Optional[Tier]: - """Reverse lookup tier from TierConfig.""" - for tier, config in TIERS.items(): - if config == tier_config: - return tier - return None - - -def allocate_emissions_by_tier(miner_evaluations: Dict[int, MinerEvaluation]) -> None: - """ - Replace each miner's total_score with tier-weighted emission allocations. - 15% of emissions will go to Bronze tier contributions, 35% to silver, and 50% to gold. - - Algorithm: - 1. Calculate net score per miner per tier: max(0, earned - collateral) - 2. Sum network totals per tier - 3. Determine max tier reached across all miners - 4. Redistribute allocations from inactive tiers to active tiers - 5. Calculate each miner's share within each tier - 6. Replace total_score with sum of tier allocations - - Args: - miner_evaluations: Dict mapping uid to MinerEvaluation (modified in place) - - Note: MinerEvaluation is imported via TYPE_CHECKING for type hints only - (avoids circular import with gittensor.classes). - """ - # Step 1 & 2: Calculate net scores and network totals per tier - network_tier_totals: Dict[Tier, float] = {tier: 0.0 for tier in TIERS_ORDER} - miner_net_scores: Dict[int, Dict[Tier, float]] = {} - - max_tier: Tier = None - - for uid, evaluation in miner_evaluations.items(): - if not evaluation or evaluation.current_tier is None: - continue - - # Track the highest tier reached across all miners - if max_tier is None: - max_tier = evaluation.current_tier - elif TIERS_ORDER.index(evaluation.current_tier) > TIERS_ORDER.index(max_tier): - max_tier = evaluation.current_tier - - miner_net_scores[uid] = {} - - for tier in TIERS_ORDER: - stats = evaluation.stats_by_tier.get(tier) - if stats is None: - miner_net_scores[uid][tier] = 0.0 - continue - - # Net score floors at 0 - negative in one tier doesn't affect others - net_score = max(0.0, stats.earned_score - stats.collateral_score) - miner_net_scores[uid][tier] = net_score - network_tier_totals[tier] += net_score - - # If no miners have a tier, all scores remain 0 - if max_tier is None: - bt.logging.info('Tier emissions: No tiered miners found, all scores set to 0') - for evaluation in miner_evaluations.values(): - if evaluation: - evaluation.total_score = 0.0 - return - - # Step 3 & 4: Determine active tiers and calculate final percentages - max_tier_idx = TIERS_ORDER.index(max_tier) - active_tiers = TIERS_ORDER[: max_tier_idx + 1] - - # Calculate sum of active tier percentages for redistribution - active_pct_sum = sum(TIER_EMISSION_SPLITS[tier.value] for tier in active_tiers) - - # Final percentages after redistribution. I.e, if gold is not yet unlocked, its 50% allocation will be - # proportionally distributed to the bronze/silver tiers. - final_tier_pcts: Dict[Tier, float] = {} - for tier in TIERS_ORDER: - if tier in active_tiers: - original_pct = TIER_EMISSION_SPLITS[tier.value] - final_tier_pcts[tier] = original_pct / active_pct_sum - else: - final_tier_pcts[tier] = 0.0 - - # Log tier allocation summary - bt.logging.info('') - bt.logging.info('=' * 50) - bt.logging.info('Tier-Based Emission Allocation') - bt.logging.info('=' * 50) - bt.logging.info(f'Max tier reached: {max_tier.value}') - bt.logging.info(f'Active tiers: {[t.value for t in active_tiers]}') - bt.logging.info('Network totals per tier:') - for tier in TIERS_ORDER: - status = 'active' if tier in active_tiers else 'redistributed' - bt.logging.info( - f' {tier.value}: {network_tier_totals[tier]:.2f} total | ' - f'{final_tier_pcts[tier] * 100:.1f}% allocation ({status})' - ) - - # Step 5 & 6: Calculate miner allocations and replace total_score - bt.logging.info('') - bt.logging.info('Per-miner allocations:') - - for uid, evaluation in miner_evaluations.items(): - if not evaluation: - continue - - if uid not in miner_net_scores: - evaluation.total_score = 0.0 - continue - - total_allocation = 0.0 - tier_allocations: Dict[Tier, float] = {} - - for tier in TIERS_ORDER: - net_score = miner_net_scores[uid].get(tier, 0.0) - network_total = network_tier_totals[tier] - tier_pct = final_tier_pcts[tier] - - if network_total > 0 and net_score > 0: - miner_share = net_score / network_total - tier_allocation = miner_share * tier_pct - else: - tier_allocation = 0.0 - - tier_allocations[tier] = tier_allocation - total_allocation += tier_allocation - - evaluation.total_score = total_allocation - - # Log non-zero allocations - if total_allocation > 0: - alloc_parts = [ - f'{tier.value}={tier_allocations[tier]:.4f}' for tier in TIERS_ORDER if tier_allocations[tier] > 0 - ] - bt.logging.info(f' UID {uid}: {" + ".join(alloc_parts)} = {total_allocation:.4f}') - - bt.logging.info('=' * 50) diff --git a/gittensor/validator/pat_handler.py b/gittensor/validator/pat_handler.py new file mode 100644 index 00000000..7b9f6aa7 --- /dev/null +++ b/gittensor/validator/pat_handler.py @@ -0,0 +1,184 @@ +# Entrius 2025 + +"""Axon handlers for miner PAT broadcasting and checking. + +Miners push their GitHub PAT to validators via PatBroadcastSynapse. +Miners check if a validator has their PAT via PatCheckSynapse. +""" + +from typing import TYPE_CHECKING, Optional, Tuple + +import bittensor as bt +import requests + +from gittensor.constants import BASE_GITHUB_API_URL +from gittensor.synapses import PatBroadcastSynapse, PatCheckSynapse +from gittensor.validator import pat_storage +from gittensor.validator.utils.github_validation import validate_github_credentials + +if TYPE_CHECKING: + from neurons.validator import Validator + + +def _get_hotkey(synapse: bt.Synapse) -> str: + """Extract the caller's hotkey from a synapse, raising if missing.""" + assert synapse.dendrite is not None and synapse.dendrite.hotkey is not None + return synapse.dendrite.hotkey + + +# --------------------------------------------------------------------------- +# PatBroadcastSynapse handlers +# --------------------------------------------------------------------------- + + +async def handle_pat_broadcast(validator: 'Validator', synapse: PatBroadcastSynapse) -> PatBroadcastSynapse: + """Validate and store a miner's GitHub PAT.""" + hotkey = _get_hotkey(synapse) + + def _reject(reason: str) -> PatBroadcastSynapse: + synapse.accepted = False + synapse.rejection_reason = reason + synapse.github_access_token = '' + bt.logging.warning(f'PAT broadcast rejected — hotkey: {hotkey[:16]}... reason: {reason}') + return synapse + + # 1. Verify hotkey is registered on the subnet + if hotkey not in validator.metagraph.hotkeys: + return _reject('Hotkey not registered on subnet') + + uid = validator.metagraph.hotkeys.index(hotkey) + + # 2. Validate PAT (checks it works, extracts github_id, verifies account age) + github_id, error = validate_github_credentials(uid, synapse.github_access_token) + if error: + return _reject(error) + + # 3. Enforce GitHub identity pinning — same hotkey cannot switch GitHub accounts + existing = pat_storage.get_pat_by_uid(uid) + if existing and existing.get('hotkey') == hotkey and existing.get('github_id'): + if existing['github_id'] != github_id: + return _reject( + 'GitHub identity is locked for this hotkey. Deregister and re-register to change GitHub accounts.' + ) + + # 4. Test query against a known repo to catch org-restricted PATs + test_error = _test_pat_against_repo(synapse.github_access_token) + if test_error: + return _reject(f'PAT test query failed: {test_error}') + + # 5. Store PAT (github_id guaranteed non-None after validate_github_credentials success) + pat_storage.save_pat(uid=uid, hotkey=hotkey, pat=synapse.github_access_token, github_id=github_id or '0') + + # Clear PAT from response so it isn't echoed back + synapse.github_access_token = '' + synapse.accepted = True + bt.logging.success(f'PAT broadcast accepted — UID: {uid}, hotkey: {hotkey[:16]}..., github_id: {github_id}') + return synapse + + +async def blacklist_pat_broadcast(validator: 'Validator', synapse: PatBroadcastSynapse) -> Tuple[bool, str]: + """Reject PAT broadcasts from unregistered hotkeys.""" + hotkey = _get_hotkey(synapse) + if hotkey not in validator.metagraph.hotkeys: + return True, f'Hotkey {hotkey[:16]}... not registered' + return False, 'Hotkey recognized' + + +async def priority_pat_broadcast(validator: 'Validator', synapse: PatBroadcastSynapse) -> float: + """Prioritize PAT broadcasts by stake.""" + hotkey = _get_hotkey(synapse) + if hotkey not in validator.metagraph.hotkeys: + return 0.0 + uid = validator.metagraph.hotkeys.index(hotkey) + return float(validator.metagraph.S[uid]) + + +# --------------------------------------------------------------------------- +# PatCheckSynapse handlers +# --------------------------------------------------------------------------- + + +async def handle_pat_check(validator: 'Validator', synapse: PatCheckSynapse) -> PatCheckSynapse: + """Check if the validator has the miner's PAT stored and re-validate it.""" + hotkey = _get_hotkey(synapse) + uid = validator.metagraph.hotkeys.index(hotkey) + entry = pat_storage.get_pat_by_uid(uid) + + bt.logging.info(f'PAT check request — UID: {uid}, hotkey: {hotkey[:16]}...') + + # Check if PAT exists and hotkey matches (not a stale entry from a previous miner) + if entry is None or entry.get('hotkey') != hotkey: + synapse.has_pat = False + synapse.pat_valid = False + synapse.rejection_reason = 'No PAT stored for this miner' + bt.logging.info(f'PAT check result — UID: {uid}: no PAT stored') + return synapse + + synapse.has_pat = True + + # Re-validate the stored PAT + _, error = validate_github_credentials(uid, entry['pat']) + if error: + synapse.pat_valid = False + synapse.rejection_reason = error + bt.logging.warning(f'PAT check result — UID: {uid}: validation failed: {error}') + return synapse + + test_error = _test_pat_against_repo(entry['pat']) + if test_error: + synapse.pat_valid = False + synapse.rejection_reason = f'PAT test query failed: {test_error}' + bt.logging.warning(f'PAT check result — UID: {uid}: test query failed: {test_error}') + return synapse + + synapse.pat_valid = True + bt.logging.success(f'PAT check result — UID: {uid}: valid') + return synapse + + +async def blacklist_pat_check(validator: 'Validator', synapse: PatCheckSynapse) -> Tuple[bool, str]: + """Reject PAT checks from unregistered hotkeys.""" + hotkey = _get_hotkey(synapse) + if hotkey not in validator.metagraph.hotkeys: + return True, f'Hotkey {hotkey[:16]}... not registered' + return False, 'Hotkey recognized' + + +async def priority_pat_check(validator: 'Validator', synapse: PatCheckSynapse) -> float: + """Prioritize PAT checks by stake.""" + hotkey = _get_hotkey(synapse) + if hotkey not in validator.metagraph.hotkeys: + return 0.0 + uid = validator.metagraph.hotkeys.index(hotkey) + return float(validator.metagraph.S[uid]) + + +# --------------------------------------------------------------------------- +# Internal helpers +# --------------------------------------------------------------------------- + +_TEST_GRAPHQL_QUERY = '{ viewer { login } }' + + +def _test_pat_against_repo(pat: str) -> Optional[str]: + """Run a test GraphQL call to verify the PAT has the access scoring requires. + + Scoring uses the GraphQL API to fetch miner PRs, so this mirrors the real path. + Returns an error string on failure, None on success. + """ + headers = {'Authorization': f'bearer {pat}', 'Accept': 'application/json'} + try: + response = requests.post( + f'{BASE_GITHUB_API_URL}/graphql', + json={'query': _TEST_GRAPHQL_QUERY}, + headers=headers, + timeout=15, + ) + if response.status_code != 200: + return f'GitHub GraphQL API returned {response.status_code}' + data = response.json() + if 'errors' in data: + return f'GraphQL error: {data["errors"][0].get("message", "unknown")}' + return None + except requests.RequestException as e: + return str(e) diff --git a/gittensor/validator/pat_storage.py b/gittensor/validator/pat_storage.py new file mode 100644 index 00000000..3c1a2346 --- /dev/null +++ b/gittensor/validator/pat_storage.py @@ -0,0 +1,104 @@ +# Entrius 2025 + +"""Thread-safe JSON storage for miner GitHub PATs. + +Validators store PATs received via PatBroadcastSynapse in miner_pats.json at the project root. +The scoring loop snapshots the full file once per round via load_all_pats(); mid-round +broadcasts update the file but do not affect the current scoring round. +""" + +import json +import os +import tempfile +import threading +from datetime import datetime, timezone +from pathlib import Path +from typing import Optional + +PATS_FILE = Path(__file__).resolve().parents[2] / 'data' / 'miner_pats.json' + +_lock = threading.Lock() + + +def ensure_pats_file() -> None: + """Create the PATs file with an empty list if it doesn't exist. Called on validator boot.""" + with _lock: + if not PATS_FILE.exists(): + _write_file([]) + + +def load_all_pats() -> list[dict]: + """Read all stored PAT entries. Returns empty list if file is missing or corrupt.""" + with _lock: + return _read_file() + + +def save_pat(uid: int, hotkey: str, pat: str, github_id: str) -> None: + """Upsert a PAT entry by UID. Creates the file if needed.""" + with _lock: + entries = _read_file() + + entry = { + 'uid': uid, + 'hotkey': hotkey, + 'pat': pat, + 'github_id': github_id, + 'stored_at': datetime.now(timezone.utc).isoformat(), + } + + for i, existing in enumerate(entries): + if existing.get('uid') == uid: + entries[i] = entry + break + else: + entries.append(entry) + + _write_file(entries) + + +def get_pat_by_uid(uid: int) -> Optional[dict]: + """Look up a single PAT entry by UID. Returns None if not found.""" + with _lock: + for entry in _read_file(): + if entry.get('uid') == uid: + return entry + return None + + +def remove_pat(uid: int) -> bool: + """Remove a PAT entry by UID. Returns True if an entry was removed.""" + with _lock: + entries = _read_file() + filtered = [e for e in entries if e.get('uid') != uid] + if len(filtered) == len(entries): + return False + _write_file(filtered) + return True + + +def _read_file() -> list[dict]: + """Read and parse the JSON file. Must be called while holding _lock.""" + if not PATS_FILE.exists(): + return [] + try: + return json.loads(PATS_FILE.read_text()) + except (json.JSONDecodeError, OSError): + return [] + + +def _write_file(entries: list[dict]) -> None: + """Atomically write entries to JSON file. Must be called while holding _lock.""" + PATS_FILE.parent.mkdir(parents=True, exist_ok=True) + # Write to temp file then atomically replace to avoid partial reads + fd, tmp_path = tempfile.mkstemp(dir=PATS_FILE.parent, suffix='.tmp') + try: + with os.fdopen(fd, 'w') as f: + json.dump(entries, f, indent=2) + os.replace(tmp_path, PATS_FILE) + except BaseException: + # Clean up temp file on any failure + try: + os.unlink(tmp_path) + except OSError: + pass + raise diff --git a/gittensor/validator/storage/database.py b/gittensor/validator/storage/database.py index ade821d4..eb54a6e3 100644 --- a/gittensor/validator/storage/database.py +++ b/gittensor/validator/storage/database.py @@ -3,7 +3,7 @@ """ import os -from typing import Optional +from typing import Any, Optional import bittensor as bt @@ -16,7 +16,7 @@ bt.logging.warning('psycopg2 not installed. Database storage features will be disabled.') -def create_database_connection() -> Optional[object]: +def create_database_connection() -> Optional[Any]: """ Create a PostgreSQL database connection using environment variables. diff --git a/gittensor/validator/storage/queries.py b/gittensor/validator/storage/queries.py index 4fa1d79e..1260575e 100644 --- a/gittensor/validator/storage/queries.py +++ b/gittensor/validator/storage/queries.py @@ -9,18 +9,27 @@ AND created_at <= %s """ -CLEANUP_STALE_MINER_TIER_STATS = """ -DELETE FROM miner_tier_stats +CLEANUP_STALE_MINERS = """ +DELETE FROM miners WHERE github_id = %s AND github_id != '0' AND (uid != %s OR hotkey != %s) """ -CLEANUP_STALE_MINERS = """ +# Reverse cleanup: Remove stale data when a (uid, hotkey) re-links to a new github_id +CLEANUP_STALE_MINER_EVALUATIONS_BY_HOTKEY = """ +DELETE FROM miner_evaluations +WHERE uid = %s AND hotkey = %s + AND github_id != %s + AND github_id != '0' + AND created_at <= %s +""" + +CLEANUP_STALE_MINERS_BY_HOTKEY = """ DELETE FROM miners -WHERE github_id = %s +WHERE uid = %s AND hotkey = %s + AND github_id != %s AND github_id != '0' - AND (uid != %s OR hotkey != %s) """ # Miner Queries @@ -38,7 +47,7 @@ merged_at, pr_created_at, pr_state, repo_weight_multiplier, base_score, issue_multiplier, open_pr_spam_multiplier, pioneer_dividend, pioneer_rank, time_decay_multiplier, - credibility_multiplier, review_quality_multiplier, raw_credibility, credibility_scalar, + credibility_multiplier, review_quality_multiplier, earned_score, collateral_score, additions, deletions, commits, total_nodes_scored, merged_by_login, description, last_edited_at, @@ -61,8 +70,6 @@ time_decay_multiplier = EXCLUDED.time_decay_multiplier, credibility_multiplier = EXCLUDED.credibility_multiplier, review_quality_multiplier = EXCLUDED.review_quality_multiplier, - raw_credibility = EXCLUDED.raw_credibility, - credibility_scalar = EXCLUDED.credibility_scalar, earned_score = EXCLUDED.earned_score, collateral_score = EXCLUDED.collateral_score, additions = EXCLUDED.additions, @@ -84,7 +91,12 @@ BULK_UPSERT_ISSUES = """ INSERT INTO issues ( number, pr_number, repository_full_name, title, created_at, closed_at, - author_login, state, author_association + author_login, state, author_association, + author_github_id, is_transferred, updated_at, + discovery_base_score, discovery_earned_score, + discovery_review_quality_multiplier, discovery_repo_weight_multiplier, + discovery_time_decay_multiplier, discovery_credibility_multiplier, + discovery_open_issue_spam_multiplier ) VALUES %s ON CONFLICT (number, pr_number, repository_full_name) DO UPDATE SET @@ -92,7 +104,17 @@ closed_at = EXCLUDED.closed_at, author_login = EXCLUDED.author_login, state = EXCLUDED.state, - author_association = EXCLUDED.author_association + author_association = EXCLUDED.author_association, + author_github_id = EXCLUDED.author_github_id, + is_transferred = EXCLUDED.is_transferred, + updated_at = EXCLUDED.updated_at, + discovery_base_score = EXCLUDED.discovery_base_score, + discovery_earned_score = EXCLUDED.discovery_earned_score, + discovery_review_quality_multiplier = EXCLUDED.discovery_review_quality_multiplier, + discovery_repo_weight_multiplier = EXCLUDED.discovery_repo_weight_multiplier, + discovery_time_decay_multiplier = EXCLUDED.discovery_time_decay_multiplier, + discovery_credibility_multiplier = EXCLUDED.discovery_credibility_multiplier, + discovery_open_issue_spam_multiplier = EXCLUDED.discovery_open_issue_spam_multiplier """ # File Change Queries @@ -115,9 +137,10 @@ INSERT INTO miner_evaluations ( uid, hotkey, github_id, failed_reason, base_total_score, total_score, total_collateral_score, total_nodes_scored, total_open_prs, total_closed_prs, total_merged_prs, total_prs, - unique_repos_count, qualified_unique_repos_count, - current_tier, - total_token_score, total_structural_count, total_structural_score, total_leaf_count, total_leaf_score + unique_repos_count, is_eligible, credibility, + total_token_score, total_structural_count, total_structural_score, total_leaf_count, total_leaf_score, + issue_discovery_score, issue_token_score, issue_credibility, is_issue_eligible, + total_solved_issues, total_valid_solved_issues, total_closed_issues, total_open_issues ) VALUES %s ON CONFLICT (uid, hotkey, github_id) DO UPDATE SET @@ -131,100 +154,20 @@ total_merged_prs = EXCLUDED.total_merged_prs, total_prs = EXCLUDED.total_prs, unique_repos_count = EXCLUDED.unique_repos_count, - qualified_unique_repos_count = EXCLUDED.qualified_unique_repos_count, - current_tier = EXCLUDED.current_tier, + is_eligible = EXCLUDED.is_eligible, + credibility = EXCLUDED.credibility, total_token_score = EXCLUDED.total_token_score, total_structural_count = EXCLUDED.total_structural_count, total_structural_score = EXCLUDED.total_structural_score, total_leaf_count = EXCLUDED.total_leaf_count, total_leaf_score = EXCLUDED.total_leaf_score, + issue_discovery_score = EXCLUDED.issue_discovery_score, + issue_token_score = EXCLUDED.issue_token_score, + issue_credibility = EXCLUDED.issue_credibility, + is_issue_eligible = EXCLUDED.is_issue_eligible, + total_solved_issues = EXCLUDED.total_solved_issues, + total_valid_solved_issues = EXCLUDED.total_valid_solved_issues, + total_closed_issues = EXCLUDED.total_closed_issues, + total_open_issues = EXCLUDED.total_open_issues, updated_at = NOW() """ - -# Miner Tier Stats Queries (joins on uid, hotkey, github_id) -BULK_UPSERT_MINER_TIER_STATS = """ -INSERT INTO miner_tier_stats ( - uid, hotkey, github_id, - bronze_merged_prs, bronze_closed_prs, bronze_total_prs, bronze_collateral_score, bronze_score, - bronze_unique_repos, bronze_qualified_unique_repos, - bronze_token_score, bronze_structural_count, bronze_structural_score, bronze_leaf_count, bronze_leaf_score, - silver_merged_prs, silver_closed_prs, silver_total_prs, silver_collateral_score, silver_score, - silver_unique_repos, silver_qualified_unique_repos, - silver_token_score, silver_structural_count, silver_structural_score, silver_leaf_count, silver_leaf_score, - gold_merged_prs, gold_closed_prs, gold_total_prs, gold_collateral_score, gold_score, - gold_unique_repos, gold_qualified_unique_repos, - gold_token_score, gold_structural_count, gold_structural_score, gold_leaf_count, gold_leaf_score -) VALUES %s -ON CONFLICT (uid, hotkey, github_id) -DO UPDATE SET - bronze_merged_prs = EXCLUDED.bronze_merged_prs, - bronze_closed_prs = EXCLUDED.bronze_closed_prs, - bronze_total_prs = EXCLUDED.bronze_total_prs, - bronze_collateral_score = EXCLUDED.bronze_collateral_score, - bronze_score = EXCLUDED.bronze_score, - bronze_unique_repos = EXCLUDED.bronze_unique_repos, - bronze_qualified_unique_repos = EXCLUDED.bronze_qualified_unique_repos, - bronze_token_score = EXCLUDED.bronze_token_score, - bronze_structural_count = EXCLUDED.bronze_structural_count, - bronze_structural_score = EXCLUDED.bronze_structural_score, - bronze_leaf_count = EXCLUDED.bronze_leaf_count, - bronze_leaf_score = EXCLUDED.bronze_leaf_score, - silver_merged_prs = EXCLUDED.silver_merged_prs, - silver_closed_prs = EXCLUDED.silver_closed_prs, - silver_total_prs = EXCLUDED.silver_total_prs, - silver_collateral_score = EXCLUDED.silver_collateral_score, - silver_score = EXCLUDED.silver_score, - silver_unique_repos = EXCLUDED.silver_unique_repos, - silver_qualified_unique_repos = EXCLUDED.silver_qualified_unique_repos, - silver_token_score = EXCLUDED.silver_token_score, - silver_structural_count = EXCLUDED.silver_structural_count, - silver_structural_score = EXCLUDED.silver_structural_score, - silver_leaf_count = EXCLUDED.silver_leaf_count, - silver_leaf_score = EXCLUDED.silver_leaf_score, - gold_merged_prs = EXCLUDED.gold_merged_prs, - gold_closed_prs = EXCLUDED.gold_closed_prs, - gold_total_prs = EXCLUDED.gold_total_prs, - gold_collateral_score = EXCLUDED.gold_collateral_score, - gold_score = EXCLUDED.gold_score, - gold_unique_repos = EXCLUDED.gold_unique_repos, - gold_qualified_unique_repos = EXCLUDED.gold_qualified_unique_repos, - gold_token_score = EXCLUDED.gold_token_score, - gold_structural_count = EXCLUDED.gold_structural_count, - gold_structural_score = EXCLUDED.gold_structural_score, - gold_leaf_count = EXCLUDED.gold_leaf_count, - gold_leaf_score = EXCLUDED.gold_leaf_score, - updated_at = NOW() -""" - -# Merge Prediction Queries -UPSERT_PREDICTION = """ -INSERT INTO predictions ( - uid, hotkey, github_id, issue_id, repository, - issue_number, pr_number, prediction, variance_at_prediction, timestamp -) VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s) -ON CONFLICT (uid, hotkey, github_id, issue_id, pr_number) -DO UPDATE SET - prediction = EXCLUDED.prediction, - variance_at_prediction = EXCLUDED.variance_at_prediction, - timestamp = EXCLUDED.timestamp -""" - -UPSERT_PREDICTION_EMA = """ -INSERT INTO prediction_emas (github_id, ema_score, rounds, updated_at) -VALUES (%s, %s, %s, %s) -ON CONFLICT (github_id) -DO UPDATE SET - ema_score = EXCLUDED.ema_score, - rounds = prediction_emas.rounds + 1, - updated_at = EXCLUDED.updated_at -""" - -UPSERT_SETTLED_ISSUE = """ -INSERT INTO settled_issues (issue_id, outcome, merged_pr_number, settled_at) -VALUES (%s, %s, %s, %s) -ON CONFLICT (issue_id) DO NOTHING -""" - -DELETE_PREDICTIONS_FOR_ISSUE = """ -DELETE FROM predictions WHERE issue_id = %s -""" diff --git a/gittensor/validator/storage/repository.py b/gittensor/validator/storage/repository.py index bd5b8e65..2a6f8fe6 100644 --- a/gittensor/validator/storage/repository.py +++ b/gittensor/validator/storage/repository.py @@ -13,22 +13,17 @@ import numpy as np from gittensor.classes import FileChange, Issue, Miner, MinerEvaluation, PullRequest -from gittensor.validator.oss_contributions.tier_config import Tier from .queries import ( BULK_UPSERT_FILE_CHANGES, BULK_UPSERT_ISSUES, BULK_UPSERT_MINER_EVALUATION, - BULK_UPSERT_MINER_TIER_STATS, BULK_UPSERT_PULL_REQUESTS, CLEANUP_STALE_MINER_EVALUATIONS, - CLEANUP_STALE_MINER_TIER_STATS, + CLEANUP_STALE_MINER_EVALUATIONS_BY_HOTKEY, CLEANUP_STALE_MINERS, - DELETE_PREDICTIONS_FOR_ISSUE, + CLEANUP_STALE_MINERS_BY_HOTKEY, SET_MINER, - UPSERT_PREDICTION, - UPSERT_PREDICTION_EMA, - UPSERT_SETTLED_ISSUE, ) T = TypeVar('T') @@ -117,7 +112,7 @@ def cleanup_stale_miner_data(self, evaluation: MinerEvaluation) -> None: """ Remove stale evaluation data when a miner re-registers on a new uid/hotkey. - Deletes miner_evaluations, miner_tier_stats, and miners rows for the same + Deletes miner_evaluations and miners rows for the same github_id but under a different (uid, hotkey) pair, ensuring only one evaluation per real github user exists in the database. @@ -130,10 +125,16 @@ def cleanup_stale_miner_data(self, evaluation: MinerEvaluation) -> None: params = (evaluation.github_id, evaluation.uid, evaluation.hotkey) eval_params = params + (evaluation.evaluation_timestamp,) + # Clean up when same github_id re-registers on a new uid/hotkey self.execute_command(CLEANUP_STALE_MINER_EVALUATIONS, eval_params) - self.execute_command(CLEANUP_STALE_MINER_TIER_STATS, params) self.execute_command(CLEANUP_STALE_MINERS, params) + # Clean up when same (uid, hotkey) re-links to a new github_id + reverse_params = (evaluation.uid, evaluation.hotkey, evaluation.github_id) + reverse_eval_params = reverse_params + (evaluation.evaluation_timestamp,) + self.execute_command(CLEANUP_STALE_MINER_EVALUATIONS_BY_HOTKEY, reverse_eval_params) + self.execute_command(CLEANUP_STALE_MINERS_BY_HOTKEY, reverse_params) + def store_pull_requests_bulk(self, pull_requests: List[PullRequest]) -> int: """ Bulk insert/update pull requests with efficient SQL conflict resolution @@ -175,8 +176,6 @@ def store_pull_requests_bulk(self, pull_requests: List[PullRequest]) -> int: pr.time_decay_multiplier, pr.credibility_multiplier, pr.review_quality_multiplier, - pr.raw_credibility, - pr.credibility_scalar, pr.earned_score, pr.collateral_score, pr.additions, @@ -240,6 +239,16 @@ def store_issues_bulk(self, issues: List[Issue]) -> int: issue.author_login, issue.state, issue.author_association, + issue.author_github_id, + issue.is_transferred, + issue.updated_at, + issue.discovery_base_score, + issue.discovery_earned_score, + issue.discovery_review_quality_multiplier, + issue.discovery_repo_weight_multiplier, + issue.discovery_time_decay_multiplier, + issue.discovery_credibility_multiplier, + issue.discovery_open_issue_spam_multiplier, ) ) @@ -304,7 +313,8 @@ def store_file_changes_bulk(self, file_changes: List[FileChange]) -> int: return len(values) except Exception as e: self.db.rollback() - self.logger.error(f'Error in bulk file change storage: {e}') + prs = {(fc.pr_number, fc.repository_full_name) for fc in file_changes} + self.logger.error(f'Error in bulk file change storage: {e} | PRs: {prs}') return 0 def set_miner_evaluation(self, evaluation: MinerEvaluation) -> bool: @@ -332,13 +342,21 @@ def set_miner_evaluation(self, evaluation: MinerEvaluation) -> bool: evaluation.total_merged_prs, evaluation.total_prs, evaluation.unique_repos_count, - evaluation.qualified_unique_repos_count, - evaluation.current_tier.value if evaluation.current_tier else None, + evaluation.is_eligible, + evaluation.credibility, evaluation.total_token_score, evaluation.total_structural_count, evaluation.total_structural_score, evaluation.total_leaf_count, evaluation.total_leaf_score, + evaluation.issue_discovery_score, + evaluation.issue_token_score, + evaluation.issue_credibility, + evaluation.is_issue_eligible, + evaluation.total_solved_issues, + evaluation.total_valid_solved_issues, + evaluation.total_closed_issues, + evaluation.total_open_issues, ) ] @@ -353,121 +371,3 @@ def set_miner_evaluation(self, evaluation: MinerEvaluation) -> bool: self.db.rollback() self.logger.error(f'Error in miner evaluation storage: {e}') return False - - def set_miner_tier_stats(self, evaluation: MinerEvaluation) -> bool: - """ - Insert or update miner tier stats. - - Args: - evaluation: MinerEvaluation object containing tier stats - - Returns: - True if successful, False otherwise - """ - bronze = evaluation.stats_by_tier[Tier.BRONZE] - silver = evaluation.stats_by_tier[Tier.SILVER] - gold = evaluation.stats_by_tier[Tier.GOLD] - - tier_stats_values = [ - ( - evaluation.uid, - evaluation.hotkey, - evaluation.github_id, - # Bronze tier - bronze.merged_count, - bronze.closed_count, - bronze.total_prs, - bronze.collateral_score, - bronze.earned_score, - bronze.unique_repo_contribution_count, - bronze.qualified_unique_repo_count, - bronze.token_score, - bronze.structural_count, - bronze.structural_score, - bronze.leaf_count, - bronze.leaf_score, - # Silver tier - silver.merged_count, - silver.closed_count, - silver.total_prs, - silver.collateral_score, - silver.earned_score, - silver.unique_repo_contribution_count, - silver.qualified_unique_repo_count, - silver.token_score, - silver.structural_count, - silver.structural_score, - silver.leaf_count, - silver.leaf_score, - # Gold tier - gold.merged_count, - gold.closed_count, - gold.total_prs, - gold.collateral_score, - gold.earned_score, - gold.unique_repo_contribution_count, - gold.qualified_unique_repo_count, - gold.token_score, - gold.structural_count, - gold.structural_score, - gold.leaf_count, - gold.leaf_score, - ) - ] - - try: - with self.get_cursor() as cursor: - from psycopg2.extras import execute_values - - execute_values(cursor, BULK_UPSERT_MINER_TIER_STATS, tier_stats_values) - self.db.commit() - return True - except Exception as e: - self.db.rollback() - self.logger.error(f'Error in miner tier stats storage: {e}') - return False - - # Merge Prediction Storage - def store_prediction( - self, - uid: int, - hotkey: str, - github_id: str, - issue_id: int, - repository: str, - issue_number: int, - pr_number: int, - prediction: float, - variance_at_prediction: float, - timestamp: str, - ) -> bool: - params = ( - uid, - hotkey, - github_id, - issue_id, - repository, - issue_number, - pr_number, - prediction, - variance_at_prediction, - timestamp, - ) - return self.set_entity(UPSERT_PREDICTION, params) - - def store_prediction_ema(self, github_id: str, ema_score: float, rounds: int, updated_at: str) -> bool: - params = (github_id, ema_score, rounds, updated_at) - return self.set_entity(UPSERT_PREDICTION_EMA, params) - - def store_settled_issue( - self, - issue_id: int, - outcome: str, - merged_pr_number: int | None, - settled_at: str, - ) -> bool: - params = (issue_id, outcome, merged_pr_number, settled_at) - return self.set_entity(UPSERT_SETTLED_ISSUE, params) - - def delete_predictions_for_issue(self, issue_id: int) -> bool: - return self.execute_command(DELETE_PREDICTIONS_FOR_ISSUE, (issue_id,)) diff --git a/gittensor/validator/utils/config.py b/gittensor/validator/utils/config.py index e0dde779..39d11f75 100644 --- a/gittensor/validator/utils/config.py +++ b/gittensor/validator/utils/config.py @@ -1,5 +1,4 @@ import os -from pathlib import Path import bittensor as bt @@ -15,9 +14,6 @@ # optional env vars STORE_DB_RESULTS = os.getenv('STORE_DB_RESULTS', 'false').lower() == 'true' -# Merge predictions DB path — defaults to /app/data/ so it lands inside the Docker volume -MP_DB_PATH = os.getenv('MP_DB_PATH', str(Path(__file__).resolve().parents[3] / 'data' / 'gt-merge-preds.db')) - # log values bt.logging.info(f'VALIDATOR_WAIT: {VALIDATOR_WAIT}') bt.logging.info(f'VALIDATOR_STEPS_INTERVAL: {VALIDATOR_STEPS_INTERVAL}') diff --git a/gittensor/validator/utils/github_validation.py b/gittensor/validator/utils/github_validation.py index 5fbda02b..dd194629 100644 --- a/gittensor/validator/utils/github_validation.py +++ b/gittensor/validator/utils/github_validation.py @@ -5,11 +5,7 @@ from typing import Optional, Tuple -from gittensor.constants import MIN_GITHUB_ACCOUNT_AGE -from gittensor.utils.github_api_tools import ( - get_github_account_age_days, - get_github_id, -) +from gittensor.utils.github_api_tools import get_github_id def validate_github_credentials(uid: int, pat: Optional[str]) -> Tuple[Optional[str], Optional[str]]: @@ -21,10 +17,4 @@ def validate_github_credentials(uid: int, pat: Optional[str]) -> Tuple[Optional[ if not github_id: return None, f"No Github id found for miner {uid}'s PAT" - account_age = get_github_account_age_days(pat) - if not account_age: - return None, f'Could not determine Github account age for miner {uid}' - if account_age < MIN_GITHUB_ACCOUNT_AGE: - return None, f"Miner {uid}'s Github account too young ({account_age} < {MIN_GITHUB_ACCOUNT_AGE} days)" - return github_id, None diff --git a/gittensor/validator/utils/load_weights.py b/gittensor/validator/utils/load_weights.py index e9763c9c..6611f1cf 100644 --- a/gittensor/validator/utils/load_weights.py +++ b/gittensor/validator/utils/load_weights.py @@ -8,7 +8,6 @@ import bittensor as bt from gittensor.constants import NON_CODE_EXTENSIONS -from gittensor.validator.oss_contributions.tier_config import Tier @dataclass @@ -32,13 +31,12 @@ class RepositoryConfig: weight: Repository weight for scoring inactive_at: ISO timestamp when repository became inactive (None if active) additional_acceptable_branches: List of additional branch patterns to accept (None if only default branch) - tier: Repository tier (Bronze, Silver, Gold) - None if not assigned + """ weight: float inactive_at: Optional[str] = None additional_acceptable_branches: Optional[List[str]] = None - tier: Optional[Tier] = None @dataclass @@ -108,16 +106,10 @@ def load_master_repo_weights() -> Dict[str, RepositoryConfig]: normalized_data: Dict[str, RepositoryConfig] = {} for repo_name, metadata in data.items(): try: - # Extract tier if present, convert to Tier enum - tier_str = metadata.get('tier') - tier = Tier(tier_str) if tier_str else None - - # Create RepositoryConfig object config = RepositoryConfig( weight=float(metadata.get('weight', 0.01)), inactive_at=metadata.get('inactive_at'), additional_acceptable_branches=metadata.get('additional_acceptable_branches'), - tier=tier, ) normalized_data[repo_name.lower()] = config except (ValueError, TypeError) as e: diff --git a/gittensor/validator/utils/storage.py b/gittensor/validator/utils/storage.py index 2f779dcf..3f8a709c 100644 --- a/gittensor/validator/utils/storage.py +++ b/gittensor/validator/utils/storage.py @@ -45,10 +45,11 @@ def store_evaluation(self, miner_eval: MinerEvaluation) -> StorageResult: try: # Start transaction + assert self.db_connection is not None and self.repo is not None self.db_connection.autocommit = False # Store all entities using bulk methods - miner = Miner(miner_eval.uid, miner_eval.hotkey, miner_eval.github_id) + miner = Miner(miner_eval.uid, miner_eval.hotkey, miner_eval.github_id or '') result.stored_counts['miners'] = self.repo.set_miner(miner) result.stored_counts['merged_pull_requests'] = self.repo.store_pull_requests_bulk( @@ -66,7 +67,6 @@ def store_evaluation(self, miner_eval: MinerEvaluation) -> StorageResult: self.repo.cleanup_stale_miner_data(miner_eval) result.stored_counts['evaluations'] = 1 if self.repo.set_miner_evaluation(miner_eval) else 0 - result.stored_counts['tier_stats'] = 1 if self.repo.set_miner_tier_stats(miner_eval) else 0 # Commit transaction self.db_connection.commit() @@ -74,8 +74,9 @@ def store_evaluation(self, miner_eval: MinerEvaluation) -> StorageResult: except Exception as ex: # Rollback transaction - self.db_connection.rollback() - self.db_connection.autocommit = True + if self.db_connection is not None: + self.db_connection.rollback() + self.db_connection.autocommit = True error_msg = f'Failed to store evaluation data for UID {miner_eval.uid}: {str(ex)}' result.success = False @@ -84,71 +85,6 @@ def store_evaluation(self, miner_eval: MinerEvaluation) -> StorageResult: return result - def store_prediction( - self, - uid: int, - hotkey: str, - github_id: str, - issue_id: int, - repository: str, - issue_number: int, - pr_number: int, - prediction: float, - variance_at_prediction: float, - timestamp: str, - ) -> bool: - if not self.is_enabled(): - return False - try: - return self.repo.store_prediction( - uid, - hotkey, - github_id, - issue_id, - repository, - issue_number, - pr_number, - prediction, - variance_at_prediction, - timestamp, - ) - except Exception as e: - self.logger.warning(f'Postgres merge prediction write failed (non-fatal): {e}') - return False - - def store_prediction_ema(self, github_id: str, ema_score: float, rounds: int, updated_at: str) -> bool: - if not self.is_enabled(): - return False - try: - return self.repo.store_prediction_ema(github_id, ema_score, rounds, updated_at) - except Exception as e: - self.logger.warning(f'Postgres merge prediction EMA write failed (non-fatal): {e}') - return False - - def store_settled_issue( - self, - issue_id: int, - outcome: str, - merged_pr_number: int | None, - settled_at: str, - ) -> bool: - if not self.is_enabled(): - return False - try: - return self.repo.store_settled_issue(issue_id, outcome, merged_pr_number, settled_at) - except Exception as e: - self.logger.warning(f'Postgres merge settled issue write failed (non-fatal): {e}') - return False - - def delete_predictions_for_issue(self, issue_id: int) -> bool: - if not self.is_enabled(): - return False - try: - return self.repo.delete_predictions_for_issue(issue_id) - except Exception as e: - self.logger.warning(f'Postgres merge prediction delete failed (non-fatal): {e}') - return False - def _log_storage_summary(self, counts: Dict[str, int]): """Log a summary of what was stored""" self.logger.info('Storage Summary:') diff --git a/gittensor/validator/utils/tree_sitter_scoring.py b/gittensor/validator/utils/tree_sitter_scoring.py index ba100510..2a204a8d 100644 --- a/gittensor/validator/utils/tree_sitter_scoring.py +++ b/gittensor/validator/utils/tree_sitter_scoring.py @@ -8,12 +8,14 @@ from gittensor.classes import ( FileScoreResult, - PrScoringResult, + PrScoringResultCategorized, ScoreBreakdown, ) from gittensor.constants import ( COMMENT_NODE_TYPES, DEFAULT_PROGRAMMING_LANGUAGE_WEIGHT, + INLINE_TEST_EXTENSIONS, + INLINE_TEST_PATTERNS, MAX_FILE_SIZE_BYTES, MAX_LINES_SCORED_FOR_NON_CODE_EXT, NON_CODE_EXTENSIONS, @@ -47,7 +49,7 @@ def get_parser(language: str) -> Optional[Parser]: try: from tree_sitter_language_pack import get_parser as get_ts_parser - parser = get_ts_parser(language) + parser = get_ts_parser(language) # type: ignore[arg-type] _parser_cache[language] = parser return parser except Exception as e: @@ -138,6 +140,21 @@ def walk_node(node: Node) -> None: return signatures +def has_inline_tests(content: str, extension: str) -> bool: + """Check whether source code contains inline test markers. + + Uses simple pattern matching to detect language-specific test constructs + that live inside production source files. Currently supports: + - Rust: ``#[cfg(test)]``, ``#![cfg(test)]``, ``#[test]``, ``#[tokio::test]`` + - Zig: ``test "name" { ... }``, ``test { ... }`` + - D: ``unittest { ... }`` + """ + pattern = INLINE_TEST_PATTERNS.get(extension) + if pattern is None: + return False + return pattern.search(content) is not None + + def score_tree_diff( old_content: Optional[str], new_content: Optional[str], @@ -186,12 +203,12 @@ def score_tree_diff( # Score added nodes for signature, count in added.items(): if signature[0] == 'structural': - _, node_type = signature + node_type = signature[1] weight = weights.get_structural_weight(node_type) breakdown.structural_added_count += count breakdown.structural_added_score += weight * count else: # leaf - _, node_type, _ = signature + node_type = signature[1] weight = weights.get_leaf_weight(node_type) breakdown.leaf_added_count += count breakdown.leaf_added_score += weight * count @@ -199,12 +216,12 @@ def score_tree_diff( # Score deleted nodes for signature, count in deleted.items(): if signature[0] == 'structural': - _, node_type = signature + node_type = signature[1] weight = weights.get_structural_weight(node_type) breakdown.structural_deleted_count += count breakdown.structural_deleted_score += weight * count else: # leaf - _, node_type, _ = signature + node_type = signature[1] weight = weights.get_leaf_weight(node_type) breakdown.leaf_deleted_count += count breakdown.leaf_deleted_score += weight * count @@ -217,7 +234,7 @@ def calculate_token_score_from_file_changes( file_contents: Dict[str, FileContentPair], weights: TokenConfig, programming_languages: Dict[str, LanguageConfig], -) -> PrScoringResult: +) -> PrScoringResultCategorized: """ Calculate contribution score using tree-sitter AST comparison. @@ -228,21 +245,20 @@ def calculate_token_score_from_file_changes( programming_languages: Language weight mapping (for fallback/documentation files) Returns: - PrScoringResult with total score and per-file details + PrScoringResultCategorized with total score, per-file details, and per-category breakdowns """ if not file_changes: - return PrScoringResult( + return PrScoringResultCategorized( total_score=0.0, total_nodes_scored=0, - file_results=[], + score_breakdown=None, + by_category={}, ) file_results: List[FileScoreResult] = [] - total_score = 0.0 - total_nodes_scored = 0 for file in file_changes: - ext = file.file_extension + ext = file.file_extension or '' is_test_file = file.is_test_file() file_weight = TEST_FILE_CONTRIBUTION_WEIGHT if is_test_file else 1.0 @@ -267,8 +283,6 @@ def calculate_token_score_from_file_changes( lang_weight = lang_config.weight if lang_config else DEFAULT_PROGRAMMING_LANGUAGE_WEIGHT file_score = lang_weight * lines_to_score * file_weight - total_score += file_score - file_results.append( FileScoreResult( filename=file.short_name, @@ -341,6 +355,13 @@ def calculate_token_score_from_file_changes( lang_config = programming_languages.get(ext) lang_weight = lang_config.weight if lang_config else 1.0 + # For non-test files in inline-test languages, check if the current + # file contains inline tests and downweight the entire file if so. + if not is_test_file and ext in INLINE_TEST_EXTENSIONS: + if has_inline_tests(new_content, ext): + is_test_file = True + file_weight = TEST_FILE_CONTRIBUTION_WEIGHT + # Apply combined weight: language weight × test file weight combined_weight = lang_weight * file_weight file_breakdown = file_breakdown.with_weight(combined_weight) @@ -349,9 +370,6 @@ def calculate_token_score_from_file_changes( # Track nodes scored for this file nodes_scored = file_breakdown.added_count + file_breakdown.deleted_count - total_score += file_score - total_nodes_scored += nodes_scored - file_results.append( FileScoreResult( filename=file.short_name, @@ -364,23 +382,13 @@ def calculate_token_score_from_file_changes( ) ) - # Compute total raw lines for logging - total_raw_lines = sum(f.total_lines for f in file_results) - - # Compute aggregate breakdown from file_results - breakdowns = [r.breakdown for r in file_results if r.breakdown is not None] - total_breakdown = sum(breakdowns, start=ScoreBreakdown()) if breakdowns else None + result = PrScoringResultCategorized.from_file_results(file_results) log_scoring_results( file_results, - total_score, - total_raw_lines, - total_breakdown, + result.total_score, + sum(r.total_lines for r in result.by_category.values()), + result.score_breakdown, ) - return PrScoringResult( - total_score=total_score, - total_nodes_scored=total_nodes_scored, - file_results=file_results, - score_breakdown=total_breakdown, - ) + return result diff --git a/gittensor/validator/weights/master_repositories.json b/gittensor/validator/weights/master_repositories.json index 56009784..6d8ce124 100644 --- a/gittensor/validator/weights/master_repositories.json +++ b/gittensor/validator/weights/master_repositories.json @@ -1,5749 +1,716 @@ { - "2factorauth/twofactorauth": { - "tier": "Bronze", - "weight": 0.18 - }, "404-Repo/404-base-miner-gs": { - "tier": "Bronze", - "weight": 0.41 + "weight": 0.0487 }, "404-Repo/404-gen-subnet": { - "tier": "Bronze", - "weight": 3 - }, - "abi/screenshot-to-code": { - "tier": "Bronze", - "weight": 0.15 - }, - "abraham/twitteroauth": { - "tier": "Bronze", - "weight": 0.15 - }, - "actions/checkout": { - "tier": "Bronze", - "weight": 0.16 - }, - "actions/setup-node": { - "tier": "Bronze", - "weight": 0.16 - }, - "actions/starter-workflows": { - "tier": "Bronze", - "weight": 0.2 - }, - "activeadmin/activeadmin": { - "tier": "Bronze", - "weight": 0.14 - }, - "activemerchant/active_merchant": { - "tier": "Bronze", - "weight": 0.13 - }, - "Activiti/Activiti": { - "tier": "Bronze", - "weight": 0.2 + "weight": 0.1025 }, "AffineFoundation/affine-cortex": { - "tier": "Silver", - "weight": 17.36 + "weight": 0.2017 }, "AffineFoundation/affinetes": { - "tier": "Silver", - "weight": 15.28 + "weight": 0.1957 }, "AffineFoundation/liveweb-arena": { - "tier": "Silver", - "weight": 14.87 - }, - "aframevr/aframe": { - "tier": "Bronze", - "weight": 0.12 + "weight": 0.1168 }, "afterpartyai/bittensor-conversation-genome-project": { - "tier": "Bronze", - "weight": 0.51 - }, - "ageron/handson-ml3": { - "tier": "Bronze", - "weight": 0.13 + "weight": 0.0536 }, - "ai/size-limit": { - "tier": "Bronze", - "weight": 0.11 + "AgentOps-AI/agentops": { + "weight": 0.0363 }, - "Aider-AI/aider": { - "tier": "Bronze", - "weight": 0.12 + "All-Hands-AI/OpenHands": { + "weight": 0.0532 }, - "airbnb/javascript": { - "tier": "Bronze", - "weight": 0.25 + "AlphaCoreBittensor/alphacore": { + "weight": 0.0484 }, - "airbnb/lottie-android": { - "tier": "Bronze", - "weight": 0.13 + "ansible/ansible": { + "weight": 0.0372 }, - "airbytehq/airbyte": { - "tier": "Bronze", - "weight": 0.12 + "ant-design/ant-design": { + "additional_acceptable_branches": [ + "feature" + ], + "weight": 0.0715 }, - "alacritty/alacritty": { - "tier": "Bronze", - "weight": 0.24 + "antoniorodr/cronboard": { + "weight": 0.0349 }, - "alex-shpak/hugo-book": { - "tier": "Bronze", - "weight": 0.12 + "appwrite/appwrite": { + "weight": 0.0579 }, - "alextselegidis/easyappointments": { - "additional_acceptable_branches": ["develop"], - "tier": "Bronze", - "weight": 0.15 + "autoppia/autoppia_iwa": { + "additional_acceptable_branches": [ + "contribution/*" + ], + "weight": 0.1259 }, - "alibaba/arthas": { - "tier": "Bronze", - "weight": 0.2 + "autoppia/autoppia_web_agents_subnet": { + "additional_acceptable_branches": [ + "dev", + "dev-gittensor" + ], + "weight": 0.1321 }, - "alibaba/canal": { - "tier": "Bronze", - "weight": 0.32 + "autoppia/autoppia_webs_demo": { + "additional_acceptable_branches": [ + "feature/*", + "fix/*" + ], + "weight": 0.1239 }, - "alibaba/COLA": { - "tier": "Bronze", - "weight": 0.11 + "aws/aws-cli": { + "weight": 0.0978 }, - "alibaba/DataX": { - "tier": "Bronze", - "weight": 0.2 + "axios/axios": { + "weight": 0.0352 }, - "alibaba/druid": { - "tier": "Bronze", - "weight": 0.22 + "backend-developers-ltd/ComputeHorde": { + "weight": 0.0504 }, - "alibaba/formily": { - "tier": "Bronze", - "weight": 0.24 + "backend-developers-ltd/InfiniteHash": { + "weight": 0.0438 }, - "alibaba/hooks": { - "tier": "Bronze", - "weight": 0.24 + "Barbariandev/MANTIS": { + "weight": 0.0549 }, - "alibaba/nacos": { - "tier": "Bronze", - "weight": 0.43 + "BerriAI/litellm": { + "weight": 0.0529 }, - "alibaba/Sentinel": { - "tier": "Bronze", - "weight": 0.28 + "bevyengine/bevy": { + "weight": 0.0571 }, - "alibaba/spring-cloud-alibaba": { - "tier": "Bronze", - "weight": 0.3 + "bitcast-network/bitcast": { + "weight": 0.0501 }, - "AlphaCoreBittensor/alphacore": { - "tier": "Bronze", - "weight": 0.41 + "bitcoin/bips": { + "weight": 0.3303 }, - "alshedivat/al-folio": { - "tier": "Bronze", - "weight": 0.15 + "bitcoin/bitcoin": { + "weight": 0.4274 }, - "ampproject/amphtml": { - "tier": "Bronze", - "weight": 0.12 + "bitcoinj/bitcoinj": { + "weight": 0.3094 }, - "amueller/word_cloud": { - "tier": "Bronze", - "weight": 0.11 + "bitcoinjs/bitcoinjs-lib": { + "weight": 0.2917 }, - "anasty17/mirror-leech-telegram-bot": { - "tier": "Bronze", - "weight": 0.2 + "BitMind-AI/bitmind-subnet": { + "additional_acceptable_branches": [ + "testnet" + ], + "weight": 0.1013 }, - "android/camera-samples": { - "tier": "Bronze", - "weight": 0.13 + "bitpay/bitcore": { + "weight": 0.0967 }, - "android/ndk-samples": { - "tier": "Bronze", - "weight": 0.16 + "bitrecs/bitrecs-subnet": { + "weight": 0.0435 }, - "angular/angular": { - "tier": "Bronze", - "weight": 0.28 + "Bitsec-AI/subnet": { + "weight": 0.0481 }, - "angular/angular-cli": { - "tier": "Bronze", - "weight": 0.22 + "bitwarden/server": { + "weight": 0.0956 }, - "angular/components": { - "tier": "Bronze", - "weight": 0.12 + "brave/brave-browser": { + "weight": 0.0946 }, - "anomalyco/opencode": { - "tier": "Bronze", - "weight": 0.47 + "browser-use/browser-use": { + "weight": 0.0526 }, - "ansible/ansible": { - "tier": "Bronze", - "weight": 0.34 + "byteleapai/byteleap-Miner": { + "weight": 0.0433 }, - "ansible/awx": { - "tier": "Bronze", - "weight": 0.12 + "calcom/cal.com": { + "weight": 0.0347 }, - "ant-design/ant-design": { - "additional_acceptable_branches": ["feature"], - "tier": "Bronze", - "weight": 1 + "ChutesAI/chutes": { + "weight": 0.0989 }, - "ant-design/ant-design-pro": { - "tier": "Bronze", - "weight": 0.12 + "Cinnamon/kotaemon": { + "weight": 0.0473 }, - "ant-design/pro-components": { - "tier": "Bronze", - "weight": 0.12 + "cli/cli": { + "weight": 0.0936 }, - "anthropics/claude-code-action": { - "tier": "Bronze", - "weight": 0.36 + "ClickHouse/ClickHouse": { + "weight": 0.1152 }, - "antlr/grammars-v4": { - "tier": "Bronze", - "weight": 0.12 + "comfyanonymous/ComfyUI": { + "weight": 0.0523 }, - "antoniorodr/cronboard": { - "tier": "Bronze", - "weight": 0.23 + "commaai/openpilot": { + "weight": 0.0926 }, - "antonmedv/fx": { - "tier": "Bronze", - "weight": 0.15 + "cosmos/cosmos-sdk": { + "weight": 0.0916 }, - "apache/airflow": { - "tier": "Bronze", - "weight": 0.66 + "CreativeBuilds/sn77": { + "weight": 0.0546 }, - "apache/arrow": { - "tier": "Bronze", - "weight": 0.25 + "crewAIInc/crewAI": { + "weight": 0.052 }, - "apache/beam": { - "tier": "Bronze", - "weight": 0.19 + "curl/curl": { + "weight": 0.037 }, - "apache/calcite": { - "tier": "Bronze", - "weight": 0.18 + "Datura-ai/lium-io": { + "weight": 0.0906 }, - "apache/camel": { - "tier": "Bronze", - "weight": 0.2 + "dbeaver/dbeaver": { + "weight": 0.2409 }, - "apache/cassandra": { - "tier": "Bronze", - "weight": 0.19 + "deepset-ai/haystack": { + "weight": 0.0567 }, - "apache/cordova-android": { - "tier": "Bronze", - "weight": 0.18 + "denoland/deno": { + "weight": 0.1106 }, - "apache/dolphinscheduler": { - "tier": "Bronze", - "weight": 0.2 + "Desearch-ai/linkedin-dms": { + "weight": 0.1221 }, - "apache/doris": { - "tier": "Bronze", - "weight": 0.11 + "Desearch-ai/subnet-22": { + "weight": 0.1367 }, - "apache/druid": { - "tier": "Bronze", - "weight": 0.11 + "django/django": { + "weight": 0.0366 }, - "apache/dubbo": { - "tier": "Bronze", - "weight": 0.25 + "docker/compose": { + "weight": 0.0897 }, - "apache/dubbo-admin": { - "tier": "Bronze", - "weight": 0.14 + "dogecoin/dogecoin": { + "weight": 0.0417 }, - "apache/echarts": { - "tier": "Bronze", - "weight": 0.28 + "dstrbtd/DistributedTraining": { + "weight": 0.0457 }, - "apache/flink": { - "tier": "Bronze", - "weight": 0.22 + "EfficientFrontier-SignalPlus/EfficientFrontier": { + "weight": 0.0478 }, - "apache/flink-cdc": { - "tier": "Bronze", - "weight": 0.18 + "eigent-ai/eigent": { + "weight": 0.1392 }, - "apache/groovy": { - "tier": "Bronze", - "weight": 0.18 + "entrius/allways": { + "weight": 1.0 }, - "apache/hadoop": { - "tier": "Bronze", - "weight": 0.22 + "entrius/allways-ui": { + "weight": 1.0 }, - "apache/hbase": { - "tier": "Bronze", - "weight": 0.18 + "entrius/gittensor": { + "weight": 1.0 }, - "apache/hive": { - "tier": "Bronze", - "weight": 0.2 + "entrius/gittensor-ui": { + "weight": 1.0 }, - "apache/httpd": { - "tier": "Bronze", - "weight": 0.34 + "espressif/arduino-esp32": { + "weight": 0.1444 }, - "apache/hudi": { - "tier": "Bronze", - "weight": 0.18 + "ethereum-optimism/optimism": { + "weight": 0.0888 }, - "apache/iceberg": { - "tier": "Bronze", - "weight": 0.18 + "ethereum/EIPs": { + "weight": 0.0879 }, - "apache/ignite": { - "tier": "Bronze", - "weight": 0.18 + "ethereum/go-ethereum": { + "weight": 0.1849 }, - "apache/incubator-kie-drools": { - "tier": "Bronze", - "weight": 0.18 + "ethers-io/ethers.js": { + "weight": 0.087 }, - "apache/incubator-seata": { - "tier": "Bronze", - "weight": 0.22 + "excalidraw/excalidraw": { + "weight": 0.059 }, - "apache/kafka": { - "tier": "Bronze", - "weight": 0.34 + "flowsurface-rs/flowsurface": { + "weight": 0.051 }, - "apache/kylin": { - "tier": "Bronze", - "weight": 0.16 + "fx-integral/metahash": { + "weight": 0.0431 }, - "apache/linkis": { - "tier": "Bronze", - "weight": 0.16 + "General-Tao-Ventures/cartha-cli": { + "weight": 0.0476 }, - "apache/logging-log4j2": { - "tier": "Bronze", - "weight": 0.16 + "General-Tao-Ventures/cartha-validator": { + "weight": 0.047 }, - "apache/maven": { - "tier": "Bronze", - "weight": 0.3 + "ggml-org/llama.cpp": { + "weight": 0.0339 }, - "apache/nifi": { - "tier": "Bronze", - "weight": 0.17 + "git/git": { + "weight": 0.0862 }, - "apache/nuttx": { - "tier": "Bronze", - "weight": 0.14 + "godotengine/godot": { + "weight": 0.0611 }, - "apache/pulsar": { - "tier": "Bronze", - "weight": 0.11 + "gopher-lab/subnet-42": { + "weight": 0.0428 }, - "apache/rocketmq": { - "tier": "Bronze", - "weight": 0.22 + "gradients-ai/G.O.D": { + "weight": 0.0853 }, - "apache/shardingsphere": { - "tier": "Bronze", - "weight": 0.21 + "grafana/grafana": { + "weight": 0.1136 }, - "apache/shardingsphere-elasticjob": { - "tier": "Bronze", - "weight": 0.18 + "GraphiteAI/Graphite-Subnet": { + "weight": 0.0468 }, - "apache/shenyu": { - "tier": "Bronze", - "weight": 0.18 + "Homebrew/brew": { + "weight": 0.0342 }, - "apache/shiro": { - "tier": "Bronze", - "weight": 0.17 + "hoppscotch/hoppscotch": { + "additional_acceptable_branches": [ + "next" + ], + "weight": 0.1801 }, - "apache/skywalking": { - "tier": "Bronze", - "weight": 0.12 + "huggingface/lerobot": { + "weight": 0.0629 }, - "apache/spark": { - "tier": "Bronze", - "weight": 0.35 + "huggingface/transformers": { + "weight": 0.0845 }, - "apache/storm": { - "tier": "Bronze", - "weight": 0.19 + "immich-app/immich": { + "weight": 0.0587 }, - "apache/superset": { - "tier": "Bronze", - "weight": 0.21 + "impel-intelligence/dippy-studio-bittensor-miner": { + "weight": 0.0426 }, - "apache/thrift": { - "tier": "Bronze", - "weight": 0.16 + "impel-intelligence/dippy-studio-bittensor-orchestrator": { + "weight": 0.0424 }, - "apache/tomcat": { - "tier": "Bronze", - "weight": 0.2 + "inference-labs-inc/subnet-2": { + "weight": 0.0422 }, - "apache/tvm": { - "tier": "Bronze", - "weight": 3 + "infiniflow/ragflow": { + "weight": 0.1185 }, - "apache/zeppelin": { - "tier": "Bronze", - "weight": 0.17 + "It-s-AI/llm-detection": { + "weight": 0.0465 }, - "apache/zookeeper": { - "tier": "Bronze", - "weight": 0.21 + "janhq/jan": { + "weight": 0.0704 }, - "apereo/cas": { - "tier": "Bronze", - "weight": 0.19 + "jellyfin/jellyfin": { + "weight": 0.0607 }, - "ApolloAuto/apollo": { - "tier": "Bronze", - "weight": 0.18 + "jesseduffield/lazygit": { + "weight": 0.0357 }, - "apolloconfig/apollo": { - "tier": "Bronze", - "weight": 0.22 + "JetBrains/kotlin": { + "weight": 0.0354 }, - "AppFlowy-IO/AppFlowy": { - "tier": "Bronze", - "weight": 0.54 + "junegunn/fzf": { + "weight": 0.0351 }, - "appium/appium": { - "tier": "Bronze", - "weight": 0.22 + "jupyter/jupyter": { + "weight": 0.0837 }, - "apple/container": { - "tier": "Bronze", - "weight": 0.27 + "jupyterlab/jupyterlab": { + "weight": 0.1755 }, - "apple/coremltools": { - "tier": "Bronze", - "weight": 0.24 + "keras-team/keras": { + "weight": 0.1091 }, - "appsmithorg/appsmith": { - "tier": "Bronze", - "weight": 0.13 + "labring/FastGPT": { + "weight": 0.0633 }, - "appwrite/appwrite": { - "tier": "Bronze", - "weight": 0.61 + "langchain-ai/langchain": { + "weight": 0.0615 }, - "aptos-labs/aptos-core": { - "tier": "Bronze", - "weight": 0.15 + "langgenius/dify": { + "weight": 0.0336 }, - "Arduino-IRremote/Arduino-IRremote": { - "tier": "Bronze", - "weight": 0.15 + "laravel/framework": { + "weight": 0.0346 }, - "ArduPilot/ardupilot": { - "tier": "Bronze", - "weight": 0.2 + "latent-to/async-substrate-interface": { + "weight": 0.2764 }, - "argoproj/argo-cd": { - "tier": "Bronze", - "weight": 0.16 + "latent-to/bittensor": { + "additional_acceptable_branches": [ + "staging", + "SDKv10" + ], + "weight": 0.3556 }, - "armbian/build": { - "tier": "Bronze", - "weight": 0.12 + "latent-to/btcli": { + "additional_acceptable_branches": [ + "staging" + ], + "weight": 0.2514 }, - "asLody/VirtualApp": { - "tier": "Bronze", - "weight": 0.11 + "latent-to/btwallet": { + "weight": 0.2631 }, - "aspnetboilerplate/aspnetboilerplate": { - "tier": "Bronze", - "weight": 0.17 + "latent-to/taohash": { + "weight": 0.0575 }, - "astral-sh/ruff": { - "tier": "Silver", - "weight": 13.72 + "leadpoet/leadpoet": { + "weight": 0.0498 }, - "astropy/astropy": { - "tier": "Bronze", - "weight": 0.17 + "letta-ai/letta": { + "weight": 0.0829 }, - "AtsushiSakai/PythonRobotics": { - "tier": "Bronze", - "weight": 0.12 + "llmsresearch/paperbanana": { + "weight": 0.1077 }, - "audreyfeldroy/cookiecutter-pypackage": { - "tier": "Bronze", - "weight": 0.17 + "macrocosm-os/apex": { + "weight": 0.0419 }, - "autoppia/autoppia_iwa": { - "additional_acceptable_branches": ["contribution/*"], - "tier": "Silver", - "weight": 5.33 + "macrocosm-os/data-universe": { + "additional_acceptable_branches": [ + "dev" + ], + "weight": 0.0495 }, - "autoppia/autoppia_web_agents_subnet": { - "additional_acceptable_branches": ["dev", "dev-gittensor"], - "tier": "Silver", - "weight": 5.79 + "macrocosm-os/iota": { + "weight": 0.0387 }, - "autoppia/autoppia_webs_demo": { - "additional_acceptable_branches": ["feature/*", "fix/*"], - "tier": "Silver", - "weight": 5.2 + "manifold-inc/hone": { + "weight": 0.0821 }, - "autorope/donkeycar": { - "tier": "Bronze", - "weight": 0.16 + "manifold-inc/targon": { + "weight": 0.0814 }, - "autowarefoundation/autoware": { - "tier": "Bronze", - "weight": 0.12 + "mark3labs/mcp-go": { + "weight": 0.0359 }, - "aws/aws-cdk": { - "tier": "Bronze", - "weight": 0.43 + "mastodon/mastodon": { + "weight": 0.0564 }, - "aws/aws-cli": { - "tier": "Bronze", - "weight": 3 + "medusajs/medusa": { + "additional_acceptable_branches": [ + "develop" + ], + "weight": 0.0603 }, - "aws/aws-sdk-java": { - "tier": "Bronze", - "weight": 0.17 + "mem0ai/mem0": { + "weight": 0.0455 }, - "aws/aws-sdk-js": { - "tier": "Bronze", - "weight": 0.11 + "MetaMask/metamask-extension": { + "weight": 0.0806 }, - "aws/aws-sdk-ruby": { - "tier": "Bronze", - "weight": 0.13 + "metanova-labs/nova": { + "weight": 0.0799 }, - "aws/serverless-application-model": { - "tier": "Bronze", - "weight": 0.11 + "mobiusfund/etf": { + "weight": 0.0415 }, - "axios/axios": { - "tier": "Bronze", - "weight": 0.26 + "mode-network/synth-subnet": { + "weight": 0.0385 }, - "AykutSarac/jsoncrack.com": { - "tier": "Bronze", - "weight": 0.11 + "modelcontextprotocol/inspector": { + "weight": 0.0452 }, - "ayn2op/discordo": { - "tier": "Bronze", - "weight": 0.15 + "modelcontextprotocol/python-sdk": { + "weight": 0.045 }, - "azat-io/you-dont-know-js-ru": { - "tier": "Bronze", - "weight": 0.12 + "modelcontextprotocol/registry": { + "weight": 0.0447 }, - "azerothcore/azerothcore-wotlk": { - "tier": "Bronze", - "weight": 0.88 + "modelcontextprotocol/servers": { + "weight": 0.0516 }, - "Azure-Samples/azure-search-openai-demo": { - "tier": "Bronze", - "weight": 0.2 + "modelcontextprotocol/typescript-sdk": { + "weight": 0.0445 }, - "Azure-Samples/cognitive-services-speech-sdk": { - "tier": "Bronze", - "weight": 0.15 + "monero-project/monero": { + "weight": 0.0792 }, - "Azure/azure-cli": { - "tier": "Bronze", - "weight": 0.19 + "mrdoob/three.js": { + "weight": 0.0733 }, - "Azure/azure-powershell": { - "tier": "Bronze", - "weight": 0.17 + "MystenLabs/sui": { + "weight": 0.0542 }, - "Azure/azure-sdk-for-net": { - "tier": "Bronze", - "weight": 0.17 + "neovim/neovim": { + "weight": 0.0785 }, - "Azure/azure-sdk-for-python": { - "tier": "Bronze", - "weight": 0.19 + "nextcloud/android": { + "weight": 0.0778 }, - "Azure/Azure-Sentinel": { - "tier": "Bronze", - "weight": 0.19 + "nextcloud/desktop": { + "weight": 0.0771 }, - "AzureAD/microsoft-authentication-library-for-js": { - "tier": "Bronze", - "weight": 0.17 + "nextcloud/server": { + "weight": 0.0764 }, - "babaohuang/GeminiProChat": { - "tier": "Bronze", - "weight": 0.21 + "nginx/nginx": { + "weight": 0.0758 }, - "babel/babel": { - "tier": "Bronze", - "weight": 0.13 + "nimbusdotstorage/Nimbus": { + "weight": 0.0333 }, - "backend-developers-ltd/ComputeHorde": { - "tier": "Bronze", - "weight": 0.45 + "nocodb/nocodb": { + "additional_acceptable_branches": [ + "develop" + ], + "weight": 0.0751 }, - "backend-developers-ltd/InfiniteHash": { - "tier": "Bronze", - "weight": 0.39 + "numinouslabs/numinous": { + "weight": 0.0413 }, - "backstage/backstage": { - "tier": "Bronze", - "weight": 0.12 + "ohmyzsh/ohmyzsh": { + "weight": 0.0721 }, - "badges/shields": { - "tier": "Bronze", - "weight": 0.12 + "ollama/ollama": { + "weight": 0.0594 }, - "bagisto/bagisto": { - "tier": "Bronze", - "weight": 0.23 + "omegalabsinc/omegalabs-anytoany-bittensor": { + "weight": 0.0411 }, - "baomidou/mybatis-plus": { - "tier": "Bronze", - "weight": 0.12 + "omegalabsinc/omegalabs-bittensor-subnet": { + "weight": 0.0409 }, - "Barbariandev/MANTIS": { - "tier": "Bronze", - "weight": 0.52 + "one-covenant/basilica": { + "weight": 0.1713 }, - "base/node": { - "tier": "Bronze", - "weight": 0.25 + "one-covenant/bittensor-rs": { + "weight": 0.1417 }, - "basecamp/omarchy": { - "tier": "Bronze", - "weight": 3 + "one-covenant/grail": { + "weight": 0.1672 }, - "bazelbuild/bazel": { - "tier": "Bronze", - "weight": 0.28 + "one-covenant/templar": { + "weight": 0.1634 }, - "beefproject/beef": { - "tier": "Bronze", - "weight": 0.12 + "oneoneone-io/subnet-111": { + "weight": 0.0407 }, - "benoitc/gunicorn": { - "tier": "Bronze", - "weight": 0.24 + "openclaw/openclaw": { + "weight": 0.2316 }, - "betaflight/betaflight": { - "tier": "Bronze", - "weight": 0.15 + "OpenGradient/BitQuant-Subnet": { + "weight": 0.0462 }, - "bevyengine/bevy": { - "tier": "Bronze", - "weight": 0.6 + "OpenHands/OpenHands": { + "weight": 0.1532 }, - "bia-pain-bache/BPB-Worker-Panel": { - "tier": "Bronze", - "weight": 0.23 + "openprose/prose": { + "weight": 0.0492 }, - "bigbluebutton/bigbluebutton": { - "tier": "Bronze", - "weight": 0.22 + "opentensor/subtensor": { + "additional_acceptable_branches": [ + "devnet-ready" + ], + "weight": 0.387 }, - "bigskysoftware/htmx": { - "additional_acceptable_branches": ["dev"], - "tier": "Bronze", - "weight": 0.12 + "OpenZeppelin/openzeppelin-contracts": { + "weight": 0.0341 }, - "Billionmail/BillionMail": { - "tier": "Bronze", - "weight": 0.47 + "Orpheus-AI/Zeus": { + "weight": 0.0539 }, - "binance/binance-spot-api-docs": { - "tier": "Bronze", - "weight": 0.11 + "oven-sh/bun": { + "weight": 0.0624 }, - "binary-husky/gpt_academic": { - "tier": "Bronze", - "weight": 0.14 + "pandas-dev/pandas": { + "weight": 0.1121 }, - "binarywang/WxJava": { - "tier": "Bronze", - "weight": 0.12 + "paperclipai/paperclip": { + "weight": 0.2231 }, - "biopython/biopython": { - "tier": "Bronze", - "weight": 0.17 + "paritytech/polkadot-sdk": { + "weight": 0.0745 }, - "bitcast-network/bitcast": { - "tier": "Bronze", - "weight": 0.44 + "penpot/penpot": { + "weight": 0.1472 }, - "bitcoin/bips": { - "tier": "Gold", - "weight": 37.4 + "phidatahq/phidata": { + "weight": 0.0442 }, - "bitcoin/bitcoin": { - "tier": "Gold", - "weight": 100 + "pi-hole/web": { + "additional_acceptable_branches": [ + "development" + ], + "weight": 0.0739 }, - "bitcoinj/bitcoinj": { - "tier": "Gold", - "weight": 32.92 + "PlatformNetwork/platform": { + "weight": 0.046 }, - "bitcoinjs/bitcoinjs-lib": { - "tier": "Gold", - "weight": 29.55 + "postgres/postgres": { + "weight": 0.0556 }, - "BitMind-AI/bitmind-subnet": { - "additional_acceptable_branches": ["testnet"], - "tier": "Bronze", - "weight": 3 + "pydantic/pydantic-ai": { + "weight": 0.0513 }, - "bitnami/charts": { - "tier": "Bronze", - "weight": 0.13 + "qbittensor-labs/quantum": { + "weight": 0.0727 }, - "bitpay/bitcore": { - "tier": "Bronze", - "weight": 3 + "qwibitai/nanoclaw": { + "weight": 0.2153 }, - "bitrecs/bitrecs-subnet": { - "tier": "Bronze", - "weight": 0.39 + "rails/rails": { + "weight": 0.0356 }, - "Bitsec-AI/subnet": { - "tier": "Bronze", - "weight": 0.41 + "ray-project/ray": { + "weight": 0.1599 }, - "bitwarden/clients": { - "tier": "Bronze", - "weight": 0.66 + "reboot-org/reboot-subnet": { + "weight": 0.0405 }, - "bitwarden/server": { - "tier": "Bronze", - "weight": 3 + "redis/redis": { + "weight": 0.0364 }, - "blockscout/blockscout": { - "tier": "Bronze", - "weight": 0.12 + "RedTeamSubnet/RedTeam": { + "additional_acceptable_branches": [ + "dev" + ], + "weight": 0.044 }, - "bnsreenu/python_for_microscopists": { - "tier": "Bronze", - "weight": 0.12 + "resi-labs-ai/resi": { + "weight": 0.0402 }, - "bokeh/bokeh": { - "additional_acceptable_branches": ["branch-*.*"], - "tier": "Bronze", - "weight": 0.11 + "ridgesai/ridges": { + "weight": 0.0709 }, - "boto/boto3": { - "tier": "Bronze", - "weight": 0.29 + "run-llama/llama_index": { + "weight": 0.1501 }, - "brave/brave-browser": { - "tier": "Bronze", - "weight": 3 + "sbt/sbt": { + "weight": 0.1001 }, - "BretFisher/udemy-docker-mastery": { - "tier": "Bronze", - "weight": 0.22 + "score-technologies/turbovision": { + "weight": 0.0698 }, - "brianfrankcooper/YCSB": { - "tier": "Bronze", - "weight": 0.17 + "sentient-agi/ROMA": { + "weight": 0.0507 }, - "brokespace/code": { - "tier": "Bronze", - "weight": 0.39 + "shiftlayer-llc/brainplay-subnet": { + "additional_acceptable_branches": [ + "dev" + ], + "weight": 0.04 }, - "browser-use/browser-use": { - "tier": "Bronze", - "weight": 0.14 + "Significant-Gravitas/AutoGPT": { + "additional_acceptable_branches": [ + "dev" + ], + "weight": 0.062 }, - "BruceDevices/firmware": { - "tier": "Bronze", - "weight": 0.27 + "smartcontractkit/chainlink": { + "weight": 0.0693 }, - "btcsuite/btcd": { - "tier": "Bronze", - "weight": 0.15 + "sportstensor/sn41": { + "weight": 0.0553 }, - "bytedance/trae-agent": { - "tier": "Bronze", - "weight": 0.23 + "Stirling-Tools/Stirling-PDF": { + "weight": 0.0642 }, - "byteleapai/byteleap-Miner": { - "tier": "Bronze", - "weight": 0.39 + "strapi/strapi": { + "weight": 0.0338 }, - "caddyserver/caddy": { - "tier": "Bronze", - "weight": 0.31 + "supabase/supabase": { + "weight": 0.0687 }, - "CaiJimmy/hugo-theme-stack": { - "tier": "Bronze", - "weight": 0.12 + "sveltejs/svelte": { + "weight": 0.0652 }, - "cakephp/cakephp": { - "tier": "Bronze", - "weight": 0.16 + "Swap-Subnet/swap-subnet": { + "weight": 0.0373 }, - "calcom/cal.com": { - "tier": "Bronze", - "weight": 0.2 + "swarm-subnet/Langostino": { + "weight": 0.0383 }, - "CareyWang/sub-web": { - "tier": "Bronze", - "weight": 0.12 + "swarm-subnet/swarm": { + "weight": 0.049 }, - "CarGuo/GSYVideoPlayer": { - "tier": "Bronze", - "weight": 0.12 + "SWE-agent/SWE-agent": { + "weight": 0.0682 }, - "carla-simulator/carla": { - "tier": "Bronze", - "weight": 0.15 + "taofu-labs/tpn-subnet": { + "additional_acceptable_branches": [ + "development" + ], + "weight": 0.0398 }, - "cashubtc/Numo": { - "weight": 0.52, - "tier": "Bronze" + "taoshidev/vanta-network": { + "weight": 0.0396 }, - "catboost/catboost": { - "tier": "Bronze", - "weight": 0.34 + "tatsuproject/chipforge_sn84": { + "weight": 0.0394 }, - "ccxt/ccxt": { - "tier": "Bronze", - "weight": 0.21 + "tauri-apps/tauri": { + "additional_acceptable_branches": [ + "dev" + ], + "weight": 0.0583 }, - "cdnjs/cdnjs": { - "tier": "Bronze", - "weight": 0.12 + "Team-Rizzo/talisman-ai": { + "weight": 0.056 }, - "ceph/ceph": { - "tier": "Bronze", - "weight": 0.17 + "tensorplex-labs/dojo": { + "weight": 0.0392 }, - "cert-manager/cert-manager": { - "tier": "Bronze", - "weight": 0.31 + "thenervelab/thebrain": { + "weight": 0.039 }, - "CesiumGS/cesium": { - "tier": "Bronze", - "weight": 0.12 + "threetau/kinitro": { + "weight": 0.0368 }, - "cf-pages/Telegraph-Image": { - "tier": "Bronze", - "weight": 0.13 + "tinygrad/tinygrad": { + "weight": 0.0677 }, - "Chainlit/chainlit": { - "inactive_at": "2025-11-29T17:45:38.525Z", - "tier": "Bronze", - "weight": 0.01 + "tmux/tmux": { + "weight": 0.0672 }, - "chakra-ui/chakra-ui": { - "tier": "Bronze", - "weight": 0.11 + "ToolJet/ToolJet": { + "additional_acceptable_branches": [ + "develop" + ], + "weight": 0.0599 }, - "charmbracelet/crush": { - "tier": "Bronze", - "weight": 0.3 + "trishoolai/trishool-subnet": { + "weight": 0.0388 }, - "charmbracelet/glow": { - "tier": "Bronze", - "weight": 0.15 + "twentyhq/twenty": { + "weight": 0.0662 }, - "chartjs/Chart.js": { - "tier": "Bronze", - "weight": 0.15 + "unarbos/agcli": { + "weight": 0.2083 }, - "chatboxai/chatbox": { - "tier": "Bronze", - "weight": 0.13 + "Uniswap/v4-core": { + "weight": 0.0667 }, - "chatchat-space/Langchain-Chatchat": { - "tier": "Bronze", - "weight": 0.13 + "Unstructured-IO/unstructured": { + "weight": 0.1344 }, - "ChatGPTNextWeb/NextChat": { - "tier": "Bronze", - "weight": 0.26 + "v0idai/SN106": { + "weight": 0.0381 }, - "chavyleung/scripts": { - "tier": "Bronze", - "weight": 0.11 + "vercel/next.js": { + "weight": 0.0657 }, - "checkstyle/checkstyle": { - "tier": "Bronze", - "weight": 0.19 + "vidaio-subnet/vidaio-subnet": { + "weight": 0.0379 }, - "chef/chef": { - "tier": "Bronze", + "virattt/dexter": { "weight": 0.13 }, - "chinabugotech/hutool": { - "tier": "Bronze", - "weight": 0.12 - }, - "chipsalliance/rocket-chip": { - "tier": "Bronze", - "weight": 0.12 - }, - "chriskacerguis/codeigniter-restserver": { - "tier": "Bronze", - "weight": 0.15 - }, - "chromium/chromium": { - "tier": "Bronze", - "weight": 0.43 - }, - "chubin/cheat.sh": { - "tier": "Bronze", - "weight": 0.11 - }, - "ChutesAI/chutes": { - "tier": "Bronze", - "weight": 3 - }, - "citation-style-language/styles": { - "tier": "Bronze", - "weight": 0.14 - }, - "citizenfx/fivem": { - "tier": "Bronze", - "weight": 0.15 - }, - "ckan/ckan": { - "tier": "Bronze", - "weight": 0.17 - }, - "ckeditor/ckeditor5": { - "tier": "Bronze", - "weight": 0.12 - }, - "clash-verge-rev/clash-verge-rev": { - "tier": "Bronze", - "weight": 0.11 + "vitejs/vite": { + "weight": 0.0647 }, - "CleverRaven/Cataclysm-DDA": { - "tier": "Bronze", - "weight": 0.16 + "vllm-project/vllm": { + "weight": 0.0377 }, - "cli/cli": { - "tier": "Bronze", - "weight": 3 + "vuejs/core": { + "weight": 0.0335 }, - "ClickHouse/ClickHouse": { - "tier": "Silver", - "weight": 4.84 + "we-promise/sure": { + "weight": 0.1279 }, - "cline/cline": { - "tier": "Bronze", - "weight": 0.13 + "withastro/astro": { + "weight": 0.1064 }, - "cloudflare/cloudflare-docs": { - "tier": "Bronze", - "weight": 0.86 + "yanez-compliance/MIID-subnet": { + "weight": 0.0375 }, - "cloudwu/skynet": { - "tier": "Bronze", - "weight": 0.15 - }, - "cmliu/CF-Workers-SUB": { - "tier": "Bronze", - "weight": 0.22 - }, - "cmliu/WorkerVless2sub": { - "tier": "Bronze", - "weight": 0.22 - }, - "cmu-db/bustub": { - "tier": "Bronze", - "weight": 0.15 - }, - "cncf/curriculum": { - "tier": "Bronze", - "weight": 0.12 - }, - "CocoaPods/Specs": { - "tier": "Bronze", - "weight": 0.13 - }, - "code4craft/webmagic": { - "tier": "Bronze", - "weight": 0.19 - }, - "codecentric/spring-boot-admin": { - "tier": "Bronze", - "weight": 0.11 - }, - "codecombat/codecombat": { - "tier": "Bronze", - "weight": 0.2 - }, - "codeguy/php-the-right-way": { - "tier": "Bronze", - "weight": 0.13 - }, - "codeigniter4/CodeIgniter4": { - "tier": "Bronze", - "weight": 0.15 - }, - "coder/code-server": { - "tier": "Bronze", - "weight": 0.11 - }, - "coleam00/context-engineering-intro": { - "tier": "Bronze", - "weight": 0.11 - }, - "coleam00/ottomator-agents": { - "tier": "Bronze", - "weight": 0.17 - }, - "collectd/collectd": { - "tier": "Bronze", - "weight": 0.14 - }, - "ColorlibHQ/AdminLTE": { - "tier": "Bronze", - "weight": 0.23 - }, - "Comfy-Org/ComfyUI": { - "tier": "Bronze", - "weight": 0.15 - }, - "commaai/openpilot": { - "tier": "Bronze", - "weight": 2.87 - }, - "community/community": { - "tier": "Bronze", - "weight": 0.14 - }, - "conda/conda": { - "inactive_at": "2026-02-05T00:00:00Z", - "tier": "Silver", - "weight": 3.53 - }, - "conduktor/kafka-stack-docker-compose": { - "tier": "Bronze", - "weight": 0.11 - }, - "containerd/containerd": { - "tier": "Bronze", - "weight": 0.34 - }, - "containers/ramalama": { - "tier": "Bronze", - "weight": 0.46 - }, - "cookiecutter/cookiecutter-django": { - "tier": "Bronze", - "weight": 0.11 - }, - "coolsnowwolf/lede": { - "tier": "Bronze", - "weight": 0.2 - }, - "cosmos/cosmos-sdk": { - "tier": "Bronze", - "weight": 2.7 - }, - "cotes2020/jekyll-theme-chirpy": { - "tier": "Bronze", - "weight": 0.13 - }, - "cq-panda/Vue.NetCore": { - "tier": "Bronze", - "weight": 0.15 - }, - "CreativeBuilds/sn77": { - "tier": "Bronze", - "weight": 0.51 - }, - "CreditTone/hooker": { - "tier": "Bronze", - "weight": 0.11 - }, - "crewAIInc/crewAI": { - "tier": "Bronze", - "weight": 0.42 - }, - "crossoverJie/cim": { - "tier": "Bronze", - "weight": 0.17 - }, - "crossplane/crossplane": { - "tier": "Bronze", - "weight": 0.3 - }, - "cs231n/cs231n.github.io": { - "tier": "Bronze", - "weight": 0.13 - }, - "curl/curl": { - "tier": "Bronze", - "weight": 0.33 - }, - "cvat-ai/cvat": { - "tier": "Bronze", - "weight": 0.11 - }, - "cyberbotics/webots": { - "tier": "Bronze", - "weight": 0.15 - }, - "cypress-io/cypress": { - "tier": "Bronze", - "weight": 0.11 - }, - "cypress-io/cypress-realworld-app": { - "tier": "Bronze", - "weight": 0.16 - }, - "cython/cython": { - "tier": "Bronze", - "weight": 0.23 - }, - "d3/d3": { - "tier": "Bronze", - "weight": 0.14 - }, - "D4Vinci/Scrapling": { - "weight": 0.15, - "tier": "Bronze" - }, - "danielmiessler/SecLists": { - "tier": "Bronze", - "weight": 0.22 - }, - "daniulive/SmarterStreaming": { - "tier": "Bronze", - "weight": 0.11 - }, - "dankogai/js-base64": { - "tier": "Bronze", - "weight": 0.17 - }, - "dariusk/corpora": { - "tier": "Bronze", - "weight": 0.11 - }, - "Dash-Industry-Forum/dash.js": { - "tier": "Bronze", - "weight": 0.17 - }, - "dataabc/weiboSpider": { - "tier": "Bronze", - "weight": 0.11 - }, - "datahub-project/datahub": { - "tier": "Bronze", - "weight": 0.11 - }, - "DataLinkDC/dinky": { - "tier": "Bronze", - "weight": 0.15 - }, - "Datalux/Osintgram": { - "tier": "Bronze", - "weight": 0.11 - }, - "Datura-ai/lium-io": { - "tier": "Bronze", - "weight": 2.55 - }, - "dbeaver/dbeaver": { - "tier": "Silver", - "weight": 20 - }, - "dcloudio/mui": { - "tier": "Bronze", - "weight": 0.22 - }, - "dcloudio/uni-app": { - "tier": "Bronze", - "weight": 0.12 - }, - "debezium/debezium": { - "tier": "Bronze", - "weight": 0.11 - }, - "deepchem/deepchem": { - "tier": "Bronze", - "weight": 0.17 - }, - "deepfakes/faceswap": { - "tier": "Bronze", - "weight": 0.27 - }, - "deepinsight/insightface": { - "tier": "Bronze", - "weight": 0.12 - }, - "DeepLabCut/DeepLabCut": { - "tier": "Bronze", - "weight": 0.17 - }, - "deepseek-ai/DeepSeek-V3": { - "tier": "Bronze", - "weight": 0.15 - }, - "deepset-ai/haystack": { - "tier": "Bronze", - "weight": 0.6 - }, - "deepspeedai/DeepSpeed": { - "tier": "Bronze", - "weight": 0.13 - }, - "DefectDojo/django-DefectDojo": { - "tier": "Bronze", - "weight": 0.12 - }, - "DefinitelyTyped/DefinitelyTyped": { - "tier": "Bronze", - "weight": 0.25 - }, - "denoland/deno": { - "tier": "Silver", - "weight": 4.54 - }, - "Desearch-ai/linkedin-dms": { - "tier": "Silver", - "weight": 5.07 - }, - "Desearch-ai/subnet-22-desearch": { - "tier": "Silver", - "weight": 6.15 - }, - "deskflow/deskflow": { - "inactive_at": "2026-01-23T00:00:00.000Z", - "tier": "Bronze", - "weight": 0.61 - }, - "desktop/desktop": { - "tier": "Bronze", - "weight": 0.2 - }, - "devbridge/jQuery-Autocomplete": { - "tier": "Bronze", - "weight": 0.17 - }, - "deviantony/docker-elk": { - "tier": "Bronze", - "weight": 0.13 - }, - "dgkanatsios/CKAD-exercises": { - "tier": "Bronze", - "weight": 0.13 - }, - "dgtlmoon/changedetection.io": { - "tier": "Bronze", - "weight": 0.25 - }, - "digitalinnovationone/dio-lab-open-source": { - "tier": "Bronze", - "weight": 0.16 - }, - "dillonzq/LoveIt": { - "tier": "Bronze", - "weight": 0.17 - }, - "discourse/discourse": { - "tier": "Bronze", - "weight": 0.11 - }, - "django-cms/django-cms": { - "tier": "Bronze", - "weight": 0.19 - }, - "django-haystack/django-haystack": { - "tier": "Bronze", - "weight": 0.16 - }, - "django-oscar/django-oscar": { - "tier": "Bronze", - "weight": 0.18 - }, - "django/django": { - "tier": "Bronze", - "weight": 0.32 - }, - "dmlc/dgl": { - "tier": "Bronze", - "weight": 0.11 - }, - "dmlc/xgboost": { - "tier": "Bronze", - "weight": 0.59 - }, - "docker-library/docs": { - "tier": "Bronze", - "weight": 0.12 - }, - "docker-library/official-images": { - "tier": "Bronze", - "weight": 0.12 - }, - "docker-library/php": { - "tier": "Bronze", - "weight": 0.11 - }, - "docker/cli": { - "tier": "Bronze", - "weight": 0.28 - }, - "docker/compose": { - "tier": "Bronze", - "weight": 2.42 - }, - "docker/docker-py": { - "tier": "Bronze", - "weight": 0.31 - }, - "docker/docs": { - "tier": "Bronze", - "weight": 0.13 - }, - "docling-project/docling": { - "tier": "Bronze", - "weight": 0.11 - }, - "doctrine-extensions/DoctrineExtensions": { - "tier": "Bronze", - "weight": 0.15 - }, - "dogecoin/dogecoin": { - "tier": "Bronze", - "weight": 0.37 - }, - "doggy8088/Learn-Git-in-30-days": { - "tier": "Bronze", - "weight": 0.11 - }, - "Dolibarr/dolibarr": { - "tier": "Bronze", - "weight": 0.15 - }, - "domoticz/domoticz": { - "tier": "Bronze", - "weight": 0.13 - }, - "doocs/advanced-java": { - "tier": "Bronze", - "weight": 0.12 - }, - "doocs/jvm": { - "tier": "Bronze", - "weight": 0.11 - }, - "doocs/leetcode": { - "tier": "Bronze", - "weight": 0.12 - }, - "dotnet/aspnetcore": { - "tier": "Bronze", - "weight": 0.19 - }, - "dotnet/docs": { - "tier": "Bronze", - "weight": 0.13 - }, - "dotnet/dotnet-docker": { - "additional_acceptable_branches": ["nightly"], - "tier": "Bronze", - "weight": 0.32 - }, - "dotnet/efcore": { - "tier": "Bronze", - "weight": 0.26 - }, - "dotnet/eShop": { - "tier": "Bronze", - "weight": 0.16 - }, - "dotnet/roslyn": { - "tier": "Bronze", - "weight": 0.26 - }, - "dotnet/runtime": { - "tier": "Bronze", - "weight": 0.18 - }, - "dotnet/samples": { - "tier": "Bronze", - "weight": 0.17 - }, - "DotNetNext/SqlSugar": { - "inactive_at": "2025-11-29T17:45:38.525Z", - "tier": "Bronze", - "weight": 0.01 - }, - "doublesymmetry/react-native-track-player": { - "tier": "Bronze", - "weight": 0.12 - }, - "dreamhunter2333/cloudflare_temp_email": { - "tier": "Bronze", - "weight": 0.11 - }, - "DrKLO/Telegram": { - "tier": "Bronze", - "weight": 0.22 - }, - "dromara/lamp-cloud": { - "tier": "Bronze", - "weight": 0.16 - }, - "dropwizard/dropwizard": { - "tier": "Bronze", - "weight": 0.18 - }, - "drupal/drupal": { - "tier": "Bronze", - "weight": 0.15 - }, - "dstrbtd/DistributedTraining": { - "tier": "Bronze", - "weight": 0.4 - }, - "EastWorld/wechat-app-mall": { - "tier": "Bronze", - "weight": 0.21 - }, - "EasyDarwin/EasyDarwin": { - "tier": "Bronze", - "weight": 0.12 - }, - "easzlab/kubeasz": { - "tier": "Bronze", - "weight": 0.12 - }, - "Ebazhanov/linkedin-skill-assessments-quizzes": { - "inactive_at": "2025-11-04T02:18:33.094Z", - "tier": "Bronze", - "weight": 0.01 - }, - "eclipse-sumo/sumo": { - "tier": "Bronze", - "weight": 0.15 - }, - "ed-donner/llm_engineering": { - "tier": "Bronze", - "weight": 0.27 - }, - "EfficientFrontier-SignalPlus/EfficientFrontier": { - "tier": "Bronze", - "weight": 0.41 - }, - "egametang/ET": { - "tier": "Bronze", - "weight": 0.17 - }, - "eigent-ai/eigent": { - "tier": "Silver", - "weight": 6.36 - }, - "eKoopmans/html2pdf.js": { - "tier": "Bronze", - "weight": 0.17 - }, - "elastic/beats": { - "tier": "Bronze", - "weight": 0.15 - }, - "elastic/elasticsearch": { - "tier": "Bronze", - "weight": 0.33 - }, - "elastic/elasticsearch-net": { - "tier": "Bronze", - "weight": 0.14 - }, - "elastic/kibana": { - "tier": "Bronze", - "weight": 0.19 - }, - "elastic/logstash": { - "tier": "Bronze", - "weight": 0.11 - }, - "elebumm/RedditVideoMakerBot": { - "additional_acceptable_branches": ["develop"], - "tier": "Bronze", - "weight": 0.11 - }, - "electron/electron": { - "tier": "Bronze", - "weight": 0.13 - }, - "electron/minimal-repro": { - "tier": "Bronze", - "weight": 0.2 - }, - "element-plus/element-plus": { - "tier": "Bronze", - "weight": 0.21 - }, - "EleutherAI/lm-evaluation-harness": { - "tier": "Bronze", - "weight": 0.11 - }, - "elizaOS/eliza": { - "tier": "Bronze", - "weight": 0.19 - }, - "elunez/eladmin": { - "tier": "Bronze", - "weight": 0.2 - }, - "elunez/eladmin-web": { - "tier": "Bronze", - "weight": 0.12 - }, - "emacs-mirror/emacs": { - "tier": "Bronze", - "weight": 0.29 - }, - "emilybache/GildedRose-Refactoring-Kata": { - "tier": "Bronze", - "weight": 0.12 - }, - "encode/django-rest-framework": { - "tier": "Bronze", - "weight": 0.12 - }, - "entrius/gittensor": { - "tier": "Gold", - "weight": 53.48 - }, - "entrius/gittensor-ui": { - "tier": "Gold", - "weight": 26.91 - }, - "entrius/venth": { - "tier": "Silver", - "weight": 3.52, - "inactive_at": "2026-03-14" - }, - "envoyproxy/envoy": { - "tier": "Bronze", - "weight": 0.33 - }, - "eooce/Sing-box": { - "tier": "Bronze", - "weight": 0.11 - }, - "epicweb-dev/react-fundamentals": { - "tier": "Bronze", - "weight": 0.17 - }, - "erguotou520/bye": { - "tier": "Bronze", - "weight": 0.11 - }, - "erigontech/erigon": { - "tier": "Bronze", - "weight": 0.13 - }, - "erxes/erxes": { - "tier": "Bronze", - "weight": 0.87 - }, - "eslint/eslint": { - "tier": "Bronze", - "weight": 0.24 - }, - "EsotericSoftware/spine-runtimes": { - "tier": "Bronze", - "weight": 0.15 - }, - "esp8266/Arduino": { - "tier": "Bronze", - "weight": 0.19 - }, - "espnet/espnet": { - "tier": "Bronze", - "weight": 0.11 - }, - "espressif/arduino-esp32": { - "tier": "Silver", - "weight": 6.82 - }, - "espressif/esp-idf": { - "tier": "Bronze", - "weight": 0.56 - }, - "etcd-io/etcd": { - "tier": "Bronze", - "weight": 0.33 - }, - "eternnoir/pyTelegramBotAPI": { - "tier": "Bronze", - "weight": 0.11 - }, - "ether/etherpad-lite": { - "tier": "Bronze", - "weight": 0.23 - }, - "ethereum-lists/chains": { - "tier": "Bronze", - "weight": 0.14 - }, - "ethereum-optimism/optimism": { - "tier": "Bronze", - "weight": 2.31 - }, - "ethereum/consensus-specs": { - "tier": "Bronze", - "weight": 0.15 - }, - "ethereum/EIPs": { - "tier": "Bronze", - "weight": 2.21 - }, - "ethereum/ethereum-org-website": { - "tier": "Bronze", - "weight": 0.12 - }, - "ethereum/go-ethereum": { - "tier": "Silver", - "weight": 12.49 - }, - "ethereum/web3.py": { - "tier": "Bronze", - "weight": 0.27 - }, - "ethers-io/ethers.js": { - "tier": "Bronze", - "weight": 2.11 - }, - "Eugeny/tabby": { - "tier": "Bronze", - "weight": 0.11 - }, - "EvolutionAPI/evolution-api": { - "tier": "Bronze", - "weight": 0.18 - }, - "excalidraw/excalidraw": { - "tier": "Bronze", - "weight": 0.62 - }, - "Expensify/App": { - "tier": "Bronze", - "weight": 0.17 - }, - "expo/expo": { - "tier": "Bronze", - "weight": 0.19 - }, - "expressjs/express": { - "tier": "Bronze", - "weight": 0.26 - }, - "expressjs/expressjs.com": { - "tier": "Bronze", - "weight": 0.12 - }, - "fabric8io/kubernetes-client": { - "tier": "Bronze", - "weight": 0.15 - }, - "facebook/docusaurus": { - "tier": "Bronze", - "weight": 0.13 - }, - "facebook/facebook-android-sdk": { - "tier": "Bronze", - "weight": 0.13 - }, - "facebook/facebook-ios-sdk": { - "tier": "Bronze", - "weight": 0.27 - }, - "facebook/fresco": { - "tier": "Bronze", - "weight": 0.11 - }, - "facebook/prophet": { - "tier": "Bronze", - "weight": 0.11 - }, - "facebook/react": { - "tier": "Bronze", - "weight": 0.39 - }, - "facebook/react-native": { - "tier": "Bronze", - "weight": 0.19 - }, - "facebook/rocksdb": { - "tier": "Bronze", - "weight": 0.26 - }, - "facebookincubator/velox": { - "tier": "Bronze", - "weight": 0.14 - }, - "facebookresearch/detectron2": { - "tier": "Bronze", - "weight": 0.12 - }, - "facebookresearch/fairseq": { - "tier": "Bronze", - "weight": 0.12 - }, - "facebookresearch/faiss": { - "tier": "Bronze", - "weight": 0.12 - }, - "faif/python-patterns": { - "inactive_at": "2025-11-29T17:45:38.525Z", - "tier": "Bronze", - "weight": 0.01 - }, - "faridrashidi/kaggle-solutions": { - "tier": "Bronze", - "weight": 0.12 - }, - "FaridSafi/react-native-gifted-chat": { - "tier": "Bronze", - "weight": 0.11 - }, - "fastapi/fastapi": { - "tier": "Bronze", - "weight": 0.27 - }, - "fastlane/fastlane": { - "tier": "Bronze", - "weight": 0.11 - }, - "fatedier/frp": { - "tier": "Bronze", - "weight": 0.12 - }, - "fatfreecrm/fat_free_crm": { - "tier": "Bronze", - "weight": 0.13 - }, - "felixrieseberg/windows95": { - "tier": "Bronze", - "weight": 0.17 - }, - "fengyuhetao/shell": { - "tier": "Bronze", - "weight": 0.11 - }, - "firebase/FirebaseUI-Android": { - "tier": "Bronze", - "weight": 0.16 - }, - "firebase/flutterfire": { - "tier": "Bronze", - "weight": 0.12 - }, - "firebase/functions-samples": { - "tier": "Bronze", - "weight": 0.19 - }, - "firebase/quickstart-android": { - "tier": "Bronze", - "weight": 0.14 - }, - "firebase/quickstart-js": { - "tier": "Bronze", - "weight": 0.17 - }, - "firecrawl/firecrawl": { - "tier": "Bronze", - "weight": 0.11 - }, - "fish-shell/fish-shell": { - "tier": "Bronze", - "weight": 0.15 - }, - "fishercoder1534/Leetcode": { - "tier": "Bronze", - "weight": 0.15 - }, - "flannel-io/flannel": { - "tier": "Bronze", - "weight": 0.14 - }, - "flow-typed/flow-typed": { - "tier": "Bronze", - "weight": 0.17 - }, - "flowable/flowable-engine": { - "tier": "Bronze", - "weight": 0.17 - }, - "FlowiseAI/Flowise": { - "tier": "Bronze", - "weight": 0.25 - }, - "flowsurface-rs/flowsurface": { - "tier": "Bronze", - "weight": 0.49 - }, - "flutter/flutter": { - "tier": "Bronze", - "weight": 0.27 - }, - "flutter/packages": { - "tier": "Bronze", - "weight": 0.12 - }, - "flutter/samples": { - "tier": "Bronze", - "weight": 0.13 - }, - "folke/flash.nvim": { - "tier": "Bronze", - "weight": 0.29 - }, - "folke/lazy.nvim": { - "tier": "Bronze", - "weight": 0.29 - }, - "folke/snacks.nvim": { - "tier": "Bronze", - "weight": 0.29 - }, - "folke/tokyonight.nvim": { - "tier": "Bronze", - "weight": 0.28 - }, - "folke/trouble.nvim": { - "tier": "Bronze", - "weight": 0.28 - }, - "folke/which-key.nvim": { - "tier": "Bronze", - "weight": 0.28 - }, - "fortra/impacket": { - "tier": "Bronze", - "weight": 0.11 - }, - "FoundationAgents/MetaGPT": { - "tier": "Bronze", - "weight": 0.14 - }, - "FoundationAgents/OpenManus": { - "tier": "Bronze", - "weight": 0.14 - }, - "francescopace/espectre": { - "tier": "Bronze", - "weight": 0.46 - }, - "frappe/erpnext": { - "tier": "Bronze", - "weight": 0.21, - "inactive_at": "2026-03-14" - }, - "frappe/frappe": { - "tier": "Bronze", - "weight": 0.93, - "inactive_at": "2026-03-14" - }, - "frappe/gantt": { - "tier": "Bronze", - "weight": 0.11, - "inactive_at": "2026-03-14" - }, - "frappe/hrms": { - "tier": "Bronze", - "weight": 0.11, - "inactive_at": "2026-03-14" - }, - "freebsd/freebsd-src": { - "tier": "Bronze", - "weight": 0.14 - }, - "FreeRDP/FreeRDP": { - "tier": "Bronze", - "weight": 0.19 - }, - "FreeRTOS/FreeRTOS-Kernel": { - "tier": "Bronze", - "weight": 0.13 - }, - "freqtrade/freqtrade": { - "tier": "Bronze", - "weight": 0.21 - }, - "FRRouting/frr": { - "tier": "Bronze", - "weight": 0.13 - }, - "fullstackhero/dotnet-starter-kit": { - "tier": "Bronze", - "weight": 0.15 - }, - "FunkinCrew/Funkin": { - "tier": "Bronze", - "weight": 0.12 - }, - "fuzhengwei/CodeGuide": { - "tier": "Bronze", - "weight": 0.12 - }, - "fx-integral/metahash": { - "tier": "Bronze", - "weight": 0.38 - }, - "Fyrd/caniuse": { - "tier": "Bronze", - "weight": 0.11 - }, - "garylab/dnmp": { - "tier": "Bronze", - "weight": 0.11 - }, - "gatsbyjs/gatsby": { - "tier": "Bronze", - "weight": 0.14 - }, - "gatsbyjs/gatsby-starter-blog": { - "tier": "Bronze", - "weight": 0.18 - }, - "gcc-mirror/gcc": { - "tier": "Bronze", - "weight": 0.16 - }, - "General-Tao-Ventures/cartha-cli": { - "tier": "Bronze", - "weight": 0.41 - }, - "General-Tao-Ventures/cartha-validator": { - "tier": "Bronze", - "weight": 0.4 - }, - "geoserver/geoserver": { - "tier": "Bronze", - "weight": 0.17 - }, - "getmoto/moto": { - "tier": "Bronze", - "weight": 0.11 - }, - "getsentry/sentry": { - "tier": "Bronze", - "weight": 0.13 - }, - "ggml-org/llama.cpp": { - "tier": "Bronze", - "weight": 0.12 - }, - "ggml-org/whisper.cpp": { - "tier": "Bronze", - "weight": 0.12 - }, - "gin-gonic/gin": { - "tier": "Bronze", - "weight": 0.26 - }, - "git/git": { - "tier": "Bronze", - "weight": 2.03 - }, - "gitbutlerapp/gitbutler": { - "tier": "Bronze", - "weight": 0.46 - }, - "github-linguist/linguist": { - "tier": "Bronze", - "weight": 0.14 - }, - "github/choosealicense.com": { - "tier": "Bronze", - "weight": 0.12 - }, - "github/docs": { - "tier": "Bronze", - "weight": 0.23 - }, - "github/explore": { - "tier": "Bronze", - "weight": 0.15 - }, - "github/markup": { - "tier": "Bronze", - "weight": 0.13 - }, - "givanz/VvvebJs": { - "tier": "Bronze", - "weight": 0.11 - }, - "glfw/glfw": { - "tier": "Bronze", - "weight": 0.16 - }, - "gnuradio/gnuradio": { - "tier": "Bronze", - "weight": 0.14 - }, - "go-gitea/gitea": { - "tier": "Bronze", - "weight": 0.12 - }, - "go-gorm/gorm": { - "tier": "Bronze", - "weight": 0.11 - }, - "godotengine/godot": { - "tier": "Bronze", - "weight": 0.65 - }, - "godotengine/godot-docs": { - "tier": "Bronze", - "weight": 0.12 - }, - "gofiber/fiber": { - "tier": "Bronze", - "weight": 0.24 - }, - "gogs/gogs": { - "tier": "Bronze", - "weight": 0.12 - }, - "going-doer/Paper2Code": { - "tier": "Bronze", - "weight": 0.49 - }, - "golang/go": { - "tier": "Bronze", - "weight": 1.96 - }, - "google-gemini/gemini-cli": { - "tier": "Bronze", - "weight": 0.27 - }, - "google-research/football": { - "tier": "Bronze", - "weight": 0.16 - }, - "google/adk-python": { - "tier": "Bronze", - "weight": 0.24 - }, - "google/adk-samples": { - "tier": "Bronze", - "weight": 0.26 - }, - "google/auto": { - "tier": "Bronze", - "weight": 0.23 - }, - "google/benchmark": { - "tier": "Bronze", - "weight": 0.26 - }, - "google/closure-compiler": { - "tier": "Bronze", - "weight": 0.23 - }, - "google/clusterfuzz": { - "tier": "Bronze", - "weight": 0.23 - }, - "google/flatbuffers": { - "tier": "Bronze", - "weight": 0.28 - }, - "google/googletest": { - "tier": "Bronze", - "weight": 0.27 - }, - "google/gson": { - "tier": "Bronze", - "weight": 0.24 - }, - "google/guava": { - "tier": "Bronze", - "weight": 0.2 - }, - "google/recaptcha": { - "tier": "Bronze", - "weight": 0.24 - }, - "google/zx": { - "tier": "Bronze", - "weight": 0.11 - }, - "googleapis/google-api-php-client": { - "tier": "Bronze", - "weight": 0.16 - }, - "googleapis/google-api-python-client": { - "tier": "Bronze", - "weight": 0.29 - }, - "googleapis/google-cloud-go": { - "tier": "Bronze", - "weight": 0.27 - }, - "googleapis/google-cloud-python": { - "tier": "Bronze", - "weight": 0.17 - }, - "googleapis/googleapis": { - "tier": "Bronze", - "weight": 0.5 - }, - "GoogleChrome/lighthouse": { - "tier": "Bronze", - "weight": 0.23 - }, - "GoogleChrome/samples": { - "tier": "Bronze", - "weight": 0.18 - }, - "GoogleCloudPlatform/golang-samples": { - "tier": "Bronze", - "weight": 0.13 - }, - "GoogleCloudPlatform/python-docs-samples": { - "tier": "Bronze", - "weight": 0.12 - }, - "GoogleCloudPlatform/training-data-analyst": { - "tier": "Bronze", - "weight": 0.12 - }, - "googlemaps/android-maps-utils": { - "tier": "Bronze", - "weight": 0.15 - }, - "googlesamples/mlkit": { - "tier": "Bronze", - "weight": 0.17 - }, - "gopher-lab/subnet-42": { - "tier": "Bronze", - "weight": 0.38 - }, - "gorhill/uBlock": { - "tier": "Bronze", - "weight": 0.43 - }, - "gradients-ai/G.O.D": { - "tier": "Bronze", - "weight": 1.89 - }, - "gradio-app/gradio": { - "tier": "Bronze", - "weight": 0.11 - }, - "gradle/gradle": { - "tier": "Bronze", - "weight": 0.26 - }, - "grafana/grafana": { - "tier": "Silver", - "weight": 4.73 - }, - "graphite-project/graphite-web": { - "tier": "Bronze", - "weight": 0.11 - }, - "GraphiteAI/Graphite-Subnet": { - "tier": "Bronze", - "weight": 0.4 - }, - "graphql/graphql-js": { - "tier": "Bronze", - "weight": 0.23 - }, - "Grasscutters/Grasscutter": { - "tier": "Bronze", - "weight": 0.11 - }, - "grpc/grpc": { - "tier": "Bronze", - "weight": 0.17 - }, - "grpc/grpc-java": { - "tier": "Bronze", - "weight": 0.18 - }, - "gunthercox/ChatterBot": { - "tier": "Bronze", - "weight": 0.19 - }, - "guodongxiaren/README": { - "tier": "Bronze", - "weight": 0.12 - }, - "Guovin/iptv-api": { - "tier": "Bronze", - "weight": 0.2 - }, - "gz-yami/mall4cloud": { - "tier": "Bronze", - "weight": 0.11 - }, - "gz-yami/mall4j": { - "tier": "Bronze", - "weight": 0.11 - }, - "h5bp/html5-boilerplate": { - "tier": "Bronze", - "weight": 0.12 - }, - "HabitRPG/habitica": { - "tier": "Bronze", - "weight": 0.2 - }, - "Hacker0x01/react-datepicker": { - "inactive_at": "2025-11-29T17:45:38.525Z", - "tier": "Bronze", - "weight": 0.01 - }, - "hacksider/Deep-Live-Cam": { - "additional_acceptable_branches": ["premain"], - "tier": "Bronze", - "weight": 0.14 - }, - "HackTricks-wiki/hacktricks": { - "tier": "Bronze", - "weight": 0.11 - }, - "hadley/r4ds": { - "tier": "Bronze", - "weight": 0.12 - }, - "hak5/usbrubberducky-payloads": { - "tier": "Bronze", - "weight": 0.11 - }, - "hakimel/reveal.js": { - "tier": "Bronze", - "weight": 0.21 - }, - "halo-dev/halo": { - "tier": "Bronze", - "weight": 0.2 - }, - "hankcs/HanLP": { - "tier": "Bronze", - "weight": 0.24 - }, - "happyfish100/fastdfs": { - "inactive_at": "2025-11-29T17:45:38.525Z", - "tier": "Bronze", - "weight": 0.01 - }, - "hashicorp/terraform": { - "tier": "Bronze", - "weight": 0.33 - }, - "hashicorp/terraform-provider-aws": { - "tier": "Bronze", - "weight": 0.24 - }, - "hashicorp/terraform-provider-azurerm": { - "tier": "Bronze", - "weight": 0.14 - }, - "hashicorp/vault": { - "tier": "Bronze", - "weight": 0.29 - }, - "hect0x7/JMComic-Crawler-Python": { - "tier": "Bronze", - "weight": 0.21 - }, - "helix-editor/helix": { - "tier": "Bronze", - "weight": 0.58 - }, - "helm/helm": { - "tier": "Bronze", - "weight": 0.33 - }, - "hexojs/hexo": { - "tier": "Bronze", - "weight": 0.12 - }, - "hhyo/Archery": { - "tier": "Bronze", - "weight": 0.11 - }, - "hibernate/hibernate-orm": { - "tier": "Bronze", - "weight": 0.18 - }, - "highcharts/highcharts": { - "tier": "Bronze", - "weight": 0.18 - }, - "hiifeng/V2ray-for-Doprax": { - "tier": "Bronze", - "weight": 0.14 - }, - "hiroi-sora/Umi-OCR": { - "tier": "Bronze", - "weight": 0.11 - }, - "hiyouga/LlamaFactory": { - "tier": "Bronze", - "weight": 0.14 - }, - "Homebrew/brew": { - "tier": "Bronze", - "weight": 0.15 - }, - "Homebrew/homebrew-cask": { - "tier": "Bronze", - "weight": 0.15 - }, - "Homebrew/homebrew-core": { - "tier": "Bronze", - "weight": 0.42 - }, - "honza/vim-snippets": { - "tier": "Bronze", - "weight": 0.31 - }, - "hoochanlon/hamuleite": { - "tier": "Bronze", - "weight": 0.11 - }, - "hoppscotch/hoppscotch": { - "additional_acceptable_branches": ["next"], - "tier": "Silver", - "weight": 11.5 - }, - "hpcaitech/ColossalAI": { - "tier": "Bronze", - "weight": 0.13 - }, - "hs-web/hsweb-framework": { - "tier": "Bronze", - "weight": 0.18 - }, - "hsliuping/TradingAgents-CN": { - "tier": "Bronze", - "weight": 0.1 - }, - "httprunner/httprunner": { - "tier": "Bronze", - "weight": 0.13 - }, - "hubotio/hubot": { - "tier": "Bronze", - "weight": 0.12 - }, - "huggingface/diffusers": { - "tier": "Bronze", - "weight": 0.12 - }, - "huggingface/lerobot": { - "tier": "Bronze", - "weight": 0.69 - }, - "huggingface/pytorch-image-models": { - "tier": "Bronze", - "weight": 0.35 - }, - "huggingface/transformers": { - "tier": "Bronze", - "weight": 1.82 - }, - "HugoBlox/kit": { - "tier": "Bronze", - "weight": 0.12 - }, - "hummingbot/hummingbot": { - "additional_acceptable_branches": ["development"], - "tier": "Bronze", - "weight": 0.11 - }, - "hybridauth/hybridauth": { - "tier": "Bronze", - "weight": 0.14 - }, - "hydralauncher/hydra": { - "tier": "Bronze", - "weight": 0.1 - }, - "hyperledger/fabric": { - "tier": "Bronze", - "weight": 0.16 - }, - "hzeller/rpi-rgb-led-matrix": { - "tier": "Bronze", - "weight": 0.14 - }, - "i18next/react-i18next": { - "inactive_at": "2025-11-29T17:45:38.525Z", - "tier": "Bronze", - "weight": 0.01 - }, - "iam-veeramalla/terraform-zero-to-hero": { - "tier": "Bronze", - "weight": 0.13 - }, - "iamkun/dayjs": { - "tier": "Bronze", - "weight": 0.11 - }, - "iamseancheney/python_for_data_analysis_2nd_chinese_version": { - "tier": "Bronze", - "weight": 0.12 - }, - "idurar/idurar-erp-crm": { - "additional_acceptable_branches": ["dev"], - "tier": "Bronze", - "weight": 0.19 - }, - "iGaoWei/BigDataView": { - "tier": "Bronze", - "weight": 0.1 - }, - "illuspas/Node-Media-Server": { - "tier": "Bronze", - "weight": 0.1 - }, - "iluwatar/java-design-patterns": { - "tier": "Bronze", - "weight": 0.12 - }, - "immich-app/immich": { - "tier": "Bronze", - "weight": 0.62 - }, - "immortalwrt/immortalwrt": { - "tier": "Bronze", - "weight": 0.14 - }, - "impel-intelligence/dippy-studio-bittensor": { - "tier": "Bronze", - "weight": 0.38 - }, - "impel-intelligence/dippy-studio-bittensor-miner": { - "tier": "Bronze", - "weight": 0.38 - }, - "impel-intelligence/dippy-studio-bittensor-orchestrator": { - "tier": "Bronze", - "weight": 0.38 - }, - "impress/impress.js": { - "tier": "Bronze", - "weight": 0.14 - }, - "iNavFlight/inav": { - "tier": "Bronze", - "weight": 0.14 - }, - "inference-labs-inc/subnet-2": { - "tier": "Bronze", - "weight": 0.38 - }, - "infiniflow/ragflow": { - "tier": "Silver", - "weight": 4.95 - }, - "influxdata/telegraf": { - "tier": "Bronze", - "weight": 0.15 - }, - "instructure/canvas-lms": { - "tier": "Bronze", - "weight": 0.13 - }, - "internetarchive/openlibrary": { - "tier": "Bronze", - "weight": 0.1 - }, - "ionic-team/ionic-conference-app": { - "tier": "Bronze", - "weight": 0.15 - }, - "ionic-team/ionic-framework": { - "tier": "Bronze", - "weight": 0.2 - }, - "ipython/ipython": { - "tier": "Bronze", - "weight": 1.77 - }, - "ireader/media-server": { - "tier": "Bronze", - "weight": 0.13 - }, - "is-a-dev/register": { - "tier": "Bronze", - "weight": 0.23 - }, - "isaac-sim/IsaacLab": { - "tier": "Bronze", - "weight": 0.58 - }, - "istio/istio": { - "tier": "Bronze", - "weight": 0.33 - }, - "It-s-AI/llm-detection": { - "tier": "Bronze", - "weight": 0.4 - }, - "ivy-llc/ivy": { - "tier": "Bronze", - "weight": 0.2 - }, - "JabRef/jabref": { - "tier": "Bronze", - "weight": 0.17 - }, - "jackyzha0/quartz": { - "tier": "Bronze", - "weight": 0.17 - }, - "janhq/jan": { - "tier": "Bronze", - "weight": 0.96 - }, - "jbeder/yaml-cpp": { - "tier": "Bronze", - "weight": 0.14 - }, - "jeecgboot/JeecgBoot": { - "tier": "Bronze", - "weight": 0.24 - }, - "JeffreySu/WeiXinMPSDK": { - "additional_acceptable_branches": ["Developer"], - "tier": "Bronze", - "weight": 0.16 - }, - "jekyll/jekyll": { - "tier": "Bronze", - "weight": 0.14 - }, - "jekyll/minima": { - "tier": "Bronze", - "weight": 0.12 - }, - "jellyfin/jellyfin": { - "tier": "Bronze", - "weight": 0.64 - }, - "jenkinsci/docker": { - "tier": "Bronze", - "weight": 0.12 - }, - "jenkinsci/jenkins": { - "tier": "Bronze", - "weight": 0.21 - }, - "jeroennoten/Laravel-AdminLTE": { - "inactive_at": "2025-11-29T17:45:38.525Z", - "tier": "Bronze", - "weight": 0.01 - }, - "jesseduffield/lazydocker": { - "tier": "Bronze", - "weight": 0.33 - }, - "jesseduffield/lazygit": { - "tier": "Bronze", - "weight": 0.29 - }, - "jestjs/jest": { - "tier": "Bronze", - "weight": 0.13 - }, - "JetBrains/intellij-community": { - "tier": "Bronze", - "weight": 0.11 - }, - "JetBrains/kotlin": { - "tier": "Bronze", - "weight": 0.26 - }, - "jetty/jetty.project": { - "tier": "Bronze", - "weight": 0.16 - }, - "jgamblin/Mirai-Source-Code": { - "tier": "Bronze", - "weight": 0.14 - }, - "jgm/pandoc": { - "tier": "Bronze", - "weight": 0.23 - }, - "jgraph/drawio-desktop": { - "tier": "Bronze", - "weight": 0.12 - }, - "jhy/jsoup": { - "tier": "Bronze", - "weight": 0.1 - }, - "jishenghua/jshERP": { - "tier": "Bronze", - "weight": 0.15 - }, - "jitsi/docker-jitsi-meet": { - "tier": "Bronze", - "weight": 0.1 - }, - "jitsi/jitsi-meet": { - "tier": "Bronze", - "weight": 0.11 - }, - "joomla/joomla-cms": { - "additional_acceptable_branches": ["*-dev"], - "tier": "Bronze", - "weight": 0.16 - }, - "jpetazzo/container.training": { - "tier": "Bronze", - "weight": 0.11 - }, - "jqlang/jq": { - "tier": "Bronze", - "weight": 0.26 - }, - "jquery-validation/jquery-validation": { - "tier": "Bronze", - "weight": 0.11 - }, - "jquery/jquery": { - "tier": "Bronze", - "weight": 0.25 - }, - "jquery/jquery-mousewheel": { - "tier": "Bronze", - "weight": 0.12 - }, - "jquery/jquery-ui": { - "tier": "Bronze", - "weight": 0.21 - }, - "jrowberg/i2cdevlib": { - "tier": "Bronze", - "weight": 0.16 - }, - "js-org/js.org": { - "tier": "Bronze", - "weight": 0.19 - }, - "judasn/IntelliJ-IDEA-Tutorial": { - "tier": "Bronze", - "weight": 0.12 - }, - "juice-shop/juice-shop": { - "additional_acceptable_branches": ["develop"], - "tier": "Bronze", - "weight": 0.21 - }, - "juliangarnier/anime": { - "additional_acceptable_branches": ["dev"], - "tier": "Bronze", - "weight": 0.26 - }, - "junegunn/fzf": { - "tier": "Bronze", - "weight": 0.24 - }, - "junyanz/pytorch-CycleGAN-and-pix2pix": { - "tier": "Bronze", - "weight": 0.12 - }, - "jupyter/docker-stacks": { - "tier": "Bronze", - "weight": 0.18 - }, - "jupyter/jupyter": { - "tier": "Bronze", - "weight": 1.71 - }, - "jupyter/notebook": { - "tier": "Bronze", - "weight": 0.12 - }, - "jupyterhub/jupyterhub": { - "tier": "Bronze", - "weight": 0.1 - }, - "jupyterlab/jupyterlab": { - "tier": "Silver", - "weight": 10.69 - }, - "just-the-docs/just-the-docs": { - "tier": "Bronze", - "weight": 0.12 - }, - "kaldi-asr/kaldi": { - "tier": "Bronze", - "weight": 0.12 - }, - "kamyu104/LeetCode-Solutions": { - "tier": "Bronze", - "weight": 0.14 - }, - "kananinirav/AWS-Certified-Cloud-Practitioner-Notes": { - "tier": "Bronze", - "weight": 0.11 - }, - "KartikTalwar/gmail.js": { - "inactive_at": "2025-11-29T17:45:38.525Z", - "tier": "Bronze", - "weight": 0.01 - }, - "kavishdevar/librepods": { - "tier": "Bronze", - "weight": 0.19 - }, - "kekingcn/kkFileView": { - "tier": "Bronze", - "weight": 0.1 - }, - "kenwheeler/slick": { - "tier": "Bronze", - "weight": 0.11 - }, - "kenzok8/openwrt-packages": { - "tier": "Bronze", - "weight": 0.12 - }, - "keras-team/keras": { - "tier": "Silver", - "weight": 4.45 - }, - "kevin-wayne/algs4": { - "tier": "Bronze", - "weight": 0.16 - }, - "keycloak/keycloak": { - "tier": "Bronze", - "weight": 0.11 - }, - "kiddin9/Kwrt": { - "tier": "Bronze", - "weight": 0.12 - }, - "Kitware/CMake": { - "tier": "Bronze", - "weight": 0.14 - }, - "kivy/python-for-android": { - "tier": "Bronze", - "weight": 0.1 - }, - "Klipper3d/klipper": { - "tier": "Bronze", - "weight": 0.15 - }, - "knowm/XChange": { - "tier": "Bronze", - "weight": 0.16 - }, - "koajs/koa": { - "tier": "Bronze", - "weight": 0.11 - }, - "Kong/insomnia": { - "tier": "Bronze", - "weight": 0.22 - }, - "Kong/kong": { - "tier": "Bronze", - "weight": 0.31 - }, - "krishnaik06/Roadmap-To-Learn-Generative-AI-In-2025": { - "tier": "Bronze", - "weight": 0.11 - }, - "ktbyers/netmiko": { - "tier": "Bronze", - "weight": 0.16 - }, - "kubeflow/kubeflow": { - "tier": "Bronze", - "weight": 0.23 - }, - "kubeflow/pipelines": { - "tier": "Bronze", - "weight": 0.42 - }, - "kubernetes-client/java": { - "tier": "Bronze", - "weight": 1.66 - }, - "kubernetes-client/python": { - "tier": "Bronze", - "weight": 1.61 - }, - "kubernetes-sigs/aws-load-balancer-controller": { - "tier": "Bronze", - "weight": 1.57 - }, - "kubernetes-sigs/cluster-api": { - "tier": "Bronze", - "weight": 1.53 - }, - "kubernetes-sigs/external-dns": { - "tier": "Bronze", - "weight": 1.49 - }, - "kubernetes-sigs/kubespray": { - "tier": "Bronze", - "weight": 0.79 - }, - "kubernetes-sigs/kustomize": { - "tier": "Bronze", - "weight": 0.79 - }, - "kubernetes-sigs/metrics-server": { - "tier": "Bronze", - "weight": 0.78 - }, - "kubernetes/autoscaler": { - "tier": "Bronze", - "weight": 0.77 - }, - "kubernetes/client-go": { - "tier": "Bronze", - "weight": 0.76 - }, - "kubernetes/community": { - "tier": "Bronze", - "weight": 0.76 - }, - "kubernetes/enhancements": { - "tier": "Bronze", - "weight": 0.75 - }, - "kubernetes/ingress-nginx": { - "tier": "Bronze", - "weight": 0.74 - }, - "kubernetes/kops": { - "tier": "Bronze", - "weight": 0.73 - }, - "kubernetes/kube-state-metrics": { - "tier": "Bronze", - "weight": 0.73 - }, - "kubernetes/kubernetes": { - "tier": "Bronze", - "weight": 1.45 - }, - "kubernetes/sample-controller": { - "tier": "Bronze", - "weight": 0.72 - }, - "kubernetes/test-infra": { - "tier": "Bronze", - "weight": 0.72 - }, - "kubernetes/website": { - "tier": "Bronze", - "weight": 0.71 - }, - "labring/FastGPT": { - "tier": "Bronze", - "weight": 0.7 - }, - "langchain-ai/langchain": { - "tier": "Bronze", - "weight": 0.67 - }, - "langflow-ai/langflow": { - "tier": "Bronze", - "weight": 0.11 - }, - "langgenius/dify": { - "tier": "Bronze", - "weight": 0.8 - }, - "laradock/laradock": { - "tier": "Bronze", - "weight": 0.12 - }, - "Laravel-Lang/lang": { - "tier": "Bronze", - "weight": 0.15 - }, - "laravel/framework": { - "tier": "Bronze", - "weight": 0.19 - }, - "laravel/laravel": { - "tier": "Bronze", - "weight": 0.14 - }, - "laruence/yaf": { - "tier": "Bronze", - "weight": 0.13 - }, - "latent-to/taohash": { - "tier": "Bronze", - "weight": 0.61 - }, - "laurent22/joplin": { - "tier": "Bronze", - "weight": 0.13 - }, - "layui/layui": { - "tier": "Bronze", - "weight": 0.11 - }, - "lballabio/QuantLib": { - "tier": "Bronze", - "weight": 0.14 - }, - "leadpoet/leadpoet": { - "tier": "Bronze", - "weight": 0.44 - }, - "Leaflet/Leaflet": { - "tier": "Bronze", - "weight": 0.14 - }, - "leethomason/tinyxml2": { - "tier": "Bronze", - "weight": 0.14 - }, - "letscontrolit/ESPEasy": { - "tier": "Bronze", - "weight": 0.14 - }, - "letta-ai/letta": { - "tier": "Bronze", - "weight": 1.42 - }, - "LFDT-web3j/web3j": { - "tier": "Bronze", - "weight": 0.16 - }, - "liangliangyy/DjangoBlog": { - "tier": "Bronze", - "weight": 0.18 - }, - "libgdx/libgdx": { - "tier": "Bronze", - "weight": 0.11 - }, - "libgit2/libgit2": { - "tier": "Bronze", - "weight": 0.3 - }, - "libopencm3/libopencm3": { - "tier": "Bronze", - "weight": 0.13 - }, - "librenms/librenms": { - "tier": "Bronze", - "weight": 0.15 - }, - "libusb/libusb": { - "tier": "Bronze", - "weight": 0.14 - }, - "lichess-org/chessground": { - "tier": "Bronze", - "weight": 0.28 - }, - "lichess-org/lila": { - "tier": "Bronze", - "weight": 0.28 - }, - "Lienol/openwrt": { - "tier": "Bronze", - "weight": 0.14 - }, - "LimeSurvey/LimeSurvey": { - "additional_acceptable_branches": ["develop-minor", "develop-major"], - "tier": "Bronze", - "weight": 0.16 - }, - "LineageOS/android": { - "tier": "Bronze", - "weight": 0.11 - }, - "liquibase/liquibase": { - "tier": "Bronze", - "weight": 0.16 - }, - "lllyasviel/Fooocus": { - "tier": "Bronze", - "weight": 0.14 - }, - "llmsresearch/paperbanana": { - "weight": 4.36, - "tier": "Silver" - }, - "llmware-ai/llmware": { - "tier": "Bronze", - "weight": 0.26 - }, - "llvm/llvm-project": { - "tier": "Bronze", - "weight": 0.27 - }, - "lobehub/lobehub": { - "tier": "Bronze", - "weight": 0.2 - }, - "localstack/localstack": { - "tier": "Bronze", - "weight": 0.11 - }, - "loiane/javascript-datastructures-algorithms": { - "inactive_at": "2025-11-08T04:21:05.319Z", - "tier": "Bronze", - "weight": 0.01 - }, - "louislam/uptime-kuma": { - "additional_acceptable_branches": ["3.0.X"], - "inactive_at": "2026-01-26T02:52:00.000Z", - "tier": "Bronze", - "weight": 0.19 - }, - "LSPosed/MagiskOnWSALocal": { - "tier": "Bronze", - "weight": 0.14 - }, - "Lucaslhm/Flipper-IRDB": { - "tier": "Bronze", - "weight": 0.1 - }, - "ly525/luban-h5": { - "tier": "Bronze", - "weight": 0.1 - }, - "lynndylanhurley/devise_token_auth": { - "tier": "Bronze", - "weight": 0.12 - }, - "lyogavin/airllm": { - "weight": 0.32, - "tier": "Bronze" - }, - "lyswhut/lx-music-desktop": { - "tier": "Bronze", - "weight": 0.13 - }, - "macrocosm-os/apex": { - "tier": "Bronze", - "weight": 0.38 - }, - "macrocosm-os/data-universe": { - "additional_acceptable_branches": ["dev"], - "tier": "Bronze", - "weight": 0.44 - }, - "macrocosm-os/iota": { - "tier": "Bronze", - "weight": 0.35 - }, - "macrozheng/mall": { - "tier": "Bronze", - "weight": 0.25 - }, - "macrozheng/mall-admin-web": { - "tier": "Bronze", - "weight": 0.12 - }, - "macrozheng/mall-swarm": { - "tier": "Bronze", - "weight": 0.19 - }, - "magento/magento2": { - "tier": "Bronze", - "weight": 0.18 - }, - "MagicMirrorOrg/MagicMirror": { - "tier": "Bronze", - "weight": 0.11 - }, - "makeplane/plane": { - "tier": "Bronze", - "weight": 0.22 - }, - "manifold-inc/hone": { - "tier": "Bronze", - "weight": 1.39 - }, - "manifold-inc/targon": { - "tier": "Bronze", - "weight": 1.36 - }, - "markedjs/marked": { - "tier": "Bronze", - "weight": 0.11 - }, - "MarlinFirmware/Marlin": { - "tier": "Bronze", - "weight": 0.19 - }, - "marmelab/react-admin": { - "tier": "Bronze", - "weight": 0.11 - }, - "mason-org/mason.nvim": { - "tier": "Bronze", - "weight": 0.31 - }, - "mastodon/mastodon": { - "tier": "Bronze", - "weight": 0.6 - }, - "matplotlib/matplotlib": { - "tier": "Bronze", - "weight": 0.21 - }, - "MatrixTM/MHDDoS": { - "tier": "Bronze", - "weight": 0.11 - }, - "mattermost/mattermost": { - "tier": "Bronze", - "weight": 0.3 - }, - "mautic/mautic": { - "tier": "Bronze", - "weight": 0.15 - }, - "mavlink/qgroundcontrol": { - "tier": "Bronze", - "weight": 0.15 - }, - "mayswind/ezbookkeeping": { - "tier": "Bronze", - "weight": 0.48 - }, - "MAZHARMIK/Interview_DS_Algo": { - "tier": "Bronze", - "weight": 0.14 - }, - "Mbed-TLS/mbedtls": { - "tier": "Bronze", - "weight": 0.14 - }, - "mdn/browser-compat-data": { - "tier": "Bronze", - "weight": 0.11 - }, - "mdn/content": { - "tier": "Bronze", - "weight": 0.14 - }, - "medusajs/medusa": { - "additional_acceptable_branches": ["develop"], - "tier": "Bronze", - "weight": 0.64 - }, - "meilisearch/meilisearch": { - "tier": "Bronze", - "weight": 0.28 - }, - "mem0ai/mem0": { - "tier": "Bronze", - "weight": 0.13 - }, - "mermaid-js/mermaid": { - "tier": "Bronze", - "weight": 0.11 - }, - "meshery/meshery": { - "tier": "Bronze", - "weight": 0.18 - }, - "mesonbuild/meson": { - "tier": "Bronze", - "weight": 0.1 - }, - "metabase/metabase": { - "tier": "Bronze", - "weight": 0.25 - }, - "MetaMask/metamask-extension": { - "tier": "Bronze", - "weight": 1.33 - }, - "metanova-labs/nova": { - "tier": "Bronze", - "weight": 1.3 - }, - "meteor/meteor": { - "tier": "Bronze", - "weight": 0.14 - }, - "metersphere/metersphere": { - "tier": "Bronze", - "weight": 0.1 - }, - "miantiao-me/Sink": { - "tier": "Bronze", - "weight": 0.11 - }, - "MIC-DKFZ/nnUNet": { - "tier": "Bronze", - "weight": 0.1 - }, - "microfeed/microfeed": { - "tier": "Bronze", - "weight": 0.16 - }, - "micropython/micropython": { - "tier": "Bronze", - "weight": 0.16 - }, - "microsoft/autogen": { - "tier": "Bronze", - "weight": 0.14 - }, - "microsoft/azure-pipelines-tasks": { - "tier": "Bronze", - "weight": 0.16 - }, - "microsoft/DirectX-Graphics-Samples": { - "tier": "Bronze", - "weight": 0.14 - }, - "microsoft/dotnet": { - "tier": "Bronze", - "weight": 0.31 - }, - "microsoft/fluentui": { - "tier": "Bronze", - "weight": 0.23 - }, - "microsoft/generative-ai-for-beginners": { - "tier": "Bronze", - "weight": 0.16 - }, - "microsoft/LightGBM": { - "tier": "Bronze", - "weight": 0.25 - }, - "microsoft/markitdown": { - "tier": "Bronze", - "weight": 0.11 - }, - "microsoft/monaco-editor": { - "tier": "Bronze", - "weight": 0.11 - }, - "microsoft/playwright": { - "tier": "Bronze", - "weight": 0.1 - }, - "microsoft/PowerToys": { - "tier": "Bronze", - "weight": 0.1 - }, - "microsoft/qlib": { - "tier": "Bronze", - "weight": 0.24 - }, - "microsoft/referencesource": { - "tier": "Bronze", - "weight": 0.14 - }, - "microsoft/sql-server-samples": { - "tier": "Bronze", - "weight": 0.13 - }, - "microsoft/terminal": { - "tier": "Bronze", - "weight": 0.27 - }, - "microsoft/TypeScript": { - "tier": "Bronze", - "weight": 0.3 - }, - "microsoft/vscode": { - "tier": "Bronze", - "weight": 1.28 - }, - "microsoft/vscode-docs": { - "tier": "Bronze", - "weight": 0.12 - }, - "microsoft/WinAppDriver": { - "tier": "Bronze", - "weight": 0.14 - }, - "microsoft/Windows-driver-samples": { - "tier": "Bronze", - "weight": 0.15 - }, - "microsoft/winget-pkgs": { - "tier": "Bronze", - "weight": 0.12 - }, - "microsoft/WPF-Samples": { - "tier": "Bronze", - "weight": 0.15 - }, - "MicrosoftDocs/azure-docs": { - "tier": "Bronze", - "weight": 0.14 - }, - "MicrosoftLearning/AZ-104-MicrosoftAzureAdministrator": { - "tier": "Bronze", - "weight": 0.11 - }, - "miguelgrinberg/python-socketio": { - "inactive_at": "2025-11-29T17:45:38.525Z", - "tier": "Bronze", - "weight": 0.01 - }, - "mikefarah/yq": { - "tier": "Bronze", - "weight": 0.15 - }, - "mikel-brostrom/boxmot": { - "tier": "Bronze", - "weight": 0.1 - }, - "milvus-io/milvus": { - "tier": "Bronze", - "weight": 0.28 - }, - "mindsdb/mindsdb": { - "tier": "Bronze", - "weight": 0.13 - }, - "MinecraftForge/MinecraftForge": { - "tier": "Bronze", - "weight": 0.16 - }, - "mingrammer/diagrams": { - "tier": "Bronze", - "weight": 0.11 - }, - "minio/minio": { - "tier": "Bronze", - "weight": 0.38 - }, - "Mintplex-Labs/anything-llm": { - "tier": "Bronze", - "weight": 0.27 - }, - "mit-pdos/xv6-riscv": { - "tier": "Bronze", - "weight": 0.14 - }, - "mitmproxy/mitmproxy": { - "tier": "Bronze", - "weight": 0.13 - }, - "mlflow/mlflow": { - "tier": "Bronze", - "weight": 0.11 - }, - "mlpack/mlpack": { - "tier": "Bronze", - "weight": 0.14 - }, - "mmistakes/minimal-mistakes": { - "tier": "Bronze", - "weight": 0.15 - }, - "mobiusfund/etf": { - "tier": "Bronze", - "weight": 0.37 - }, - "moby/moby": { - "tier": "Bronze", - "weight": 0.16 - }, - "mode-network/synth-subnet": { - "tier": "Bronze", - "weight": 0.35 - }, - "modelcontextprotocol/python-sdk": { - "tier": "Bronze", - "weight": 0.29 - }, - "modelcontextprotocol/servers": { - "tier": "Bronze", - "weight": 0.13 - }, - "monero-project/monero": { - "tier": "Bronze", - "weight": 1.25 - }, - "mongodb/mongo": { - "tier": "Bronze", - "weight": 0.25 - }, - "mongodb/mongoid": { - "tier": "Bronze", - "weight": 0.12 - }, - "mongodb/node-mongodb-native": { - "tier": "Bronze", - "weight": 0.22 - }, - "moodle/moodle": { - "tier": "Bronze", - "weight": 0.18 - }, - "mozilla-mobile/firefox-ios": { - "tier": "Bronze", - "weight": 0.24 - }, - "mozilla/pdf.js": { - "tier": "Bronze", - "weight": 0.21 - }, - "mrdoob/three.js": { - "tier": "Bronze", - "weight": 1.03 - }, - "MudBlazor/MudBlazor": { - "inactive_at": "2025-11-29T17:45:38.525Z", - "tier": "Bronze", - "weight": 0.01 - }, - "mudler/LocalAI": { - "tier": "Bronze", - "weight": 0.55 - }, - "mui/material-ui": { - "tier": "Bronze", - "weight": 0.34 - }, - "MvvmCross/MvvmCross": { - "tier": "Bronze", - "weight": 0.14 - }, - "mybatis/generator": { - "tier": "Bronze", - "weight": 0.16 - }, - "mybatis/mybatis-3": { - "tier": "Bronze", - "weight": 0.21 - }, - "mybatis/spring-boot-starter": { - "tier": "Bronze", - "weight": 0.15 - }, - "mysql/mysql-server": { - "tier": "Bronze", - "weight": 0.56 - }, - "MystenLabs/sui": { - "tier": "Bronze", - "weight": 0.51 - }, - "n8n-io/n8n": { - "tier": "Bronze", - "weight": 0.29 - }, - "NaiboWang/EasySpider": { - "tier": "Bronze", - "weight": 0.14 - }, - "NanmiCoder/MediaCrawler": { - "tier": "Bronze", - "weight": 0.21 - }, - "naptha/tesseract.js": { - "tier": "Bronze", - "weight": 0.11 - }, - "NationalSecurityAgency/ghidra": { - "tier": "Bronze", - "weight": 0.13 - }, - "neetcode-gh/leetcode": { - "tier": "Bronze", - "weight": 0.43 - }, - "neovim/neovim": { - "tier": "Bronze", - "weight": 1.23 - }, - "NervJS/taro": { - "tier": "Bronze", - "weight": 0.12 - }, - "nestjs/nest": { - "tier": "Bronze", - "weight": 0.13 - }, - "netbirdio/netbird": { - "tier": "Bronze", - "weight": 0.48 - }, - "Netflix/zuul": { - "tier": "Bronze", - "weight": 0.23 - }, - "netty/netty": { - "tier": "Bronze", - "weight": 0.21 - }, - "networkx/networkx": { - "tier": "Bronze", - "weight": 0.11 - }, - "neuralinternet/SN27": { - "tier": "Bronze", - "weight": 0.44 - }, - "nextcloud/android": { - "tier": "Bronze", - "weight": 1.2 - }, - "nextcloud/desktop": { - "tier": "Bronze", - "weight": 1.18 - }, - "nextcloud/news": { - "tier": "Bronze", - "weight": 1.16 - }, - "nextcloud/server": { - "tier": "Bronze", - "weight": 1.14 - }, - "NG-ZORRO/ng-zorro-antd": { - "tier": "Bronze", - "weight": 0.18 - }, - "nginx/docker-nginx": { - "tier": "Bronze", - "weight": 0.29 - }, - "nginx/kubernetes-ingress": { - "tier": "Bronze", - "weight": 0.13 - }, - "nginx/nginx": { - "tier": "Bronze", - "weight": 1.12 - }, - "NginxProxyManager/nginx-proxy-manager": { - "tier": "Bronze", - "weight": 0.24 - }, - "nimbusdotstorage/Nimbus": { - "tier": "Bronze", - "weight": 0.1 - }, - "NixOS/nixpkgs": { - "tier": "Bronze", - "weight": 0.14 - }, - "nl8590687/ASRT_SpeechRecognition": { - "tier": "Bronze", - "weight": 0.1 - }, - "nlohmann/json": { - "tier": "Bronze", - "weight": 0.12 - }, - "nltk/nltk": { - "tier": "Bronze", - "weight": 0.11 - }, - "nmap/nmap": { - "tier": "Bronze", - "weight": 0.29 - }, - "NobyDa/Script": { - "tier": "Bronze", - "weight": 0.19 - }, - "nocodb/nocodb": { - "additional_acceptable_branches": ["develop"], - "tier": "Bronze", - "weight": 1.1 - }, - "nodejs/node": { - "tier": "Bronze", - "weight": 0.5 - }, - "nodejs/nodejs.org": { - "tier": "Bronze", - "weight": 0.19 - }, - "nodemcu/nodemcu-firmware": { - "additional_acceptable_branches": ["dev"], - "tier": "Bronze", - "weight": 0.14 - }, - "nolimits4web/swiper": { - "tier": "Bronze", - "weight": 0.21 - }, - "nopSolutions/nopCommerce": { - "tier": "Bronze", - "weight": 0.18 - }, - "NousResearch/atropos": { - "tier": "Bronze", - "weight": 0.45 - }, - "novicezk/midjourney-proxy": { - "tier": "Bronze", - "weight": 0.16 - }, - "novuhq/novu": { - "tier": "Bronze", - "weight": 0.12 - }, - "npm/cli": { - "tier": "Bronze", - "weight": 0.31 - }, - "numinouslabs/numinous": { - "tier": "Bronze", - "weight": 0.37 - }, - "numpy/numpy": { - "tier": "Bronze", - "weight": 0.55 - }, - "nushell/nushell": { - "tier": "Bronze", - "weight": 0.98 - }, - "nuxt/nuxt": { - "tier": "Bronze", - "weight": 0.1 - }, - "NVIDIA-Omniverse/IsaacSim-dockerfiles": { - "tier": "Bronze", - "weight": 0.57 - }, - "NVIDIA/apex": { - "tier": "Bronze", - "weight": 0.23 - }, - "NVIDIA/Megatron-LM": { - "tier": "Bronze", - "weight": 0.11 - }, - "nvim-lua/kickstart.nvim": { - "tier": "Bronze", - "weight": 0.15 - }, - "nvim-mini/mini.nvim": { - "tier": "Bronze", - "weight": 0.29 - }, - "nvim-treesitter/nvim-treesitter": { - "tier": "Bronze", - "weight": 0.48 - }, - "nwjs/nw.js": { - "tier": "Bronze", - "weight": 0.11 - }, - "OAI/OpenAPI-Specification": { - "tier": "Bronze", - "weight": 0.34 - }, - "obsidianmd/obsidian-releases": { - "tier": "Bronze", - "weight": 0.25 - }, - "obsidianmd/obsidian-sample-plugin": { - "tier": "Bronze", - "weight": 0.15 - }, - "obsproject/obs-studio": { - "tier": "Bronze", - "weight": 0.11 - }, - "oceanbase/miniob": { - "tier": "Bronze", - "weight": 0.14 - }, - "ocornut/imgui": { - "tier": "Bronze", - "weight": 0.12 - }, - "odoo/odoo": { - "tier": "Bronze", - "weight": 0.25 - }, - "OffchainLabs/prysm": { - "tier": "Bronze", - "weight": 0.13 - }, - "ohmyzsh/ohmyzsh": { - "tier": "Bronze", - "weight": 1.01 - }, - "oldratlee/useful-scripts": { - "tier": "Bronze", - "weight": 0.11 - }, - "ollama/ollama": { - "tier": "Bronze", - "weight": 0.63 - }, - "olton/metroui": { - "tier": "Bronze", - "weight": 0.1 - }, - "omegalabsinc/omegalabs-anytoany-bittensor": { - "tier": "Bronze", - "weight": 0.37 - }, - "omegalabsinc/omegalabs-bittensor-subnet": { - "tier": "Bronze", - "weight": 0.37 - }, - "omkarcloud/botasaurus": { - "tier": "Bronze", - "weight": 0.45 - }, - "one-covenant/basilica": { - "tier": "Silver", - "weight": 10 - }, - "one-covenant/bittensor-rs": { - "tier": "Silver", - "weight": 6.58 - }, - "one-covenant/grail": { - "tier": "Silver", - "weight": 9.4 - }, - "one-covenant/templar": { - "tier": "Silver", - "weight": 8.89 - }, - "oneoneone-io/subnet-111": { - "tier": "Bronze", - "weight": 0.37 - }, - "oobabooga/text-generation-webui": { - "tier": "Bronze", - "weight": 0.13 - }, - "opa334/Dopamine": { - "tier": "Bronze", - "weight": 0.15 - }, - "open-telemetry/opentelemetry-collector": { - "tier": "Bronze", - "weight": 0.3 - }, - "open-telemetry/opentelemetry-collector-contrib": { - "tier": "Bronze", - "weight": 0.14 - }, - "open-telemetry/opentelemetry-go": { - "tier": "Bronze", - "weight": 0.29 - }, - "open-webui/open-webui": { - "tier": "Bronze", - "weight": 0.14 - }, - "openai/codex": { - "tier": "Bronze", - "weight": 0.28 - }, - "openai/openai-agents-python": { - "tier": "Bronze", - "weight": 0.24 - }, - "openai/openai-python": { - "tier": "Bronze", - "weight": 0.24 - }, - "openai/openai-realtime-console": { - "tier": "Bronze", - "weight": 0.16 - }, - "OpenAPITools/openapi-generator": { - "tier": "Bronze", - "weight": 0.11 - }, - "OpenBB-finance/OpenBB": { - "tier": "Bronze", - "weight": 0.11 - }, - "opencart/opencart": { - "tier": "Bronze", - "weight": 0.16 - }, - "openclaw/openclaw": { - "weight": 20, - "tier": "Silver" - }, - "opencv/opencv": { - "tier": "Bronze", - "weight": 0.32 - }, - "opencv/opencv_contrib": { - "tier": "Bronze", - "weight": 0.16 - }, - "opendatalab/MinerU": { - "tier": "Bronze", - "weight": 0.11 - }, - "OpenDroneMap/WebODM": { - "tier": "Bronze", - "weight": 0.15 - }, - "openedx/openedx-platform": { - "tier": "Bronze", - "weight": 0.19 - }, - "openemr/openemr": { - "tier": "Bronze", - "weight": 0.15 - }, - "OpenGradient/BitQuant-Subnet": { - "tier": "Bronze", - "weight": 0.4 - }, - "OpenHands/OpenHands": { - "tier": "Silver", - "weight": 7.69 - }, - "openinterpreter/open-interpreter": { - "tier": "Bronze", - "weight": 0.7 - }, - "openjdk/jdk": { - "tier": "Bronze", - "weight": 0.32 - }, - "openlayers/openlayers": { - "tier": "Bronze", - "weight": 0.11 - }, - "OpenMined/PySyft": { - "tier": "Bronze", - "weight": 0.1 - }, - "openprose/prose": { - "tier": "Bronze", - "weight": 0.44 - }, - "opensearch-project/OpenSearch": { - "tier": "Bronze", - "weight": 0.1 - }, - "openshift/origin": { - "tier": "Bronze", - "weight": 0.14 - }, - "OpenSignLabs/OpenSign": { - "tier": "Bronze", - "weight": 0.47 - }, - "opensourcepos/opensourcepos": { - "tier": "Bronze", - "weight": 0.15 - }, - "openspug/spug": { - "tier": "Bronze", - "weight": 0.11 - }, - "openssh/openssh-portable": { - "tier": "Bronze", - "weight": 0.14 - }, - "openstreetmap/iD": { - "tier": "Bronze", - "weight": 0.16 - }, - "opentensor/async-substrate-interface": { - "tier": "Gold", - "weight": 24.78 - }, - "opentensor/bittensor": { - "additional_acceptable_branches": ["staging", "SDKv10"], - "tier": "Gold", - "weight": 43.72 - }, - "opentensor/btcli": { - "additional_acceptable_branches": ["staging"], - "tier": "Gold", - "weight": 21.54 - }, - "opentensor/btwallet": { - "tier": "Gold", - "weight": 23.02 - }, - "opentensor/subtensor": { - "additional_acceptable_branches": ["devnet-ready"], - "tier": "Gold", - "weight": 71.03 - }, - "openvinotoolkit/open_model_zoo": { - "tier": "Bronze", - "weight": 0.15 - }, - "openvinotoolkit/openvino": { - "tier": "Bronze", - "weight": 0.14 - }, - "OpenVPN/openvpn": { - "tier": "Bronze", - "weight": 0.25 - }, - "openvswitch/ovs": { - "tier": "Bronze", - "weight": 0.13 - }, - "openwrt/luci": { - "tier": "Bronze", - "weight": 0.92 - }, - "openwrt/openwrt": { - "tier": "Bronze", - "weight": 0.18 - }, - "OpenZeppelin/openzeppelin-contracts": { - "tier": "Bronze", - "weight": 0.13 - }, - "ophub/amlogic-s9xxx-armbian": { - "tier": "Bronze", - "weight": 0.11 - }, - "oppia/oppia": { - "tier": "Bronze", - "weight": 0.19 - }, - "oracle/docker-images": { - "tier": "Bronze", - "weight": 0.12 - }, - "OrchardCMS/OrchardCore": { - "tier": "Bronze", - "weight": 0.14 - }, - "ordinals/ord": { - "tier": "Bronze", - "weight": 0.13 - }, - "orion-lib/OrionTV": { - "tier": "Bronze", - "weight": 0.14 - }, - "Orpheus-AI/Zeus": { - "tier": "Bronze", - "weight": 0.51 - }, - "otavioschwanck/github-pr-reviewer.nvim": { - "tier": "Bronze", - "weight": 0.69 - }, - "othneildrew/Best-README-Template": { - "tier": "Bronze", - "weight": 0.13 - }, - "ourongxing/newsnow": { - "tier": "Bronze", - "weight": 0.17 - }, - "oven-sh/bun": { - "tier": "Bronze", - "weight": 0.68 - }, - "owncast/owncast": { - "tier": "Bronze", - "weight": 0.21 - }, - "owncloud/android": { - "tier": "Bronze", - "weight": 0.12 - }, - "PaddlePaddle/PaddleDetection": { - "tier": "Bronze", - "weight": 0.11 - }, - "PaddlePaddle/PaddleNLP": { - "tier": "Bronze", - "weight": 0.11 - }, - "pagefaultgames/pokerogue": { - "tier": "Bronze", - "weight": 0.15 - }, - "pagehelper-org/Mybatis-PageHelper": { - "tier": "Bronze", - "weight": 0.1 - }, - "pallets-eco/flask-admin": { - "tier": "Bronze", - "weight": 0.1 - }, - "pallets/flask": { - "tier": "Bronze", - "weight": 0.21 - }, - "pandas-dev/pandas": { - "tier": "Silver", - "weight": 4.63 - }, - "pantsbuild/pants": { - "tier": "Bronze", - "weight": 0.26 - }, - "paperclipai/paperclip": { - "weight": 20, - "tier": "Silver" - }, - "PaperMC/Paper": { - "tier": "Bronze", - "weight": 0.1 - }, - "paradigmxyz/reth": { - "tier": "Bronze", - "weight": 0.13 - }, - "paramiko/paramiko": { - "tier": "Bronze", - "weight": 0.1 - }, - "parcadei/Continuous-Claude-v3": { - "tier": "Bronze", - "weight": 0.48 - }, - "parcel-bundler/parcel": { - "tier": "Bronze", - "weight": 0.11 - }, - "paritytech/polkadot-sdk": { - "tier": "Bronze", - "weight": 1.09 - }, - "parse-community/parse-dashboard": { - "tier": "Bronze", - "weight": 0.16 - }, - "parse-community/parse-server": { - "tier": "Bronze", - "weight": 0.11 - }, - "PathOfBuildingCommunity/PathOfBuilding": { - "tier": "Bronze", - "weight": 0.11 - }, - "pathwaycom/pathway": { - "tier": "Bronze", - "weight": 0.1 - }, - "paulirish/dotfiles": { - "tier": "Bronze", - "weight": 0.11 - }, - "payloadcms/payload": { - "tier": "Bronze", - "weight": 0.1 - }, - "pebble-dev/mobile-app": { - "tier": "Bronze", - "weight": 0.54 - }, - "pebble-dev/pebble-firmware": { - "tier": "Bronze", - "weight": 0.53 - }, - "pedroslopez/whatsapp-web.js": { - "tier": "Bronze", - "weight": 0.11 - }, - "pennersr/django-allauth": { - "tier": "Bronze", - "weight": 0.18 - }, - "penpot/penpot": { - "tier": "Silver", - "weight": 7.08 - }, - "pentaho/pentaho-kettle": { - "tier": "Bronze", - "weight": 0.17 - }, - "phaserjs/phaser": { - "tier": "Bronze", - "weight": 0.13 - }, - "photon-hq/imessage-kit": { - "tier": "Bronze", - "weight": 0.45 - }, - "photoprism/photoprism": { - "tier": "Bronze", - "weight": 0.58 - }, - "php/php-src": { - "tier": "Bronze", - "weight": 0.32 - }, - "PHPMailer/PHPMailer": { - "tier": "Bronze", - "weight": 0.19 - }, - "phpmyadmin/phpmyadmin": { - "tier": "Bronze", - "weight": 0.15 - }, - "pi-hole/pi-hole": { - "tier": "Bronze", - "weight": 0.42 - }, - "pi-hole/web": { - "additional_acceptable_branches": ["development"], - "tier": "Bronze", - "weight": 1.07 - }, - "pimcore/pimcore": { - "tier": "Bronze", - "weight": 0.13 - }, - "pingcap/tidb": { - "tier": "Bronze", - "weight": 0.81 - }, - "pinpoint-apm/pinpoint": { - "tier": "Bronze", - "weight": 0.1 - }, - "PipedreamHQ/pipedream": { - "tier": "Bronze", - "weight": 0.21 - }, - "piskvorky/gensim": { - "tier": "Bronze", - "weight": 0.11 - }, - "pixijs/pixijs": { - "tier": "Bronze", - "weight": 0.12 - }, - "pjialin/py12306": { - "tier": "Bronze", - "weight": 0.11 - }, - "pk910/PoWFaucet": { - "tier": "Bronze", - "weight": 0.14 - }, - "Platane/snk": { - "tier": "Bronze", - "weight": 0.15 - }, - "PlatformNetwork/platform": { - "tier": "Bronze", - "weight": 0.4 - }, - "playframework/playframework": { - "tier": "Bronze", - "weight": 0.13 - }, - "pmmp/PocketMine-MP": { - "tier": "Bronze", - "weight": 0.13 - }, - "pnpm/pnpm": { - "tier": "Bronze", - "weight": 0.26 - }, - "pocketbase/pocketbase": { - "tier": "Bronze", - "weight": 0.57 - }, - "PointCloudLibrary/pcl": { - "tier": "Bronze", - "weight": 0.15 - }, - "pola-rs/polars": { - "tier": "Bronze", - "weight": 0.57 - }, - "Polymarket/agents": { - "tier": "Bronze", - "weight": 0.46 - }, - "postgres/postgres": { - "tier": "Bronze", - "weight": 0.55 - }, - "poteto/hiring-without-whiteboards": { - "tier": "Bronze", - "weight": 0.11 - }, - "PowerShell/PowerShell": { - "tier": "Bronze", - "weight": 0.27 - }, - "prakhar1989/docker-curriculum": { - "tier": "Bronze", - "weight": 0.11 - }, - "prasathmani/tinyfilemanager": { - "tier": "Bronze", - "weight": 0.14 - }, - "preactjs/preact": { - "tier": "Bronze", - "weight": 0.1 - }, - "PrestaShop/PrestaShop": { - "tier": "Bronze", - "weight": 0.16 - }, - "prestodb/presto": { - "tier": "Bronze", - "weight": 0.19 - }, - "prettier/prettier": { - "tier": "Bronze", - "weight": 0.11 - }, - "primefaces/primeng": { - "tier": "Bronze", - "weight": 0.18 - }, - "progit/progit2": { - "tier": "Bronze", - "weight": 0.11 - }, - "Project-OSRM/osrm-backend": { - "tier": "Bronze", - "weight": 0.14 - }, - "prometheus-community/helm-charts": { - "tier": "Bronze", - "weight": 0.12 - }, - "prometheus-operator/prometheus-operator": { - "tier": "Bronze", - "weight": 0.14 - }, - "prometheus/alertmanager": { - "tier": "Bronze", - "weight": 0.13 - }, - "prometheus/prometheus": { - "tier": "Bronze", - "weight": 0.32 - }, - "protocolbuffers/protobuf": { - "tier": "Bronze", - "weight": 0.18 - }, - "ProvableHQ/snarkOS": { - "tier": "Bronze", - "weight": 0.13 - }, - "psf/black": { - "tier": "Bronze", - "weight": 0.1 - }, - "psf/requests": { - "tier": "Bronze", - "weight": 0.13 - }, - "puikinsh/Adminator-admin-dashboard": { - "tier": "Bronze", - "weight": 0.11 - }, - "pulumi/pulumi": { - "inactive_at": "2026-03-04T17:03:48.522Z", - "tier": "Silver", - "weight": 3.73 - }, - "puppeteer/puppeteer": { - "tier": "Bronze", - "weight": 0.13 - }, - "PX4/PX4-Autopilot": { - "tier": "Bronze", - "weight": 0.65 - }, - "pyca/cryptography": { - "tier": "Silver", - "weight": 5.07 - }, - "PyGithub/PyGithub": { - "tier": "Bronze", - "weight": 0.1 - }, - "pymc-devs/pymc": { - "tier": "Bronze", - "weight": 0.1 - }, - "pypa/pip": { - "tier": "Bronze", - "weight": 0.3 - }, - "PyQt5/PyQt": { - "tier": "Bronze", - "weight": 0.1 - }, - "pytest-dev/pytest": { - "tier": "Bronze", - "weight": 0.1 - }, - "python-pillow/Pillow": { - "tier": "Bronze", - "weight": 0.23 - }, - "python-telegram-bot/python-telegram-bot": { - "tier": "Bronze", - "weight": 0.83 - }, - "python-visualization/folium": { - "tier": "Bronze", - "weight": 0.17 - }, - "python/cpython": { - "tier": "Bronze", - "weight": 1.05 - }, - "python/mypy": { - "tier": "Bronze", - "weight": 0.24 - }, - "python/peps": { - "tier": "Bronze", - "weight": 0.1 - }, - "python/typeshed": { - "tier": "Bronze", - "weight": 0.16 - }, - "pytorch/pytorch": { - "tier": "Bronze", - "weight": 1.04 - }, - "pytorch/vision": { - "tier": "Bronze", - "weight": 0.31 - }, - "qbittensor-labs/quantum": { - "tier": "Bronze", - "weight": 1.02 - }, - "qdrant/qdrant": { - "additional_acceptable_branches": ["dev"], - "tier": "Bronze", - "weight": 0.5 - }, - "qemu/qemu": { - "tier": "Bronze", - "weight": 0.15 - }, - "Qiskit/qiskit": { - "tier": "Bronze", - "weight": 0.91 - }, - "qist/tvbox": { - "tier": "Bronze", - "weight": 0.18 - }, - "qkqpttgf/OneManager-php": { - "tier": "Bronze", - "weight": 0.11 - }, - "qmk/qmk_firmware": { - "tier": "Bronze", - "weight": 0.21 - }, - "QuivrHQ/quivr": { - "tier": "Bronze", - "weight": 0.1 - }, - "qwibitai/nanoclaw": { - "weight": 20, - "tier": "Silver" - }, - "rabbitmq/rabbitmq-server": { - "tier": "Bronze", - "weight": 0.3 - }, - "rack/rack": { - "tier": "Bronze", - "weight": 0.12 - }, - "raga-ai-hub/RagaAI-Catalyst": { - "tier": "Bronze", - "weight": 0.1 - }, - "rails/rails": { - "tier": "Bronze", - "weight": 0.28 - }, - "RainerKuemmerle/g2o": { - "tier": "Bronze", - "weight": 0.13 - }, - "Raphire/Win11Debloat": { - "tier": "Bronze", - "weight": 0.21 - }, - "rapid7/metasploit-framework": { - "tier": "Bronze", - "weight": 0.22 - }, - "Rapptz/discord.py": { - "tier": "Bronze", - "weight": 0.27 - }, - "RasaHQ/rasa": { - "tier": "Bronze", - "weight": 0.11 - }, - "raspberrypi/documentation": { - "tier": "Bronze", - "weight": 0.16 - }, - "raspberrypi/linux": { - "tier": "Bronze", - "weight": 0.42 - }, - "raspberrypi/pico-sdk": { - "tier": "Bronze", - "weight": 0.41 - }, - "RaspberryPiFoundation/blockly": { - "tier": "Bronze", - "weight": 0.1 - }, - "ray-project/ray": { - "tier": "Silver", - "weight": 8.44 - }, - "raycast/extensions": { - "tier": "Bronze", - "weight": 0.17 - }, - "react-component/image": { - "tier": "Bronze", - "weight": 0.1 - }, - "react-component/picker": { - "tier": "Bronze", - "weight": 0.1 - }, - "react-component/select": { - "tier": "Bronze", - "weight": 0.1 - }, - "react-navigation/react-navigation": { - "tier": "Bronze", - "weight": 0.1 - }, - "reactchartjs/react-chartjs-2": { - "tier": "Bronze", - "weight": 0.15 - }, - "ReactiveX/RxJava": { - "tier": "Bronze", - "weight": 0.13 - }, - "reactjs/react.dev": { - "tier": "Bronze", - "weight": 1.01 - }, - "realpython/materials": { - "tier": "Bronze", - "weight": 0.13 - }, - "reboot-org/reboot-subnet": { - "tier": "Bronze", - "weight": 0.37 - }, - "redis/jedis": { - "tier": "Bronze", - "weight": 0.18 - }, - "redis/redis": { - "tier": "Bronze", - "weight": 0.32 - }, - "redisson/redisson": { - "tier": "Bronze", - "weight": 0.1 - }, - "redmine/redmine": { - "tier": "Bronze", - "weight": 0.13 - }, - "RedTeamSubnet/RedTeam": { - "additional_acceptable_branches": ["dev"], - "tier": "Bronze", - "weight": 0.39 - }, - "reduxjs/redux": { - "tier": "Bronze", - "weight": 0.2 - }, - "remix-run/react-router": { - "tier": "Bronze", - "weight": 0.13 - }, - "resemble-ai/chatterbox": { - "tier": "Bronze", - "weight": 0.47 - }, - "resi-labs-ai/resi": { - "tier": "Bronze", - "weight": 0.37 - }, - "rfordatascience/tidytuesday": { - "tier": "Bronze", - "weight": 0.11 - }, - "ricequant/rqalpha": { - "tier": "Bronze", - "weight": 0.1 - }, - "RichardLitt/standard-readme": { - "tier": "Bronze", - "weight": 0.11 - }, - "ridgesai/ridges": { - "tier": "Bronze", - "weight": 0.99 - }, - "RIOT-OS/RIOT": { - "tier": "Bronze", - "weight": 0.13 - }, - "riscv-collab/riscv-gnu-toolchain": { - "tier": "Bronze", - "weight": 0.13 - }, - "RobinHerbots/Inputmask": { - "tier": "Bronze", - "weight": 0.17 - }, - "robotframework/robotframework": { - "tier": "Bronze", - "weight": 0.1 - }, - "RobotLocomotion/drake": { - "tier": "Bronze", - "weight": 0.13 - }, - "RocketChat/Rocket.Chat": { - "tier": "Bronze", - "weight": 0.19 - }, - "RogueTensor/bitagent_subnet": { - "tier": "Bronze", - "weight": 0.39 - }, - "rolling-scopes-school/tasks": { - "tier": "Bronze", - "weight": 0.2 - }, - "romkatv/powerlevel10k": { - "tier": "Bronze", - "weight": 0.22 - }, - "ros-navigation/navigation2": { - "tier": "Bronze", - "weight": 0.13 - }, - "ros2/ros2": { - "tier": "Bronze", - "weight": 0.54 - }, - "rstudio/bookdown": { - "tier": "Bronze", - "weight": 0.16 - }, - "rstudio/shiny": { - "tier": "Bronze", - "weight": 0.1 - }, - "RT-Thread/rt-thread": { - "tier": "Bronze", - "weight": 0.14 - }, - "ruby-china/homeland": { - "tier": "Bronze", - "weight": 0.12 - }, - "ruby/ruby": { - "tier": "Bronze", - "weight": 0.98 - }, - "ruby/rubygems": { - "tier": "Bronze", - "weight": 0.13 - }, - "Rudrabha/Wav2Lip": { - "tier": "Bronze", - "weight": 0.1 - }, - "run-llama/llama_index": { - "tier": "Silver", - "weight": 7.37 - }, - "runelite/runelite": { - "tier": "Bronze", - "weight": 0.19 - }, - "rust-lang/rust": { - "tier": "Bronze", - "weight": 0.97 - }, - "rustdesk/rustdesk": { - "tier": "Bronze", - "weight": 0.11 - }, - "RVC-Boss/GPT-SoVITS": { - "tier": "Bronze", - "weight": 0.13 - }, - "RyanFitzgerald/devportfolio": { - "tier": "Bronze", - "weight": 0.11 - }, - "sahat/hackathon-starter": { - "tier": "Bronze", - "weight": 0.12 - }, - "saleor/saleor": { - "tier": "Bronze", - "weight": 0.11 - }, - "saltstack/salt": { - "tier": "Bronze", - "weight": 0.2 - }, - "samdutton/simpl": { - "tier": "Bronze", - "weight": 0.11 - }, - "sammchardy/python-binance": { - "tier": "Bronze", - "weight": 0.31 - }, - "sansan0/TrendRadar": { - "tier": "Bronze", - "weight": 0.17 - }, - "sbt/sbt": { - "additional_acceptable_branches": ["1.12.x"], - "tier": "Bronze", - "weight": 0.3 - }, - "scala/scala": { - "tier": "Bronze", - "weight": 0.23 - }, - "SchemaStore/schemastore": { - "tier": "Bronze", - "weight": 0.17 - }, - "schwabe/ics-openvpn": { - "tier": "Bronze", - "weight": 0.13 - }, - "scikit-image/scikit-image": { - "tier": "Bronze", - "weight": 0.17 - }, - "scipy/scipy": { - "tier": "Bronze", - "weight": 0.95 - }, - "score-technologies/turbovision": { - "tier": "Bronze", - "weight": 0.94 - }, - "scrapy/scrapy": { - "tier": "Bronze", - "weight": 0.13 - }, - "scratchfoundation/scratch-gui": { - "tier": "Bronze", - "weight": 0.19 - }, - "SeleniumHQ/docker-selenium": { - "tier": "Bronze", - "weight": 0.3 - }, - "SeleniumHQ/selenium": { - "tier": "Bronze", - "weight": 0.3 - }, - "sentient-agi/ROMA": { - "tier": "Bronze", - "weight": 0.46 - }, - "serverless/serverless": { - "tier": "Bronze", - "weight": 0.13 - }, - "shadcn-ui/ui": { - "tier": "Bronze", - "weight": 0.1 - }, - "shadowsocks/shadowsocks-android": { - "tier": "Bronze", - "weight": 0.22 - }, - "sharkdp/fd": { - "tier": "Bronze", - "weight": 0.23 - }, - "sharu725/online-cv": { - "tier": "Bronze", - "weight": 0.21 - }, - "sherlock-project/sherlock": { - "tier": "Bronze", - "weight": 0.13 - }, - "shidenggui/easytrader": { - "tier": "Bronze", - "weight": 0.18 - }, - "shiftlayer-llc/brainplay-subnet": { - "additional_acceptable_branches": ["dev"], - "tier": "Bronze", - "weight": 0.36 - }, - "SigmaHQ/sigma": { - "tier": "Bronze", - "weight": 0.1 - }, - "signalapp/Signal-Android": { - "tier": "Bronze", - "weight": 0.54 - }, - "signalapp/Signal-Server": { - "tier": "Bronze", - "weight": 0.1 - }, - "signalwire/freeswitch": { - "tier": "Bronze", - "weight": 0.13 - }, - "Significant-Gravitas/AutoGPT": { - "additional_acceptable_branches": ["dev"], - "tier": "Bronze", - "weight": 0.67 - }, - "SillyTavern/SillyTavern": { - "additional_acceptable_branches": ["staging"], - "tier": "Bronze", - "weight": 0.11 - }, - "singgel/JAVA": { - "tier": "Bronze", - "weight": 0.1 - }, - "sipeed/picoclaw": { - "tier": "Bronze", - "weight": 0.52 - }, - "siteserver/cms": { - "tier": "Bronze", - "weight": 0.16 - }, - "siyuan-note/siyuan": { - "tier": "Bronze", - "weight": 0.22 - }, - "Sjj1024/PakePlus": { - "tier": "Bronze", - "weight": 0.12 - }, - "skylot/jadx": { - "tier": "Bronze", - "weight": 0.13 - }, - "Skyvern-AI/skyvern": { - "tier": "Bronze", - "weight": 0.23 - }, - "slack-go/slack": { - "tier": "Bronze", - "weight": 0.1 - }, - "slackapi/node-slack-sdk": { - "tier": "Bronze", - "weight": 0.22 - }, - "slackapi/python-slack-sdk": { - "tier": "Bronze", - "weight": 0.22 - }, - "smartcontractkit/chainlink": { - "tier": "Bronze", - "weight": 0.93 - }, - "smogon/pokemon-showdown": { - "tier": "Bronze", - "weight": 0.16 - }, - "Snailclimb/JavaGuide": { - "inactive_at": "2025-11-29T17:45:38.525Z", - "tier": "Bronze", - "weight": 0.01 - }, - "socketio/socket.io": { - "tier": "Bronze", - "weight": 0.13 - }, - "sofastack/sofa-jraft": { - "tier": "Bronze", - "weight": 0.14 - }, - "sofastack/sofa-rpc": { - "tier": "Bronze", - "weight": 0.14 - }, - "soimort/you-get": { - "tier": "Bronze", - "weight": 0.13 - }, - "solana-foundation/anchor": { - "tier": "Bronze", - "weight": 0.92 - }, - "SonarSource/sonarqube": { - "tier": "Bronze", - "weight": 0.25 - }, - "sorin-ionescu/prezto": { - "tier": "Bronze", - "weight": 0.12 - }, - "spack/spack": { - "tier": "Bronze", - "weight": 0.17 - }, - "spdk/spdk": { - "tier": "Bronze", - "weight": 0.13 - }, - "spesmilo/electrum": { - "tier": "Bronze", - "weight": 0.18 - }, - "sphinx-doc/sphinx": { - "tier": "Bronze", - "weight": 0.17 - }, - "sportstensor/sn41": { - "tier": "Bronze", - "weight": 0.53 - }, - "spree/spree": { - "tier": "Bronze", - "weight": 0.13 - }, - "spring-cloud/spring-cloud-gateway": { - "tier": "Bronze", - "weight": 0.17 - }, - "spring-cloud/spring-cloud-kubernetes": { - "tier": "Bronze", - "weight": 0.25 - }, - "spring-cloud/spring-cloud-netflix": { - "tier": "Bronze", - "weight": 0.16 - }, - "spring-io/initializr": { - "tier": "Bronze", - "weight": 0.15 - }, - "spring-projects/spring-boot": { - "tier": "Bronze", - "weight": 0.31 - }, - "spring-projects/spring-framework": { - "tier": "Bronze", - "weight": 0.34 - }, - "spring-projects/spring-petclinic": { - "tier": "Bronze", - "weight": 0.14 - }, - "spring-projects/spring-security": { - "tier": "Bronze", - "weight": 0.19 - }, - "sqlite/sqlite": { - "tier": "Bronze", - "weight": 0.32 - }, - "sqlmapproject/sqlmap": { - "tier": "Bronze", - "weight": 0.13 - }, - "square/okhttp": { - "tier": "Bronze", - "weight": 0.1 - }, - "starship/starship": { - "tier": "Bronze", - "weight": 0.26 - }, - "statsmodels/statsmodels": { - "tier": "Bronze", - "weight": 0.1 - }, - "stefanprodan/podinfo": { - "tier": "Bronze", - "weight": 0.13 - }, - "stevenjoezhang/live2d-widget": { - "tier": "Bronze", - "weight": 0.1 - }, - "steveseguin/vdo.ninja": { - "tier": "Bronze", - "weight": 0.15 - }, - "Stirling-Tools/Stirling-PDF": { - "tier": "Bronze", - "weight": 0.81 - }, - "stleary/JSON-java": { - "tier": "Bronze", - "weight": 0.16 - }, - "storybookjs/storybook": { - "tier": "Bronze", - "weight": 0.12 - }, - "strapi/strapi": { - "tier": "Bronze", - "weight": 0.12 - }, - "streamich/react-use": { - "tier": "Bronze", - "weight": 0.1 - }, - "streamlit/streamlit": { - "tier": "Bronze", - "weight": 0.53 - }, - "Studio-42/elFinder": { - "tier": "Bronze", - "weight": 0.16 - }, - "sudheerj/javascript-interview-questions": { - "inactive_at": "2025-11-04T02:18:33.094Z", - "tier": "Bronze", - "weight": 0.01 - }, - "sudheerj/reactjs-interview-questions": { - "inactive_at": "2025-11-04T02:18:33.094Z", - "tier": "Bronze", - "weight": 0.01 - }, - "SuiteCRM/SuiteCRM": { - "tier": "Bronze", - "weight": 0.14 - }, - "supabase/supabase": { - "tier": "Bronze", - "weight": 0.91 - }, - "sveltejs/svelte": { - "tier": "Bronze", - "weight": 0.82 - }, - "svenfuchs/rails-i18n": { - "tier": "Bronze", - "weight": 0.12 - }, - "SVG-Edit/svgedit": { - "tier": "Bronze", - "weight": 0.1 - }, - "swagger-api/swagger-codegen": { - "tier": "Bronze", - "weight": 0.12 - }, - "swagger-api/swagger-editor": { - "tier": "Bronze", - "weight": 0.5 - }, - "swagger-api/swagger-ui": { - "tier": "Bronze", - "weight": 0.28 - }, - "Swap-Subnet/swap-subnet": { - "tier": "Bronze", - "weight": 0.34 - }, - "swarm-subnet/Langostino": { - "tier": "Bronze", - "weight": 0.35 - }, - "swarm-subnet/swarm": { - "tier": "Bronze", - "weight": 0.43 - }, - "SWE-agent/SWE-agent": { - "tier": "Bronze", - "weight": 0.9 - }, - "swiftlang/swift": { - "tier": "Bronze", - "weight": 0.27 - }, - "swimlane/ngx-datatable": { - "tier": "Bronze", - "weight": 0.14 - }, - "swisskyrepo/PayloadsAllTheThings": { - "tier": "Bronze", - "weight": 0.12 - }, - "symfony/symfony": { - "tier": "Bronze", - "weight": 0.19 - }, - "sympy/sympy": { - "tier": "Bronze", - "weight": 0.19 - }, - "systemd/systemd": { - "tier": "Bronze", - "weight": 0.32 - }, - "taikoxyz/taiko-mono": { - "tier": "Bronze", - "weight": 0.1 - }, - "tailwindlabs/tailwindcss": { - "tier": "Bronze", - "weight": 0.1 - }, - "tailwindlabs/tailwindcss.com": { - "tier": "Bronze", - "weight": 0.1 - }, - "tangly1024/NotionNext": { - "tier": "Bronze", - "weight": 0.22 - }, - "TanStack/query": { - "tier": "Bronze", - "weight": 0.1 - }, - "taofu-labs/tpn-subnet": { - "additional_acceptable_branches": ["development"], - "tier": "Bronze", - "weight": 0.36 - }, - "taoshidev/vanta-network": { - "tier": "Bronze", - "weight": 0.36 - }, - "tatsuproject/chipforge_sn84": { - "tier": "Bronze", - "weight": 0.36 - }, - "taubyte/tau": { - "tier": "Bronze", - "weight": 0.22 - }, - "tauri-apps/tauri": { - "additional_acceptable_branches": ["dev"], - "tier": "Bronze", - "weight": 0.62 - }, - "Team-Rizzo/talisman-ai": { - "tier": "Bronze", - "weight": 0.59 - }, - "teddysun/across": { - "tier": "Bronze", - "weight": 0.11 - }, - "Tencent/weui-wxss": { - "tier": "Bronze", - "weight": 0.12 - }, - "tensorflow/datasets": { - "tier": "Bronze", - "weight": 0.16 - }, - "tensorflow/docs": { - "tier": "Bronze", - "weight": 0.12 - }, - "tensorflow/models": { - "tier": "Bronze", - "weight": 0.25 - }, - "tensorflow/serving": { - "tier": "Bronze", - "weight": 0.13 - }, - "tensorflow/tensorboard": { - "tier": "Bronze", - "weight": 0.25 - }, - "tensorflow/tensorflow": { - "tier": "Bronze", - "weight": 0.88 - }, - "tensorplex-labs/dojo": { - "tier": "Bronze", - "weight": 0.36 - }, - "tensortrade-org/tensortrade": { - "tier": "Bronze", - "weight": 0.49 - }, - "termux/termux-app": { - "tier": "Bronze", - "weight": 0.12 - }, - "terraform-aws-modules/terraform-aws-eks": { - "tier": "Bronze", - "weight": 0.11 - }, - "tesseract-ocr/tesseract": { - "tier": "Bronze", - "weight": 0.12 - }, - "Textualize/rich": { - "tier": "Bronze", - "weight": 0.1 - }, - "TheAlgorithms/C-Sharp": { - "inactive_at": "2025-11-10T16:39:22.045Z", - "tier": "Bronze", - "weight": 0.01 - }, - "thedevs-network/kutt": { - "tier": "Bronze", - "weight": 0.23 - }, - "thenervelab/thebrain": { - "tier": "Bronze", - "weight": 0.36 - }, - "TheOdinProject/curriculum": { - "tier": "Bronze", - "weight": 0.95 - }, - "TheOdinProject/theodinproject": { - "tier": "Bronze", - "weight": 0.12 - }, - "TheWidlarzGroup/react-native-video": { - "tier": "Bronze", - "weight": 0.12 - }, - "thingsboard/thingsboard": { - "tier": "Bronze", - "weight": 0.1 - }, - "thinkgem/jeesite": { - "tier": "Bronze", - "weight": 0.12 - }, - "thomaspark/bootswatch": { - "tier": "Bronze", - "weight": 0.11 - }, - "thonny/thonny": { - "tier": "Bronze", - "weight": 0.14 - }, - "thoughtbot/factory_bot": { - "tier": "Bronze", - "weight": 0.12 - }, - "threetau/kinitro": { - "tier": "Bronze", - "weight": 0.33 - }, - "tianocore/edk2": { - "tier": "Bronze", - "weight": 0.13 - }, - "TiddlyWiki/TiddlyWiki5": { - "inactive_at": "2025-11-29T17:45:38.525Z", - "tier": "Bronze", - "weight": 0.01 - }, - "TideDra/zotero-arxiv-daily": { - "tier": "Bronze", - "weight": 0.18 - }, - "tidyverse/dplyr": { - "tier": "Bronze", - "weight": 0.1 - }, - "tidyverse/ggplot2": { - "tier": "Bronze", - "weight": 0.1 - }, - "timercrack/trader": { - "tier": "Bronze", - "weight": 0.1 - }, - "tinygrad/tinygrad": { - "tier": "Bronze", - "weight": 0.87 - }, - "tmk/tmk_keyboard": { - "tier": "Bronze", - "weight": 0.13 - }, - "tModLoader/tModLoader": { - "tier": "Bronze", - "weight": 0.14 - }, - "tmux/tmux": { - "tier": "Bronze", - "weight": 0.86 - }, - "toeverything/AFFiNE": { - "tier": "Bronze", - "weight": 0.1 - }, - "ton-blockchain/ton": { - "tier": "Bronze", - "weight": 0.13 - }, - "TonyChen56/WeChatRobot": { - "tier": "Bronze", - "weight": 0.14 - }, - "ToolJet/ToolJet": { - "additional_acceptable_branches": ["develop"], - "tier": "Bronze", - "weight": 0.63 - }, - "TooTallNate/Java-WebSocket": { - "tier": "Bronze", - "weight": 0.1 - }, - "topjohnwu/Magisk": { - "tier": "Bronze", - "weight": 0.15 - }, - "tornadoweb/tornado": { - "tier": "Bronze", - "weight": 0.11 - }, - "torvalds/linux": { - "tier": "Bronze", - "weight": 0.85 - }, - "traccar/traccar": { - "tier": "Bronze", - "weight": 0.16 - }, - "traefik/traefik": { - "tier": "Bronze", - "weight": 0.3 - }, - "transitive-bullshit/nextjs-notion-starter-kit": { - "tier": "Bronze", - "weight": 0.18 - }, - "travist/jsencrypt": { - "tier": "Bronze", - "weight": 0.1 - }, - "Trinea/android-open-project": { - "tier": "Bronze", - "weight": 0.12 - }, - "TrinityCore/TrinityCore": { - "tier": "Bronze", - "weight": 0.16 - }, - "trinodb/trino": { - "tier": "Bronze", - "weight": 0.1 - }, - "trishoolai/trishool-subnet": { - "tier": "Bronze", - "weight": 0.36 - }, - "trpc/trpc": { - "tier": "Bronze", - "weight": 0.25 - }, - "truenas/middleware": { - "tier": "Bronze", - "weight": 0.34 - }, - "trustwallet/assets": { - "tier": "Bronze", - "weight": 0.19 - }, - "TryGhost/Ghost": { - "tier": "Bronze", - "weight": 0.21 - }, - "tssovi/grokking-the-object-oriented-design-interview": { - "inactive_at": "2025-11-04T02:18:33.094Z", - "tier": "Bronze", - "weight": 0.01 - }, - "tursodatabase/agentfs": { - "tier": "Bronze", - "weight": 0.45 - }, - "tw93/Mole": { - "tier": "Bronze", - "weight": 0.49 - }, - "tw93/Pake": { - "additional_acceptable_branches": ["dev"], - "tier": "Bronze", - "weight": 0.1 - }, - "twbs/bootstrap": { - "tier": "Bronze", - "weight": 0.22 - }, - "tweepy/tweepy": { - "tier": "Bronze", - "weight": 0.19 - }, - "twentyhq/twenty": { - "tier": "Bronze", - "weight": 0.84 - }, - "typeorm/typeorm": { - "tier": "Bronze", - "weight": 0.31 - }, - "typescript-cheatsheets/react": { - "tier": "Bronze", - "weight": 0.11 - }, - "typicode/json-server": { - "tier": "Bronze", - "weight": 0.11 - }, - "typst/typst": { - "tier": "Bronze", - "weight": 0.24 - }, - "u-boot/u-boot": { - "tier": "Bronze", - "weight": 0.14 - }, - "Uberi/speech_recognition": { - "tier": "Bronze", - "weight": 0.1 - }, - "ubicloud/ubicloud": { - "tier": "Bronze", - "weight": 0.22 - }, - "Ultimaker/Cura": { - "tier": "Bronze", - "weight": 0.16 - }, - "ultralytics/ultralytics": { - "tier": "Bronze", - "weight": 0.13 - }, - "ultralytics/yolov3": { - "tier": "Bronze", - "weight": 0.19 - }, - "ultralytics/yolov5": { - "tier": "Bronze", - "weight": 0.24 - }, - "umbraco/Umbraco-CMS": { - "tier": "Bronze", - "weight": 0.89 - }, - "unclecode/crawl4ai": { - "tier": "Bronze", - "weight": 0.1 - }, - "unconst/agcli": { - "weight": 20, - "tier": "Silver" - }, - "Uniswap/interface": { - "tier": "Bronze", - "weight": 0.18 - }, - "Uniswap/v4-core": { - "tier": "Bronze", - "weight": 0.85 - }, - "Unitech/pm2": { - "tier": "Bronze", - "weight": 0.84 - }, - "Unity-Technologies/ml-agents": { - "tier": "Bronze", - "weight": 0.1 - }, - "unslothai/unsloth": { - "tier": "Bronze", - "weight": 0.1 - }, - "Unstructured-IO/unstructured": { - "tier": "Silver", - "weight": 5.96 - }, - "up-for-grabs/up-for-grabs.net": { - "tier": "Bronze", - "weight": 0.18 - }, - "urllib3/urllib3": { - "tier": "Bronze", - "weight": 0.15 - }, - "usebruno/bruno": { - "tier": "Bronze", - "weight": 0.1 - }, - "v0idai/SN106": { - "tier": "Bronze", - "weight": 0.35 - }, - "valor-software/ngx-bootstrap": { - "tier": "Bronze", - "weight": 0.14 - }, - "vercel/ai": { - "tier": "Bronze", - "weight": 0.23 - }, - "vercel/next.js": { - "tier": "Bronze", - "weight": 0.83 - }, - "vercel/openchat": { - "tier": "Bronze", - "weight": 0.18 - }, - "vercel/vercel": { - "tier": "Bronze", - "weight": 0.24 - }, - "VickScarlet/lifeRestart": { - "tier": "Bronze", - "weight": 0.1 - }, - "vidaio-subnet/vidaio-subnet": { - "tier": "Bronze", - "weight": 0.35 - }, - "videojs/video.js": { - "tier": "Bronze", - "weight": 0.13 - }, - "videolan/vlc": { - "tier": "Bronze", - "weight": 0.14 - }, - "vim/vim": { - "tier": "Bronze", - "weight": 0.26 - }, - "virattt/ai-hedge-fund": { - "tier": "Bronze", - "weight": 0.13 - }, - "virattt/dexter": { - "tier": "Silver", - "weight": 5.63 - }, - "vitejs/vite": { - "tier": "Bronze", - "weight": 0.82 - }, - "vllm-project/vllm": { - "tier": "Bronze", - "weight": 0.35 - }, - "vnpy/vnpy": { - "tier": "Bronze", - "weight": 0.21 - }, - "Vonng/ddia": { - "tier": "Bronze", - "weight": 0.1 - }, - "vuejs/core": { - "tier": "Bronze", - "weight": 0.12 - }, - "vuejs/vue-cli": { - "tier": "Bronze", - "weight": 0.11 - }, - "vuetifyjs/vuetify": { - "tier": "Bronze", - "weight": 0.12 - }, - "waditu/czsc": { - "tier": "Bronze", - "weight": 0.15 - }, - "wagtail/wagtail": { - "tier": "Bronze", - "weight": 0.1 - }, - "wang-bin/QtAV": { - "tier": "Bronze", - "weight": 0.13 - }, - "warmcat/libwebsockets": { - "tier": "Bronze", - "weight": 0.13 - }, - "we-promise/sure": { - "tier": "Silver", - "weight": 5.47 - }, - "web-platform-tests/wpt": { - "tier": "Bronze", - "weight": 0.12 - }, - "WebGoat/WebGoat": { - "tier": "Bronze", - "weight": 0.21 - }, - "webpack/webpack": { - "tier": "Bronze", - "weight": 0.13 - }, - "webrtc/samples": { - "tier": "Bronze", - "weight": 0.21 - }, - "wenzhixin/bootstrap-table": { - "tier": "Bronze", - "weight": 0.2 - }, - "whatwg/html": { - "tier": "Bronze", - "weight": 0.12 - }, - "WhiskeySockets/Baileys": { - "tier": "Bronze", - "weight": 0.17 - }, - "wine-mirror/wine": { - "tier": "Bronze", - "weight": 0.12 - }, - "wireshark/wireshark": { - "tier": "Bronze", - "weight": 0.33 - }, - "withastro/astro": { - "tier": "Silver", - "weight": 4.28 - }, - "withfig/autocomplete": { - "tier": "Bronze", - "weight": 0.1 - }, - "wkentaro/labelme": { - "tier": "Bronze", - "weight": 0.1 - }, - "woocommerce/woocommerce": { - "tier": "Bronze", - "weight": 0.18 - }, - "wordpress-mobile/WordPress-iOS": { - "tier": "Bronze", - "weight": 0.11 - }, - "WordPress/gutenberg": { - "tier": "Bronze", - "weight": 0.2 - }, - "WordPress/WordPress": { - "tier": "Bronze", - "weight": 0.42 - }, - "wzdnzd/aggregator": { - "tier": "Bronze", - "weight": 0.18 - }, - "xbmc/xbmc": { - "tier": "Bronze", - "weight": 0.16 - }, - "xiaolai/regular-investing-in-box": { - "tier": "Bronze", - "weight": 0.16 - }, - "xiaorouji/openwrt-passwall": { - "tier": "Bronze", - "weight": 0.11 - }, - "xinnan-tech/xiaozhi-esp32-server": { - "additional_acceptable_branches": ["live2d-actions"], - "tier": "Bronze", - "weight": 0.17 - }, - "XRPLF/rippled": { - "tier": "Bronze", - "weight": 0.13 - }, - "xtekky/gpt4free": { - "tier": "Bronze", - "weight": 0.2 - }, - "xuxueli/xxl-job": { - "tier": "Bronze", - "weight": 0.2 - }, - "XX-net/XX-Net": { - "tier": "Bronze", - "weight": 0.11 - }, - "yanez-compliance/MIID-subnet": { - "tier": "Bronze", - "weight": 0.35 - }, - "yangzongzhuan/RuoYi-Vue3": { - "tier": "Bronze", - "weight": 0.11 - }, - "ygs-code/vue": { - "tier": "Bronze", - "weight": 0.1 - }, - "yihong0618/running_page": { - "inactive_at": "2026-02-10T00:00:00.000Z", - "tier": "Bronze", - "weight": 0.28 - }, - "yiisoft/yii": { - "tier": "Bronze", - "weight": 0.14 - }, - "yiisoft/yii2": { - "tier": "Bronze", - "weight": 0.17 - }, - "yonggekkk/Cloudflare-vless-trojan": { - "tier": "Bronze", - "weight": 0.21 - }, - "yonggekkk/sing-box-yg": { - "tier": "Bronze", - "weight": 0.11 - }, - "Yorko/mlcourse.ai": { - "tier": "Bronze", - "weight": 0.2 - }, - "youzan/vant": { - "tier": "Bronze", - "weight": 0.2 - }, - "yt-dlp/yt-dlp": { - "tier": "Bronze", - "weight": 0.11 - }, - "ytdl-org/youtube-dl": { - "tier": "Bronze", - "weight": 0.11 - }, - "ytisf/theZoo": { - "tier": "Bronze", - "weight": 0.1 - }, - "YunaiV/ruoyi-vue-pro": { - "tier": "Bronze", - "weight": 0.11 - }, - "YunaiV/yudao-cloud": { - "tier": "Bronze", - "weight": 0.1 - }, - "yutiansut/QUANTAXIS": { - "tier": "Bronze", - "weight": 0.17 - }, - "zcash/zcash": { - "tier": "Bronze", - "weight": 0.8 + "zcash/zcash": { + "weight": 0.0638 }, "zed-industries/zed": { - "tier": "Silver", - "weight": 8.04 - }, - "zellij-org/zellij": { - "tier": "Bronze", - "weight": 0.25 - }, - "zephyrproject-rtos/zephyr": { - "tier": "Bronze", - "weight": 0.16 - }, - "zhayujie/chatgpt-on-wechat": { - "tier": "Bronze", - "weight": 0.2 - }, - "zio/zio": { - "tier": "Bronze", - "weight": 0.11 - }, - "zmkfirmware/zmk": { - "tier": "Bronze", - "weight": 0.14 - }, - "zsh-users/zsh": { - "tier": "Bronze", - "weight": 0.18 - }, - "zulip/zulip": { - "tier": "Bronze", - "weight": 0.2 - }, - "zxing/zxing": { - "tier": "Bronze", - "weight": 0.11 - }, - "zxlie/FeHelper": { - "tier": "Bronze", - "weight": 0.1 + "weight": 0.1564 } } diff --git a/issue_discovery/issue-discovery-rewards.md b/issue_discovery/issue-discovery-rewards.md new file mode 100644 index 00000000..02ad0107 --- /dev/null +++ b/issue_discovery/issue-discovery-rewards.md @@ -0,0 +1,411 @@ +# Issue Discovery Rewards + +## Overview + +A new reward mechanism where miners earn score for discovering issues in tracked repositories. The scoring model mirrors the existing merged PR model — issues that lead to merged PRs are treated as successes, while issues closed without resolution count against the discoverer. + +Issue discovery is purely additive — PRs are scored exactly as today whether or not they have a linked issue. A PR with no linked issue just means nobody earns a discovery score. The solver's score is unaffected. + +Issue discovery only applies when the issue creator is a registered miner. Non-miner issue creators don't participate — the PR solver gets normal scoring and nothing happens on the discovery side. + +--- + +## Credibility + +Issue discoverers build a separate `issue_credibility` score, stored independently from PR credibility (`credibility`): + +``` +issue_credibility = solved_issues / (solved_issues + closed_issues) +``` + +- **Solved issue**: an issue that was resolved via a merged PR. +- **Closed issue**: an issue closed without an attached merged PR — counts against credibility. +- **Transferred issue**: any issue that has been transferred at any point (detected via GitHub timeline API `TransferredEvent`) is treated as closed/failed — 0 discovery score and counts against credibility. No exceptions regardless of destination. Prevents exploit where discoverers transfer closed issues to burner repos to dodge credibility hits. + +Issue credibility is **computed fresh every scoring round** (stateless), same as PR credibility. No persistent state needed — count solved and closed issues within the lookback window each cycle. + +### Qualification Gates + +Stricter than OSS contributions to increase the runway required for eligibility and raise the cost of Sybil attacks: + +1. **Minimum 7 valid solved issues** — a "valid" solved issue is one where the solving PR has `token_score >= 5` (`MIN_TOKEN_SCORE_FOR_BASE_SCORE`). +2. **Minimum 80% issue credibility** (`MIN_ISSUE_CREDIBILITY = 0.80`). +3. **Credibility mulligan** — `CREDIBILITY_MULLIGAN_COUNT = 1`, mirroring OSS contributions. + +Eligibility is evaluated globally across all repos (not per-repo), same as OSS contributions. + +--- + +## Scoring + +### Base Score + +The issue discovery base score equals the base score of the PR that solved the issue. The discoverer's reward scales with the actual value of the work their issue generated. + +The quality signal for an issue is the resulting PR's token score. PRs with `token_score < 5` receive 0 base score (per `MIN_TOKEN_SCORE_FOR_BASE_SCORE` in `constants.py`), so trivial issues that generate trivial PRs yield near-zero discovery score. The discoverer still gets a small credibility bump from a solved issue — this is acceptable. + +### Contribution Bonus + +The contribution bonus is per-PR (based on `scoring_result.total_score`), not a miner-level historical metric. It passes through to the discoverer because it naturally scales with PR quality — it reflects the value of the work generated. The threshold (`CONTRIBUTION_SCORE_FOR_FULL_BONUS = 2000`) is high enough that farming is impractical. Can always lower `MAX_CONTRIBUTION_BONUS` if needed. + +### Same-Account Double Dipping + +When the same GitHub account is both the issue author and the PR author: **zero issue discovery score**, but the solved issue **still counts for issue credibility**. + +Rationale: +- Discovery rewards are for finding problems *others* solve. +- No reason to avoid linking your own issues (credibility still benefits). +- Alt-account gaming is acknowledged but bounded by independent credibility gates on each account. +- Can add heuristic detection later (timing patterns, always-same-solver, etc.) if needed. + +### Review Quality Multiplier (Cliff Model) + +Both solver and discoverer are penalized equally when changes are requested. Clean-PR bonus of `1.1` when zero `CHANGES_REQUESTED` rounds. Cliff model — first review round drops from 1.1 to 1.0, then subtracts 0.15 per round (linear after the cliff). + +- **Both solver and discoverer:** `1.1` clean bonus, then `1.0 - 0.15n` once changes are requested (n = number of `CHANGES_REQUESTED` rounds) + +| Rounds | Multiplier | +|--------|------------| +| 0 | 1.10 | +| 1 | 0.85 | +| 2 | 0.70 | +| 3 | 0.55 | + +Rationale: +- Same penalty for both sides — no need to differentiate since same-account double dipping is blocked (see above). +- The 1.1 clean bonus rewards "perfect PRs out of the box" — the desired behavior. +- The cliff from 1.1 → penalty makes the first `CHANGES_REQUESTED` round sting (~23% swing). +- Maintainers already reserve `CHANGES_REQUESTED` for meaningful problems (minor stuff gets comments, no penalty). + +### Shared Multipliers + +The following multipliers from PR scoring carry over to issue discovery: + +- **repo_weight_multiplier** — yes +- **time_decay_multiplier** — yes, anchored to solving PR's merge date (same as lookback window) +- **credibility_multiplier** — uses `issue_credibility`, not PR credibility +- **open_issue_spam_multiplier** — issue-specific threshold (see Spam Control below), replaces `open_pr_spam_multiplier` + +The following do **not** carry over: + +- **open_pr_spam_multiplier** — replaced by `open_issue_spam_multiplier` +- **pioneer_dividend** — does not exist for issue discovery (out of scope for this feature) + +--- + +## Issue-to-PR Linking + +### How issues are linked to solving PRs + +Uses the same mechanism as the existing issue bounty/competition system. GitHub's GraphQL API provides the `closingIssuesReferences` field, which natively resolves `fixes #N` / `closes #N` / `resolves #N` keywords into structured cross-reference events. The validator queries issue timeline items (`CROSS_REFERENCED_EVENT`) and validates: + +1. The PR's `baseRepository` matches the issue's repo. +2. The PR state is `MERGED`. +3. The issue number appears in the PR's `closingIssuesReferences`. + +See existing implementation: `gittensor/utils/github_api_tools.py` — `_search_issue_referencing_prs_graphql()` and `find_solver_from_cross_references()`. + +### One PR per issue + +Only the **most recent merged PR** that solved the issue counts (latest `mergedAt`), consistent with the existing issue competition logic (`github_api_tools.py:1107-1115`). If multiple PRs claim to solve the same issue, the latest merge wins. All others are ignored for issue discovery purposes. + +### One issue credited per PR + +When a single PR references multiple issues (`fixes #10, fixes #11, fixes #12`), only **one** issue discoverer receives the discovery score. The remaining issue creators still get credibility credit (solved/merged) but no score. + +Selection heuristic: **TBD** (earliest-created is the likely default). Options: +- Credit the **earliest-created issue** (rewards the first discoverer). +- Credit the issue with the **most engagement** (comments, reactions). +- Split the score across discoverers. + +### Retroactive Linking + +If a PR is scored in cycle N but the issue link is established in cycle N+1 (e.g., maintainer links the issue after merge), the issue discovery score is awarded when the link is detected. We use whatever the API returns at scoring time. + +--- + +## Scoring Pipeline + +### Execution order + +1. Score PRs (existing pipeline — base score, multipliers, credibility). +2. Score issues using the solving PR's base score. +3. Apply issue-specific multipliers (review quality cliff model, issue bonus, repo weight, time decay, credibility). +4. Compute issue credibility. +5. Produce issue discovery scores. + +Issue discovery scores do not feed back into PR scoring. + +### Lookback Window + +Issue discovery uses the same lookback window as PRs (~35 days per roadmap). The window is anchored to the **solving PR's merge date**, not the issue creation date. + +> **Note:** This means an issue created 90 days ago but solved today is within the window. An issue created 30 days ago whose solving PR was merged 40 days ago is outside the window. + +### Weight / Pool Separation + +Issue discovery scores are split into their own pool in the weight vector, same approach as issue competitions and merge predictions. The validator manually splits the weight allocation in code — no chain-level changes needed. + +--- + +## Anti-Gaming + +### Post-PR Edit Protection + +If an issue is edited at any point after the solving PR's **`merged_at`** timestamp: + +- The issue receives **0 score**. +- The issue **counts as closed** (hurts credibility). + +Anchored to `merged_at` (not `created_at`) so discoverers can add clarifying context while a PR is in review without being penalized. + +**Edit detection (current):** Uses `updated_at` as a rough proxy. Acknowledged that `updated_at` fires on bot activity, comments, labels, etc. — accept false positives for now. + +**Edit detection (future):** Upgrade to timeline/events API for body-only edit detection in a later update. + +### Timing / Sniping Protection + +If a miner files an issue for work already in progress (a PR is opened shortly after the issue), maintainers can close the issue as invalid. A closed-without-PR issue hurts the sniper's credibility. Maintainers can cross-reference timings between issue creation and PR creation to identify suspicious patterns. + +No automated minimum time gap is enforced — this is left to maintainer judgment to avoid penalizing legitimate fast turnaround. + +### Maintainer Influence + +Maintainers have power over issue lifecycle (closing issues, linking PRs). This is by design — the same trust model applies to PRs (maintainers decide what gets merged). The Gittensor team currently curates the repository list, which limits exposure to adversarial maintainers. As repository selection opens up to miners, maintainer trust becomes a larger concern and may need additional safeguards. + +### Trivial Issue Farming + +Filing trivial issues on active repos to farm credibility is mitigated by the token score threshold: PRs with `token_score < 5` get 0 base score, so the resulting issue discovery score is near-zero. The discoverer gains minor credibility, which is acceptable — the qualification gates and credibility thresholds prevent this from scaling into meaningful emissions. + +### Issue Deletion + +GitHub does not allow regular users to delete issues — only repo admins can, and it's a destructive action. If an admin deletes an issue, we lose tracking (both positive and negative credibility impact). This is a non-concern in practice. + +### Forked Repo Issues + +Issues filed on forks of tracked repos are ignored entirely — no score (positive or negative) and no credibility impact. Only issues on the actual tracked repository count. This is enforced naturally by the existing linking mechanism: `closingIssuesReferences` and the repo-centric closed scan both operate on the tracked repo, not its forks. + +### Issue Transfers + +Any issue that has been transferred at any point is treated as **closed/failed** — 0 discovery score and counts against credibility. No exceptions regardless of destination. Detection via GitHub timeline API `TransferredEvent`. Prevents exploit where discoverers transfer closed issues to burner repos to dodge credibility hits. + +### State Transitions (Close → Reopen → Solve) + +Whatever state the API returns at scoring time is what counts. If an issue is closed (credibility hit in cycle N), then reopened and solved (credibility positive in cycle N+1), both events are reflected in their respective scoring cycles. No smoothing or retroactive correction. + +### Spam Control via Open Thresholds + +There is no collateral requirement for issues. Spam is controlled through open issue thresholds: + +- **Base threshold**: 5 open issues (half the PR base of 10). +- **Dynamic scaling**: +1 allowed open issue per 300 merged token score from solved issues. +- **Exceeding the threshold**: 0 score for all issues (binary, same as OSS contributions). + +--- + +## Emissions — Hardcoded Per Competition + +Dynamic emissions are being removed. Each competition gets a fixed percentage of total emissions, hardcoded in the validator. This replaces the exponential unlock curve (`dynamic_emissions.py`) that scaled rewards based on network-wide unique repos and token score. + +**Rationale:** Dynamic emissions added complexity without proportional benefit. Hardcoded splits are easier to reason about, tune, and audit. Adjustments happen via code changes with PR review, not opaque curves. + +**Emission split:** + +| Competition | Share | Notes | +|---|---|---| +| OSS Contributions (PRs) | 30% | Shipping code | +| Issue Discovery | 30% | Finding problems others solve | +| Issue Competitions (Treasury) | 15% | Funds bounties via smart contract (UID 111) | +| Unallocated (burn/recycle) | 25% | Recycles to UID 0 | + +The unallocated 25% recycles to UID 0, same mechanism as today. Each pool normalizes scores independently — a miner's share within a pool is based on their score relative to other participants in that pool. + +**Early participation windfall:** If few miners participate in issue discovery early, they split the entire pool — potentially outsized rewards. This is intentional: miners who keep up with codebase updates and act early are rewarded for being first movers. No participation floor is needed. + +--- + +## GitHub API / Data Collection Strategy + +### What We Have vs What We Need + +The existing PR scoring pipeline fetches each miner's PRs via GraphQL, which includes `closingIssuesReferences(first: 3)` on every PR. This gives us issue metadata (author, state, dates) for issues linked to miner PRs — but only those issues. + +| Case | Effect | How to detect | Have it today? | API cost | +|---|---|---|---|---| +| **Solved by miner PR** | ✅ Positive credibility + discovery score | Miner B's merged PR has `closingIssuesReferences` → check if issue author is miner A | **Yes** | 0 extra calls | +| **Solved by non-miner PR** | ✅ Positive credibility (no score — solver not a miner) | A non-miner's merged PR solved miner A's issue. We never fetch non-miner PRs, so we're blind to this. | **No** | Timeline call per issue to find solver | +| **Closed without any PR** | ❌ Negative credibility | Miner's issue closed as wontfix/duplicate/invalid. No PR linkage exists, so invisible to current pipeline. | **No** | Part of repo scan | +| **Open issues on tracked repos** | Spam threshold (0 score if over threshold) | Need count of open issues per miner, scoped to tracked repos only. | **No** | See options below | + +**Key constraint:** Cases 2 and 3 are found by the same repo-centric scan, but we can't distinguish them without a timeline fallback call per issue. Case 4 needs to be scoped to tracked repos only (counting all repos would unfairly penalize miners with legitimate open issues on personal projects). + +**PAT constraint:** Timeline API (`timelineItems` GraphQL) works reliably with classic PATs (validators) but has known issues with fine-grained PATs (miners). All timeline-dependent detection must run on the validator PAT. The validator PAT budget is 5,000 requests/hour. + +### Strategy: Repo-Centric Closed Scan (Cases 2 & 3) + +**Approach:** Scan each tracked repo's closed issues using the validator PAT. + +``` +GET /repos/{owner/repo}/issues?state=closed&since={lookback_date}&per_page=100 +``` + +The `since` filter scopes to the lookback window (~35 days), keeping page counts manageable. Filter results client-side against known miner GitHub IDs. For each miner-authored closed issue: +- If it matches a merged PR already in memory (from case 1) → skip, already counted +- If not → call `find_solver_from_cross_references()` (timeline API, validator PAT) to check if any PR (miner or not) solved it + - If solver found → positive credibility (case 2) + - If no solver → negative credibility (case 3) + +**The fallback timeline call is what makes this expensive.** The scan itself is cheap, but distinguishing case 2 from case 3 requires 1 GraphQL call per unmatched issue. + +### Budget Stress Test (256 tracked repos, validator PAT) + +Based on real closed issue volumes sampled 2026-04-08: + +**Sampled repos (closed issues in 35 days):** +- Heavy: openclaw=5978, zed=737, grafana=479, ClickHouse=466, deno=440, llama.cpp=397, pandas=320 +- Medium: paperclip=166, astro=160, nanoclaw=146, llama_index=139, beam=139, openlibrary=118 +- Light: bitcoin=56, dbeaver=87, desktop=91, ray=94, hoppscotch=18, subtensor=0 + +**Scan cost (pagination):** + +| Component | Calls | Notes | +|---|---|---| +| 32 known repos | ~122 pages | Based on actual volumes | +| ~224 remaining repos | ~224 pages | Assume 1 page each (light) | +| **Total scan** | **~346 calls (7%)** | | + +**Total cost (scan + fallback), by miner adoption rate:** + +"Miner rate" = what % of closed issues across all repos are authored by registered miners. Currently near 0%, but issue discovery incentivizes miners to file issues — could rise significantly. + +| Miner rate | Fallback calls | Total (scan + fallback + existing) | % of 5,000/hr | +|---|---|---|---| +| 0.5% (current) | ~70 | ~516 | 10.3% ✓ | +| 1% | ~140 | ~586 | 11.7% ✓ | +| 2% | ~280 | ~726 | 14.5% ✓ | +| 5% | ~700 | ~1,146 | 22.9% ✓ | +| 10% | ~1,400 | ~1,846 | 36.9% ✓ | +| 20% | ~2,800 | ~3,246 | 64.9% ✓ | + +**At 256 repos, budget stays under 75% even at 20% miner adoption.** The previous estimates that showed budget pressure were based on the old 1,375-repo list (revamp-repo-list branch reduces to ~256). + +**Assumptions:** These estimates assume the revamp-repo-list branch is merged before issue discovery ships. If the repo list stays at 1,375, the scan alone costs ~2,129 calls (43%) and the fallback pushes past 75% at moderate miner adoption. + +### Open Issue Counting (Case 4) — Unsolved + +Need: count of a miner's open issues **on tracked repos only**. Global count (all repos) would unfairly penalize miners with legitimate open issues on personal projects. + +**Options considered:** + +| Option | How | Cost (per miner) | Cost (256 miners) | Drawback | +|---|---|---|---|---| +| **A. Global count on miner query** | Add `issues(states: [OPEN]) { totalCount }` to existing User node GraphQL query (miner PAT) | ~0 extra calls | ~0 | Counts ALL repos, not just tracked. Unfair to miners with personal projects. | +| **B. Batched per-repo GraphQL** | Alias ~20-30 repos per query: `repo1: repository(...) { issues(filterBy: {createdBy: $login}, states: OPEN) { totalCount } }` (miner PAT) | ~9-13 calls | ~2,300-3,300 on miner PATs | Significant per-miner cost. GraphQL complexity limits may cap batch size lower. | +| **C. Search API** | `GET /search/issues?q=author:{login}+is:issue+is:open+repo:{repo1}+repo:{repo2}` (miner PAT) | Multiple calls (max ~5 `repo:` qualifiers per query) | High, 30 req/min search limit | Very slow, separate rate limit (30/min shared). | +| **D. Defer for v1** | Don't implement open issue spam threshold at launch. Rely on credibility gate (80%) + qualification runway (7 solved issues) to catch spammers. Add threshold when mirror ships. | 0 | 0 | No spam threshold — but credibility + qualification gates still filter aggressively. | + +**No decision yet.** Option A is near-free but unfairly scoped. Option B works but is expensive. Option D is pragmatic if the credibility gates are sufficient. Needs decision. + +### Long-Term: GitHub Mirror (Non-Issue) + +The `github-mirror-spec.md` describes a webhook-based mirror service that captures all issue events in real-time via a GitHub App. When the mirror ships, ALL four cases become simple database queries — zero GitHub API calls, zero rate limit concerns. The mirror's `issues` table captures state, author, transfers, and timestamps via webhooks. Validators query the mirror's REST API instead of GitHub. + +The current API strategy is a bridge until the mirror is ready (estimated months away). + +--- + +## Data Model + +> **Status: IMPLEMENTED** (2026-04-09) — All data model changes below have been implemented across all 4 repos on the `issue-discovery` branch: `gittensor-db` (schema), `gittensor` (classes.py, queries.py, repository.py), `das-gittensor` (TypeORM entities + miners query), `gittensor-ui` (MinerEvaluation type). + +### MinerEvaluation — New Fields (`classes.py` + `miner_evaluations` table) + +The existing `MinerEvaluation` tracks PR-based scoring. Issue discovery adds a parallel set of fields: + +| Field | Type | Default | Description | +|---|---|---|---| +| `issue_discovery_score` | `float` / `DECIMAL(15,6)` | 0.0 | Final aggregated issue discovery score (sum of all scored issues) | +| `issue_credibility` | `float` / `DECIMAL(15,6)` | 0.0 | `solved_issues / (solved_issues + closed_issues - mulligan)` | +| `is_issue_eligible` | `bool` / `BOOLEAN` | False | Meets issue discovery gates (≥7 valid solved issues AND ≥80% issue_credibility) | +| `total_solved_issues` | `int` / `INTEGER` | 0 | Issues resolved via merged PR (positive credibility) | +| `total_closed_issues` | `int` / `INTEGER` | 0 | Issues closed without merged PR or transferred (negative credibility) | +| `total_open_issues` | `int` / `INTEGER` | 0 | Currently open issues by this miner (for spam threshold) | + +These are independent from the existing PR fields — a miner has both `credibility` (PR-based, 90% threshold) and `issue_credibility` (issue-based, 80% threshold). + +### Issues Table — New Fields (`classes.py` Issue class + `issues` table) + +The existing `issues` table stores issue-to-PR relationships. Issue discovery needs additional fields for scoring: + +| Field | Type | Default | Description | +|---|---|---|---| +| `author_github_id` | `VARCHAR(255)` | NULL | Issue author's GitHub user ID (for miner matching) | +| `is_transferred` | `BOOLEAN` | FALSE | Whether issue was transferred (timeline API `TransferredEvent`) | +| `updated_at` | `TIMESTAMP` | NULL | GitHub's `updated_at` — rough proxy for edit detection | +| `discovery_base_score` | `DECIMAL(15,6)` | 0.0 | Base score inherited from solving PR | +| `discovery_earned_score` | `DECIMAL(15,6)` | 0.0 | Final score after all multipliers | +| `discovery_review_quality_multiplier` | `DECIMAL(15,6)` | 1.0 | Cliff model: `1.1` clean, then `1.0 - 0.15n` | +| `discovery_repo_weight_multiplier` | `DECIMAL(15,6)` | 1.0 | Inherited from solving PR's repo weight | +| `discovery_time_decay_multiplier` | `DECIMAL(15,6)` | 1.0 | Anchored to solving PR's merge date | +| `discovery_credibility_multiplier` | `DECIMAL(15,6)` | 1.0 | Based on `issue_credibility` | +| `discovery_open_issue_spam_multiplier` | `DECIMAL(15,6)` | 1.0 | 0.0 if over open issue threshold | + +### Issue Class — New Fields (`classes.py`) + +Implemented on the existing `Issue` dataclass: + +```python +# Miner matching +author_github_id: Optional[str] = None + +# Edit/transfer detection +is_transferred: bool = False +updated_at: Optional[datetime] = None + +# Discovery scoring (populated during issue scoring pipeline) +discovery_base_score: float = 0.0 +discovery_earned_score: float = 0.0 +discovery_review_quality_multiplier: float = 1.0 +discovery_repo_weight_multiplier: float = 1.0 +discovery_time_decay_multiplier: float = 1.0 +discovery_credibility_multiplier: float = 1.0 +discovery_open_issue_spam_multiplier: float = 1.0 +``` + +### Key Design Notes + +1. **Issues table PK stays `(number, pr_number, repository_full_name)`** — one issue can be linked from multiple PRs, but only the most recent merged PR's score flows into discovery scoring. +2. **`author_github_id`** (not `author_login`) is used for miner matching because GitHub IDs are immutable while logins can change. The existing `author_login` field is kept for display. +3. **Discovery multipliers are stored per-issue** (not just per-miner) for auditability — the dashboard can show exactly why each issue got its score. +4. **No new tables needed** — issue discovery piggybacks on the existing `issues` and `miner_evaluations` tables with additional columns. + +--- + +## Open / Needs Decision + +### Open Issue Counting (Case 4) +How to count a miner's open issues scoped to tracked repos only. See options table in API strategy section above. Needs decision before implementation — option D (defer) is pragmatic if credibility gates are sufficient for v1. + +### One-Issue-Per-PR Selection Heuristic +When a PR solves multiple issues, which issue gets the discovery score? Options: earliest-created, most engagement, or split score. Needs decision before implementation. + +### Repo-Centric Scan Cadence +Should the closed issue scan run every scoring cycle or less frequently (e.g., every 3rd cycle)? Running every cycle is simpler but uses more budget. Less frequent delays negative credibility signals but saves API calls. + +--- + +## Blocked On / Prerequisites + +### Revamp Repo List (revamp-repo-list branch) +The API budget estimates assume ~256 tracked repos. If the repo list stays at 1,375, the closed scan alone costs ~2,129 calls (43%) and the fallback pushes past 75% at moderate miner adoption. **Issue discovery should ship after the repo list revamp.** + +### Dynamic Emissions Removal +`dynamic_emissions.py` still needs to be removed and replaced with the hardcoded 30/30/15/25 split in code. + +--- + +## Deferred Post-Launch + +- **Edit detection upgrade** — current `updated_at` proxy false-positives on bot activity, comments, labels. Future: timeline/events API for body-only edits. +- **Retroactive linking timing** — if a PR merges in cycle N and issue link appears in cycle N+3, what base score is used? +- **Open issue spam threshold** — if deferred from v1 (option D), add once mirror ships and scoped counting is free. diff --git a/neurons/base/miner.py b/neurons/base/miner.py deleted file mode 100644 index 31cf3075..00000000 --- a/neurons/base/miner.py +++ /dev/null @@ -1,193 +0,0 @@ -# The MIT License (MIT) -# Copyright © 2023 Yuma Rao - -# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated -# documentation files (the “Software”), to deal in the Software without restriction, including without limitation -# the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, -# and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -# The above copyright notice and this permission notice shall be included in all copies or substantial portions of -# the Software. - -# THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO -# THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL -# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -# DEALINGS IN THE SOFTWARE. - -import argparse -import asyncio -import threading -import time -import traceback -from typing import Union - -import bittensor as bt - -from gittensor.utils.config import add_miner_args -from neurons.base.neuron import BaseNeuron - - -class BaseMinerNeuron(BaseNeuron): - """ - Base class for Bittensor miners. - """ - - neuron_type: str = 'MinerNeuron' - - @classmethod - def add_args(cls, parser: argparse.ArgumentParser): - super().add_args(parser) - add_miner_args(cls, parser) - - def __init__(self, config=None): - super().__init__(config=config) - - # Warn if allowing incoming requests from anyone. - if not self.config.blacklist.force_validator_permit: - bt.logging.warning( - 'You are allowing non-validators to send requests to your miner. This is a security risk.' - ) - if self.config.blacklist.allow_non_registered: - bt.logging.warning( - 'You are allowing non-registered entities to send requests to your miner. This is a security risk.' - ) - # The axon handles request processing, allowing validators to send this miner requests. - self.axon = bt.Axon( - wallet=self.wallet, - config=self.config() if callable(self.config) else self.config, - ) - - # Attach determiners which functions are called when servicing a request. - bt.logging.info('Attaching forward function to miner axon.') - self.axon.attach( - forward_fn=self.forward, - blacklist_fn=self.blacklist, - priority_fn=self.priority, - ) - bt.logging.info(f'Axon created: {self.axon}') - - # Instantiate runners - self.should_exit: bool = False - self.is_running: bool = False - self.thread: Union[threading.Thread, None] = None - self.lock = asyncio.Lock() - - def run(self): - """ - Initiates and manages the main loop for the miner on the Bittensor network. The main loop handles graceful shutdown on keyboard interrupts and logs unforeseen errors. - - This function performs the following primary tasks: - 1. Check for registration on the Bittensor network. - 2. Starts the miner's axon, making it active on the network. - 3. Periodically resynchronizes with the chain; updating the metagraph with the latest network state and setting weights. - - The miner continues its operations until `should_exit` is set to True or an external interruption occurs. - During each epoch of its operation, the miner waits for new blocks on the Bittensor network, updates its - knowledge of the network (metagraph), and sets its weights. This process ensures the miner remains active - and up-to-date with the network's latest state. - - Note: - - The function leverages the global configurations set during the initialization of the miner. - - The miner's axon serves as its interface to the Bittensor network, handling incoming and outgoing requests. - - Raises: - KeyboardInterrupt: If the miner is stopped by a manual interruption. - Exception: For unforeseen errors during the miner's operation, which are logged for diagnosis. - """ - - # Check that miner is registered on the network. - self.sync() - - # Serve passes the axon information to the network + netuid we are hosting on. - # This will auto-update if the axon port of external ip have changed. - bt.logging.info( - f'Serving miner axon {self.axon} on network: {self.config.subtensor.chain_endpoint} with netuid: {self.config.netuid}' - ) - self.axon.serve(netuid=self.config.netuid, subtensor=self.subtensor) - - # Start starts the miner's axon, making it active on the network. - self.axon.start() - - bt.logging.info(f'Miner starting at block: {self.block}') - - # This loop maintains the miner's operations until intentionally stopped. - try: - while not self.should_exit: - while self.block - self.metagraph.last_update[self.uid] < self.config.neuron.epoch_length: - # Wait before checking again. - time.sleep(1) - - # Check if we should exit. - if self.should_exit: - break - - # Sync metagraph and potentially set weights. - self.sync() - self.step += 1 - time.sleep(60) - - # If someone intentionally stops the miner, it'll safely terminate operations. - except KeyboardInterrupt: - self.axon.stop() - bt.logging.success('Miner killed by keyboard interrupt.') - exit() - - # In case of unforeseen errors, the miner will log the error and continue operations. - except Exception: - bt.logging.error(traceback.format_exc()) - - def run_in_background_thread(self): - """ - Starts the miner's operations in a separate background thread. - This is useful for non-blocking operations. - """ - if not self.is_running: - bt.logging.debug('Starting miner in background thread.') - self.should_exit = False - self.thread = threading.Thread(target=self.run, daemon=True) - self.thread.start() - self.is_running = True - bt.logging.debug('Started') - - def stop_run_thread(self): - """ - Stops the miner's operations that are running in the background thread. - """ - if self.is_running: - bt.logging.debug('Stopping miner in background thread.') - self.should_exit = True - if self.thread is not None: - self.thread.join(5) - self.is_running = False - bt.logging.debug('Stopped') - - def __enter__(self): - """ - Starts the miner's operations in a background thread upon entering the context. - This method facilitates the use of the miner in a 'with' statement. - """ - self.run_in_background_thread() - return self - - def __exit__(self, exc_type, exc_value, traceback): - """ - Stops the miner's background operations upon exiting the context. - This method facilitates the use of the miner in a 'with' statement. - - Args: - exc_type: The type of the exception that caused the context to be exited. - None if the context was exited without an exception. - exc_value: The instance of the exception that caused the context to be exited. - None if the context was exited without an exception. - traceback: A traceback object encoding the stack trace. - None if the context was exited without an exception. - """ - self.stop_run_thread() - - def resync_metagraph(self): - """Resyncs the metagraph and updates the hotkeys and moving averages based on the new metagraph.""" - bt.logging.debug('resync_metagraph()') - - # Sync the metagraph. - self.metagraph.sync(subtensor=self.subtensor) diff --git a/neurons/base/neuron.py b/neurons/base/neuron.py index 3ba39240..28ff38c7 100644 --- a/neurons/base/neuron.py +++ b/neurons/base/neuron.py @@ -23,7 +23,6 @@ from websockets.exceptions import ConnectionClosedError from gittensor import __spec_version__ as spec_version -from gittensor.mock import MockMetagraph, MockSubtensor # Sync calls set weights and also resyncs the metagraph. from gittensor.utils.config import add_args, check_config, config @@ -80,14 +79,9 @@ def __init__(self, config=None): bt.logging.info('Setting up bittensor objects.') # The wallet holds the cryptographic key pairs for the miner. - if self.config.mock: - self.wallet = bt.MockWallet(config=self.config) - self.subtensor = MockSubtensor(self.config.netuid, wallet=self.wallet) - self.metagraph = MockMetagraph(self.config.netuid, subtensor=self.subtensor) - else: - self.wallet = bt.Wallet(config=self.config) - self.subtensor = bt.Subtensor(config=self.config) - self.metagraph = self.subtensor.metagraph(self.config.netuid) + self.wallet = bt.Wallet(config=self.config) + self.subtensor = bt.Subtensor(config=self.config) + self.metagraph = self.subtensor.metagraph(self.config.netuid) bt.logging.info(f'Wallet: {self.wallet}') bt.logging.info(f'Subtensor: {self.subtensor}') @@ -105,8 +99,6 @@ def __init__(self, config=None): def _reconnect_subtensor(self): """Recreate subtensor connection when WebSocket goes stale.""" - if self.config.mock: - return # Don't reconnect in mock mode bt.logging.info('Reconnecting subtensor...') self.subtensor = bt.Subtensor(config=self.config) diff --git a/neurons/base/validator.py b/neurons/base/validator.py index 3c1484d6..bffc2fb3 100644 --- a/neurons/base/validator.py +++ b/neurons/base/validator.py @@ -26,7 +26,6 @@ import bittensor as bt import numpy as np -from gittensor.mock import MockDendrite from gittensor.utils.config import add_validator_args from neurons.base.neuron import BaseNeuron from neurons.base.utils.weight_utils import ( @@ -54,10 +53,7 @@ def __init__(self, config=None): self.hotkeys = copy.deepcopy(self.metagraph.hotkeys) # Dendrite lets us send messages to other nodes (axons) in the network. - if self.config.mock: - self.dendrite = MockDendrite(wallet=self.wallet) - else: - self.dendrite = bt.Dendrite(wallet=self.wallet) + self.dendrite = bt.Dendrite(wallet=self.wallet) bt.logging.info(f'Dendrite: {self.dendrite}') # Set up initial scoring weights for validation diff --git a/neurons/miner.py b/neurons/miner.py deleted file mode 100644 index 42c82573..00000000 --- a/neurons/miner.py +++ /dev/null @@ -1,101 +0,0 @@ -# The MIT License (MIT) -# Copyright © 2025 Entrius -# GitTensor Miner - -import time -import typing - -import bittensor as bt - -from gittensor.miner import token_mgmt -from gittensor.synapses import GitPatSynapse -from neurons.base.miner import BaseMinerNeuron - - -class Miner(BaseMinerNeuron): - def __init__(self, config=None): - super(Miner, self).__init__(config=config) - token_mgmt.init() - - if self.config.dev_mode: - bt.logging.info('DEV MODE ENABLED') - - async def forward(self, synapse: GitPatSynapse) -> GitPatSynapse: - """ - Processes the incoming GitPatSynapse by loading GitHub access token. - - Args: - synapse (GitPatSynapse): The synapse object representing the token request. - - Returns: - GitPatSynapse: The same synapse object with the GitHub access token set. - """ - - github_token = token_mgmt.load_token() - synapse.github_access_token = github_token - bt.logging.debug(f'synapse received from hotkey: {synapse.axon.hotkey}') - - return synapse - - async def blacklist(self, synapse: GitPatSynapse) -> typing.Tuple[bool, str]: - """ - Determines whether an incoming request should be blacklisted. - """ - - if self.config.dev_mode: - return False, 'Blacklist disabled in dev mode' - - if synapse.dendrite.hotkey == '5Dnffftud49iScqvvymjuvS4D1MP4ApenAQG2R5wg4bXGH7L': - return False, 'Owner hotkey accepted' - - bt.logging.info(f'Received synapse from {synapse.dendrite.hotkey}') - if synapse.dendrite is None or synapse.dendrite.hotkey is None: - bt.logging.warning('Received a request without a dendrite or hotkey.') - return True, 'Missing dendrite or hotkey' - - uid = self.metagraph.hotkeys.index(synapse.dendrite.hotkey) - if not self.config.blacklist.allow_non_registered and synapse.dendrite.hotkey not in self.metagraph.hotkeys: - # Ignore requests from un-registered entities. - bt.logging.trace(f'Blacklisting un-registered hotkey {synapse.dendrite.hotkey}') - return True, 'Unrecognized hotkey' - - if self.config.blacklist.force_validator_permit: - # If the config is set to force validator permit, then we should only allow requests from validators. - bt.logging.debug( - f'Validator permit: {self.metagraph.validator_permit[uid]}, Stake: {self.metagraph.S[uid]}' - ) - if not self.metagraph.validator_permit[uid] or self.metagraph.S[uid] < self.config.blacklist.min_stake: - bt.logging.warning(f'Blacklisting a request from non-validator hotkey {synapse.dendrite.hotkey}') - return True, 'Non-validator hotkey' - - bt.logging.trace(f'Not Blacklisting recognized hotkey {synapse.dendrite.hotkey}') - return False, 'Hotkey recognized!' - - async def priority(self, synapse: GitPatSynapse) -> float: - """ - Determines the processing priority for incoming token requests. - This function is unchanged. - """ - if synapse.dendrite is None or synapse.dendrite.hotkey is None: - bt.logging.warning('Received a request without a dendrite or hotkey.') - return 0.0 - - caller_uid = self.metagraph.hotkeys.index(synapse.dendrite.hotkey) # Get the caller index. - priority = float(self.metagraph.S[caller_uid]) # Return the stake as the priority. - bt.logging.trace(f'Prioritizing {synapse.dendrite.hotkey} with value: {priority}') - return priority - - -if __name__ == '__main__': - with Miner() as miner: - # load token on startup just to check if it's valid if not then exit - if not token_mgmt.load_token(): - exit(1) - - bt.logging.info( - 'Repeating an action makes a habit. Your habits create your character. And your character is your destiny.' - ) - - while True: - bt.logging.info('Gittensor miner running...') - time.sleep(100) diff --git a/neurons/validator.py b/neurons/validator.py index 284664d4..804f6e3d 100644 --- a/neurons/validator.py +++ b/neurons/validator.py @@ -16,6 +16,7 @@ # DEALINGS IN THE SOFTWARE. +import os import time from functools import partial from typing import Dict, List, Set @@ -25,9 +26,16 @@ from gittensor.__init__ import __version__ from gittensor.classes import MinerEvaluation, MinerEvaluationCache +from gittensor.validator import pat_storage from gittensor.validator.forward import forward -from gittensor.validator.merge_predictions.handler import blacklist_prediction, handle_prediction, priority_prediction -from gittensor.validator.merge_predictions.mp_storage import PredictionStorage +from gittensor.validator.pat_handler import ( + blacklist_pat_broadcast, + blacklist_pat_check, + handle_pat_broadcast, + handle_pat_check, + priority_pat_broadcast, + priority_pat_check, +) from gittensor.validator.utils.config import STORE_DB_RESULTS, WANDB_PROJECT, WANDB_VALIDATOR_NAME from gittensor.validator.utils.storage import DatabaseStorage from neurons.base.validator import BaseValidatorNeuron @@ -46,17 +54,24 @@ class Validator(BaseValidatorNeuron): def __init__(self, config=None): super(Validator, self).__init__(config=config) - # Merge predictions — SQLite storage + axon handler - self.mp_storage = PredictionStorage() + if os.environ.get('DEV_MODE'): + bt.logging.warning('⚠ DEV_MODE is active — maintainer PR filtering is bypassed') + + # Ensure PAT storage file exists on boot + pat_storage.ensure_pats_file() + + # Attach PAT broadcast and check handlers to the axon if hasattr(self, 'axon') and self.axon is not None: self.axon.attach( - forward_fn=partial(handle_prediction, self), - blacklist_fn=partial(blacklist_prediction, self), - priority_fn=partial(priority_prediction, self), + forward_fn=partial(handle_pat_broadcast, self), + blacklist_fn=partial(blacklist_pat_broadcast, self), + priority_fn=partial(priority_pat_broadcast, self), + ) + self.axon.attach( + forward_fn=partial(handle_pat_check, self), + blacklist_fn=partial(blacklist_pat_check, self), + priority_fn=partial(priority_pat_check, self), ) - bt.logging.info('Merge predictions handler attached to axon') - else: - bt.logging.warning('Axon not available, skipping prediction handler attachment') # Init in-memory cache for miner evaluations (fallback when GitHub API fails) self.evaluation_cache = MinerEvaluationCache() diff --git a/pyproject.toml b/pyproject.toml index d28c8f9a..36b9a809 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,18 +1,49 @@ -[tool.black] -line-length = 120 -skip-string-normalization = true -target-version = ['py39'] +[project] +name = "gittensor" +version = "5.0.0" +description = "gittensor" +readme = "README.md" +license = "MIT" +requires-python = ">=3.12" +dependencies = [ + "bittensor==10.0.1", + "bittensor-cli==9.17.0", + "bittensor-commit-reveal==0.4.0", + "bittensor-wallet==4.0.0", + "click", + "levenshtein==0.27.3", + "psycopg2-binary==2.9.10", + "python-dotenv==1.2.1", + "pytz==2025.2", + "rich", + "substrate-interface", + "tree-sitter==0.24.0", + "tree-sitter-language-pack==0.7.2", + "wandb==0.21.3", +] + +[project.optional-dependencies] +dev = [ + "pytest==9.0.0", + "pyright", + "ruff==0.14.10", +] +debug = [ + "debugpy==1.8.11", + "fastapi==0.110.1", + "uvicorn==0.32.0", +] + +[project.scripts] +gitt = "gittensor.cli.main:main" -[tool.isort] -profile = "black" -line_length = 120 -multi_line_output = 3 -include_trailing_comma = true -skip_gitignore = true +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" [tool.ruff] line-length = 120 -target-version = "py39" +target-version = "py312" [tool.ruff.lint] select = ["E", "F", "I"] @@ -21,3 +52,5 @@ ignore = ["E501"] [tool.ruff.format] quote-style = "single" +[tool.pytest.ini_options] +testpaths = ["tests"] diff --git a/pyrightconfig.json b/pyrightconfig.json new file mode 100644 index 00000000..6a6b1637 --- /dev/null +++ b/pyrightconfig.json @@ -0,0 +1,8 @@ +{ + "typeCheckingMode": "basic", + "include": ["gittensor", "tests"], + "exclude": ["gittensor-venv", "neurons"], + "pythonVersion": "3.9", + "reportMissingImports": true, + "reportMissingTypeStubs": false +} diff --git a/requirements.txt b/requirements.txt deleted file mode 100644 index a94c26db..00000000 --- a/requirements.txt +++ /dev/null @@ -1,21 +0,0 @@ -bittensor==10.0.1 -bittensor-cli==9.17.0 -bittensor-commit-reveal==0.4.0 -bittensor-wallet==4.0.0 -levenshtein==0.27.3 -pytest==9.0.0 -ruff==0.14.10 -tree-sitter==0.24.0 -tree-sitter-language-pack==0.7.2 -wandb==0.21.3 - -# For remote debugging API -fastapi==0.110.1 -uvicorn==0.32.0 -debugpy==1.8.11 - -# For validator database storage (not required for validators to run) -pytz==2025.2 -psycopg2-binary==2.9.10 -python-dotenv==1.2.1 -substrate-interface \ No newline at end of file diff --git a/scripts/miner-entrypoint.sh b/scripts/miner-entrypoint.sh deleted file mode 100755 index 9834dd82..00000000 --- a/scripts/miner-entrypoint.sh +++ /dev/null @@ -1,18 +0,0 @@ -#!/bin/bash - -if [ -z "$NETUID" ]; then echo "NETUID is not set" && exit 1; fi -if [ -z "$WALLET_NAME" ]; then echo "WALLET_NAME is not set" && exit 1; fi -if [ -z "$HOTKEY_NAME" ]; then echo "HOTKEY_NAME is not set" && exit 1; fi -if [ -z "$SUBTENSOR_NETWORK" ]; then echo "SUBTENSOR_NETWORK is not set" && exit 1; fi -if [ -z "$PORT" ]; then echo "PORT is not set" && exit 1; fi -if [ -z "$LOG_LEVEL" ]; then echo "LOG_LEVEL is not set" && exit 1; fi -if [ -z "$GITTENSOR_MINER_PAT" ]; then echo "GITTENSOR_MINER_PAT is not set" && exit 1; fi - -exec python neurons/miner.py \ - --netuid ${NETUID} \ - --wallet.name ${WALLET_NAME} \ - --wallet.hotkey ${HOTKEY_NAME} \ - --subtensor.network ${SUBTENSOR_NETWORK} \ - --axon.port ${PORT} \ - --logging.${LOG_LEVEL} \ - "$@" diff --git a/setup.py b/setup.py deleted file mode 100644 index 95f38a66..00000000 --- a/setup.py +++ /dev/null @@ -1,95 +0,0 @@ -# The MIT License (MIT) -# Copyright © 2023 Yuma Rao -# Copyright © 2025 Entrius - -# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated -# documentation files (the “Software”), to deal in the Software without restriction, including without limitation -# the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, -# and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - -# The above copyright notice and this permission notice shall be included in all copies or substantial portions of -# the Software. - -# THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO -# THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL -# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION -# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER -# DEALINGS IN THE SOFTWARE. - -import codecs -import os -import re -from io import open -from os import path - -from setuptools import find_packages, setup - - -def read_requirements(path): - with open(path, 'r') as f: - requirements = f.read().splitlines() - processed_requirements = [] - - for req in requirements: - # For git or other VCS links - if req.startswith('git+') or '@' in req: - pkg_name = re.search(r'(#egg=)([\w\-_]+)', req) - if pkg_name: - processed_requirements.append(pkg_name.group(2)) - else: - # You may decide to raise an exception here, - # if you want to ensure every VCS link has an #egg= at the end - continue - else: - processed_requirements.append(req) - return processed_requirements - - -requirements = read_requirements('requirements.txt') -here = path.abspath(path.dirname(__file__)) - -with open(path.join(here, 'README.md'), encoding='utf-8') as f: - long_description = f.read() - -# loading version from setup.py -with codecs.open(os.path.join(here, 'gittensor/__init__.py'), encoding='utf-8') as init_file: - version_match = re.search(r"^__version__ = ['\"]([^'\"]*)['\"]", init_file.read(), re.M) - version_string = version_match.group(1) - -setup( - name='gittensor', - version=version_string, - description='gittensor', - long_description=long_description, - long_description_content_type='text/markdown', - url='https://github.com/entrius/gittensor', - author='Entrius', - packages=find_packages(), - include_package_data=True, - author_email='', - license='MIT', - python_requires='>=3.8', - install_requires=requirements, - entry_points={ - 'console_scripts': [ - 'gitt=gittensor.cli.main:main', - ], - }, - classifiers=[ - 'Development Status :: 3 - Alpha', - 'Intended Audience :: Developers', - 'Topic :: Software Development :: Build Tools', - # Pick your license as you wish - 'License :: OSI Approved :: MIT License', - 'Programming Language :: Python :: 3 :: Only', - 'Programming Language :: Python :: 3.8', - 'Programming Language :: Python :: 3.9', - 'Programming Language :: Python :: 3.10', - 'Topic :: Scientific/Engineering', - 'Topic :: Scientific/Engineering :: Mathematics', - 'Topic :: Scientific/Engineering :: Artificial Intelligence', - 'Topic :: Software Development', - 'Topic :: Software Development :: Libraries', - 'Topic :: Software Development :: Libraries :: Python Modules', - ], -) diff --git a/smart-contracts/issues-v0/errors.rs b/smart-contracts/issues-v0/errors.rs index bae1d817..92b86391 100644 --- a/smart-contracts/issues-v0/errors.rs +++ b/smart-contracts/issues-v0/errors.rs @@ -20,7 +20,7 @@ pub enum Error { InvalidIssueNumber, /// Issue is not in Active status IssueNotActive, - /// Solver is not a valid miner (bronze+ tier required) + /// Solver is not an eligible miner InvalidSolver, /// Caller has already voted on this proposal AlreadyVoted, diff --git a/tests/cli/conftest.py b/tests/cli/conftest.py index 3b5c53fb..b918b422 100644 --- a/tests/cli/conftest.py +++ b/tests/cli/conftest.py @@ -60,7 +60,7 @@ def is_hotkey_registered(self, *args, **kwargs): Wallet=_FakeWallet, Subtensor=_FakeSubtensor, ) - sys.modules['bittensor'] = fake_bt + sys.modules['bittensor'] = fake_bt # type: ignore[assignment] _STUBBED_BITTENSOR = True diff --git a/tests/cli/test_issue_predict.py b/tests/cli/test_issue_predict.py deleted file mode 100644 index 03c6e429..00000000 --- a/tests/cli/test_issue_predict.py +++ /dev/null @@ -1,146 +0,0 @@ -# The MIT License (MIT) -# Copyright © 2025 Entrius - -"""CLI tests for `issues predict` command.""" - -import json -from unittest.mock import patch - - -def test_predict_interactive_continue_cancel_skips_miner_validation(cli_root, runner, sample_issue, sample_prs): - with ( - patch('gittensor.cli.issue_commands.predict.get_contract_address', return_value='0xabc'), - patch('gittensor.cli.issue_commands.predict.resolve_network', return_value=('ws://x', 'test')), - patch('gittensor.cli.issue_commands.predict.resolve_netuid_from_contract', return_value=1), - patch('gittensor.cli.issue_commands.predict.fetch_issue_from_contract', return_value=sample_issue), - patch('gittensor.cli.issue_commands.predict.fetch_open_issue_pull_requests', return_value=sample_prs), - patch('gittensor.cli.issue_commands.predict._is_interactive', return_value=True), - patch('gittensor.cli.issue_commands.predict._resolve_registered_miner_hotkey') as mock_resolve_miner, - ): - result = runner.invoke( - cli_root, - ['issues', 'predict', '--id', '42'], - input='n\n', - catch_exceptions=False, - ) - - assert result.exit_code == 0 - assert 'Prediction cancelled' in result.output - mock_resolve_miner.assert_not_called() - - -def test_predict_json_success_payload_schema(cli_root, runner, sample_issue, sample_prs): - with ( - patch('gittensor.cli.issue_commands.predict.get_contract_address', return_value='0xabc'), - patch('gittensor.cli.issue_commands.predict.resolve_network', return_value=('ws://x', 'test')), - patch('gittensor.cli.issue_commands.predict.resolve_netuid_from_contract', return_value=1), - patch('gittensor.cli.issue_commands.predict.fetch_issue_from_contract', return_value=sample_issue), - patch('gittensor.cli.issue_commands.predict.fetch_open_issue_pull_requests', return_value=sample_prs), - patch( - 'gittensor.cli.issue_commands.predict._resolve_registered_miner_hotkey', - return_value='5FakeHotkey123', - ), - patch('gittensor.cli.issue_commands.predict.broadcast_predictions') as mock_broadcast_stub, - ): - mock_broadcast_stub.return_value = { - 'issue_id': 42, - 'repository': 'entrius/gittensor', - 'predictions': {'101': 0.7}, - } - result = runner.invoke( - cli_root, - ['issues', 'predict', '--id', '42', '--pr', '101', '--probability', '0.7', '--json'], - catch_exceptions=False, - ) - - assert result.exit_code == 0 - mock_broadcast_stub.assert_called_once() - call_kwargs = mock_broadcast_stub.call_args - payload = call_kwargs.kwargs['payload'] - assert {'issue_id', 'repository', 'predictions'} <= set(payload.keys()) - assert payload['issue_id'] == 42 - assert payload['repository'] == 'entrius/gittensor' - assert payload['predictions'] == {101: 0.7} - - -def test_predict_json_requires_non_interactive_inputs(runner, cli_root): - result = runner.invoke( - cli_root, - ['issues', 'predict', '--id', '42', '--json'], - catch_exceptions=False, - ) - assert result.exit_code != 0 - payload = json.loads(result.output) - assert payload['success'] is False - assert '--json mode requires --pr/--probability or --json-input.' in payload['error']['message'] - - -def test_predict_rejects_probability_out_of_range(runner, cli_root): - result = runner.invoke( - cli_root, - ['issues', 'predict', '--id', '42', '--pr', '101', '--probability', '1.1', '--json'], - catch_exceptions=False, - ) - assert result.exit_code != 0 - payload = json.loads(result.output) - assert payload['success'] is False - assert 'Probability must be between 0.0 and 1.0' in payload['error']['message'] - - -def test_predict_rejects_invalid_json_input(runner, cli_root): - result = runner.invoke( - cli_root, - ['issues', 'predict', '--id', '42', '--json-input', '{bad json}', '--json'], - catch_exceptions=False, - ) - assert result.exit_code != 0 - payload = json.loads(result.output) - assert payload['success'] is False - assert 'Invalid JSON' in payload['error']['message'] - - -def test_predict_rejects_json_input_probability_total_over_one(runner, cli_root): - result = runner.invoke( - cli_root, - ['issues', 'predict', '--id', '42', '--json-input', '{"101": 0.8, "103": 0.3}', '--json'], - catch_exceptions=False, - ) - assert result.exit_code != 0 - payload = json.loads(result.output) - assert payload['success'] is False - assert 'Sum of probabilities must be <= 1.0' in payload['error']['message'] - - -def test_predict_rejects_pr_not_in_open_set_before_miner_validation(cli_root, runner, sample_issue, sample_prs): - with ( - patch('gittensor.cli.issue_commands.predict.get_contract_address', return_value='0xabc'), - patch('gittensor.cli.issue_commands.predict.resolve_network', return_value=('ws://x', 'test')), - patch('gittensor.cli.issue_commands.predict.resolve_netuid_from_contract', return_value=1), - patch('gittensor.cli.issue_commands.predict.fetch_issue_from_contract', return_value=sample_issue), - patch('gittensor.cli.issue_commands.predict.fetch_open_issue_pull_requests', return_value=sample_prs), - patch('gittensor.cli.issue_commands.predict._resolve_registered_miner_hotkey') as mock_resolve_miner, - ): - result = runner.invoke( - cli_root, - ['issues', 'predict', '--id', '42', '--pr', '999', '--probability', '0.2', '--json'], - catch_exceptions=False, - ) - - assert result.exit_code != 0 - payload = json.loads(result.output) - assert payload['success'] is False - assert 'is not an open PR for this issue' in payload['error']['message'] - mock_resolve_miner.assert_not_called() - - -def test_predict_invalid_issue_id_returns_bad_parameter(runner, cli_root): - for invalid_issue_id in [0, -1, 1_000_000]: - result = runner.invoke( - cli_root, - ['issues', 'predict', '--id', str(invalid_issue_id), '--json'], - catch_exceptions=False, - ) - assert result.exit_code != 0 - payload = json.loads(result.output) - assert payload['success'] is False - assert payload['error']['type'] == 'bad_parameter' diff --git a/tests/cli/test_issue_submission.py b/tests/cli/test_issue_submission.py index 8e0ba6da..8724d78b 100644 --- a/tests/cli/test_issue_submission.py +++ b/tests/cli/test_issue_submission.py @@ -113,3 +113,16 @@ def test_submissions_human_no_open_prs_message(cli_root, runner, sample_issue): assert result.exit_code == 0 assert 'No open submissions available' in result.output + + +def test_submissions_help_via_issue_alias_routes_to_command_help(cli_root, runner): + result = runner.invoke( + cli_root, + ['i', 'submissions', '--help'], + catch_exceptions=False, + ) + + assert result.exit_code == 0 + assert 'On-chain issue ID' in result.output + assert '--id' in result.output + assert '--logging.debug' not in result.output diff --git a/tests/cli/test_issues_list_json.py b/tests/cli/test_issues_list_json.py new file mode 100644 index 00000000..9c07db1d --- /dev/null +++ b/tests/cli/test_issues_list_json.py @@ -0,0 +1,35 @@ +# The MIT License (MIT) +# Copyright © 2025 Entrius + +"""Regression tests for `issues list --json --id` not-found handling.""" + +import json +from unittest.mock import patch + +FAKE_ISSUES = [ + { + 'id': 1, + 'repository_full_name': 'owner/repo', + 'issue_number': 10, + 'bounty_amount': 50_000_000_000, + 'target_bounty': 100_000_000_000, + 'status': 'Active', + }, +] + + +def test_issues_list_json_missing_issue_returns_structured_error(cli_root, runner): + """Requesting a nonexistent issue ID must return a structured JSON error with non-zero exit.""" + with ( + patch('gittensor.cli.issue_commands.view.get_contract_address', return_value='5Fakeaddr'), + patch('gittensor.cli.issue_commands.view.resolve_network', return_value=('ws://x', 'test')), + patch('gittensor.cli.issue_commands.view.read_issues_from_contract', return_value=FAKE_ISSUES), + ): + result = runner.invoke(cli_root, ['issues', 'list', '--json', '--id', '999'], catch_exceptions=False) + + assert result.exit_code != 0 + + payload = json.loads(result.output) + assert payload['success'] is False + assert payload['error']['type'] == 'not_found' + assert '999' in payload['error']['message'] diff --git a/tests/cli/test_miner_commands.py b/tests/cli/test_miner_commands.py new file mode 100644 index 00000000..75add3e2 --- /dev/null +++ b/tests/cli/test_miner_commands.py @@ -0,0 +1,69 @@ +# Entrius 2025 + +"""Tests for gitt miner post and gitt miner check CLI commands.""" + +import json +from unittest.mock import patch + +import pytest +from click.testing import CliRunner + +from gittensor.cli.main import cli + + +@pytest.fixture +def runner(): + return CliRunner() + + +class TestMinerPost: + def test_no_pat_prompts_interactively(self, runner, monkeypatch): + monkeypatch.delenv('GITTENSOR_MINER_PAT', raising=False) + result = runner.invoke(cli, ['miner', 'post', '--wallet', 'test', '--hotkey', 'test'], input='') + assert 'Enter your GitHub Personal Access Token' in result.output + + def test_no_pat_json_mode_exits(self, runner, monkeypatch): + monkeypatch.delenv('GITTENSOR_MINER_PAT', raising=False) + result = runner.invoke(cli, ['miner', 'post', '--json-output', '--wallet', 'test', '--hotkey', 'test']) + assert result.exit_code != 0 + output = json.loads(result.output) + assert output['success'] is False + + @patch('gittensor.cli.miner_commands.post._validate_pat_locally', return_value=False) + def test_pat_flag_used(self, mock_validate, runner, monkeypatch): + monkeypatch.delenv('GITTENSOR_MINER_PAT', raising=False) + result = runner.invoke(cli, ['miner', 'post', '--pat', 'ghp_test123', '--wallet', 'test', '--hotkey', 'test']) + assert result.exit_code != 0 + assert 'invalid' in result.output.lower() or 'expired' in result.output.lower() + mock_validate.assert_called_once_with('ghp_test123') + + @patch('gittensor.cli.miner_commands.post._validate_pat_locally', return_value=False) + def test_invalid_pat_exits(self, mock_validate, runner, monkeypatch): + monkeypatch.setenv('GITTENSOR_MINER_PAT', 'ghp_invalid') + result = runner.invoke(cli, ['miner', 'post', '--wallet', 'test', '--hotkey', 'test']) + assert result.exit_code != 0 + assert 'invalid' in result.output.lower() or 'expired' in result.output.lower() + + def test_help_text(self, runner): + result = runner.invoke(cli, ['miner', 'post', '--help']) + assert result.exit_code == 0 + assert 'Broadcast your GitHub PAT' in result.output + + def test_miner_alias(self, runner): + """gitt m post should work as alias for gitt miner post.""" + result = runner.invoke(cli, ['m', 'post', '--help']) + assert result.exit_code == 0 + assert 'Broadcast your GitHub PAT' in result.output + + +class TestMinerCheck: + def test_help_text(self, runner): + result = runner.invoke(cli, ['miner', 'check', '--help']) + assert result.exit_code == 0 + assert 'Check how many validators' in result.output + + def test_check_alias(self, runner): + """gitt m check should work as alias for gitt miner check.""" + result = runner.invoke(cli, ['m', 'check', '--help']) + assert result.exit_code == 0 + assert 'Check how many validators' in result.output diff --git a/tests/utils/conftest.py b/tests/utils/conftest.py index 9cca3b07..b1f95192 100644 --- a/tests/utils/conftest.py +++ b/tests/utils/conftest.py @@ -5,19 +5,3 @@ """ Pytest configuration for utils tests. """ - -import pytest - - -@pytest.fixture -def clear_github_cache(): - """Clear the GitHub user cache before and after test.""" - # Import here to avoid issues during collection - try: - import gittensor.utils.github_api_tools as api_tools - - api_tools._GITHUB_USER_CACHE.clear() - yield - api_tools._GITHUB_USER_CACHE.clear() - except (ImportError, AttributeError): - yield diff --git a/tests/utils/test_github_api_tools.py b/tests/utils/test_github_api_tools.py index e0829bc8..b409907f 100644 --- a/tests/utils/test_github_api_tools.py +++ b/tests/utils/test_github_api_tools.py @@ -15,6 +15,8 @@ Run with: python run_tests.py tests/utils/ """ +from datetime import datetime, timedelta, timezone +from typing import Dict, Optional from unittest.mock import Mock, call, patch import pytest @@ -28,6 +30,7 @@ get_github_id = github_api_tools.get_github_id get_github_account_age_days = github_api_tools.get_github_account_age_days get_pull_request_file_changes = github_api_tools.get_pull_request_file_changes +get_merge_base_sha = github_api_tools.get_merge_base_sha find_prs_for_issue = github_api_tools.find_prs_for_issue execute_graphql_query = github_api_tools.execute_graphql_query @@ -67,16 +70,6 @@ def mock_response_502(): return response -@pytest.fixture -def clear_github_cache(): - """Clear the GitHub user cache before test.""" - import gittensor.utils.github_api_tools as api_tools - - api_tools._GITHUB_USER_CACHE.clear() - yield - api_tools._GITHUB_USER_CACHE.clear() - - # ============================================================================ # GraphQL Retry Logic Tests # ============================================================================ @@ -367,7 +360,7 @@ class TestOtherGitHubAPIFunctions: @patch('gittensor.utils.github_api_tools.requests.get') @patch('gittensor.utils.github_api_tools.time.sleep') @patch('gittensor.utils.github_api_tools.bt.logging') - def test_get_github_id_retry_logic(self, mock_logging, mock_sleep, mock_get, clear_github_cache): + def test_get_github_id_retry_logic(self, mock_logging, mock_sleep, mock_get): """Test that get_github_id retries on failure.""" mock_response_success = Mock() mock_response_success.status_code = 200 @@ -387,7 +380,7 @@ def test_get_github_id_retry_logic(self, mock_logging, mock_sleep, mock_get, cle @patch('gittensor.utils.github_api_tools.requests.get') @patch('gittensor.utils.github_api_tools.time.sleep') @patch('gittensor.utils.github_api_tools.bt.logging') - def test_get_github_account_age_retry_logic(self, mock_logging, mock_sleep, mock_get, clear_github_cache): + def test_get_github_account_age_retry_logic(self, mock_logging, mock_sleep, mock_get): """Test that get_github_account_age_days retries on failure.""" mock_response_success = Mock() mock_response_success.status_code = 200 @@ -1149,17 +1142,19 @@ def test_null_closing_issues_skips_bad_pr_continues_rest(self, mock_logging, moc from gittensor.classes import MinerEvaluation from gittensor.validator.utils.load_weights import RepositoryConfig - good_pr_before = _make_pr_node( - 1, 'goodorg', 'goodrepo', created_at='2026-02-15T00:00:00Z', merged_at='2026-02-16T00:00:00Z' - ) - bad_pr = _make_pr_node( - 2, 'affinefoundation', 'affinetes', created_at='2026-02-10T00:00:00Z', merged_at='2026-02-11T00:00:00Z' - ) + now = datetime.now(timezone.utc) + recent = (now - timedelta(days=5)).strftime('%Y-%m-%dT%H:%M:%SZ') + recent_merge = (now - timedelta(days=4)).strftime('%Y-%m-%dT%H:%M:%SZ') + mid = (now - timedelta(days=10)).strftime('%Y-%m-%dT%H:%M:%SZ') + mid_merge = (now - timedelta(days=9)).strftime('%Y-%m-%dT%H:%M:%SZ') + older = (now - timedelta(days=15)).strftime('%Y-%m-%dT%H:%M:%SZ') + older_merge = (now - timedelta(days=14)).strftime('%Y-%m-%dT%H:%M:%SZ') + + good_pr_before = _make_pr_node(1, 'goodorg', 'goodrepo', created_at=recent, merged_at=recent_merge) + bad_pr = _make_pr_node(2, 'affinefoundation', 'affinetes', created_at=mid, merged_at=mid_merge) # Simulate the banned repo returning null for closingIssuesReferences bad_pr['closingIssuesReferences'] = None - good_pr_after = _make_pr_node( - 3, 'goodorg', 'goodrepo', created_at='2026-02-05T00:00:00Z', merged_at='2026-02-06T00:00:00Z' - ) + good_pr_after = _make_pr_node(3, 'goodorg', 'goodrepo', created_at=older, merged_at=older_merge) mock_graphql_query.return_value = _make_graphql_response([good_pr_before, bad_pr, good_pr_after]) @@ -1183,5 +1178,471 @@ def test_null_closing_issues_skips_bad_pr_continues_rest(self, mock_logging, moc assert any('PR #2' in w for w in warning_calls), f'Expected a warning about PR #2, got: {warning_calls}' +# ============================================================================ +# GraphQL Batch-Size Limit Tests +# ============================================================================ + +fetch_file_contents_batch = github_api_tools.fetch_file_contents_batch +fetch_file_contents_with_base = github_api_tools.fetch_file_contents_with_base +FileContentPair = github_api_tools.FileContentPair + + +def _make_blob_response(text: str) -> Dict: + """Create a mock GraphQL Blob response object.""" + return {'text': text, 'byteSize': len(text), 'isBinary': False} + + +def _make_file_change(filename: str, status: str = 'modified', previous_filename: Optional[str] = None): + """Create a mock FileChange-like object for fetch_file_contents_with_base tests.""" + change = Mock() + change.filename = filename + change.status = status + change.previous_filename = previous_filename + return change + + +class TestFetchFileContentsBatch: + """Tests for batch-size limiting in fetch_file_contents_batch.""" + + @patch('gittensor.utils.github_api_tools.execute_graphql_query') + def test_empty_paths_returns_empty_dict(self, mock_execute): + """No GraphQL call should be made when file_paths is empty.""" + result = fetch_file_contents_batch('owner', 'repo', 'abc123', [], 'token') + assert result == {} + mock_execute.assert_not_called() + + @patch('gittensor.utils.github_api_tools.execute_graphql_query') + def test_single_batch_under_limit(self, mock_execute): + """Files under the batch limit should be fetched in a single GraphQL request.""" + paths = [f'src/file{i}.py' for i in range(5)] + mock_execute.return_value = { + 'data': {'repository': {f'file{i}': _make_blob_response(f'content_{i}') for i in range(5)}} + } + + result = fetch_file_contents_batch('owner', 'repo', 'sha1', paths, 'token') + + assert mock_execute.call_count == 1 + assert len(result) == 5 + for i, path in enumerate(paths): + assert result[path] == f'content_{i}' + + @patch('gittensor.utils.github_api_tools.MAX_FILES_PER_GRAPHQL_BATCH', 3) + @patch('gittensor.utils.github_api_tools.execute_graphql_query') + def test_multiple_batches_splits_correctly(self, mock_execute): + """Files exceeding the batch limit should be split into multiple GraphQL requests.""" + paths = [f'src/file{i}.py' for i in range(7)] + + def mock_query_side_effect(query, variables, token): + # Count how many file aliases are in the query + file_count = query.count('object(expression:') + return { + 'data': { + 'repository': {f'file{i}': _make_blob_response(f'batch_content_{i}') for i in range(file_count)} + } + } + + mock_execute.side_effect = mock_query_side_effect + + result = fetch_file_contents_batch('owner', 'repo', 'sha1', paths, 'token') + + # 7 files / batch size 3 = 3 batches (3 + 3 + 1) + assert mock_execute.call_count == 3 + assert len(result) == 7 + + @patch('gittensor.utils.github_api_tools.MAX_FILES_PER_GRAPHQL_BATCH', 3) + @patch('gittensor.utils.github_api_tools.execute_graphql_query') + @patch('gittensor.utils.github_api_tools.bt.logging') + def test_failed_batch_returns_none_without_losing_other_batches(self, mock_logging, mock_execute): + """A failed batch should return None for its files but not affect other batches.""" + paths = [f'file{i}.py' for i in range(6)] + + # First batch succeeds, second batch fails + mock_execute.side_effect = [ + {'data': {'repository': {f'file{i}': _make_blob_response(f'ok_{i}') for i in range(3)}}}, + None, # second batch fails + ] + + result = fetch_file_contents_batch('owner', 'repo', 'sha1', paths, 'token') + + assert len(result) == 6 + # First batch files have content + assert result['file0.py'] == 'ok_0' + assert result['file1.py'] == 'ok_1' + assert result['file2.py'] == 'ok_2' + # Second batch files are None due to failure + assert result['file3.py'] is None + assert result['file4.py'] is None + assert result['file5.py'] is None + + @patch('gittensor.utils.github_api_tools.execute_graphql_query') + def test_binary_and_oversized_files_return_none(self, mock_execute): + """Binary files and files exceeding MAX_FILE_SIZE_BYTES should return None.""" + paths = ['normal.py', 'binary.bin', 'huge.py'] + mock_execute.return_value = { + 'data': { + 'repository': { + 'file0': _make_blob_response('print("hello")'), + 'file1': {'text': None, 'byteSize': 100, 'isBinary': True}, + 'file2': {'text': 'x' * 100, 'byteSize': 2_000_000, 'isBinary': False}, + } + } + } + + result = fetch_file_contents_batch('owner', 'repo', 'sha1', paths, 'token') + + assert result['normal.py'] == 'print("hello")' + assert result['binary.bin'] is None + assert result['huge.py'] is None + + @patch('gittensor.utils.github_api_tools.MAX_FILES_PER_GRAPHQL_BATCH', 50) + @patch('gittensor.utils.github_api_tools.execute_graphql_query') + def test_exactly_batch_size_uses_single_request(self, mock_execute): + """Exactly MAX_FILES_PER_GRAPHQL_BATCH files should use a single request.""" + paths = [f'file{i}.py' for i in range(50)] + mock_execute.return_value = { + 'data': {'repository': {f'file{i}': _make_blob_response(f'c{i}') for i in range(50)}} + } + + result = fetch_file_contents_batch('owner', 'repo', 'sha1', paths, 'token') + + assert mock_execute.call_count == 1 + assert len(result) == 50 + + +class TestFetchFileContentsWithBase: + """Tests for batch-size limiting in fetch_file_contents_with_base.""" + + @patch('gittensor.utils.github_api_tools.execute_graphql_query') + def test_empty_file_changes_returns_empty_dict(self, mock_execute): + """No GraphQL call should be made when file_changes is empty.""" + result = fetch_file_contents_with_base('owner', 'repo', 'base', 'head', [], 'token') + assert result == {} + mock_execute.assert_not_called() + + @patch('gittensor.utils.github_api_tools.execute_graphql_query') + def test_single_batch_fetches_base_and_head(self, mock_execute): + """Modified files should have both old and new content fetched.""" + changes = [_make_file_change('app.py', status='modified')] + mock_execute.return_value = { + 'data': { + 'repository': { + 'base0': _make_blob_response('old code'), + 'head0': _make_blob_response('new code'), + } + } + } + + result = fetch_file_contents_with_base('owner', 'repo', 'base_sha', 'head_sha', changes, 'token') + + assert len(result) == 1 + assert result['app.py'].old_content == 'old code' + assert result['app.py'].new_content == 'new code' + + @patch('gittensor.utils.github_api_tools.execute_graphql_query') + def test_added_file_has_no_old_content(self, mock_execute): + """Newly added files should only fetch head content, not base.""" + changes = [_make_file_change('new_file.py', status='added')] + mock_execute.return_value = { + 'data': { + 'repository': { + 'head0': _make_blob_response('brand new'), + } + } + } + + result = fetch_file_contents_with_base('owner', 'repo', 'base', 'head', changes, 'token') + + assert result['new_file.py'].old_content is None + assert result['new_file.py'].new_content == 'brand new' + + @patch('gittensor.utils.github_api_tools.execute_graphql_query') + def test_removed_file_has_no_new_content(self, mock_execute): + """Deleted files should only fetch base content, not head.""" + changes = [_make_file_change('old_file.py', status='removed')] + mock_execute.return_value = { + 'data': { + 'repository': { + 'base0': _make_blob_response('deleted code'), + } + } + } + + result = fetch_file_contents_with_base('owner', 'repo', 'base', 'head', changes, 'token') + + assert result['old_file.py'].old_content == 'deleted code' + assert result['old_file.py'].new_content is None + + @patch('gittensor.utils.github_api_tools.MAX_FILES_PER_GRAPHQL_BATCH', 2) + @patch('gittensor.utils.github_api_tools.execute_graphql_query') + def test_multiple_batches_splits_file_changes(self, mock_execute): + """File changes exceeding the batch limit should be split into multiple requests.""" + changes = [_make_file_change(f'file{i}.py') for i in range(5)] + + def mock_side_effect(query, variables, token): + file_count = query.count('base') + return { + 'data': { + 'repository': { + **{f'base{i}': _make_blob_response(f'old_{i}') for i in range(file_count)}, + **{f'head{i}': _make_blob_response(f'new_{i}') for i in range(file_count)}, + } + } + } + + mock_execute.side_effect = mock_side_effect + + result = fetch_file_contents_with_base('owner', 'repo', 'base', 'head', changes, 'token') + + # 5 files / batch size 2 = 3 batches (2 + 2 + 1) + assert mock_execute.call_count == 3 + assert len(result) == 5 + + @patch('gittensor.utils.github_api_tools.MAX_FILES_PER_GRAPHQL_BATCH', 2) + @patch('gittensor.utils.github_api_tools.execute_graphql_query') + @patch('gittensor.utils.github_api_tools.bt.logging') + def test_failed_batch_isolates_failure(self, mock_logging, mock_execute): + """A failed batch should return None pairs without affecting other batches.""" + changes = [_make_file_change(f'f{i}.py') for i in range(4)] + + mock_execute.side_effect = [ + { + 'data': { + 'repository': { + 'base0': _make_blob_response('old_0'), + 'head0': _make_blob_response('new_0'), + 'base1': _make_blob_response('old_1'), + 'head1': _make_blob_response('new_1'), + } + } + }, + None, # second batch fails + ] + + result = fetch_file_contents_with_base('owner', 'repo', 'base', 'head', changes, 'token') + + assert len(result) == 4 + # First batch succeeded + assert result['f0.py'].old_content == 'old_0' + assert result['f0.py'].new_content == 'new_0' + # Second batch failed — None pairs + assert result['f2.py'].old_content is None + assert result['f2.py'].new_content is None + + @patch('gittensor.utils.github_api_tools.execute_graphql_query') + def test_renamed_file_fetches_from_previous_filename(self, mock_execute): + """Renamed files should fetch base content from the previous filename.""" + changes = [_make_file_change('new_name.py', status='renamed', previous_filename='old_name.py')] + mock_execute.return_value = { + 'data': { + 'repository': { + 'base0': _make_blob_response('original'), + 'head0': _make_blob_response('updated'), + } + } + } + + result = fetch_file_contents_with_base('owner', 'repo', 'base_sha', 'head_sha', changes, 'token') + + assert result['new_name.py'].old_content == 'original' + assert result['new_name.py'].new_content == 'updated' + # Verify the base expression uses old_name.py + query_arg = mock_execute.call_args[0][0] + assert 'base_sha:old_name.py' in query_arg + assert 'head_sha:new_name.py' in query_arg + + +# ============================================================================ +# Merge Base SHA Tests +# ============================================================================ + + +class TestGetMergeBaseSha: + """Test suite for get_merge_base_sha using GitHub compare API.""" + + @patch('gittensor.utils.github_api_tools.requests.get') + def test_returns_merge_base_sha_on_success(self, mock_get): + """Successful compare API call returns the merge_base_commit SHA.""" + mock_response = Mock(status_code=200) + mock_response.json.return_value = { + 'merge_base_commit': {'sha': 'abc123merge'}, + } + mock_get.return_value = mock_response + + result = get_merge_base_sha('owner/repo', 'base_sha', 'head_sha', 'fake_token') + + assert result == 'abc123merge' + assert mock_get.call_count == 1 + + @patch('gittensor.utils.github_api_tools.requests.get') + @patch('gittensor.utils.github_api_tools.time.sleep') + @patch('gittensor.utils.github_api_tools.bt.logging') + def test_retries_on_failure_then_succeeds(self, mock_logging, mock_sleep, mock_get): + """Retries on HTTP error and succeeds on second attempt.""" + mock_500 = Mock(status_code=500, text='Internal Server Error') + mock_200 = Mock(status_code=200) + mock_200.json.return_value = {'merge_base_commit': {'sha': 'abc123merge'}} + + mock_get.side_effect = [mock_500, mock_200] + + result = get_merge_base_sha('owner/repo', 'base_sha', 'head_sha', 'fake_token') + + assert result == 'abc123merge' + assert mock_get.call_count == 2 + assert mock_sleep.call_count == 1 + + @patch('gittensor.utils.github_api_tools.requests.get') + @patch('gittensor.utils.github_api_tools.time.sleep') + @patch('gittensor.utils.github_api_tools.bt.logging') + def test_returns_none_after_all_attempts_fail(self, mock_logging, mock_sleep, mock_get): + """Returns None after 3 failed attempts.""" + mock_500 = Mock(status_code=500, text='Internal Server Error') + mock_get.return_value = mock_500 + + result = get_merge_base_sha('owner/repo', 'base_sha', 'head_sha', 'fake_token') + + assert result is None + assert mock_get.call_count == 3 + assert mock_sleep.call_count == 2 + + @patch('gittensor.utils.github_api_tools.requests.get') + @patch('gittensor.utils.github_api_tools.bt.logging') + def test_returns_none_when_merge_base_commit_missing(self, mock_logging, mock_get): + """Returns None when response lacks merge_base_commit field.""" + mock_response = Mock(status_code=200) + mock_response.json.return_value = {'status': 'ahead'} + mock_get.return_value = mock_response + + result = get_merge_base_sha('owner/repo', 'base_sha', 'head_sha', 'fake_token') + + assert result is None + mock_logging.warning.assert_called() + + @patch('gittensor.utils.github_api_tools.requests.get') + @patch('gittensor.utils.github_api_tools.time.sleep') + @patch('gittensor.utils.github_api_tools.bt.logging') + def test_retries_on_connection_error(self, mock_logging, mock_sleep, mock_get): + """Retries on connection errors and succeeds.""" + import requests + + mock_200 = Mock(status_code=200) + mock_200.json.return_value = {'merge_base_commit': {'sha': 'abc123merge'}} + mock_get.side_effect = [requests.exceptions.ConnectionError('refused'), mock_200] + + result = get_merge_base_sha('owner/repo', 'base_sha', 'head_sha', 'fake_token') + + assert result == 'abc123merge' + assert mock_get.call_count == 2 + assert mock_sleep.call_count == 1 + + @patch('gittensor.utils.github_api_tools.requests.get') + @patch('gittensor.utils.github_api_tools.time.sleep') + @patch('gittensor.utils.github_api_tools.bt.logging') + def test_returns_none_after_all_connection_errors(self, mock_logging, mock_sleep, mock_get): + """Returns None after 3 connection errors.""" + import requests + + mock_get.side_effect = requests.exceptions.ConnectionError('refused') + + result = get_merge_base_sha('owner/repo', 'base_sha', 'head_sha', 'fake_token') + + assert result is None + assert mock_get.call_count == 3 + + +# ============================================================================ +# fetch_file_contents_for_pr Merge Base Integration Tests +# ============================================================================ + + +class TestFetchFileContentsForPrMergeBase: + """Test that fetch_file_contents_for_pr resolves merge-base instead of using base_ref_oid directly.""" + + @patch('gittensor.validator.oss_contributions.scoring.fetch_file_contents_with_base') + @patch('gittensor.validator.oss_contributions.scoring.get_merge_base_sha') + def test_uses_merge_base_when_available(self, mock_merge_base, mock_fetch): + """When merge-base resolves successfully, it should be used instead of base_ref_oid.""" + from gittensor.classes import FileChange, PRState, PullRequest + from gittensor.validator.oss_contributions.scoring import fetch_file_contents_for_pr + + mock_merge_base.return_value = 'merge_base_sha_123' + mock_fetch.return_value = {} + + pr = PullRequest( + number=1, + repository_full_name='owner/repo', + uid=0, + hotkey='hk', + github_id='1', + title='test', + author_login='user', + merged_at=None, + created_at=__import__('datetime').datetime.now(__import__('datetime').timezone.utc), + pr_state=PRState.MERGED, + base_ref_oid='base_branch_tip_sha', + head_ref_oid='head_sha', + file_changes=[ + FileChange( + pr_number=1, + repository_full_name='owner/repo', + filename='test.py', + status='modified', + changes=5, + additions=3, + deletions=2, + ), + ], + ) + + fetch_file_contents_for_pr(pr, 'fake_token') + + mock_merge_base.assert_called_once_with('owner/repo', 'base_branch_tip_sha', 'head_sha', 'fake_token') + # Verify merge-base SHA was passed, not the original base_ref_oid + mock_fetch.assert_called_once() + call_args = mock_fetch.call_args + assert call_args[0][2] == 'merge_base_sha_123', 'Should pass merge-base SHA as base_sha' + + @patch('gittensor.validator.oss_contributions.scoring.fetch_file_contents_with_base') + @patch('gittensor.validator.oss_contributions.scoring.get_merge_base_sha') + def test_falls_back_to_base_ref_oid_when_merge_base_fails(self, mock_merge_base, mock_fetch): + """When merge-base resolution fails, should fall back to base_ref_oid.""" + from gittensor.classes import FileChange, PRState, PullRequest + from gittensor.validator.oss_contributions.scoring import fetch_file_contents_for_pr + + mock_merge_base.return_value = None + mock_fetch.return_value = {} + + pr = PullRequest( + number=1, + repository_full_name='owner/repo', + uid=0, + hotkey='hk', + github_id='1', + title='test', + author_login='user', + merged_at=None, + created_at=__import__('datetime').datetime.now(__import__('datetime').timezone.utc), + pr_state=PRState.MERGED, + base_ref_oid='base_branch_tip_sha', + head_ref_oid='head_sha', + file_changes=[ + FileChange( + pr_number=1, + repository_full_name='owner/repo', + filename='test.py', + status='modified', + changes=5, + additions=3, + deletions=2, + ), + ], + ) + + fetch_file_contents_for_pr(pr, 'fake_token') + + # Should fall back to base_ref_oid + call_args = mock_fetch.call_args + assert call_args[0][2] == 'base_branch_tip_sha', 'Should fall back to base_ref_oid' + + if __name__ == '__main__': pytest.main([__file__, '-v']) diff --git a/tests/validator/conftest.py b/tests/validator/conftest.py index ec946934..a46ec6eb 100644 --- a/tests/validator/conftest.py +++ b/tests/validator/conftest.py @@ -4,7 +4,7 @@ """ Pytest fixtures for validator tests. -This module provides reusable fixtures for testing tier credibility, +Provides reusable fixtures for testing credibility, eligibility, scoring, and other validator functionality. """ @@ -15,35 +15,6 @@ import pytest from gittensor.classes import PRState, PullRequest -from gittensor.validator.oss_contributions.tier_config import ( - TIERS, - Tier, - TierConfig, - TierStats, -) - -# ============================================================================ -# Tier Config Fixtures -# ============================================================================ - - -@pytest.fixture -def bronze_config() -> TierConfig: - """Bronze tier configuration.""" - return TIERS[Tier.BRONZE] - - -@pytest.fixture -def silver_config() -> TierConfig: - """Silver tier configuration.""" - return TIERS[Tier.SILVER] - - -@pytest.fixture -def gold_config() -> TierConfig: - """Gold tier configuration.""" - return TIERS[Tier.GOLD] - # ============================================================================ # PR Factory Fixture @@ -56,15 +27,11 @@ class PRBuilder: Builder for creating mock PullRequests with sensible defaults. Usage: - pr = pr_factory.merged(tier=bronze_config) - pr = pr_factory.closed(tier=silver_config, number=5) - pr = pr_factory.open(tier=gold_config) - - # Or use the generic create method: - pr = pr_factory.create(state=PRState.MERGED, tier=bronze_config) - - # Create PRs with unique repos (for unique repo requirement testing): - prs = pr_factory.merged_batch(tier=bronze_config, count=3, unique_repos=True) + pr = pr_factory.merged() + pr = pr_factory.closed(number=5) + pr = pr_factory.open() + pr = pr_factory.merged(token_score=50.0) + prs = pr_factory.merged_batch(count=5, unique_repos=True) """ _counter: int = 0 @@ -81,31 +48,16 @@ def _next_repo(self) -> str: def create( self, state: PRState, - tier: TierConfig, number: Optional[int] = None, earned_score: float = 100.0, collateral_score: float = 20.0, repo: Optional[str] = None, unique_repo: bool = False, - token_score: Optional[float] = None, # Auto-calculated from tier if None + token_score: float = 10.0, uid: int = 0, merged_at: Optional[datetime] = None, ) -> PullRequest: - """Create a mock PullRequest with the given parameters. - - Args: - unique_repo: If True, generates a unique repo name for this PR. - If False and repo is None, uses 'test/repo'. - token_score: Token score for this PR. If None, auto-calculates based on tier - requirements to ensure the PR qualifies. - """ - # Auto-calculate token score if not specified - ensure it meets tier requirements - if token_score is None: - required_repos = tier.required_unique_repos_count or 3 - min_per_repo = tier.required_min_token_score_per_repo or 5.0 - min_total = tier.required_min_token_score or 0.0 - # Each PR should contribute enough to meet both per-repo and total requirements - token_score = max(min_per_repo, min_total / required_repos) + 1.0 + """Create a mock PullRequest with the given parameters.""" if number is None: number = self._next_number() @@ -126,94 +78,44 @@ def create( merged_at=merged_at, created_at=datetime.now(timezone.utc), pr_state=state, - repository_tier_configuration=tier, earned_score=earned_score, collateral_score=collateral_score, token_score=token_score, ) - def merged(self, tier: TierConfig, **kwargs) -> PullRequest: + def merged(self, **kwargs) -> PullRequest: """Create a merged PR.""" - return self.create(state=PRState.MERGED, tier=tier, **kwargs) + return self.create(state=PRState.MERGED, **kwargs) - def closed(self, tier: TierConfig, **kwargs) -> PullRequest: + def closed(self, **kwargs) -> PullRequest: """Create a closed PR.""" - return self.create(state=PRState.CLOSED, tier=tier, **kwargs) + return self.create(state=PRState.CLOSED, **kwargs) - def open(self, tier: TierConfig, **kwargs) -> PullRequest: + def open(self, **kwargs) -> PullRequest: """Create an open PR.""" - return self.create(state=PRState.OPEN, tier=tier, **kwargs) - - def merged_batch(self, tier: TierConfig, count: int, unique_repos: bool = False, **kwargs) -> List[PullRequest]: - """Create multiple merged PRs. - - Args: - unique_repos: If True, each PR gets a unique repo name. - """ - return [self.merged(tier=tier, unique_repo=unique_repos, **kwargs) for _ in range(count)] + return self.create(state=PRState.OPEN, **kwargs) - def closed_batch(self, tier: TierConfig, count: int, unique_repos: bool = False, **kwargs) -> List[PullRequest]: - """Create multiple closed PRs. + def merged_batch(self, count: int, unique_repos: bool = False, **kwargs) -> List[PullRequest]: + """Create multiple merged PRs.""" + return [self.merged(unique_repo=unique_repos, **kwargs) for _ in range(count)] - Args: - unique_repos: If True, each PR gets a unique repo name. - """ - return [self.closed(tier=tier, unique_repo=unique_repos, **kwargs) for _ in range(count)] + def closed_batch(self, count: int, unique_repos: bool = False, **kwargs) -> List[PullRequest]: + """Create multiple closed PRs.""" + return [self.closed(unique_repo=unique_repos, **kwargs) for _ in range(count)] - def open_batch(self, tier: TierConfig, count: int, unique_repos: bool = False, **kwargs) -> List[PullRequest]: - """Create multiple open PRs. - - Args: - unique_repos: If True, each PR gets a unique repo name. - """ - return [self.open(tier=tier, unique_repo=unique_repos, **kwargs) for _ in range(count)] + def open_batch(self, count: int, unique_repos: bool = False, **kwargs) -> List[PullRequest]: + """Create multiple open PRs.""" + return [self.open(unique_repo=unique_repos, **kwargs) for _ in range(count)] def reset(self): """Reset the counters (useful between tests).""" self._counter = 0 self._repo_counter = 0 - def create_without_tier( - self, - state: PRState, - number: Optional[int] = None, - repo: str = 'untracked/repo', - ) -> PullRequest: - """Create a PR without tier configuration (simulates untracked repo). - - These PRs should be completely ignored by tier calculations. - """ - if number is None: - number = self._next_number() - - return PullRequest( - number=number, - repository_full_name=repo, - uid=0, - hotkey='test_hotkey', - github_id='12345', - title=f'Untracked PR #{number}', - author_login='testuser', - merged_at=datetime.now(timezone.utc) if state == PRState.MERGED else None, - created_at=datetime.now(timezone.utc), - pr_state=state, - repository_tier_configuration=None, # No tier config! - ) - @pytest.fixture def pr_factory() -> PRBuilder: - """ - Factory fixture for creating mock PRs. - - Usage: - def test_something(pr_factory, bronze_config): - merged_pr = pr_factory.merged(tier=bronze_config) - closed_pr = pr_factory.closed(tier=bronze_config) - - # Create batches - merged_prs = pr_factory.merged_batch(tier=bronze_config, count=5) - """ + """Factory fixture for creating mock PRs.""" return PRBuilder() @@ -224,15 +126,7 @@ def test_something(pr_factory, bronze_config): @dataclass class MinerScenario: - """ - Represents a miner's PR history for testing. - - Attributes: - merged: List of merged PRs - closed: List of closed PRs - open: List of open PRs - description: Human-readable description of this scenario - """ + """Represents a miner's PR history for testing.""" merged: List[PullRequest] closed: List[PullRequest] @@ -245,411 +139,67 @@ def all_prs(self) -> List[PullRequest]: @pytest.fixture -def new_miner(pr_factory, bronze_config) -> MinerScenario: - """Brand new miner with no PRs (no tiers unlocked).""" +def new_miner(pr_factory) -> MinerScenario: + """Brand new miner with no PRs.""" pr_factory.reset() return MinerScenario(merged=[], closed=[], open=[], description='New miner with no history') @pytest.fixture -def bronze_miner(pr_factory, bronze_config) -> MinerScenario: - """Miner with Bronze unlocked (meets requirements with 100% credibility and qualified unique repos).""" - pr_factory.reset() - bronze_tier_config = TIERS[Tier.BRONZE] - required_repos = bronze_tier_config.required_unique_repos_count or 3 - return MinerScenario( - merged=pr_factory.merged_batch(tier=bronze_config, count=required_repos, unique_repos=True), - closed=[], - open=[], - description=f'Bronze miner: {required_repos} merged to unique repos = 100% credibility', - ) - - -@pytest.fixture -def silver_unlocked_miner(pr_factory, bronze_config, silver_config) -> MinerScenario: - """Miner who has unlocked Silver (Bronze and Silver requirements met with qualified unique repos).""" +def eligible_miner(pr_factory) -> MinerScenario: + """Miner who passes the eligibility gate (5+ valid PRs, 100% credibility).""" pr_factory.reset() - bronze_tier_config = TIERS[Tier.BRONZE] - silver_tier_config = TIERS[Tier.SILVER] - bronze_repos = bronze_tier_config.required_unique_repos_count or 3 - silver_repos = silver_tier_config.required_unique_repos_count or 3 - # Ensure enough token score per PR to meet Silver's total token score requirement - silver_token_per_pr = (silver_tier_config.required_min_token_score or 50.0) / silver_repos + 1.0 return MinerScenario( - merged=( - pr_factory.merged_batch(tier=bronze_config, count=bronze_repos, unique_repos=True) - + pr_factory.merged_batch( - tier=silver_config, count=silver_repos, unique_repos=True, token_score=silver_token_per_pr - ) - ), + merged=pr_factory.merged_batch(count=6, unique_repos=True, token_score=10.0), closed=[], open=[], - description='Silver miner: Bronze + Silver unlocked with 100% credibility and qualified repos', + description='Eligible miner: 6 valid merged PRs, 100% credibility', ) @pytest.fixture -def silver_threshold_miner(pr_factory, bronze_config, silver_config) -> MinerScenario: - """Miner exactly at Silver credibility threshold with qualified repos.""" +def ineligible_low_prs(pr_factory) -> MinerScenario: + """Miner with too few valid PRs (below MIN_VALID_MERGED_PRS).""" pr_factory.reset() - bronze_tier_config = TIERS[Tier.BRONZE] - silver_tier_config = TIERS[Tier.SILVER] - bronze_repos = bronze_tier_config.required_unique_repos_count or 3 - silver_repos = silver_tier_config.required_unique_repos_count or 3 - required_credibility = silver_tier_config.required_credibility - - # Calculate closed to be exactly at threshold - closed_count = int(silver_repos * (1 - required_credibility) / required_credibility) - # Ensure enough token score per PR to meet Silver's total token score requirement - silver_token_per_pr = (silver_tier_config.required_min_token_score or 50.0) / silver_repos + 1.0 - - return MinerScenario( - merged=( - pr_factory.merged_batch(tier=bronze_config, count=bronze_repos, unique_repos=True) - + pr_factory.merged_batch( - tier=silver_config, count=silver_repos, unique_repos=True, token_score=silver_token_per_pr - ) - ), - closed=pr_factory.closed_batch(tier=silver_config, count=closed_count, unique_repos=True), - open=[], - description=f'Silver threshold: {silver_repos} merged, {closed_count} closed = ~{required_credibility * 100}%', - ) - - -@pytest.fixture -def gold_unlocked_miner(pr_factory, bronze_config, silver_config, gold_config) -> MinerScenario: - """Miner who has unlocked Gold tier (all tiers unlocked with qualified repos).""" - pr_factory.reset() - bronze_tier_config = TIERS[Tier.BRONZE] - silver_tier_config = TIERS[Tier.SILVER] - gold_tier_config = TIERS[Tier.GOLD] - bronze_repos = bronze_tier_config.required_unique_repos_count or 3 - silver_repos = silver_tier_config.required_unique_repos_count or 3 - gold_repos = gold_tier_config.required_unique_repos_count or 3 - # Ensure enough token score per PR to meet each tier's requirements - silver_token_per_pr = (silver_tier_config.required_min_token_score or 50.0) / silver_repos + 1.0 - gold_token_per_pr = (gold_tier_config.required_min_token_score or 150.0) / gold_repos + 1.0 return MinerScenario( - merged=( - pr_factory.merged_batch(tier=bronze_config, count=bronze_repos, unique_repos=True) - + pr_factory.merged_batch( - tier=silver_config, count=silver_repos, unique_repos=True, token_score=silver_token_per_pr - ) - + pr_factory.merged_batch( - tier=gold_config, count=gold_repos, unique_repos=True, token_score=gold_token_per_pr - ) - ), + merged=pr_factory.merged_batch(count=3, unique_repos=True, token_score=10.0), closed=[], open=[], - description='Gold miner: All tiers unlocked with 100% credibility and qualified repos', - ) - - -@pytest.fixture -def gold_threshold_miner(pr_factory, bronze_config, silver_config, gold_config) -> MinerScenario: - """Miner exactly at Gold credibility threshold with qualified repos.""" - pr_factory.reset() - bronze_tier_config = TIERS[Tier.BRONZE] - silver_tier_config = TIERS[Tier.SILVER] - gold_tier_config = TIERS[Tier.GOLD] - bronze_repos = bronze_tier_config.required_unique_repos_count or 3 - silver_repos = silver_tier_config.required_unique_repos_count or 3 - gold_repos = gold_tier_config.required_unique_repos_count or 3 - required_credibility = gold_tier_config.required_credibility - - # Calculate closed to be exactly at threshold - closed_count = int(gold_repos * (1 - required_credibility) / required_credibility) - # Ensure enough token score per PR to meet each tier's requirements - silver_token_per_pr = (silver_tier_config.required_min_token_score or 50.0) / silver_repos + 1.0 - gold_token_per_pr = (gold_tier_config.required_min_token_score or 150.0) / gold_repos + 1.0 - - return MinerScenario( - merged=( - pr_factory.merged_batch(tier=bronze_config, count=bronze_repos, unique_repos=True) - + pr_factory.merged_batch( - tier=silver_config, count=silver_repos, unique_repos=True, token_score=silver_token_per_pr - ) - + pr_factory.merged_batch( - tier=gold_config, count=gold_repos, unique_repos=True, token_score=gold_token_per_pr - ) - ), - closed=pr_factory.closed_batch(tier=gold_config, count=closed_count, unique_repos=True), - open=[], - description=f'Gold threshold: {gold_repos} merged, {closed_count} closed = ~{required_credibility * 100}%', + description='Ineligible: only 3 valid merged PRs', ) -# ============================================================================ -# Demotion Scenario Fixtures -# ============================================================================ - - @pytest.fixture -def demoted_from_gold_miner(pr_factory, bronze_config, silver_config, gold_config) -> MinerScenario: - """Miner who was at Gold but got demoted (credibility dropped below requirement).""" +def ineligible_low_credibility(pr_factory) -> MinerScenario: + """Miner with enough PRs but credibility below 75%.""" pr_factory.reset() - bronze_tier_config = TIERS[Tier.BRONZE] - silver_tier_config = TIERS[Tier.SILVER] - gold_tier_config = TIERS[Tier.GOLD] - bronze_repos = bronze_tier_config.required_unique_repos_count or 3 - silver_repos = silver_tier_config.required_unique_repos_count or 3 - gold_repos = gold_tier_config.required_unique_repos_count or 3 - gold_cred_required = gold_tier_config.required_credibility - - # Calculate closed to drop below Gold credibility requirement - closed_count = int(gold_repos * (1 - gold_cred_required) / gold_cred_required) + 2 - # Ensure enough token score per PR to meet each tier's requirements - silver_token_per_pr = (silver_tier_config.required_min_token_score or 50.0) / silver_repos + 1.0 - gold_token_per_pr = (gold_tier_config.required_min_token_score or 150.0) / gold_repos + 1.0 - return MinerScenario( - merged=( - pr_factory.merged_batch(tier=bronze_config, count=bronze_repos, unique_repos=True) - + pr_factory.merged_batch( - tier=silver_config, count=silver_repos, unique_repos=True, token_score=silver_token_per_pr - ) - + pr_factory.merged_batch( - tier=gold_config, count=gold_repos, unique_repos=True, token_score=gold_token_per_pr - ) - ), - closed=pr_factory.closed_batch(tier=gold_config, count=closed_count, unique_repos=True), + merged=pr_factory.merged_batch(count=5, unique_repos=True, token_score=10.0), + closed=pr_factory.closed_batch(count=4, unique_repos=True), open=[], - description=f'Demoted from Gold: {gold_repos}/{gold_repos + closed_count} (below {gold_cred_required * 100}% threshold)', + description='Ineligible: 5/9 = 55.6% credibility (after mulligan: 5/8 = 62.5%)', ) @pytest.fixture -def demoted_from_silver_miner(pr_factory, bronze_config, silver_config) -> MinerScenario: - """Miner who was at Silver but got demoted (credibility dropped below requirement).""" +def miner_with_mulligan(pr_factory) -> MinerScenario: + """Miner who benefits from the mulligan (1 closed PR forgiven).""" pr_factory.reset() - bronze_tier_config = TIERS[Tier.BRONZE] - silver_tier_config = TIERS[Tier.SILVER] - bronze_repos = bronze_tier_config.required_unique_repos_count or 3 - silver_repos = silver_tier_config.required_unique_repos_count or 3 - silver_cred_required = silver_tier_config.required_credibility - - # Calculate closed to drop below Silver credibility requirement - closed_count = int(silver_repos * (1 - silver_cred_required) / silver_cred_required) + 2 - # Ensure enough token score per PR to meet Silver's requirements - silver_token_per_pr = (silver_tier_config.required_min_token_score or 50.0) / silver_repos + 1.0 - return MinerScenario( - merged=( - pr_factory.merged_batch(tier=bronze_config, count=bronze_repos, unique_repos=True) - + pr_factory.merged_batch( - tier=silver_config, count=silver_repos, unique_repos=True, token_score=silver_token_per_pr - ) - ), - closed=pr_factory.closed_batch(tier=silver_config, count=closed_count, unique_repos=True), + merged=pr_factory.merged_batch(count=5, unique_repos=True, token_score=10.0), + closed=pr_factory.closed_batch(count=1, unique_repos=True), open=[], - description=f'Demoted from Silver: {silver_repos}/{silver_repos + closed_count} (below {silver_cred_required * 100}% threshold)', + description='Miner with mulligan: 5/5 = 100% credibility (1 closed forgiven)', ) @pytest.fixture -def cascade_demoted_miner(pr_factory, bronze_config, silver_config, gold_config) -> MinerScenario: - """Miner with perfect Gold stats but Silver is locked (cascade demotion due to not enough qualified repos).""" +def miner_with_open_prs(pr_factory) -> MinerScenario: + """Miner with open PRs (for collateral testing).""" pr_factory.reset() - bronze_tier_config = TIERS[Tier.BRONZE] - silver_tier_config = TIERS[Tier.SILVER] - gold_tier_config = TIERS[Tier.GOLD] - bronze_repos = bronze_tier_config.required_unique_repos_count or 3 - silver_repos = silver_tier_config.required_unique_repos_count or 3 - gold_repos = gold_tier_config.required_unique_repos_count or 3 - gold_token_per_pr = (gold_tier_config.required_min_token_score or 150.0) / gold_repos + 1.0 - # Silver has low token scores - below the per-repo requirement - silver_low_token = (silver_tier_config.required_min_token_score_per_repo or 10.0) - 5.0 - return MinerScenario( - merged=( - pr_factory.merged_batch(tier=bronze_config, count=bronze_repos, unique_repos=True) - + pr_factory.merged_batch( - tier=silver_config, count=silver_repos, unique_repos=True, token_score=silver_low_token - ) # Low token score - doesn't qualify - + pr_factory.merged_batch( - tier=gold_config, count=gold_repos + 5, unique_repos=True, token_score=gold_token_per_pr - ) # Perfect Gold - ), + merged=pr_factory.merged_batch(count=5, unique_repos=True, token_score=10.0), closed=[], - open=[], - description='Cascade demotion: Silver locked (low token score repos) -> Gold locked despite 100%', + open=pr_factory.open_batch(count=3, unique_repos=True), + description='Miner with 3 open PRs', ) - - -# ============================================================================ -# Edge Case Fixtures -# ============================================================================ - - -@pytest.fixture -def spammer_miner(pr_factory, bronze_config, silver_config, gold_config) -> MinerScenario: - """Miner who spammed PRs that mostly got closed.""" - pr_factory.reset() - return MinerScenario( - merged=( - pr_factory.merged_batch(tier=bronze_config, count=5, unique_repos=True) - + pr_factory.merged_batch(tier=silver_config, count=5, unique_repos=True) - + pr_factory.merged_batch(tier=gold_config, count=6, unique_repos=True) - ), - closed=( - pr_factory.closed_batch(tier=bronze_config, count=20, unique_repos=True) - + pr_factory.closed_batch(tier=silver_config, count=20, unique_repos=True) - + pr_factory.closed_batch(tier=gold_config, count=20, unique_repos=True) - ), - open=[], - description='Spammer: lots of closed PRs destroying credibility', - ) - - -@pytest.fixture -def perfect_miner(pr_factory, bronze_config, silver_config, gold_config) -> MinerScenario: - """Miner with 100% credibility across all tiers and unique repos.""" - pr_factory.reset() - return MinerScenario( - merged=( - pr_factory.merged_batch(tier=bronze_config, count=5, unique_repos=True) - + pr_factory.merged_batch(tier=silver_config, count=5, unique_repos=True) - + pr_factory.merged_batch(tier=gold_config, count=10, unique_repos=True) - ), - closed=[], - open=[], - description='Perfect miner: 100% credibility everywhere with unique repos', - ) - - -@pytest.fixture -def mixed_performance_miner(pr_factory, bronze_config, silver_config, gold_config) -> MinerScenario: - """Miner with varying performance across tiers.""" - pr_factory.reset() - return MinerScenario( - merged=( - pr_factory.merged_batch(tier=bronze_config, count=9, unique_repos=True) # 90% - + pr_factory.merged_batch(tier=silver_config, count=11, unique_repos=True) # 55% - + pr_factory.merged_batch(tier=gold_config, count=6, unique_repos=True) # 60% - ), - closed=( - pr_factory.closed_batch(tier=bronze_config, count=1, unique_repos=True) - + pr_factory.closed_batch(tier=silver_config, count=9, unique_repos=True) - + pr_factory.closed_batch(tier=gold_config, count=4, unique_repos=True) - ), - open=[], - description='Mixed: Bronze 90%, Silver 55%, Gold 60% (locked)', - ) - - -@pytest.fixture -def miner_with_open_prs(pr_factory, bronze_config, silver_config) -> MinerScenario: - """Miner with some open PRs (for collateral testing) with unique repos.""" - pr_factory.reset() - return MinerScenario( - merged=pr_factory.merged_batch(tier=bronze_config, count=3, unique_repos=True), - closed=pr_factory.closed_batch(tier=bronze_config, count=1, unique_repos=True), - open=( - pr_factory.open_batch(tier=bronze_config, count=2, unique_repos=True) - + pr_factory.open_batch(tier=silver_config, count=3, unique_repos=True) - ), - description='Miner with 5 open PRs (for collateral testing) with unique repos', - ) - - -# ============================================================================ -# TierStats Fixtures -# ============================================================================ - - -@pytest.fixture -def empty_tier_stats() -> dict: - """Empty TierStats for all tiers.""" - return {tier: TierStats() for tier in Tier} - - -def _unlocked_bronze_stats() -> TierStats: - """Helper to create Bronze stats that meet unlock requirements (including qualified repos).""" - bronze_config = TIERS[Tier.BRONZE] - required_repos = bronze_config.required_unique_repos_count or 3 - token_per_repo = bronze_config.required_min_token_score_per_repo or 5.0 - total_token_score = required_repos * token_per_repo # Enough token score to meet requirements - return TierStats( - merged_count=required_repos, - closed_count=0, - unique_repo_contribution_count=required_repos, - qualified_unique_repo_count=required_repos, - token_score=total_token_score, - ) - - -def _unlocked_silver_stats() -> TierStats: - """Helper to create Silver stats that meet unlock requirements (including qualified repos).""" - silver_config = TIERS[Tier.SILVER] - required_repos = silver_config.required_unique_repos_count or 3 - token_per_repo = silver_config.required_min_token_score_per_repo or 10.0 - # Silver requires 50.0 total token score, so we ensure that's met - min_total = silver_config.required_min_token_score or 50.0 - total_token_score = max(required_repos * token_per_repo, min_total) - return TierStats( - merged_count=required_repos, - closed_count=0, - unique_repo_contribution_count=required_repos, - qualified_unique_repo_count=required_repos, - token_score=total_token_score, - ) - - -def _unlocked_gold_stats() -> TierStats: - """Helper to create Gold stats that meet unlock requirements (including qualified repos).""" - gold_config = TIERS[Tier.GOLD] - required_repos = gold_config.required_unique_repos_count or 3 - token_per_repo = gold_config.required_min_token_score_per_repo or 25.0 - # Gold requires 150.0 total token score, so we ensure that's met - min_total = gold_config.required_min_token_score or 150.0 - total_token_score = max(required_repos * token_per_repo, min_total) - return TierStats( - merged_count=required_repos, - closed_count=0, - unique_repo_contribution_count=required_repos, - qualified_unique_repo_count=required_repos, - token_score=total_token_score, - ) - - -@pytest.fixture -def silver_unlocked_stats() -> dict: - """TierStats where Silver is unlocked (Bronze must also be unlocked).""" - return { - Tier.BRONZE: _unlocked_bronze_stats(), - Tier.SILVER: _unlocked_silver_stats(), - Tier.GOLD: TierStats(), - } - - -@pytest.fixture -def gold_unlocked_stats() -> dict: - """TierStats where Gold is unlocked (Bronze and Silver must also be unlocked).""" - return { - Tier.BRONZE: _unlocked_bronze_stats(), - Tier.SILVER: _unlocked_silver_stats(), - Tier.GOLD: _unlocked_gold_stats(), - } - - -@pytest.fixture -def gold_locked_stats() -> dict: - """TierStats where Gold is locked (below credibility requirement).""" - gold_config = TIERS[Tier.GOLD] - required_repos = gold_config.required_unique_repos_count or 3 - required_credibility = gold_config.required_credibility - - # Calculate closed count to be just below credibility threshold - closed_count = int(required_repos * (1 - required_credibility) / required_credibility) + 1 - - return { - Tier.BRONZE: _unlocked_bronze_stats(), - Tier.SILVER: _unlocked_silver_stats(), - Tier.GOLD: TierStats( - merged_count=required_repos, - closed_count=closed_count, - unique_repo_contribution_count=required_repos, - qualified_unique_repo_count=required_repos, - token_score=gold_config.required_min_token_score or 150.0, - ), - } diff --git a/tests/validator/merge_predictions/conftest.py b/tests/validator/merge_predictions/conftest.py deleted file mode 100644 index 9e4dd65e..00000000 --- a/tests/validator/merge_predictions/conftest.py +++ /dev/null @@ -1,130 +0,0 @@ -# Entrius 2025 - -"""Shared fixtures for merge predictions tests.""" - -from datetime import datetime, timedelta, timezone -from unittest.mock import MagicMock - -import pytest -from bittensor.core.synapse import TerminalInfo - -from gittensor.synapses import PredictionSynapse -from gittensor.validator.merge_predictions.mp_storage import PredictionStorage -from gittensor.validator.merge_predictions.scoring import PrOutcome, PrPrediction - -# ============================================================================ -# Storage -# ============================================================================ - - -@pytest.fixture -def mp_storage(tmp_path): - """Real SQLite-backed PredictionStorage in a temp directory.""" - return PredictionStorage(db_path=str(tmp_path / 'test.db')) - - -# ============================================================================ -# Time helpers -# ============================================================================ - - -@pytest.fixture -def base_times(): - """Spread of datetimes across a 30-day window for scoring tests.""" - pr_open = datetime(2025, 6, 1, tzinfo=timezone.utc) - return { - 'pr_open': pr_open, - 'peak_variance': pr_open + timedelta(days=10), - 'prediction_early': pr_open + timedelta(days=2), - 'prediction_mid': pr_open + timedelta(days=15), - 'prediction_late': pr_open + timedelta(days=28), - 'settlement': pr_open + timedelta(days=30), - } - - -# ============================================================================ -# Outcomes & Predictions -# ============================================================================ - - -@pytest.fixture -def sample_outcomes(base_times): - """4 PRs: #1 merged, #2-#4 non-merged.""" - pr_open = base_times['pr_open'] - return [ - PrOutcome(pr_number=1, outcome=1.0, pr_open_time=pr_open), - PrOutcome(pr_number=2, outcome=0.0, pr_open_time=pr_open), - PrOutcome(pr_number=3, outcome=0.0, pr_open_time=pr_open), - PrOutcome(pr_number=4, outcome=0.0, pr_open_time=pr_open), - ] - - -@pytest.fixture -def alice_predictions(base_times): - """Early + accurate miner: high on merged PR, low on others.""" - t = base_times['prediction_early'] - return [ - PrPrediction(pr_number=1, prediction=0.70, prediction_time=t, variance_at_prediction=0.05), - PrPrediction(pr_number=2, prediction=0.15, prediction_time=t, variance_at_prediction=0.05), - PrPrediction(pr_number=3, prediction=0.10, prediction_time=t, variance_at_prediction=0.05), - PrPrediction(pr_number=4, prediction=0.05, prediction_time=t, variance_at_prediction=0.05), - ] - - -@pytest.fixture -def dave_predictions(base_times): - """Spray-and-pray miner: uniform 0.25 across all PRs.""" - t = base_times['prediction_early'] - return [ - PrPrediction(pr_number=1, prediction=0.25, prediction_time=t, variance_at_prediction=0.05), - PrPrediction(pr_number=2, prediction=0.25, prediction_time=t, variance_at_prediction=0.05), - PrPrediction(pr_number=3, prediction=0.25, prediction_time=t, variance_at_prediction=0.05), - PrPrediction(pr_number=4, prediction=0.25, prediction_time=t, variance_at_prediction=0.05), - ] - - -# ============================================================================ -# Validator mock -# ============================================================================ - - -@pytest.fixture -def mock_validator(mp_storage): - """MagicMock validator with mp_storage, metagraph, and subtensor.""" - v = MagicMock() - v.mp_storage = mp_storage - - # metagraph with 3 registered hotkeys - v.metagraph.hotkeys = ['hk_alice', 'hk_bob', 'hk_charlie'] - v.metagraph.S = [100.0, 50.0, 25.0] - - v.subtensor = MagicMock() - return v - - -# ============================================================================ -# Synapse factory -# ============================================================================ - - -@pytest.fixture -def make_synapse(): - """Factory that builds a PredictionSynapse with configurable fields.""" - - def _make( - predictions=None, - issue_id=1, - repository='test/repo', - github_access_token='ghp_test123', - hotkey='hk_alice', - ): - synapse = PredictionSynapse( - predictions=predictions or {1: 0.5}, - issue_id=issue_id, - repository=repository, - github_access_token=github_access_token, - ) - synapse.dendrite = TerminalInfo(hotkey=hotkey) - return synapse - - return _make diff --git a/tests/validator/merge_predictions/test_merge_predictions.py b/tests/validator/merge_predictions/test_merge_predictions.py deleted file mode 100644 index f96757f6..00000000 --- a/tests/validator/merge_predictions/test_merge_predictions.py +++ /dev/null @@ -1,1022 +0,0 @@ -# Entrius 2025 - -"""Merge predictions test suite. - -Covers: storage, handler, scoring, validation, and settlement. - -Run: - pytest tests/validator/merge_predictions/ -v -""" - -import asyncio -from datetime import datetime, timedelta, timezone -from unittest.mock import MagicMock, patch - -import pytest - -from gittensor.constants import ( - PREDICTIONS_COOLDOWN_SECONDS, - PREDICTIONS_CORRECTNESS_EXPONENT, - PREDICTIONS_EMA_BETA, - PREDICTIONS_EMISSIONS_SHARE, - PREDICTIONS_MAX_CONSENSUS_BONUS, - PREDICTIONS_MAX_ORDER_BONUS, - PREDICTIONS_MAX_TIMELINESS_BONUS, - PREDICTIONS_TIMELINESS_EXPONENT, -) -from gittensor.validator.merge_predictions.scoring import ( - MinerIssueScore, - PrPrediction, - compute_merged_pr_order_ranks, - score_consensus_bonus, - score_correctness, - score_miner_issue, - score_order_bonus, - score_timeliness, - update_ema, -) -from gittensor.validator.merge_predictions.validation import validate_prediction_values - - -def _run(coro): - """Run an async coroutine synchronously (no pytest-asyncio needed).""" - return asyncio.run(coro) - - -# ============================================================================= -# 1. Storage -# ============================================================================= - - -class TestPredictionStorage: - """Tests for PredictionStorage (real SQLite, no mocking).""" - - def test_tables_created(self, mp_storage): - with mp_storage._get_connection() as conn: - tables = {r[0] for r in conn.execute("SELECT name FROM sqlite_master WHERE type='table'").fetchall()} - assert 'predictions' in tables - assert 'prediction_emas' in tables - assert 'settled_issues' in tables - - def test_store_and_retrieve_prediction(self, mp_storage): - mp_storage.store_prediction( - uid=0, - hotkey='hk', - github_id='gh1', - issue_id=1, - repository='r/r', - issue_number=10, - pr_number=10, - prediction=0.6, - variance_at_prediction=0.1, - ) - rows = mp_storage.get_predictions_for_issue(1) - assert len(rows) == 1 - assert rows[0]['prediction'] == pytest.approx(0.6) - assert rows[0]['pr_number'] == 10 - - def test_upsert_replaces_prediction(self, mp_storage): - kwargs = dict(uid=0, hotkey='hk', github_id='gh1', issue_id=1, repository='r/r', issue_number=10, pr_number=10) - mp_storage.store_prediction(**kwargs, prediction=0.3, variance_at_prediction=0.1) - mp_storage.store_prediction(**kwargs, prediction=0.8, variance_at_prediction=0.2) - rows = mp_storage.get_predictions_for_issue(1) - assert len(rows) == 1 - assert rows[0]['prediction'] == pytest.approx(0.8) - - def test_upsert_preserves_other_prs(self, mp_storage): - base = dict(uid=0, hotkey='hk', github_id='gh1', issue_id=1, repository='r/r', issue_number=10) - mp_storage.store_prediction(**base, pr_number=1, prediction=0.3, variance_at_prediction=0.0) - mp_storage.store_prediction(**base, pr_number=2, prediction=0.4, variance_at_prediction=0.0) - - # Update only PR #1 - mp_storage.store_prediction(**base, pr_number=1, prediction=0.5, variance_at_prediction=0.0) - - rows = mp_storage.get_predictions_for_issue(1) - by_pr = {r['pr_number']: r for r in rows} - assert by_pr[1]['prediction'] == pytest.approx(0.5) - assert by_pr[2]['prediction'] == pytest.approx(0.4) - - def test_miner_total_for_issue(self, mp_storage): - base = dict(uid=0, hotkey='hk', github_id='gh1', issue_id=1, repository='r/r', issue_number=10) - mp_storage.store_prediction(**base, pr_number=1, prediction=0.3, variance_at_prediction=0.0) - mp_storage.store_prediction(**base, pr_number=2, prediction=0.4, variance_at_prediction=0.0) - total = mp_storage.get_miner_total_for_issue(0, 'hk', 1) - assert total == pytest.approx(0.7) - - def test_miner_total_excludes_prs(self, mp_storage): - base = dict(uid=0, hotkey='hk', github_id='gh1', issue_id=1, repository='r/r', issue_number=10) - mp_storage.store_prediction(**base, pr_number=1, prediction=0.3, variance_at_prediction=0.0) - mp_storage.store_prediction(**base, pr_number=2, prediction=0.4, variance_at_prediction=0.0) - mp_storage.store_prediction(**base, pr_number=3, prediction=0.2, variance_at_prediction=0.0) - total = mp_storage.get_miner_total_for_issue(0, 'hk', 1, exclude_prs={2, 3}) - assert total == pytest.approx(0.3) - - def test_cooldown_active(self, mp_storage): - mp_storage.store_prediction( - uid=0, - hotkey='hk', - github_id='gh1', - issue_id=1, - repository='r/r', - issue_number=10, - pr_number=1, - prediction=0.5, - variance_at_prediction=0.0, - ) - remaining = mp_storage.check_cooldown(0, 'hk', 1, 1) - assert remaining is not None - assert remaining > 0 - - def test_cooldown_expired(self, mp_storage): - """Store a prediction with a timestamp far in the past, then verify cooldown is None.""" - # Insert directly with an old timestamp to avoid patching datetime - old_ts = (datetime.now(timezone.utc) - timedelta(seconds=PREDICTIONS_COOLDOWN_SECONDS + 60)).isoformat() - with mp_storage._get_connection() as conn: - conn.execute( - 'INSERT INTO predictions (uid, hotkey, github_id, issue_id, repository, issue_number, pr_number, prediction, timestamp, variance_at_prediction) ' - 'VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)', - (0, 'hk', 'gh1', 1, 'r/r', 10, 1, 0.5, old_ts, 0.0), - ) - conn.commit() - - remaining = mp_storage.check_cooldown(0, 'hk', 1, 1) - assert remaining is None - - def test_cooldown_no_prior_prediction(self, mp_storage): - assert mp_storage.check_cooldown(0, 'hk', 1, 1) is None - - def test_compute_variance_single_miner(self, mp_storage): - mp_storage.store_prediction( - uid=0, - hotkey='hk', - github_id='gh1', - issue_id=1, - repository='r/r', - issue_number=10, - pr_number=1, - prediction=0.5, - variance_at_prediction=0.0, - ) - assert mp_storage.compute_current_variance(1) == pytest.approx(0.0) - - def test_compute_variance_disagreement(self, mp_storage): - base = dict(github_id='gh1', issue_id=1, repository='r/r', issue_number=10, pr_number=1) - mp_storage.store_prediction(uid=0, hotkey='hk0', **base, prediction=0.9, variance_at_prediction=0.0) - mp_storage.store_prediction(uid=1, hotkey='hk1', **base, prediction=0.1, variance_at_prediction=0.0) - var = mp_storage.compute_current_variance(1) - # var((0.9,0.1)) = mean(x^2) - mean(x)^2 = 0.41 - 0.25 = 0.16 - assert var > 0 - - def test_peak_variance_time(self, mp_storage): - base = dict(uid=0, hotkey='hk', github_id='gh1', issue_id=1, repository='r/r', issue_number=10) - mp_storage.store_prediction(**base, pr_number=1, prediction=0.5, variance_at_prediction=0.1) - mp_storage.store_prediction(**base, pr_number=2, prediction=0.5, variance_at_prediction=0.9) - peak = mp_storage.get_peak_variance_time(1) - assert peak is not None - - def test_ema_default_zero(self, mp_storage): - assert mp_storage.get_ema('unknown_github_id') == 0.0 - - def test_ema_upsert_increments_rounds(self, mp_storage): - mp_storage.update_ema('gh1', 0.5) - mp_storage.update_ema('gh1', 0.6) - emas = mp_storage.get_all_emas() - by_id = {e['github_id']: e for e in emas} - assert by_id['gh1']['rounds'] == 2 - - def test_get_all_emas(self, mp_storage): - mp_storage.update_ema('gh1', 0.5) - mp_storage.update_ema('gh2', 0.3) - emas = mp_storage.get_all_emas() - ids = {e['github_id'] for e in emas} - assert ids == {'gh1', 'gh2'} - - def test_delete_predictions_for_issue(self, mp_storage): - base = dict(uid=0, hotkey='hk', github_id='gh1', issue_id=1, repository='r/r', issue_number=10) - mp_storage.store_prediction(**base, pr_number=1, prediction=0.3, variance_at_prediction=0.0) - mp_storage.store_prediction(**base, pr_number=2, prediction=0.4, variance_at_prediction=0.0) - deleted = mp_storage.delete_predictions_for_issue(1) - assert deleted == 2 - assert mp_storage.get_predictions_for_issue(1) == [] - - def test_delete_predictions_no_rows(self, mp_storage): - deleted = mp_storage.delete_predictions_for_issue(999) - assert deleted == 0 - - def test_mark_and_check_settled(self, mp_storage): - mp_storage.mark_issue_settled(42, 'scored', merged_pr_number=7) - assert mp_storage.is_issue_settled(42) is True - - def test_is_issue_settled_false(self, mp_storage): - assert mp_storage.is_issue_settled(999) is False - - def test_mark_settled_voided(self, mp_storage): - mp_storage.mark_issue_settled(10, 'voided') - assert mp_storage.is_issue_settled(10) is True - - def test_mark_settled_idempotent(self, mp_storage): - mp_storage.mark_issue_settled(42, 'scored', merged_pr_number=7) - mp_storage.mark_issue_settled(42, 'scored', merged_pr_number=7) - assert mp_storage.is_issue_settled(42) is True - - -# ============================================================================= -# 2. Handler -# ============================================================================= - - -class TestPredictionHandler: - """Tests for handle_prediction, blacklist_prediction, priority_prediction.""" - - @patch('gittensor.validator.merge_predictions.handler.validate_prediction_values', return_value=None) - @patch('gittensor.validator.merge_predictions.handler.validate_github_credentials', return_value=('gh_alice', None)) - @patch('gittensor.validator.merge_predictions.handler.check_prs_open', return_value=(None, {1})) - @patch( - 'gittensor.validator.merge_predictions.handler.check_issue_active', - return_value=(None, MagicMock(issue_number=10)), - ) - def test_successful_prediction_stored(self, _cia, _cpo, _vgc, _vpv, mock_validator, make_synapse): - from gittensor.validator.merge_predictions.handler import handle_prediction - - synapse = make_synapse(predictions={1: 0.5}, hotkey='hk_alice') - result = _run(handle_prediction(mock_validator, synapse)) - assert result.accepted is True - rows = mock_validator.mp_storage.get_predictions_for_issue(1) - assert len(rows) == 1 - - @patch('gittensor.validator.merge_predictions.handler.check_issue_active', return_value=('Issue not found', None)) - def test_reject_inactive_issue(self, _cia, mock_validator, make_synapse): - from gittensor.validator.merge_predictions.handler import handle_prediction - - synapse = make_synapse(hotkey='hk_alice') - result = _run(handle_prediction(mock_validator, synapse)) - assert result.accepted is False - assert 'Issue not found' in result.rejection_reason - - @patch('gittensor.validator.merge_predictions.handler.check_prs_open', return_value=('PR #1 is not open', set())) - @patch( - 'gittensor.validator.merge_predictions.handler.check_issue_active', - return_value=(None, MagicMock(issue_number=10)), - ) - def test_reject_closed_pr(self, _cia, _cpo, mock_validator, make_synapse): - from gittensor.validator.merge_predictions.handler import handle_prediction - - synapse = make_synapse(hotkey='hk_alice') - result = _run(handle_prediction(mock_validator, synapse)) - assert result.accepted is False - assert 'not open' in result.rejection_reason - - @patch('gittensor.validator.merge_predictions.handler.validate_github_credentials', return_value=(None, 'Bad PAT')) - @patch('gittensor.validator.merge_predictions.handler.check_prs_open', return_value=(None, {1})) - @patch( - 'gittensor.validator.merge_predictions.handler.check_issue_active', - return_value=(None, MagicMock(issue_number=10)), - ) - def test_reject_invalid_github_creds(self, _cia, _cpo, _vgc, mock_validator, make_synapse): - from gittensor.validator.merge_predictions.handler import handle_prediction - - synapse = make_synapse(hotkey='hk_alice') - result = _run(handle_prediction(mock_validator, synapse)) - assert result.accepted is False - assert 'Bad PAT' in result.rejection_reason - - @patch('gittensor.validator.merge_predictions.handler.validate_prediction_values', return_value='Values bad') - @patch('gittensor.validator.merge_predictions.handler.validate_github_credentials', return_value=('gh_alice', None)) - @patch('gittensor.validator.merge_predictions.handler.check_prs_open', return_value=(None, {1})) - @patch( - 'gittensor.validator.merge_predictions.handler.check_issue_active', - return_value=(None, MagicMock(issue_number=10)), - ) - def test_reject_invalid_values(self, _cia, _cpo, _vgc, _vpv, mock_validator, make_synapse): - from gittensor.validator.merge_predictions.handler import handle_prediction - - synapse = make_synapse(hotkey='hk_alice') - result = _run(handle_prediction(mock_validator, synapse)) - assert result.accepted is False - assert 'Values bad' in result.rejection_reason - - @patch('gittensor.validator.merge_predictions.handler.validate_prediction_values', return_value=None) - @patch('gittensor.validator.merge_predictions.handler.validate_github_credentials', return_value=('gh_alice', None)) - @patch('gittensor.validator.merge_predictions.handler.check_prs_open', return_value=(None, {1})) - @patch( - 'gittensor.validator.merge_predictions.handler.check_issue_active', - return_value=(None, MagicMock(issue_number=10)), - ) - def test_reject_cooldown(self, _cia, _cpo, _vgc, _vpv, mock_validator, make_synapse): - from gittensor.validator.merge_predictions.handler import handle_prediction - - # First prediction succeeds - s1 = make_synapse(predictions={1: 0.3}, hotkey='hk_alice') - _run(handle_prediction(mock_validator, s1)) - - # Immediate re-prediction hits cooldown - s2 = make_synapse(predictions={1: 0.4}, hotkey='hk_alice') - result = _run(handle_prediction(mock_validator, s2)) - assert result.accepted is False - assert 'cooldown' in result.rejection_reason - - @patch('gittensor.validator.merge_predictions.handler.validate_prediction_values', return_value=None) - @patch('gittensor.validator.merge_predictions.handler.validate_github_credentials', return_value=('gh_alice', None)) - @patch('gittensor.validator.merge_predictions.handler.check_prs_open', return_value=(None, {1, 2})) - @patch( - 'gittensor.validator.merge_predictions.handler.check_issue_active', - return_value=(None, MagicMock(issue_number=10)), - ) - def test_reject_total_exceeds_one(self, _cia, _cpo, _vgc, _vpv, mock_validator, make_synapse): - from gittensor.validator.merge_predictions.handler import handle_prediction - - # Seed existing prediction via storage directly to avoid cooldown - mock_validator.mp_storage.store_prediction( - uid=0, - hotkey='hk_alice', - github_id='gh_alice', - issue_id=1, - repository='test/repo', - issue_number=10, - pr_number=1, - prediction=0.8, - variance_at_prediction=0.0, - ) - - # New prediction on different PR would push total > 1.0 - s = make_synapse(predictions={2: 0.5}, hotkey='hk_alice') - result = _run(handle_prediction(mock_validator, s)) - assert result.accepted is False - assert 'exceeds 1.0' in result.rejection_reason - - def test_blacklist_unregistered_hotkey(self, mock_validator, make_synapse): - from gittensor.validator.merge_predictions.handler import blacklist_prediction - - synapse = make_synapse(hotkey='hk_unknown') - is_blacklisted, reason = _run(blacklist_prediction(mock_validator, synapse)) - assert is_blacklisted is True - assert 'Unregistered' in reason - - def test_blacklist_allows_registered(self, mock_validator, make_synapse): - from gittensor.validator.merge_predictions.handler import blacklist_prediction - - synapse = make_synapse(hotkey='hk_alice') - is_blacklisted, _ = _run(blacklist_prediction(mock_validator, synapse)) - assert is_blacklisted is False - - def test_priority_by_stake(self, mock_validator, make_synapse): - from gittensor.validator.merge_predictions.handler import priority_prediction - - synapse = make_synapse(hotkey='hk_alice') - priority = _run(priority_prediction(mock_validator, synapse)) - assert priority == pytest.approx(100.0) - - -# ============================================================================= -# 3. Scoring -# ============================================================================= - - -class TestPredictionScoring: - """Pure function tests for scoring math.""" - - # -- Correctness -- - - def test_correctness_merged_pr(self): - result = score_correctness(0.9, 1.0) - assert result == pytest.approx(0.9**PREDICTIONS_CORRECTNESS_EXPONENT) - - def test_correctness_non_merged_pr(self): - result = score_correctness(0.1, 0.0) - assert result == pytest.approx(0.9**PREDICTIONS_CORRECTNESS_EXPONENT) - - def test_correctness_wrong_prediction(self): - result = score_correctness(0.3, 1.0) - assert result == pytest.approx(0.3**PREDICTIONS_CORRECTNESS_EXPONENT) - - def test_correctness_uniform_spray(self): - result = score_correctness(0.25, 1.0) - assert result == pytest.approx(0.25**PREDICTIONS_CORRECTNESS_EXPONENT) - - # -- Timeliness -- - - def test_timeliness_at_pr_open(self, base_times): - result = score_timeliness(base_times['pr_open'], base_times['settlement'], base_times['pr_open']) - assert result == pytest.approx(PREDICTIONS_MAX_TIMELINESS_BONUS) - - def test_timeliness_at_settlement(self, base_times): - result = score_timeliness(base_times['settlement'], base_times['settlement'], base_times['pr_open']) - assert result == pytest.approx(0.0) - - def test_timeliness_midpoint(self, base_times): - midpoint = base_times['pr_open'] + timedelta(days=15) - result = score_timeliness(midpoint, base_times['settlement'], base_times['pr_open']) - expected = PREDICTIONS_MAX_TIMELINESS_BONUS * (0.5**PREDICTIONS_TIMELINESS_EXPONENT) - assert result == pytest.approx(expected) - - def test_timeliness_zero_window(self): - t = datetime(2025, 6, 1, tzinfo=timezone.utc) - assert score_timeliness(t, t, t) == 0.0 - - # -- Consensus -- - - def test_consensus_before_peak(self, base_times): - result = score_consensus_bonus( - base_times['prediction_early'], base_times['peak_variance'], base_times['settlement'] - ) - assert result == pytest.approx(PREDICTIONS_MAX_CONSENSUS_BONUS) - - def test_consensus_at_peak(self, base_times): - result = score_consensus_bonus( - base_times['peak_variance'], base_times['peak_variance'], base_times['settlement'] - ) - assert result == pytest.approx(PREDICTIONS_MAX_CONSENSUS_BONUS) - - def test_consensus_after_peak_midway(self, base_times): - peak = base_times['peak_variance'] - settle = base_times['settlement'] - mid = peak + (settle - peak) / 2 - result = score_consensus_bonus(mid, peak, settle) - assert result == pytest.approx(PREDICTIONS_MAX_CONSENSUS_BONUS * 0.5) - - def test_consensus_at_settlement(self, base_times): - result = score_consensus_bonus(base_times['settlement'], base_times['peak_variance'], base_times['settlement']) - assert result == pytest.approx(0.0) - - # -- Order -- - - def test_order_rank_1(self): - assert score_order_bonus(1) == pytest.approx(PREDICTIONS_MAX_ORDER_BONUS) - - def test_order_rank_2(self): - assert score_order_bonus(2) == pytest.approx(PREDICTIONS_MAX_ORDER_BONUS / 2) - - def test_order_rank_0_unqualified(self): - assert score_order_bonus(0) == 0.0 - - def test_compute_order_ranks_filters_below_threshold(self): - preds = { - 0: [ - PrPrediction( - pr_number=1, - prediction=0.5, - prediction_time=datetime(2025, 6, 1, tzinfo=timezone.utc), - variance_at_prediction=0.0, - ) - ], - 1: [ - PrPrediction( - pr_number=1, - prediction=0.9, - prediction_time=datetime(2025, 6, 2, tzinfo=timezone.utc), - variance_at_prediction=0.0, - ) - ], - } - ranks = compute_merged_pr_order_ranks(preds, merged_pr_number=1) - assert 0 not in ranks - assert ranks[1] == 1 - - def test_compute_order_ranks_sorts_by_time(self): - t1 = datetime(2025, 6, 1, tzinfo=timezone.utc) - t2 = datetime(2025, 6, 2, tzinfo=timezone.utc) - preds = { - 0: [PrPrediction(pr_number=1, prediction=0.9, prediction_time=t2, variance_at_prediction=0.0)], - 1: [PrPrediction(pr_number=1, prediction=0.8, prediction_time=t1, variance_at_prediction=0.0)], - } - ranks = compute_merged_pr_order_ranks(preds, merged_pr_number=1) - assert ranks[1] == 1 # earlier - assert ranks[0] == 2 - - # -- Aggregation: score_miner_issue -- - - def test_score_miner_issue_weighted_mean(self, base_times, sample_outcomes): - """Merged PR gets weight=N in the issue score (N = total PRs).""" - t = base_times['prediction_early'] - preds = [ - PrPrediction(pr_number=1, prediction=0.9, prediction_time=t, variance_at_prediction=0.05), - PrPrediction(pr_number=2, prediction=0.05, prediction_time=t, variance_at_prediction=0.05), - PrPrediction(pr_number=3, prediction=0.03, prediction_time=t, variance_at_prediction=0.05), - PrPrediction(pr_number=4, prediction=0.02, prediction_time=t, variance_at_prediction=0.05), - ] - result = score_miner_issue( - uid=0, - predictions=preds, - outcomes=sample_outcomes, - settlement_time=base_times['settlement'], - peak_variance_time=base_times['peak_variance'], - merged_pr_order_ranks={0: 1}, - ) - assert isinstance(result, MinerIssueScore) - assert result.issue_score > 0 - merged_score = next(ps for ps in result.pr_scores if ps.pr_number == 1) - assert merged_score.score > 0 - - # -- EMA -- - - def test_update_ema(self): - result = update_ema(current_round_score=1.0, previous_ema=0.0) - expected = PREDICTIONS_EMA_BETA * 1.0 + (1.0 - PREDICTIONS_EMA_BETA) * 0.0 - assert result == pytest.approx(expected) - - -# ============================================================================= -# 4. Top-K reward distribution (build_prediction_ema_rewards) -# ============================================================================= - - -def _make_mock_validator(ema_records: list[dict]) -> MagicMock: - """Create a mock validator with mp_storage returning given EMA records.""" - validator = MagicMock() - validator.mp_storage.get_all_emas.return_value = ema_records - return validator - - -def _make_evaluations(uid_to_github_id: dict[int, str]) -> dict: - """Create mock miner evaluations mapping uid -> github_id.""" - evaluations = {} - for uid, github_id in uid_to_github_id.items(): - ev = MagicMock() - ev.github_id = github_id - evaluations[uid] = ev - return evaluations - - -class TestBuildPredictionEmaRewards: - """Tests for the top-K reward distribution integrated with validator state.""" - - def _call(self, validator, miner_uids, evaluations): - from gittensor.validator.forward import build_prediction_ema_rewards - - return build_prediction_ema_rewards(validator, miner_uids, evaluations) - - def test_standard_top3_split(self): - """3+ miners with positive EMA -> 50/35/15 split.""" - emas = [ - {'github_id': 'a', 'ema_score': 0.9, 'rounds': 10}, - {'github_id': 'b', 'ema_score': 0.7, 'rounds': 8}, - {'github_id': 'c', 'ema_score': 0.5, 'rounds': 6}, - {'github_id': 'd', 'ema_score': 0.3, 'rounds': 4}, - ] - validator = _make_mock_validator(emas) - uids = {1, 2, 3, 4} - evals = _make_evaluations({1: 'a', 2: 'b', 3: 'c', 4: 'd'}) - - rewards = self._call(validator, uids, evals) - sorted_uids = sorted(uids) - - assert rewards[sorted_uids.index(1)] == pytest.approx(0.50 * PREDICTIONS_EMISSIONS_SHARE) - assert rewards[sorted_uids.index(2)] == pytest.approx(0.35 * PREDICTIONS_EMISSIONS_SHARE) - assert rewards[sorted_uids.index(3)] == pytest.approx(0.15 * PREDICTIONS_EMISSIONS_SHARE) - assert rewards[sorted_uids.index(4)] == 0.0 - - def test_two_miners_only(self): - """Only 2 miners with positive EMA -> 50% and 35%, rest unallocated.""" - emas = [ - {'github_id': 'a', 'ema_score': 0.8, 'rounds': 5}, - {'github_id': 'b', 'ema_score': 0.4, 'rounds': 3}, - ] - validator = _make_mock_validator(emas) - uids = {1, 2, 3} - evals = _make_evaluations({1: 'a', 2: 'b', 3: '0'}) - - rewards = self._call(validator, uids, evals) - sorted_uids = sorted(uids) - - assert rewards[sorted_uids.index(1)] == pytest.approx(0.50 * PREDICTIONS_EMISSIONS_SHARE) - assert rewards[sorted_uids.index(2)] == pytest.approx(0.35 * PREDICTIONS_EMISSIONS_SHARE) - assert rewards[sorted_uids.index(3)] == 0.0 - assert rewards.sum() < PREDICTIONS_EMISSIONS_SHARE - - def test_single_miner(self): - """Single miner -> receives 50%, rest unallocated.""" - emas = [ - {'github_id': 'a', 'ema_score': 0.6, 'rounds': 2}, - ] - validator = _make_mock_validator(emas) - uids = {1, 2} - evals = _make_evaluations({1: 'a', 2: '0'}) - - rewards = self._call(validator, uids, evals) - sorted_uids = sorted(uids) - - assert rewards[sorted_uids.index(1)] == pytest.approx(0.50 * PREDICTIONS_EMISSIONS_SHARE) - assert rewards[sorted_uids.index(2)] == 0.0 - - def test_no_positive_ema(self): - """No miners with positive EMA -> all zeros.""" - emas = [ - {'github_id': 'a', 'ema_score': 0.0, 'rounds': 1}, - {'github_id': 'b', 'ema_score': -0.1, 'rounds': 1}, - ] - validator = _make_mock_validator(emas) - uids = {1, 2} - evals = _make_evaluations({1: 'a', 2: 'b'}) - - rewards = self._call(validator, uids, evals) - assert rewards.sum() == 0.0 - - def test_no_emas_at_all(self): - """Empty EMA table -> all zeros.""" - validator = _make_mock_validator([]) - uids = {1, 2} - evals = _make_evaluations({1: 'a', 2: 'b'}) - - rewards = self._call(validator, uids, evals) - assert rewards.sum() == 0.0 - - def test_tie_broken_by_rounds(self): - """Equal EMA scores -> higher rounds count wins.""" - emas = [ - {'github_id': 'a', 'ema_score': 0.5, 'rounds': 3}, - {'github_id': 'b', 'ema_score': 0.5, 'rounds': 10}, - {'github_id': 'c', 'ema_score': 0.5, 'rounds': 7}, - ] - validator = _make_mock_validator(emas) - uids = {1, 2, 3} - evals = _make_evaluations({1: 'a', 2: 'b', 3: 'c'}) - - rewards = self._call(validator, uids, evals) - sorted_uids = sorted(uids) - - assert rewards[sorted_uids.index(2)] == pytest.approx(0.50 * PREDICTIONS_EMISSIONS_SHARE) - assert rewards[sorted_uids.index(3)] == pytest.approx(0.35 * PREDICTIONS_EMISSIONS_SHARE) - assert rewards[sorted_uids.index(1)] == pytest.approx(0.15 * PREDICTIONS_EMISSIONS_SHARE) - - def test_deregistered_miner_excluded(self): - """Miner with EMA but no evaluation entry (deregistered) is excluded.""" - emas = [ - {'github_id': 'a', 'ema_score': 0.9, 'rounds': 10}, - {'github_id': 'orphan', 'ema_score': 0.8, 'rounds': 8}, - {'github_id': 'c', 'ema_score': 0.5, 'rounds': 6}, - ] - validator = _make_mock_validator(emas) - uids = {1, 3} - evals = _make_evaluations({1: 'a', 3: 'c'}) - - rewards = self._call(validator, uids, evals) - sorted_uids = sorted(uids) - - assert rewards[sorted_uids.index(1)] == pytest.approx(0.50 * PREDICTIONS_EMISSIONS_SHARE) - assert rewards[sorted_uids.index(3)] == pytest.approx(0.35 * PREDICTIONS_EMISSIONS_SHARE) - - def test_total_never_exceeds_emission_share(self): - """Total prediction rewards must never exceed PREDICTIONS_EMISSIONS_SHARE.""" - emas = [{'github_id': str(i), 'ema_score': 1.0 - i * 0.01, 'rounds': 100 - i} for i in range(20)] - validator = _make_mock_validator(emas) - uids = set(range(20)) - evals = _make_evaluations({i: str(i) for i in range(20)}) - - rewards = self._call(validator, uids, evals) - assert rewards.sum() == pytest.approx(PREDICTIONS_EMISSIONS_SHARE) - - -# ============================================================================= -# 5. Validation -# ============================================================================= - - -class TestValidation: - """Pure function tests for validate_prediction_values.""" - - def test_valid_predictions(self): - assert validate_prediction_values({1: 0.5, 2: 0.3}) is None - - def test_empty_predictions(self): - result = validate_prediction_values({}) - assert result is not None - assert 'Empty' in result - - def test_negative_pr_number(self): - result = validate_prediction_values({-1: 0.5}) - assert result is not None - assert 'Invalid PR number' in result - - def test_value_out_of_range(self): - result = validate_prediction_values({1: 1.5}) - assert result is not None - assert 'out of range' in result - - def test_total_exceeds_one(self): - result = validate_prediction_values({1: 0.6, 2: 0.5}) - assert result is not None - assert 'exceeds 1.0' in result - - -# ============================================================================= -# 6. Settlement -# ============================================================================= - - -class TestSettlement: - """Tests for merge_predictions() settlement orchestrator. - - Settlement now queries COMPLETED and CANCELLED issues from the contract - (not ACTIVE). Predictions are deleted after settlement as the "settled" marker. - """ - - def _seed_predictions(self, mp_storage, uid, hotkey, github_id, issue_id, preds, issue_number=10): - """Helper: store a set of predictions for a miner.""" - for pr_num, value in preds.items(): - mp_storage.store_prediction( - uid=uid, - hotkey=hotkey, - github_id=github_id, - issue_id=issue_id, - repository='test/repo', - issue_number=issue_number, - pr_number=pr_num, - prediction=value, - variance_at_prediction=0.05, - ) - - def _make_contract_issue(self, issue_id=1, repo='test/repo', issue_number=10): - issue = MagicMock() - issue.id = issue_id - issue.repository_full_name = repo - issue.issue_number = issue_number - return issue - - def _setup_contract_mock(self, MockContract, completed=None, cancelled=None): - """Configure the contract mock to return different issues per status.""" - from gittensor.validator.issue_competitions.contract_client import IssueStatus - - def get_issues_side_effect(status): - if status == IssueStatus.COMPLETED: - return completed or [] - elif status == IssueStatus.CANCELLED: - return cancelled or [] - return [] - - MockContract.return_value.get_issues_by_status.side_effect = get_issues_side_effect - - @patch('gittensor.validator.merge_predictions.settlement.get_pr_open_times') - @patch('gittensor.validator.merge_predictions.settlement.check_github_issue_closed') - @patch('gittensor.validator.merge_predictions.settlement.get_contract_address', return_value='5Faddr') - @patch('gittensor.validator.merge_predictions.settlement.GITTENSOR_VALIDATOR_PAT', 'ghp_test') - @patch('gittensor.validator.merge_predictions.settlement.IssueCompetitionContractClient') - def test_settle_completed_issue_updates_ema( - self, MockContract, _gca, mock_check_closed, mock_pr_times, mock_validator - ): - from gittensor.validator.merge_predictions.settlement import merge_predictions - - pr_open_time = datetime(2025, 6, 1, tzinfo=timezone.utc) - - contract_issue = self._make_contract_issue() - self._setup_contract_mock(MockContract, completed=[contract_issue]) - - mock_check_closed.return_value = {'is_closed': True, 'pr_number': 1} - mock_pr_times.return_value = {1: pr_open_time, 2: pr_open_time} - - self._seed_predictions( - mock_validator.mp_storage, - uid=0, - hotkey='hk_alice', - github_id='gh_alice', - issue_id=1, - preds={1: 0.7, 2: 0.2}, - ) - - _run(merge_predictions(mock_validator, {})) - - ema = mock_validator.mp_storage.get_ema('gh_alice') - assert ema > 0 - # Predictions deleted after settlement - assert mock_validator.mp_storage.get_predictions_for_issue(1) == [] - - @patch('gittensor.validator.merge_predictions.settlement.get_pr_open_times') - @patch('gittensor.validator.merge_predictions.settlement.check_github_issue_closed') - @patch('gittensor.validator.merge_predictions.settlement.get_contract_address', return_value='5Faddr') - @patch('gittensor.validator.merge_predictions.settlement.GITTENSOR_VALIDATOR_PAT', 'ghp_test') - @patch('gittensor.validator.merge_predictions.settlement.IssueCompetitionContractClient') - def test_settle_multiple_completed_issues( - self, MockContract, _gca, mock_check_closed, mock_pr_times, mock_validator - ): - from gittensor.validator.merge_predictions.settlement import merge_predictions - - pr_open_time = datetime(2025, 6, 1, tzinfo=timezone.utc) - - issue1 = self._make_contract_issue(issue_id=1, issue_number=10) - issue2 = self._make_contract_issue(issue_id=2, issue_number=20) - self._setup_contract_mock(MockContract, completed=[issue1, issue2]) - - mock_check_closed.return_value = {'is_closed': True, 'pr_number': 1} - mock_pr_times.return_value = {1: pr_open_time} - - self._seed_predictions( - mock_validator.mp_storage, uid=0, hotkey='hk_alice', github_id='gh_alice', issue_id=1, preds={1: 0.8} - ) - self._seed_predictions( - mock_validator.mp_storage, uid=0, hotkey='hk_alice', github_id='gh_alice', issue_id=2, preds={1: 0.9} - ) - - _run(merge_predictions(mock_validator, {})) - - emas = mock_validator.mp_storage.get_all_emas() - gh_alice = next(e for e in emas if e['github_id'] == 'gh_alice') - assert gh_alice['rounds'] == 2 - # Both issues' predictions deleted - assert mock_validator.mp_storage.get_predictions_for_issue(1) == [] - assert mock_validator.mp_storage.get_predictions_for_issue(2) == [] - - @patch('gittensor.validator.merge_predictions.settlement.check_github_issue_closed') - @patch('gittensor.validator.merge_predictions.settlement.get_contract_address', return_value='5Faddr') - @patch('gittensor.validator.merge_predictions.settlement.GITTENSOR_VALIDATOR_PAT', 'ghp_test') - @patch('gittensor.validator.merge_predictions.settlement.IssueCompetitionContractClient') - def test_cancelled_issue_no_merge_no_ema_impact(self, MockContract, _gca, mock_check_closed, mock_validator): - """Cancelled issue with no merged PR: predictions voided, no EMA impact.""" - from gittensor.validator.merge_predictions.settlement import merge_predictions - - contract_issue = self._make_contract_issue() - self._setup_contract_mock(MockContract, cancelled=[contract_issue]) - - mock_check_closed.return_value = {'is_closed': True, 'pr_number': None} - - self._seed_predictions( - mock_validator.mp_storage, uid=0, hotkey='hk_alice', github_id='gh_alice', issue_id=1, preds={1: 0.8} - ) - - _run(merge_predictions(mock_validator, {})) - - assert mock_validator.mp_storage.get_ema('gh_alice') == 0.0 - # Predictions deleted even though voided - assert mock_validator.mp_storage.get_predictions_for_issue(1) == [] - - @patch('gittensor.validator.merge_predictions.settlement.get_pr_open_times') - @patch('gittensor.validator.merge_predictions.settlement.check_github_issue_closed') - @patch('gittensor.validator.merge_predictions.settlement.get_contract_address', return_value='5Faddr') - @patch('gittensor.validator.merge_predictions.settlement.GITTENSOR_VALIDATOR_PAT', 'ghp_test') - @patch('gittensor.validator.merge_predictions.settlement.IssueCompetitionContractClient') - def test_cancelled_issue_with_merge_still_scored( - self, MockContract, _gca, mock_check_closed, mock_pr_times, mock_validator - ): - """Cancelled but PR was merged (solver not in subnet): predictions still scored.""" - from gittensor.validator.merge_predictions.settlement import merge_predictions - - pr_open_time = datetime(2025, 6, 1, tzinfo=timezone.utc) - - contract_issue = self._make_contract_issue() - self._setup_contract_mock(MockContract, cancelled=[contract_issue]) - - mock_check_closed.return_value = {'is_closed': True, 'pr_number': 1} - mock_pr_times.return_value = {1: pr_open_time, 2: pr_open_time} - - self._seed_predictions( - mock_validator.mp_storage, - uid=0, - hotkey='hk_alice', - github_id='gh_alice', - issue_id=1, - preds={1: 0.7, 2: 0.2}, - ) - - _run(merge_predictions(mock_validator, {})) - - ema = mock_validator.mp_storage.get_ema('gh_alice') - assert ema > 0 - # Predictions deleted after scoring - assert mock_validator.mp_storage.get_predictions_for_issue(1) == [] - - @patch('gittensor.validator.merge_predictions.settlement.get_contract_address', return_value='5Faddr') - @patch('gittensor.validator.merge_predictions.settlement.GITTENSOR_VALIDATOR_PAT', 'ghp_test') - @patch('gittensor.validator.merge_predictions.settlement.IssueCompetitionContractClient') - def test_already_settled_skipped(self, MockContract, _gca, mock_validator): - """Already-settled issues are skipped without calling GitHub, even if predictions exist.""" - from gittensor.validator.merge_predictions.settlement import merge_predictions - - contract_issue = self._make_contract_issue() - self._setup_contract_mock(MockContract, completed=[contract_issue]) - - # Pre-mark as settled and seed predictions anyway - mock_validator.mp_storage.mark_issue_settled(contract_issue.id, 'scored', merged_pr_number=1) - self._seed_predictions( - mock_validator.mp_storage, uid=0, hotkey='hk_alice', github_id='gh_alice', issue_id=1, preds={1: 0.8} - ) - - with patch('gittensor.validator.merge_predictions.settlement.check_github_issue_closed') as mock_check: - _run(merge_predictions(mock_validator, {})) - # GitHub should NOT be called since issue is already settled - mock_check.assert_not_called() - - # Predictions should be untouched (not deleted by settlement) - assert len(mock_validator.mp_storage.get_predictions_for_issue(1)) == 1 - - @patch('gittensor.validator.merge_predictions.settlement.get_pr_open_times') - @patch('gittensor.validator.merge_predictions.settlement.check_github_issue_closed') - @patch('gittensor.validator.merge_predictions.settlement.get_contract_address', return_value='5Faddr') - @patch('gittensor.validator.merge_predictions.settlement.GITTENSOR_VALIDATOR_PAT', 'ghp_test') - @patch('gittensor.validator.merge_predictions.settlement.IssueCompetitionContractClient') - def test_deregistered_miner_skipped(self, MockContract, _gca, mock_check_closed, mock_pr_times, mock_validator): - from gittensor.validator.merge_predictions.settlement import merge_predictions - - pr_open_time = datetime(2025, 6, 1, tzinfo=timezone.utc) - contract_issue = self._make_contract_issue() - self._setup_contract_mock(MockContract, completed=[contract_issue]) - - mock_check_closed.return_value = {'is_closed': True, 'pr_number': 1} - mock_pr_times.return_value = {1: pr_open_time} - - self._seed_predictions( - mock_validator.mp_storage, uid=0, hotkey='hk_alice', github_id='gh_alice', issue_id=1, preds={1: 0.7} - ) - self._seed_predictions( - mock_validator.mp_storage, uid=5, hotkey='hk_gone', github_id='gh_gone', issue_id=1, preds={1: 0.6} - ) - - _run(merge_predictions(mock_validator, {})) - - assert mock_validator.mp_storage.get_ema('gh_alice') > 0 - assert mock_validator.mp_storage.get_ema('gh_gone') == 0.0 - # Predictions deleted for all miners (including deregistered) - assert mock_validator.mp_storage.get_predictions_for_issue(1) == [] - - @patch('gittensor.validator.merge_predictions.settlement.get_pr_open_times') - @patch('gittensor.validator.merge_predictions.settlement.check_github_issue_closed') - @patch('gittensor.validator.merge_predictions.settlement.get_contract_address', return_value='5Faddr') - @patch('gittensor.validator.merge_predictions.settlement.GITTENSOR_VALIDATOR_PAT', 'ghp_test') - @patch('gittensor.validator.merge_predictions.settlement.IssueCompetitionContractClient') - def test_ema_persists_across_settlements( - self, MockContract, _gca, mock_check_closed, mock_pr_times, mock_validator - ): - from gittensor.validator.merge_predictions.settlement import merge_predictions - - pr_open_time = datetime(2025, 6, 1, tzinfo=timezone.utc) - - # First settlement - issue1 = self._make_contract_issue(issue_id=1, issue_number=10) - self._setup_contract_mock(MockContract, completed=[issue1]) - mock_check_closed.return_value = {'is_closed': True, 'pr_number': 1} - mock_pr_times.return_value = {1: pr_open_time} - - self._seed_predictions( - mock_validator.mp_storage, uid=0, hotkey='hk_alice', github_id='gh_alice', issue_id=1, preds={1: 0.8} - ) - - _run(merge_predictions(mock_validator, {})) - ema_after_first = mock_validator.mp_storage.get_ema('gh_alice') - assert ema_after_first > 0 - assert mock_validator.mp_storage.get_predictions_for_issue(1) == [] - - # Second settlement with a new issue - issue2 = self._make_contract_issue(issue_id=2, issue_number=20) - self._setup_contract_mock(MockContract, completed=[issue2]) - mock_pr_times.return_value = {1: pr_open_time} - - self._seed_predictions( - mock_validator.mp_storage, uid=0, hotkey='hk_alice', github_id='gh_alice', issue_id=2, preds={1: 0.9} - ) - - _run(merge_predictions(mock_validator, {})) - ema_after_second = mock_validator.mp_storage.get_ema('gh_alice') - - assert ema_after_second != ema_after_first - - @patch('gittensor.validator.merge_predictions.settlement.get_pr_open_times') - @patch('gittensor.validator.merge_predictions.settlement.check_github_issue_closed') - @patch('gittensor.validator.merge_predictions.settlement.get_contract_address', return_value='5Faddr') - @patch('gittensor.validator.merge_predictions.settlement.GITTENSOR_VALIDATOR_PAT', 'ghp_test') - @patch('gittensor.validator.merge_predictions.settlement.IssueCompetitionContractClient') - def test_settled_issue_recorded_after_scoring( - self, MockContract, _gca, mock_check_closed, mock_pr_times, mock_validator - ): - """After completed settlement, issue is recorded in settled_issues.""" - from gittensor.validator.merge_predictions.settlement import merge_predictions - - pr_open_time = datetime(2025, 6, 1, tzinfo=timezone.utc) - contract_issue = self._make_contract_issue() - self._setup_contract_mock(MockContract, completed=[contract_issue]) - - mock_check_closed.return_value = {'is_closed': True, 'pr_number': 1} - mock_pr_times.return_value = {1: pr_open_time} - - self._seed_predictions( - mock_validator.mp_storage, uid=0, hotkey='hk_alice', github_id='gh_alice', issue_id=1, preds={1: 0.8} - ) - - _run(merge_predictions(mock_validator, {})) - - assert mock_validator.mp_storage.is_issue_settled(1) is True - - @patch('gittensor.validator.merge_predictions.settlement.check_github_issue_closed') - @patch('gittensor.validator.merge_predictions.settlement.get_contract_address', return_value='5Faddr') - @patch('gittensor.validator.merge_predictions.settlement.GITTENSOR_VALIDATOR_PAT', 'ghp_test') - @patch('gittensor.validator.merge_predictions.settlement.IssueCompetitionContractClient') - def test_voided_issue_recorded(self, MockContract, _gca, mock_check_closed, mock_validator): - """After voiding a cancelled issue, it is recorded in settled_issues.""" - from gittensor.validator.merge_predictions.settlement import merge_predictions - - contract_issue = self._make_contract_issue() - self._setup_contract_mock(MockContract, cancelled=[contract_issue]) - - mock_check_closed.return_value = {'is_closed': True, 'pr_number': None} - - self._seed_predictions( - mock_validator.mp_storage, uid=0, hotkey='hk_alice', github_id='gh_alice', issue_id=1, preds={1: 0.8} - ) - - _run(merge_predictions(mock_validator, {})) - - assert mock_validator.mp_storage.is_issue_settled(1) is True - - @patch('gittensor.validator.merge_predictions.settlement.GITTENSOR_VALIDATOR_PAT', '') - def test_no_validator_pat_skips(self, mock_validator): - from gittensor.validator.merge_predictions.settlement import merge_predictions - - _run(merge_predictions(mock_validator, {})) - - assert mock_validator.mp_storage.get_all_emas() == [] diff --git a/tests/validator/test_base_score.py b/tests/validator/test_base_score.py new file mode 100644 index 00000000..ea579490 --- /dev/null +++ b/tests/validator/test_base_score.py @@ -0,0 +1,616 @@ +# The MIT License (MIT) +# Copyright © 2025 Entrius + +"""Integration tests for calculate_base_score - verifying per-category density +calculation and SOURCE-only contribution bonus using real tree-sitter scoring""" + +from typing import Dict, List, Optional + +import pytest + +from gittensor.classes import FileChange, PullRequest +from gittensor.constants import MIN_TOKEN_SCORE_FOR_BASE_SCORE +from gittensor.utils.github_api_tools import FileContentPair +from gittensor.validator.oss_contributions.scoring import calculate_base_score +from gittensor.validator.utils.load_weights import ( + LanguageConfig, + TokenConfig, + load_programming_language_weights, + load_token_config, +) +from tests.validator.conftest import PRBuilder + +_THRESHOLD = MIN_TOKEN_SCORE_FOR_BASE_SCORE + +_SOURCE_CODE = """\ +def validate_input(value, min_val, max_val): + if not isinstance(value, (int, float)): + raise TypeError("Expected numeric value") + if value < min_val or value > max_val: + raise ValueError(f"Value {value} out of range [{min_val}, {max_val}]") + return True + +def clamp(value, low, high): + return max(low, min(high, value)) + +class Processor: + def __init__(self, name): + self.name = name + self.results = [] + + def process(self, items): + for item in items: + if validate_input(item, 0, 100): + self.results.append(clamp(item, 0, 100)) + return self.results +""" + +_TEST_CODE = """\ +def test_validate_input_valid(): + assert validate_input(5, 0, 10) is True + +def test_validate_input_type_error(): + try: + validate_input("abc", 0, 10) + assert False + except TypeError: + pass + +def test_validate_input_range_error(): + try: + validate_input(20, 0, 10) + assert False + except ValueError: + pass + +def test_clamp_within_range(): + assert clamp(5, 0, 10) == 5 + +def test_clamp_below(): + assert clamp(-1, 0, 10) == 0 + +def test_clamp_above(): + assert clamp(15, 0, 10) == 10 +""" + +_LARGE_TEST_CODE = """\ +def test_processor_init(): + p = Processor("test") + assert p.name == "test" + assert p.results == [] + +def test_processor_process_valid(): + p = Processor("test") + result = p.process([1, 50, 99]) + assert result == [1, 50, 99] + +def test_processor_process_clamp(): + p = Processor("test") + result = p.process([150, -10, 50]) + assert result == [100, 0, 50] + +def test_validate_boundary(): + assert validate_input(0, 0, 100) is True + assert validate_input(100, 0, 100) is True + +def test_clamp_boundary(): + assert clamp(0, 0, 100) == 0 + assert clamp(100, 0, 100) == 100 +""" + +# Same logic as _SOURCE_CODE but spread across more lines +_VERBOSE_SOURCE = """\ +def validate_input( + value, + min_val, + max_val, +): + if not isinstance( + value, + (int, float), + ): + raise TypeError( + "Expected numeric value" + ) + if ( + value < min_val + or value > max_val + ): + raise ValueError( + f"Value {value} out of range [{min_val}, {max_val}]" + ) + return True + +def clamp( + value, + low, + high, +): + return max( + low, + min( + high, + value, + ), + ) + +class Processor: + def __init__( + self, + name, + ): + self.name = name + self.results = [] + + def process( + self, + items, + ): + for item in items: + if validate_input( + item, + 0, + 100, + ): + self.results.append( + clamp( + item, + 0, + 100, + ) + ) + return self.results +""" + +_SOURCE_CODE_V1 = """\ +def clamp(value, low, high): + return max(low, min(high, value)) +""" + +_SOURCE_CODE_V2 = """\ +def clamp(value, low, high): + if not isinstance(value, (int, float)): + raise TypeError("Expected numeric") + if low > high: + raise ValueError("low must be <= high") + return max(low, min(high, value)) +""" + + +@pytest.fixture +def token_config() -> TokenConfig: + return load_token_config() + + +@pytest.fixture +def programming_languages() -> Dict[str, LanguageConfig]: + return load_programming_language_weights() + + +def _change(filename: str, content: str, status: str = 'added') -> FileChange: + lines: int = content.count('\n') + return FileChange( + pr_number=1, + repository_full_name='test/repo', + filename=filename, + changes=lines, + additions=lines if status != 'removed' else 0, + deletions=lines if status == 'removed' else 0, + status=status, + ) + + +def _contents( + filename: str, new_content: Optional[str], old_content: Optional[str] = None +) -> tuple[str, FileContentPair]: + return filename, FileContentPair(old_content=old_content, new_content=new_content) + + +def _score( + pr: PullRequest, + file_changes: List[FileChange], + file_contents: List[tuple[str, FileContentPair]], + token_config: TokenConfig, + programming_languages: Dict[str, LanguageConfig], +) -> float: + """Set file_changes on PR and call calculate_base_score""" + pr.set_file_changes(file_changes) + return calculate_base_score(pr, programming_languages, token_config, dict(file_contents)) + + +def test_adding_tests_does_not_reduce_score( + pr_factory: PRBuilder, + token_config: TokenConfig, + programming_languages: Dict[str, LanguageConfig], +): + """Adding test files to a source PR must never lower the base score""" + source_change = _change('main.py', _SOURCE_CODE) + source_content = _contents('main.py', _SOURCE_CODE) + test_change = _change('tests/test_main.py', _TEST_CODE) + test_content = _contents('tests/test_main.py', _TEST_CODE) + + pr1 = pr_factory.merged() + score_without = _score(pr1, [source_change], [source_content], token_config, programming_languages) + + pr2 = pr_factory.merged() + score_with = _score( + pr2, + [source_change, test_change], + [source_content, test_content], + token_config, + programming_languages, + ) + + assert score_with > score_without + assert score_without > 0 + + +def test_tests_do_not_affect_contribution_bonus( + pr_factory: PRBuilder, + token_config: TokenConfig, + programming_languages: Dict[str, LanguageConfig], +): + """Adding small or large test files should produce the same modest + increase - the difference is only from test density, not from bonus""" + source_change = _change('main.py', _SOURCE_CODE) + source_content = _contents('main.py', _SOURCE_CODE) + small_test_change = _change('tests/test_a.py', _TEST_CODE) + small_test_content = _contents('tests/test_a.py', _TEST_CODE) + big_test = _TEST_CODE + _LARGE_TEST_CODE + big_test_change = _change('tests/test_a.py', big_test) + big_test_content = _contents('tests/test_a.py', big_test) + + pr_base = pr_factory.merged() + score_without = _score(pr_base, [source_change], [source_content], token_config, programming_languages) + + pr_small = pr_factory.merged() + score_small = _score( + pr_small, + [source_change, small_test_change], + [source_content, small_test_content], + token_config, + programming_languages, + ) + + pr_big = pr_factory.merged() + score_big = _score( + pr_big, + [source_change, big_test_change], + [source_content, big_test_content], + token_config, + programming_languages, + ) + + # Both increase over baseline + assert score_small > score_without + assert score_big > score_without + + # Increases are modest relative to baseline (test weight is 0.05x) + assert (score_small - score_without) / score_without < 0.1 + assert (score_big - score_without) / score_without < 0.1 + + +def test_same_code_in_test_path_scores_much_lower( + pr_factory: PRBuilder, + token_config: TokenConfig, + programming_languages: Dict[str, LanguageConfig], +): + """Identical code placed in a test directory scores much lower than + in a source path, because test weight is 0.05x and no contribution bonus""" + source_change = _change('main.py', _SOURCE_CODE) + source_content = _contents('main.py', _SOURCE_CODE) + source_as_test_change = _change('tests/test_main.py', _SOURCE_CODE) + source_as_test_content = _contents('tests/test_main.py', _SOURCE_CODE) + + pr_src = pr_factory.merged() + score_as_source = _score(pr_src, [source_change], [source_content], token_config, programming_languages) + + pr_test = pr_factory.merged() + score_as_test = _score( + pr_test, + [source_as_test_change], + [source_as_test_content], + token_config, + programming_languages, + ) + + assert score_as_source > (score_as_test * 10) + + +def test_tests_do_not_affect_threshold( + pr_factory: PRBuilder, + token_config: TokenConfig, + programming_languages: Dict[str, LanguageConfig], +): + """A PR below the token score threshold stays below even if large + test files are added - test scores don't help reach the threshold + because their contribution to the score is low""" + tiny_change = _change('tiny.py', 'x = 1\n') + tiny_content = _contents('tiny.py', 'x = 1\n') + big_test = _TEST_CODE + _LARGE_TEST_CODE + big_test_change = _change('tests/test_a.py', big_test) + big_test_content = _contents('tests/test_a.py', big_test) + + pr_tiny = pr_factory.merged(token_score=0.0) + score_tiny = _score(pr_tiny, [tiny_change], [tiny_content], token_config, programming_languages) + + pr_tiny_with_tests = pr_factory.merged(token_score=0.0) + score_tiny_with_tests = _score( + pr_tiny_with_tests, + [tiny_change, big_test_change], + [tiny_content, big_test_content], + token_config, + programming_languages, + ) + + assert score_tiny == score_tiny_with_tests + + +def test_adding_non_code_files_does_not_reduce_score( + pr_factory: PRBuilder, + token_config: TokenConfig, + programming_languages: Dict[str, LanguageConfig], +): + """Adding non-code files (markdown, yaml) must never lower the base score""" + source_change = _change('main.py', _SOURCE_CODE) + source_content = _contents('main.py', _SOURCE_CODE) + readme = '# Project\n\nSome documentation about the project\n' * 10 + readme_change = _change('README.md', readme) + readme_content = _contents('README.md', readme) + + pr1 = pr_factory.merged() + score_without = _score(pr1, [source_change], [source_content], token_config, programming_languages) + + pr2 = pr_factory.merged() + score_with = _score( + pr2, + [source_change, readme_change], + [source_content, readme_content], + token_config, + programming_languages, + ) + + assert score_with > score_without + + +def test_non_code_does_not_affect_contribution_bonus( + pr_factory: PRBuilder, + token_config: TokenConfig, + programming_languages: Dict[str, LanguageConfig], +): + """Adding small or large non-code files should produce the same increase + because line-count density = lang_weight regardless of size""" + source_change = _change('main.py', _SOURCE_CODE) + source_content = _contents('main.py', _SOURCE_CODE) + small_yaml = 'key: value\n' * 5 + small_yaml_change = _change('config.yaml', small_yaml) + small_yaml_content = _contents('config.yaml', small_yaml) + big_yaml = 'key: value\nlist:\n - item1\n - item2\n' * 50 + big_yaml_change = _change('config.yaml', big_yaml) + big_yaml_content = _contents('config.yaml', big_yaml) + + pr_base = pr_factory.merged() + score_without = _score(pr_base, [source_change], [source_content], token_config, programming_languages) + + pr_small = pr_factory.merged() + score_small = _score( + pr_small, + [source_change, small_yaml_change], + [source_content, small_yaml_content], + token_config, + programming_languages, + ) + + pr_big = pr_factory.merged() + score_big = _score( + pr_big, + [source_change, big_yaml_change], + [source_content, big_yaml_content], + token_config, + programming_languages, + ) + + assert score_small > score_without + assert score_big > score_without + assert score_big == score_small + + +def test_source_code_scores_much_higher_than_non_code( + pr_factory: PRBuilder, + token_config: TokenConfig, + programming_languages: Dict[str, LanguageConfig], +): + """Tree-diff scored source code produces a much higher base score than + line-count scored non-code files""" + source_change = _change('main.py', _SOURCE_CODE) + source_content = _contents('main.py', _SOURCE_CODE) + big_yaml = 'key: value\nlist:\n - item1\n - item2\n' * 50 + yaml_change = _change('config.yaml', big_yaml) + yaml_content = _contents('config.yaml', big_yaml) + + pr_src = pr_factory.merged() + score_as_source = _score(pr_src, [source_change], [source_content], token_config, programming_languages) + + pr_unc = pr_factory.merged() + score_as_non_code = _score( + pr_unc, + [yaml_change], + [yaml_content], + token_config, + programming_languages, + ) + + assert score_as_source > (score_as_non_code * 10) + + +def test_non_code_does_not_affect_threshold( + pr_factory: PRBuilder, + token_config: TokenConfig, + programming_languages: Dict[str, LanguageConfig], +): + """A PR below the token score threshold stays below even if large + non-code files are added""" + tiny_change = _change('tiny.py', 'x = 1\n') + tiny_content = _contents('tiny.py', 'x = 1\n') + big_yaml = 'key: value\nlist:\n - item1\n - item2\n' * 50 + big_yaml_change = _change('config.yaml', big_yaml) + big_yaml_content = _contents('config.yaml', big_yaml) + + pr_tiny = pr_factory.merged(token_score=0.0) + score_tiny = _score(pr_tiny, [tiny_change], [tiny_content], token_config, programming_languages) + + pr_tiny_with_yaml = pr_factory.merged(token_score=0.0) + score_tiny_with_yaml = _score( + pr_tiny_with_yaml, + [tiny_change, big_yaml_change], + [tiny_content, big_yaml_content], + token_config, + programming_languages, + ) + + assert score_tiny == score_tiny_with_yaml + + +def test_deleted_file_does_not_change_score( + pr_factory: PRBuilder, + token_config: TokenConfig, + programming_languages: Dict[str, LanguageConfig], +): + """A deleted file contributes score=0 and must not reduce the base score""" + source_change = _change('main.py', _SOURCE_CODE) + source_content = _contents('main.py', _SOURCE_CODE) + deleted_change = _change('old.py', 'def old(): pass\n', status='removed') + deleted_content = _contents('old.py', None) + + pr1 = pr_factory.merged() + score_without = _score(pr1, [source_change], [source_content], token_config, programming_languages) + + pr2 = pr_factory.merged() + score_with = _score( + pr2, + [source_change, deleted_change], + [source_content, deleted_content], + token_config, + programming_languages, + ) + + assert score_without == score_with + + +def test_unsupported_file_does_not_change_score( + pr_factory: PRBuilder, + token_config: TokenConfig, + programming_languages: Dict[str, LanguageConfig], +): + """A file with an unsupported extension contributes score=0 and must + not reduce the base score""" + source_change = _change('main.py', _SOURCE_CODE) + source_content = _contents('main.py', _SOURCE_CODE) + unknown_change = _change('data.xyz', 'some unknown format\n' * 10) + unknown_content = _contents('data.xyz', 'some unknown format\n' * 10) + + pr1 = pr_factory.merged() + score_without = _score(pr1, [source_change], [source_content], token_config, programming_languages) + + pr2 = pr_factory.merged() + score_with = _score( + pr2, + [source_change, unknown_change], + [source_content, unknown_content], + token_config, + programming_languages, + ) + + assert score_without == score_with + + +def test_adding_test_category_increases_score_beyond_single_cap( + pr_factory: PRBuilder, + token_config: TokenConfig, + programming_languages: Dict[str, LanguageConfig], +): + """Each category has its own density cap, so adding a test category + can push the total score above what a single source category achieves""" + source_change = _change('main.py', _SOURCE_CODE) + source_content = _contents('main.py', _SOURCE_CODE) + test_change = _change('tests/test_main.py', _TEST_CODE) + test_content = _contents('tests/test_main.py', _TEST_CODE) + + pr_one = pr_factory.merged() + score_source = _score(pr_one, [source_change], [source_content], token_config, programming_languages) + + pr_two = pr_factory.merged() + score_both = _score( + pr_two, + [source_change, test_change], + [source_content, test_content], + token_config, + programming_languages, + ) + + assert score_both > score_source + + +def test_verbose_formatting_decreases_score( + pr_factory: PRBuilder, + token_config: TokenConfig, + programming_languages: Dict[str, LanguageConfig], +): + """Same logic reformatted across more lines produces a lower score + because density (token_score / lines) drops""" + compact_change = _change('main.py', _SOURCE_CODE) + compact_content = _contents('main.py', _SOURCE_CODE) + verbose_change = _change('main.py', _VERBOSE_SOURCE) + verbose_content = _contents('main.py', _VERBOSE_SOURCE) + + pr_compact = pr_factory.merged() + score_compact = _score(pr_compact, [compact_change], [compact_content], token_config, programming_languages) + + pr_verbose = pr_factory.merged() + score_verbose = _score(pr_verbose, [verbose_change], [verbose_content], token_config, programming_languages) + + assert score_compact > score_verbose + assert score_verbose > 0 + + +def test_modified_file_scores_diff_only( + pr_factory: PRBuilder, + token_config: TokenConfig, + programming_languages: Dict[str, LanguageConfig], +): + """A modified file scores only the AST diff between old and new content, + not the entire new file""" + new_change = _change('main.py', _SOURCE_CODE_V2) + new_content = _contents('main.py', _SOURCE_CODE_V2) + mod_change = _change('main.py', _SOURCE_CODE_V2, status='modified') + mod_content = _contents('main.py', _SOURCE_CODE_V2, old_content=_SOURCE_CODE_V1) + + pr_new = pr_factory.merged() + score_new_file = _score(pr_new, [new_change], [new_content], token_config, programming_languages) + + pr_mod = pr_factory.merged() + score_modified = _score(pr_mod, [mod_change], [mod_content], token_config, programming_languages) + + assert score_new_file > score_modified + assert score_modified > 0 + + +def test_below_threshold_scores_less( + pr_factory: PRBuilder, + token_config: TokenConfig, + programming_languages: Dict[str, LanguageConfig], +): + """A trivial change (below token score threshold) scores strictly less + than a substantial change (above threshold)""" + tiny_change = _change('tiny.py', 'x = 1\n') + tiny_content = _contents('tiny.py', 'x = 1\n') + source_change = _change('main.py', _SOURCE_CODE) + source_content = _contents('main.py', _SOURCE_CODE) + + pr_below = pr_factory.merged(token_score=0.0) + score_below = _score(pr_below, [tiny_change], [tiny_content], token_config, programming_languages) + + pr_above = pr_factory.merged() + score_above = _score(pr_above, [source_change], [source_content], token_config, programming_languages) + + assert score_above > score_below diff --git a/tests/validator/test_dynamic_open_pr_threshold.py b/tests/validator/test_dynamic_open_pr_threshold.py index c5e63d18..e503320f 100644 --- a/tests/validator/test_dynamic_open_pr_threshold.py +++ b/tests/validator/test_dynamic_open_pr_threshold.py @@ -1,8 +1,8 @@ """ -Tests for dynamic open PR threshold based on total token score across unlocked tiers. +Tests for dynamic open PR threshold based on total token score. -Bonus = floor(total_unlocked_token_score / 500) -Example: 1500 token score across unlocked tiers / 500 = +3 bonus +Bonus = floor(total_token_score / 300) +Example: 900 total token score / 300 = +3 bonus Multiplier is binary: 1.0 if <= threshold, 0.0 otherwise @@ -18,132 +18,38 @@ calculate_open_pr_threshold, calculate_pr_spam_penalty_multiplier, ) -from gittensor.validator.oss_contributions.tier_config import Tier, TierStats - - -def make_tier_stats( - bronze_merged=0, - bronze_closed=0, - bronze_token_score=0.0, - silver_merged=0, - silver_closed=0, - silver_token_score=0.0, - gold_merged=0, - gold_closed=0, - gold_token_score=0.0, -): - """Create tier stats with specified merged/closed counts and token scores.""" - stats = {tier: TierStats() for tier in Tier} - stats[Tier.BRONZE].merged_count = bronze_merged - stats[Tier.BRONZE].closed_count = bronze_closed - stats[Tier.BRONZE].token_score = bronze_token_score - stats[Tier.SILVER].merged_count = silver_merged - stats[Tier.SILVER].closed_count = silver_closed - stats[Tier.SILVER].token_score = silver_token_score - stats[Tier.GOLD].merged_count = gold_merged - stats[Tier.GOLD].closed_count = gold_closed - stats[Tier.GOLD].token_score = gold_token_score - # Set qualified unique repos to meet requirements (3 repos needed per tier) - stats[Tier.BRONZE].qualified_unique_repo_count = 3 - stats[Tier.SILVER].qualified_unique_repo_count = 3 - stats[Tier.GOLD].qualified_unique_repo_count = 3 - return stats class TestCalculateOpenPrThreshold: """Tests for calculate_open_pr_threshold function.""" - def test_no_tier_stats_returns_base_threshold(self): - """Without tier stats, threshold should be the base threshold.""" - assert calculate_open_pr_threshold(tier_stats=None) == EXCESSIVE_PR_PENALTY_BASE_THRESHOLD - def test_no_token_score_returns_base_threshold(self): - """With no token score, threshold should be the base threshold.""" - tier_stats = make_tier_stats(bronze_merged=7, bronze_closed=3) - assert calculate_open_pr_threshold(tier_stats) == EXCESSIVE_PR_PENALTY_BASE_THRESHOLD - - def test_below_500_no_bonus(self): - """Token score below 500 doesn't grant bonus.""" - tier_stats = make_tier_stats(bronze_merged=7, bronze_closed=3, bronze_token_score=499.0) - assert calculate_open_pr_threshold(tier_stats) == EXCESSIVE_PR_PENALTY_BASE_THRESHOLD - - def test_500_token_score_gets_bonus(self): - """500 token score grants +1 bonus.""" - tier_stats = make_tier_stats(bronze_merged=7, bronze_closed=3, bronze_token_score=500.0) - assert calculate_open_pr_threshold(tier_stats) == EXCESSIVE_PR_PENALTY_BASE_THRESHOLD + 1 - - def test_1000_token_score_gets_double_bonus(self): - """1000 token score grants +2 bonus.""" - tier_stats = make_tier_stats(bronze_merged=7, bronze_closed=3, bronze_token_score=1000.0) - assert calculate_open_pr_threshold(tier_stats) == EXCESSIVE_PR_PENALTY_BASE_THRESHOLD + 2 - - def test_locked_tier_ignores_token_score(self): - """Token score from locked tiers doesn't count.""" - # Bronze locked: 50% credibility (below 70% requirement) - tier_stats = make_tier_stats(bronze_merged=5, bronze_closed=5, bronze_token_score=1000.0) - assert calculate_open_pr_threshold(tier_stats) == EXCESSIVE_PR_PENALTY_BASE_THRESHOLD - - def test_sum_across_unlocked_tiers(self): - """Token scores sum across all unlocked tiers.""" - # Bronze and Silver unlocked - tier_stats = make_tier_stats( - bronze_merged=7, - bronze_closed=3, - bronze_token_score=300.0, - silver_merged=13, - silver_closed=7, - silver_token_score=700.0, - ) - # Total: 300 + 700 = 1000 -> floor(1000/500) = +2 - assert calculate_open_pr_threshold(tier_stats) == EXCESSIVE_PR_PENALTY_BASE_THRESHOLD + 2 - - def test_locked_tier_excluded_from_sum(self): - """Only unlocked tier token scores are summed.""" - # Bronze unlocked, Silver locked (50% credibility) - tier_stats = make_tier_stats( - bronze_merged=7, - bronze_closed=3, - bronze_token_score=500.0, - silver_merged=5, - silver_closed=5, - silver_token_score=1000.0, - ) - # Only Bronze counts: 500 -> +1 bonus - assert calculate_open_pr_threshold(tier_stats) == EXCESSIVE_PR_PENALTY_BASE_THRESHOLD + 1 - - def test_all_tiers_unlocked_sum(self): - """All unlocked tiers contribute to the sum.""" - # All tiers unlocked - tier_stats = make_tier_stats( - bronze_merged=7, - bronze_closed=3, - bronze_token_score=500.0, - silver_merged=13, - silver_closed=7, - silver_token_score=500.0, - gold_merged=6, - gold_closed=4, - gold_token_score=500.0, - ) - # Total: 500 + 500 + 500 = 1500 -> floor(1500/500) = +3 - assert calculate_open_pr_threshold(tier_stats) == EXCESSIVE_PR_PENALTY_BASE_THRESHOLD + 3 + """Without token score, threshold should be the base threshold.""" + assert calculate_open_pr_threshold() == EXCESSIVE_PR_PENALTY_BASE_THRESHOLD + + def test_zero_token_score_returns_base_threshold(self): + """With zero token score, threshold should be the base threshold.""" + assert calculate_open_pr_threshold(0.0) == EXCESSIVE_PR_PENALTY_BASE_THRESHOLD + + def test_below_300_no_bonus(self): + """Token score below 300 doesn't grant bonus.""" + assert calculate_open_pr_threshold(299.0) == EXCESSIVE_PR_PENALTY_BASE_THRESHOLD + + def test_300_token_score_gets_bonus(self): + """300 token score grants +1 bonus.""" + assert calculate_open_pr_threshold(300.0) == EXCESSIVE_PR_PENALTY_BASE_THRESHOLD + 1 + + def test_600_token_score_gets_double_bonus(self): + """600 token score grants +2 bonus.""" + assert calculate_open_pr_threshold(600.0) == EXCESSIVE_PR_PENALTY_BASE_THRESHOLD + 2 + + def test_900_token_score_gets_triple_bonus(self): + """900 token score grants +3 bonus.""" + assert calculate_open_pr_threshold(900.0) == EXCESSIVE_PR_PENALTY_BASE_THRESHOLD + 3 def test_threshold_capped_at_max(self): """Threshold is capped at MAX_OPEN_PR_THRESHOLD.""" - # All tiers unlocked with very high token scores - tier_stats = make_tier_stats( - bronze_merged=7, - bronze_closed=3, - bronze_token_score=5000.0, - silver_merged=13, - silver_closed=7, - silver_token_score=5000.0, - gold_merged=6, - gold_closed=4, - gold_token_score=5000.0, - ) - # Total: 15000 -> floor(15000/500) = +30, base + 30 = 40, capped at 30 - assert calculate_open_pr_threshold(tier_stats) == MAX_OPEN_PR_THRESHOLD + assert calculate_open_pr_threshold(50000.0) == MAX_OPEN_PR_THRESHOLD class TestCalculatePrSpamPenaltyMultiplier: @@ -167,38 +73,12 @@ def test_zero_multiplier_well_above_threshold(self): def test_bonus_increases_threshold(self): """Token score bonus increases the threshold.""" - # Bronze unlocked with 1000 token score = +2 bonus - tier_stats = make_tier_stats(bronze_merged=7, bronze_closed=3, bronze_token_score=1000.0) - - # Base (10) + bonus (+2) = 12 threshold - assert calculate_pr_spam_penalty_multiplier(12, tier_stats) == 1.0 - assert calculate_pr_spam_penalty_multiplier(13, tier_stats) == 0.0 - - def test_locked_tier_no_bonus(self): - """Token score in locked tiers doesn't increase threshold.""" - # Bronze locked (below 70% credibility) - tier_stats = make_tier_stats(bronze_merged=5, bronze_closed=5, bronze_token_score=1000.0) - - # No bonus, threshold = 10 - assert calculate_pr_spam_penalty_multiplier(10, tier_stats) == 1.0 - assert calculate_pr_spam_penalty_multiplier(11, tier_stats) == 0.0 + # 600 token score = +2 bonus -> threshold = 12 + assert calculate_pr_spam_penalty_multiplier(12, 600.0) == 1.0 + assert calculate_pr_spam_penalty_multiplier(13, 600.0) == 0.0 def test_high_threshold_for_top_contributor(self): """Top contributor with high token score gets higher threshold.""" - # All tiers unlocked with token scores - tier_stats = make_tier_stats( - bronze_merged=7, - bronze_closed=3, - bronze_token_score=1000.0, - silver_merged=13, - silver_closed=7, - silver_token_score=1000.0, - gold_merged=6, - gold_closed=4, - gold_token_score=1000.0, - ) - # Total: 3000 -> floor(3000/500) = +6 bonus - # Threshold = 10 + 6 = 16 - - assert calculate_pr_spam_penalty_multiplier(16, tier_stats) == 1.0 - assert calculate_pr_spam_penalty_multiplier(17, tier_stats) == 0.0 + # 1800 token score -> floor(1800/300) = +6 bonus -> threshold = 16 + assert calculate_pr_spam_penalty_multiplier(16, 1800.0) == 1.0 + assert calculate_pr_spam_penalty_multiplier(17, 1800.0) == 0.0 diff --git a/tests/validator/test_emission_shares.py b/tests/validator/test_emission_shares.py deleted file mode 100644 index ecba39fd..00000000 --- a/tests/validator/test_emission_shares.py +++ /dev/null @@ -1,48 +0,0 @@ -# Entrius 2025 - -""" -Guard-rail tests: emission shares and top-K constant configuration. - -Ensures: -- Combined non-OSS emission shares (treasury + predictions) never reach 100%. -- PREDICTIONS_TOP_K_SHARES sums to exactly 1.0 and has length == PREDICTIONS_TOP_K. - -Run: - pytest tests/validator/test_emission_shares.py -v -""" - -import pytest - -from gittensor.constants import ( - ISSUES_TREASURY_EMISSION_SHARE, - PREDICTIONS_EMISSIONS_SHARE, - PREDICTIONS_TOP_K, - PREDICTIONS_TOP_K_SHARES, -) - - -def test_combined_emission_shares_leave_room_for_oss(): - """Issue bounties + merge predictions must not consume all emissions.""" - combined = ISSUES_TREASURY_EMISSION_SHARE + PREDICTIONS_EMISSIONS_SHARE - oss_share = 1.0 - combined - - assert combined < 1.0, ( - f'Combined non-OSS emission shares ({ISSUES_TREASURY_EMISSION_SHARE} + {PREDICTIONS_EMISSIONS_SHARE} ' - f'= {combined}) must be < 1.0, otherwise OSS contributions get nothing' - ) - assert oss_share > 0.0 - - -def test_top_k_shares_sum_to_one(): - """Top-K shares must sum to exactly 1.0.""" - assert sum(PREDICTIONS_TOP_K_SHARES) == pytest.approx(1.0), ( - f'PREDICTIONS_TOP_K_SHARES must sum to 1.0, got {sum(PREDICTIONS_TOP_K_SHARES)}' - ) - - -def test_top_k_shares_length_matches_top_k(): - """PREDICTIONS_TOP_K_SHARES length must equal PREDICTIONS_TOP_K.""" - assert len(PREDICTIONS_TOP_K_SHARES) == PREDICTIONS_TOP_K, ( - f'PREDICTIONS_TOP_K_SHARES has {len(PREDICTIONS_TOP_K_SHARES)} entries ' - f'but PREDICTIONS_TOP_K is {PREDICTIONS_TOP_K}' - ) diff --git a/tests/validator/test_inline_test_detection.py b/tests/validator/test_inline_test_detection.py new file mode 100644 index 00000000..41bd53ea --- /dev/null +++ b/tests/validator/test_inline_test_detection.py @@ -0,0 +1,109 @@ +"""Tests for inline test detection in Rust, Zig, and D source files.""" + +from gittensor.constants import INLINE_TEST_EXTENSIONS +from gittensor.validator.utils.tree_sitter_scoring import has_inline_tests + +# -- Rust ------------------------------------------------------------------ + + +def test_rust_cfg_test_module_detected(): + code = 'fn prod() -> i32 { 42 }\n#[cfg(test)]\nmod tests { fn t() {} }\n' + assert has_inline_tests(code, 'rs') is True + + +def test_rust_test_fn_detected(): + code = 'fn prod() -> i32 { 42 }\n#[test]\nfn test_it() {}\n' + assert has_inline_tests(code, 'rs') is True + + +def test_rust_inner_attribute_cfg_test_detected(): + """#![cfg(test)] inner attribute gates the entire module.""" + code = '#![cfg(test)]\nfn test_helper() {}\n' + assert has_inline_tests(code, 'rs') is True + + +def test_rust_cfg_test_prefix_not_detected(): + """#[cfg(test_utils)] should not be detected as inline test.""" + code = '#[cfg(test_utils)]\nmod helpers { fn h() {} }\n' + assert has_inline_tests(code, 'rs') is False + + +def test_rust_production_only_not_detected(): + code = 'fn prod() -> i32 { 42 }\nfn other() {}\n' + assert has_inline_tests(code, 'rs') is False + + +def test_rust_tokio_test_detected(): + """#[tokio::test] async test attribute should be detected.""" + code = 'async fn helper() {}\n#[tokio::test]\nasync fn test_it() {}\n' + assert has_inline_tests(code, 'rs') is True + + +def test_rust_indented_test_detected(): + """Indented #[test] inside a mod should still be detected.""" + code = ' #[test]\n fn test_it() {}\n' + assert has_inline_tests(code, 'rs') is True + + +def test_rust_test_in_comment_not_detected(): + """#[test] inside a line comment must not trigger detection.""" + code = 'fn prod() {}\n// Use #[test] to annotate test functions\n' + assert has_inline_tests(code, 'rs') is False + + +def test_rust_test_in_doc_comment_not_detected(): + """#[test] inside a doc comment must not trigger detection.""" + code = '/// Example: #[test]\nfn documented() {}\n' + assert has_inline_tests(code, 'rs') is False + + +def test_rust_test_in_string_not_detected(): + """#[test] inside a string literal must not trigger detection.""" + code = 'fn f() { let s = "#[test]"; }\n' + assert has_inline_tests(code, 'rs') is False + + +# -- Zig ------------------------------------------------------------------ + + +def test_zig_named_test_detected(): + code = 'fn add(a: i32, b: i32) i32 { return a + b; }\ntest "add" { }\n' + assert has_inline_tests(code, 'zig') is True + + +def test_zig_unnamed_test_detected(): + """Zig allows unnamed test blocks: test { ... }""" + code = 'fn add(a: i32, b: i32) i32 { return a + b; }\ntest {\n // ...\n}\n' + assert has_inline_tests(code, 'zig') is True + + +def test_zig_production_only_not_detected(): + code = 'fn add(a: i32, b: i32) i32 { return a + b; }\n' + assert has_inline_tests(code, 'zig') is False + + +# -- D --------------------------------------------------------------------- + + +def test_d_unittest_detected(): + code = 'int add(int a, int b) { return a + b; }\nunittest { assert(add(1,2) == 3); }\n' + assert has_inline_tests(code, 'd') is True + + +def test_d_production_only_not_detected(): + code = 'int add(int a, int b) { return a + b; }\n' + assert has_inline_tests(code, 'd') is False + + +# -- Unsupported / Constants ----------------------------------------------- + + +def test_unsupported_extension_returns_false(): + assert has_inline_tests('def foo(): pass', 'py') is False + + +def test_inline_test_extensions_constant(): + assert 'rs' in INLINE_TEST_EXTENSIONS + assert 'zig' in INLINE_TEST_EXTENSIONS + assert 'd' in INLINE_TEST_EXTENSIONS + assert 'py' not in INLINE_TEST_EXTENSIONS diff --git a/tests/validator/test_load_weights.py b/tests/validator/test_load_weights.py index e59d7b4c..35d17a00 100644 --- a/tests/validator/test_load_weights.py +++ b/tests/validator/test_load_weights.py @@ -10,7 +10,6 @@ import pytest -from gittensor.validator.oss_contributions.tier_config import Tier from gittensor.validator.utils.load_weights import ( LanguageConfig, RepositoryConfig, @@ -114,13 +113,6 @@ def test_repo_names_are_lowercase(self): for repo_name in repos.keys(): assert repo_name == repo_name.lower(), f'{repo_name} should be lowercase' - def test_repos_have_valid_tiers(self): - """Repositories should have valid tier assignments.""" - repos = load_master_repo_weights() - valid_tiers = {Tier.BRONZE, Tier.SILVER, Tier.GOLD, None} - for repo_name, config in repos.items(): - assert config.tier in valid_tiers, f'{repo_name} has invalid tier: {config.tier}' - class TestBannedOrganizations: """Tests ensuring banned organizations are not active in the repository list. @@ -131,7 +123,18 @@ class TestBannedOrganizations: # orgs may be banned for: # - exploitative PR manipulation # - explicit removal request - BANNED_ORGS = ['conda', 'conda-incubator', 'conda-archive', 'louislam'] + BANNED_ORGS = [ + 'conda', + 'conda-incubator', + 'conda-archive', + 'louislam', + 'python', + 'fastapi', + 'astral-sh', + 'astropy', + 'numpy', + 'scipy', + ] def test_banned_org_repos_are_inactive(self): """Repositories from banned organizations must be marked as inactive.""" diff --git a/tests/validator/test_pat_handler.py b/tests/validator/test_pat_handler.py new file mode 100644 index 00000000..49839bdb --- /dev/null +++ b/tests/validator/test_pat_handler.py @@ -0,0 +1,225 @@ +# Entrius 2025 + +"""Tests for PAT broadcast and check handlers.""" + +import asyncio +from unittest.mock import MagicMock, patch + +import pytest +from bittensor.core.synapse import TerminalInfo + +from gittensor.synapses import PatBroadcastSynapse, PatCheckSynapse +from gittensor.validator import pat_storage +from gittensor.validator.pat_handler import ( + blacklist_pat_broadcast, + blacklist_pat_check, + handle_pat_broadcast, + handle_pat_check, +) + + +def _run(coro): + """Run an async function synchronously.""" + return asyncio.get_event_loop().run_until_complete(coro) + + +@pytest.fixture(autouse=True) +def use_tmp_pats_file(tmp_path, monkeypatch): + """Redirect PAT storage to a temporary file for each test.""" + tmp_file = tmp_path / 'miner_pats.json' + monkeypatch.setattr(pat_storage, 'PATS_FILE', tmp_file) + return tmp_file + + +@pytest.fixture +def mock_validator(): + """Create a mock validator with metagraph.""" + validator = MagicMock() + validator.metagraph.hotkeys = ['hotkey_0', 'hotkey_1', 'hotkey_2'] + validator.metagraph.S = [100.0, 200.0, 300.0] + return validator + + +def _make_dendrite(hotkey: str) -> TerminalInfo: + return TerminalInfo(hotkey=hotkey) + + +def _make_broadcast_synapse(hotkey: str, pat: str = 'ghp_test123') -> PatBroadcastSynapse: + synapse = PatBroadcastSynapse(github_access_token=pat) + synapse.dendrite = _make_dendrite(hotkey) + return synapse + + +def _make_check_synapse(hotkey: str) -> PatCheckSynapse: + synapse = PatCheckSynapse() + synapse.dendrite = _make_dendrite(hotkey) + return synapse + + +# --------------------------------------------------------------------------- +# Blacklist tests +# --------------------------------------------------------------------------- + + +class TestBlacklistPatBroadcast: + def test_registered_hotkey_accepted(self, mock_validator): + synapse = _make_broadcast_synapse('hotkey_1') + blocked, reason = _run(blacklist_pat_broadcast(mock_validator, synapse)) + assert blocked is False + + def test_unregistered_hotkey_rejected(self, mock_validator): + synapse = _make_broadcast_synapse('unknown_hotkey') + blocked, reason = _run(blacklist_pat_broadcast(mock_validator, synapse)) + assert blocked is True + + +class TestBlacklistPatCheck: + def test_registered_hotkey_accepted(self, mock_validator): + synapse = _make_check_synapse('hotkey_1') + blocked, reason = _run(blacklist_pat_check(mock_validator, synapse)) + assert blocked is False + + def test_unregistered_hotkey_rejected(self, mock_validator): + synapse = _make_check_synapse('unknown_hotkey') + blocked, reason = _run(blacklist_pat_check(mock_validator, synapse)) + assert blocked is True + + +# --------------------------------------------------------------------------- +# Handler tests +# --------------------------------------------------------------------------- + + +class TestHandlePatBroadcast: + @patch('gittensor.validator.pat_handler._test_pat_against_repo', return_value=None) + @patch('gittensor.validator.pat_handler.validate_github_credentials', return_value=('github_42', None)) + def test_valid_pat_accepted(self, mock_validate, mock_test_query, mock_validator): + synapse = _make_broadcast_synapse('hotkey_1', pat='ghp_valid') + result = _run(handle_pat_broadcast(mock_validator, synapse)) + + assert result.accepted is True + assert result.rejection_reason is None + # PAT should be cleared from response + assert result.github_access_token == '' + + # Verify PAT was stored by UID + entry = pat_storage.get_pat_by_uid(1) + assert entry is not None + assert entry['pat'] == 'ghp_valid' + assert entry['hotkey'] == 'hotkey_1' + assert entry['uid'] == 1 + assert entry['github_id'] == 'github_42' + + def test_unregistered_hotkey_rejected(self, mock_validator): + synapse = _make_broadcast_synapse('unknown_hotkey') + result = _run(handle_pat_broadcast(mock_validator, synapse)) + + assert result.accepted is False + assert 'not registered' in (result.rejection_reason or '') + + @patch('gittensor.validator.pat_handler.validate_github_credentials', return_value=(None, 'PAT invalid')) + def test_invalid_pat_rejected(self, mock_validate, mock_validator): + synapse = _make_broadcast_synapse('hotkey_1', pat='ghp_bad') + result = _run(handle_pat_broadcast(mock_validator, synapse)) + + assert result.accepted is False + assert 'PAT invalid' in (result.rejection_reason or '') + + # Verify PAT was NOT stored + assert pat_storage.get_pat_by_uid(1) is None + + @patch('gittensor.validator.pat_handler._test_pat_against_repo', return_value='GitHub API returned 403') + @patch('gittensor.validator.pat_handler.validate_github_credentials', return_value=('github_42', None)) + def test_test_query_failure_rejected(self, mock_validate, mock_test_query, mock_validator): + synapse = _make_broadcast_synapse('hotkey_1') + result = _run(handle_pat_broadcast(mock_validator, synapse)) + + assert result.accepted is False + assert '403' in (result.rejection_reason or '') + + @patch('gittensor.validator.pat_handler._test_pat_against_repo', return_value=None) + @patch('gittensor.validator.pat_handler.validate_github_credentials', return_value=('github_99', None)) + def test_github_identity_change_rejected(self, mock_validate, mock_test_query, mock_validator): + """Same hotkey cannot switch to a different GitHub account.""" + pat_storage.save_pat(1, 'hotkey_1', 'ghp_old', 'github_42') + + synapse = _make_broadcast_synapse('hotkey_1', pat='ghp_new_account') + result = _run(handle_pat_broadcast(mock_validator, synapse)) + + assert result.accepted is False + assert 'locked' in (result.rejection_reason or '').lower() + + # Original entry should be unchanged + entry = pat_storage.get_pat_by_uid(1) + assert entry is not None + assert entry['github_id'] == 'github_42' + + @patch('gittensor.validator.pat_handler._test_pat_against_repo', return_value=None) + @patch('gittensor.validator.pat_handler.validate_github_credentials', return_value=('github_42', None)) + def test_pat_rotation_same_github_accepted(self, mock_validate, mock_test_query, mock_validator): + """Same hotkey can rotate PATs if GitHub identity stays the same.""" + pat_storage.save_pat(1, 'hotkey_1', 'ghp_old', 'github_42') + + synapse = _make_broadcast_synapse('hotkey_1', pat='ghp_refreshed') + result = _run(handle_pat_broadcast(mock_validator, synapse)) + + assert result.accepted is True + entry = pat_storage.get_pat_by_uid(1) + assert entry is not None + assert entry['pat'] == 'ghp_refreshed' + assert entry['github_id'] == 'github_42' + + @patch('gittensor.validator.pat_handler._test_pat_against_repo', return_value=None) + @patch('gittensor.validator.pat_handler.validate_github_credentials', return_value=('github_99', None)) + def test_new_miner_on_uid_can_use_any_github(self, mock_validate, mock_test_query, mock_validator): + """A new hotkey on the same UID (new miner) can register any GitHub account.""" + pat_storage.save_pat(1, 'old_hotkey', 'ghp_old', 'github_42') + + synapse = _make_broadcast_synapse('hotkey_1', pat='ghp_new_miner') + result = _run(handle_pat_broadcast(mock_validator, synapse)) + + assert result.accepted is True + entry = pat_storage.get_pat_by_uid(1) + assert entry is not None + assert entry['github_id'] == 'github_99' + assert entry['hotkey'] == 'hotkey_1' + + +class TestHandlePatCheck: + @patch('gittensor.validator.pat_handler._test_pat_against_repo', return_value=None) + @patch('gittensor.validator.pat_handler.validate_github_credentials', return_value=('github_42', None)) + def test_valid_pat(self, mock_validate, mock_test_query, mock_validator): + pat_storage.save_pat(1, 'hotkey_1', 'ghp_test', 'github_42') + + synapse = _make_check_synapse('hotkey_1') + result = _run(handle_pat_check(mock_validator, synapse)) + assert result.has_pat is True + assert result.pat_valid is True + assert result.rejection_reason is None + + def test_missing_pat(self, mock_validator): + synapse = _make_check_synapse('hotkey_1') + result = _run(handle_pat_check(mock_validator, synapse)) + assert result.has_pat is False + assert result.pat_valid is False + + def test_stale_pat_reports_false(self, mock_validator): + """If a different miner now holds this UID, has_pat should be False.""" + pat_storage.save_pat(1, 'old_hotkey', 'ghp_old', 'github_42') + + synapse = _make_check_synapse('hotkey_1') + result = _run(handle_pat_check(mock_validator, synapse)) + assert result.has_pat is False + assert result.pat_valid is False + + @patch('gittensor.validator.pat_handler._test_pat_against_repo', return_value=None) + @patch('gittensor.validator.pat_handler.validate_github_credentials', return_value=(None, 'PAT expired')) + def test_stored_but_invalid_pat(self, mock_validate, mock_test_query, mock_validator): + """PAT is stored but fails re-validation.""" + pat_storage.save_pat(1, 'hotkey_1', 'ghp_expired', 'github_42') + + synapse = _make_check_synapse('hotkey_1') + result = _run(handle_pat_check(mock_validator, synapse)) + assert result.has_pat is True + assert result.pat_valid is False + assert 'PAT expired' in (result.rejection_reason or '') diff --git a/tests/validator/test_pat_storage.py b/tests/validator/test_pat_storage.py new file mode 100644 index 00000000..0e6f07d5 --- /dev/null +++ b/tests/validator/test_pat_storage.py @@ -0,0 +1,143 @@ +# Entrius 2025 + +"""Tests for validator PAT storage.""" + +import json +import threading + +import pytest + +from gittensor.validator import pat_storage + + +@pytest.fixture(autouse=True) +def use_tmp_pats_file(tmp_path, monkeypatch): + """Redirect PAT storage to a temporary file for each test.""" + tmp_file = tmp_path / 'miner_pats.json' + monkeypatch.setattr(pat_storage, 'PATS_FILE', tmp_file) + return tmp_file + + +class TestEnsurePatsFile: + def test_creates_file(self, use_tmp_pats_file): + assert not use_tmp_pats_file.exists() + pat_storage.ensure_pats_file() + assert use_tmp_pats_file.exists() + assert json.loads(use_tmp_pats_file.read_text()) == [] + + def test_does_not_overwrite_existing(self, use_tmp_pats_file): + pat_storage.save_pat(1, 'hotkey_1', 'ghp_abc', 'user_1') + pat_storage.ensure_pats_file() + entries = json.loads(use_tmp_pats_file.read_text()) + assert len(entries) == 1 + + +class TestSavePat: + def test_save_creates_file(self, use_tmp_pats_file): + pat_storage.save_pat(1, 'hotkey_1', 'ghp_abc', 'user_1') + assert use_tmp_pats_file.exists() + + entries = json.loads(use_tmp_pats_file.read_text()) + assert len(entries) == 1 + assert entries[0]['uid'] == 1 + assert entries[0]['hotkey'] == 'hotkey_1' + assert entries[0]['pat'] == 'ghp_abc' + assert entries[0]['github_id'] == 'user_1' + assert 'stored_at' in entries[0] + + def test_save_upsert_by_uid(self): + pat_storage.save_pat(1, 'hotkey_1', 'ghp_old', 'user_1') + pat_storage.save_pat(1, 'hotkey_1', 'ghp_new', 'user_1') + + entries = pat_storage.load_all_pats() + assert len(entries) == 1 + assert entries[0]['pat'] == 'ghp_new' + + def test_save_upsert_replaces_hotkey_on_uid(self): + """When a new miner takes over a UID, save_pat overwrites the old entry.""" + pat_storage.save_pat(1, 'old_hotkey', 'ghp_old', 'user_old') + pat_storage.save_pat(1, 'new_hotkey', 'ghp_new', 'user_new') + + entries = pat_storage.load_all_pats() + assert len(entries) == 1 + assert entries[0]['hotkey'] == 'new_hotkey' + assert entries[0]['pat'] == 'ghp_new' + + def test_save_multiple_miners(self): + pat_storage.save_pat(1, 'hotkey_1', 'ghp_a', 'user_a') + pat_storage.save_pat(2, 'hotkey_2', 'ghp_b', 'user_b') + pat_storage.save_pat(3, 'hotkey_3', 'ghp_c', 'user_c') + + entries = pat_storage.load_all_pats() + assert len(entries) == 3 + + +class TestLoadAllPats: + def test_load_empty_when_no_file(self): + entries = pat_storage.load_all_pats() + assert entries == [] + + def test_load_returns_all_entries(self): + pat_storage.save_pat(1, 'h1', 'p1', 'user_1') + pat_storage.save_pat(2, 'h2', 'p2', 'user_2') + + entries = pat_storage.load_all_pats() + assert len(entries) == 2 + + def test_load_handles_corrupt_file(self, use_tmp_pats_file): + use_tmp_pats_file.write_text('not json{{{') + entries = pat_storage.load_all_pats() + assert entries == [] + + +class TestGetPatByUid: + def test_get_existing(self): + pat_storage.save_pat(1, 'hotkey_1', 'ghp_abc', 'user_1') + entry = pat_storage.get_pat_by_uid(1) + assert entry is not None + assert entry['pat'] == 'ghp_abc' + + def test_get_missing(self): + entry = pat_storage.get_pat_by_uid(999) + assert entry is None + + +class TestRemovePat: + def test_remove_existing(self): + pat_storage.save_pat(1, 'hotkey_1', 'ghp_abc', 'user_1') + assert pat_storage.remove_pat(1) is True + assert pat_storage.get_pat_by_uid(1) is None + + def test_remove_missing(self): + assert pat_storage.remove_pat(999) is False + + def test_remove_preserves_others(self): + pat_storage.save_pat(1, 'h1', 'p1', 'user_1') + pat_storage.save_pat(2, 'h2', 'p2', 'user_2') + pat_storage.remove_pat(1) + + entries = pat_storage.load_all_pats() + assert len(entries) == 1 + assert entries[0]['uid'] == 2 + + +class TestConcurrency: + def test_concurrent_writes(self): + """Multiple threads writing simultaneously should not corrupt the file.""" + errors = [] + + def write_pat(i): + try: + pat_storage.save_pat(i, f'hotkey_{i}', f'ghp_{i}', f'user_{i}') + except Exception as e: + errors.append(e) + + threads = [threading.Thread(target=write_pat, args=(i,)) for i in range(20)] + for t in threads: + t.start() + for t in threads: + t.join() + + assert not errors + entries = pat_storage.load_all_pats() + assert len(entries) == 20 diff --git a/tests/validator/test_pioneer_dividend.py b/tests/validator/test_pioneer_dividend.py index e7b54754..be66f9ab 100644 --- a/tests/validator/test_pioneer_dividend.py +++ b/tests/validator/test_pioneer_dividend.py @@ -13,13 +13,10 @@ PIONEER_DIVIDEND_MAX_RATIO, PIONEER_DIVIDEND_RATE_1ST, PIONEER_DIVIDEND_RATE_2ND, - PIONEER_DIVIDEND_RATE_REST, ) from gittensor.validator.oss_contributions.scoring import ( calculate_pioneer_dividends, - finalize_miner_scores, ) -from gittensor.validator.oss_contributions.tier_config import TIERS, Tier from tests.validator.conftest import PRBuilder # ========================================================================== @@ -32,11 +29,6 @@ def builder(): return PRBuilder() -@pytest.fixture -def bronze(): - return TIERS[Tier.BRONZE] - - # ========================================================================== # TestPioneerEligibility # ========================================================================== @@ -45,718 +37,195 @@ def bronze(): class TestPioneerEligibility: """Tests for PullRequest.is_pioneer_eligible instance method.""" - def test_eligible_when_merged_with_tier_and_token_score(self, builder, bronze): - pr = builder.create(state=PRState.MERGED, tier=bronze, uid=1) + def test_eligible_when_merged_with_token_score(self, builder): + pr = builder.create(state=PRState.MERGED, uid=1) assert pr.is_pioneer_eligible() - def test_ineligible_without_tier(self, builder, bronze): - pr = builder.create(state=PRState.MERGED, tier=bronze, uid=1) - pr.repository_tier_configuration = None + def test_ineligible_when_below_token_score(self, builder): + pr = builder.create(state=PRState.MERGED, uid=1, token_score=MIN_TOKEN_SCORE_FOR_BASE_SCORE - 1) assert not pr.is_pioneer_eligible() - def test_ineligible_without_merge_timestamp(self, builder, bronze): - pr = builder.create(state=PRState.MERGED, tier=bronze, uid=1) - pr.merged_at = None + def test_ineligible_when_open(self, builder): + pr = builder.create(state=PRState.OPEN, uid=1) assert not pr.is_pioneer_eligible() - def test_ineligible_below_token_score_threshold(self, builder, bronze): - pr = builder.create( - state=PRState.MERGED, - tier=bronze, - uid=1, - token_score=MIN_TOKEN_SCORE_FOR_BASE_SCORE - 1, - ) + def test_ineligible_when_closed(self, builder): + pr = builder.create(state=PRState.CLOSED, uid=1) assert not pr.is_pioneer_eligible() - def test_eligible_at_exact_token_score_threshold(self, builder, bronze): - pr = builder.create( - state=PRState.MERGED, - tier=bronze, - uid=1, - token_score=MIN_TOKEN_SCORE_FOR_BASE_SCORE, - ) - assert pr.is_pioneer_eligible() - # ========================================================================== -# TestCalculatePioneerDividends +# TestPioneerDividendCalculation # ========================================================================== -class TestCalculatePioneerDividends: +class TestPioneerDividendCalculation: """Tests for calculate_pioneer_dividends function.""" - def test_single_miner_gets_no_dividend(self, builder, bronze): - """A lone pioneer with no followers earns zero dividend.""" + def _make_eval(self, uid, prs): + """Helper to create a MinerEvaluation with given merged PRs.""" + eval_ = MinerEvaluation(uid=uid, hotkey=f'hotkey_{uid}') + eval_.merged_pull_requests = prs + return eval_ + + def test_single_contributor_no_dividend(self, builder): + """Pioneer with no followers gets no dividend.""" now = datetime.now(timezone.utc) pr = builder.create( state=PRState.MERGED, - tier=bronze, - repo='org/repo-a', uid=1, + repo='test/repo', merged_at=now, - earned_score=0.0, - collateral_score=0.0, + earned_score=100.0, ) - pr.base_score = 30.0 - evals = {1: MinerEvaluation(uid=1, hotkey='h1', merged_pull_requests=[pr])} + evals = {1: self._make_eval(1, [pr])} calculate_pioneer_dividends(evals) - assert pr.pioneer_rank == 1 assert pr.pioneer_dividend == 0.0 - def test_pioneer_earns_dividend_from_follower(self, builder, bronze): - """Pioneer earns 30% of first follower's earned_score.""" + def test_two_contributors_pioneer_gets_dividend(self, builder): + """Pioneer gets dividend from the 1st follower.""" now = datetime.now(timezone.utc) pioneer_pr = builder.create( state=PRState.MERGED, - tier=bronze, - repo='org/repo-a', uid=1, + repo='test/repo', merged_at=now - timedelta(days=5), - earned_score=0.0, - collateral_score=0.0, + earned_score=100.0, ) follower_pr = builder.create( state=PRState.MERGED, - tier=bronze, - repo='org/repo-a', uid=2, - merged_at=now, - earned_score=0.0, - collateral_score=0.0, + repo='test/repo', + merged_at=now - timedelta(days=1), + earned_score=80.0, ) - pioneer_pr.base_score = 30.0 - follower_pr.base_score = 20.0 - # Simulate earned_scores (all multipliers = 1.0) - pioneer_pr.earned_score = 30.0 - follower_pr.earned_score = 20.0 evals = { - 1: MinerEvaluation(uid=1, hotkey='h1', merged_pull_requests=[pioneer_pr]), - 2: MinerEvaluation(uid=2, hotkey='h2', merged_pull_requests=[follower_pr]), + 1: self._make_eval(1, [pioneer_pr]), + 2: self._make_eval(2, [follower_pr]), } calculate_pioneer_dividends(evals) - expected_dividend = round(20.0 * PIONEER_DIVIDEND_RATE_1ST, 2) # 20 * 0.30 = 6.0 + expected_dividend = min(80.0 * PIONEER_DIVIDEND_RATE_1ST, 100.0 * PIONEER_DIVIDEND_MAX_RATIO) + assert pioneer_pr.pioneer_dividend == round(expected_dividend, 2) assert pioneer_pr.pioneer_rank == 1 - assert pioneer_pr.pioneer_dividend == expected_dividend assert follower_pr.pioneer_rank == 2 - assert follower_pr.pioneer_dividend == 0.0 - - def test_dividend_from_multiple_followers(self, builder, bronze): - """Pioneer dividend uses per-position rates: 30%, 20%, 10%, 10%.""" - now = datetime.now(timezone.utc) - pioneer_pr = builder.create( - state=PRState.MERGED, - tier=bronze, - repo='org/repo-a', - uid=1, - merged_at=now - timedelta(days=10), - earned_score=0.0, - collateral_score=0.0, - ) - pioneer_pr.base_score = 30.0 - pioneer_pr.earned_score = 30.0 - follower_prs = [] - for uid in range(2, 6): # 4 followers - pr = builder.create( - state=PRState.MERGED, - tier=bronze, - repo='org/repo-a', - uid=uid, - merged_at=now - timedelta(days=10 - uid), - earned_score=0.0, - collateral_score=0.0, - ) - pr.base_score = 10.0 - pr.earned_score = 10.0 - follower_prs.append(pr) - evals = {1: MinerEvaluation(uid=1, hotkey='h1', merged_pull_requests=[pioneer_pr])} - for pr in follower_prs: - evals[pr.uid] = MinerEvaluation(uid=pr.uid, hotkey=f'h{pr.uid}', merged_pull_requests=[pr]) - calculate_pioneer_dividends(evals) - - # 1st: 10*0.30=3.0, 2nd: 10*0.20=2.0, 3rd: 10*0.10=1.0, 4th: 10*0.10=1.0 - expected_dividend = round( - 10.0 * PIONEER_DIVIDEND_RATE_1ST - + 10.0 * PIONEER_DIVIDEND_RATE_2ND - + 10.0 * PIONEER_DIVIDEND_RATE_REST - + 10.0 * PIONEER_DIVIDEND_RATE_REST, - 2, - ) - assert pioneer_pr.pioneer_dividend == expected_dividend - - def test_dividend_grows_with_many_followers(self, builder, bronze): - """Dividend scales with followers but is capped at PIONEER_DIVIDEND_MAX_RATIO × own earned.""" - now = datetime.now(timezone.utc) - pioneer_pr = builder.create( - state=PRState.MERGED, - tier=bronze, - repo='org/repo-a', - uid=1, - merged_at=now - timedelta(days=30), - earned_score=0.0, - collateral_score=0.0, - ) - pioneer_pr.base_score = 30.0 - pioneer_pr.earned_score = 30.0 - - follower_prs = [] - for uid in range(2, 12): # 10 followers - pr = builder.create( - state=PRState.MERGED, - tier=bronze, - repo='org/repo-a', - uid=uid, - merged_at=now - timedelta(days=30 - uid), - earned_score=0.0, - collateral_score=0.0, - ) - pr.base_score = 30.0 - pr.earned_score = 30.0 - follower_prs.append(pr) - evals = {1: MinerEvaluation(uid=1, hotkey='h1', merged_pull_requests=[pioneer_pr])} - for pr in follower_prs: - evals[pr.uid] = MinerEvaluation(uid=pr.uid, hotkey=f'h{pr.uid}', merged_pull_requests=[pr]) - calculate_pioneer_dividends(evals) - # Raw: 30*0.30=9 + 30*0.20=6 + 8*30*0.10=24 → 39.0 - # Cap: min(39.0, 30.0 * 1.0) = 30.0 - max_dividend = round(30.0 * PIONEER_DIVIDEND_MAX_RATIO, 2) - assert pioneer_pr.pioneer_dividend == max_dividend - assert pioneer_pr.earned_score == 30.0 + max_dividend - - def test_dividend_cap_at_max_ratio(self, builder, bronze): - """Dividend is capped at PIONEER_DIVIDEND_MAX_RATIO × pioneer's own earned_score.""" + def test_three_contributors_diminishing_rates(self, builder): + """Pioneer dividend diminishes across follower positions.""" now = datetime.now(timezone.utc) pioneer_pr = builder.create( state=PRState.MERGED, - tier=bronze, - repo='org/repo-a', uid=1, + repo='test/repo', merged_at=now - timedelta(days=10), - earned_score=0.0, - collateral_score=0.0, + earned_score=200.0, ) - pioneer_pr.base_score = 10.0 - pioneer_pr.earned_score = 10.0 - # 1 follower with much higher earned_score - follower_pr = builder.create( + f1_pr = builder.create( state=PRState.MERGED, - tier=bronze, - repo='org/repo-a', - uid=2, - merged_at=now, - earned_score=0.0, - collateral_score=0.0, - ) - follower_pr.base_score = 100.0 - follower_pr.earned_score = 100.0 - evals = { - 1: MinerEvaluation(uid=1, hotkey='h1', merged_pull_requests=[pioneer_pr]), - 2: MinerEvaluation(uid=2, hotkey='h2', merged_pull_requests=[follower_pr]), - } - calculate_pioneer_dividends(evals) - - # Raw: 100*0.30 = 30.0, Cap: min(30.0, 10.0*1.0) = 10.0 - assert pioneer_pr.pioneer_dividend == round(10.0 * PIONEER_DIVIDEND_MAX_RATIO, 2) - assert pioneer_pr.earned_score == 10.0 + pioneer_pr.pioneer_dividend - - def test_multiple_follower_prs_summed(self, builder, bronze): - """A follower with multiple PRs on the same repo contributes all earned_scores to dividend.""" - now = datetime.now(timezone.utc) - pioneer_pr = builder.create( - state=PRState.MERGED, - tier=bronze, - repo='org/repo-a', - uid=1, - merged_at=now - timedelta(days=10), - earned_score=0.0, - collateral_score=0.0, - ) - pioneer_pr.base_score = 30.0 - pioneer_pr.earned_score = 30.0 - # Follower has 3 PRs on the same repo - f_pr1 = builder.create( - state=PRState.MERGED, - tier=bronze, - repo='org/repo-a', uid=2, + repo='test/repo', merged_at=now - timedelta(days=5), - earned_score=0.0, - collateral_score=0.0, + earned_score=100.0, ) - f_pr2 = builder.create( + f2_pr = builder.create( state=PRState.MERGED, - tier=bronze, - repo='org/repo-a', - uid=2, - merged_at=now - timedelta(days=3), - earned_score=0.0, - collateral_score=0.0, - ) - f_pr3 = builder.create( - state=PRState.MERGED, - tier=bronze, - repo='org/repo-a', - uid=2, + uid=3, + repo='test/repo', merged_at=now - timedelta(days=1), - earned_score=0.0, - collateral_score=0.0, + earned_score=80.0, ) - f_pr1.base_score = 5.0 - f_pr1.earned_score = 5.0 - f_pr2.base_score = 5.0 - f_pr2.earned_score = 5.0 - f_pr3.base_score = 5.0 - f_pr3.earned_score = 5.0 evals = { - 1: MinerEvaluation(uid=1, hotkey='h1', merged_pull_requests=[pioneer_pr]), - 2: MinerEvaluation(uid=2, hotkey='h2', merged_pull_requests=[f_pr1, f_pr2, f_pr3]), + 1: self._make_eval(1, [pioneer_pr]), + 2: self._make_eval(2, [f1_pr]), + 3: self._make_eval(3, [f2_pr]), } calculate_pioneer_dividends(evals) - # Single follower (position 0 → 30% rate), sum of ALL their earned_scores: (5+5+5) * 0.30 - expected = round((5.0 + 5.0 + 5.0) * PIONEER_DIVIDEND_RATE_1ST, 2) - assert pioneer_pr.pioneer_dividend == expected + expected = 100.0 * PIONEER_DIVIDEND_RATE_1ST + 80.0 * PIONEER_DIVIDEND_RATE_2ND + expected_capped = min(expected, 200.0 * PIONEER_DIVIDEND_MAX_RATIO) + assert pioneer_pr.pioneer_dividend == round(expected_capped, 2) - def test_repos_are_independent(self, builder, bronze): - """Pioneer status and dividends are calculated per repo independently.""" + def test_dividend_capped(self, builder): + """Pioneer dividend is capped at PIONEER_DIVIDEND_MAX_RATIO × pioneer's earned_score.""" now = datetime.now(timezone.utc) - # UID 1 pioneers repo-a, UID 2 pioneers repo-b - pr1a = builder.create( + pioneer_pr = builder.create( state=PRState.MERGED, - tier=bronze, - repo='org/repo-a', uid=1, + repo='test/repo', merged_at=now - timedelta(days=10), - earned_score=0.0, - collateral_score=0.0, - ) - pr2a = builder.create( - state=PRState.MERGED, - tier=bronze, - repo='org/repo-a', - uid=2, - merged_at=now - timedelta(days=5), - earned_score=0.0, - collateral_score=0.0, - ) - pr2b = builder.create( - state=PRState.MERGED, - tier=bronze, - repo='org/repo-b', - uid=2, - merged_at=now - timedelta(days=10), - earned_score=0.0, - collateral_score=0.0, + earned_score=10.0, ) - pr1b = builder.create( - state=PRState.MERGED, - tier=bronze, - repo='org/repo-b', - uid=1, - merged_at=now - timedelta(days=5), - earned_score=0.0, - collateral_score=0.0, - ) - for pr in [pr1a, pr2a, pr2b, pr1b]: - pr.base_score = 30.0 - pr.earned_score = 30.0 - evals = { - 1: MinerEvaluation(uid=1, hotkey='h1', merged_pull_requests=[pr1a, pr1b]), - 2: MinerEvaluation(uid=2, hotkey='h2', merged_pull_requests=[pr2a, pr2b]), - } - calculate_pioneer_dividends(evals) + # Large follower scores + followers = [] + for i in range(5): + pr = builder.create( + state=PRState.MERGED, + uid=i + 2, + repo='test/repo', + merged_at=now - timedelta(days=5 - i), + earned_score=500.0, + ) + followers.append(pr) - # UID 1 is pioneer on repo-a - assert pr1a.pioneer_rank == 1 - assert pr1a.pioneer_dividend == round(30.0 * PIONEER_DIVIDEND_RATE_1ST, 2) - # UID 2 is pioneer on repo-b - assert pr2b.pioneer_rank == 1 - assert pr2b.pioneer_dividend == round(30.0 * PIONEER_DIVIDEND_RATE_1ST, 2) + evals = {1: self._make_eval(1, [pioneer_pr])} + for i, fpr in enumerate(followers): + evals[i + 2] = self._make_eval(i + 2, [fpr]) - def test_low_quality_pr_excluded_from_pioneer(self, builder, bronze): - """Low token_score PR cannot be pioneer; quality follower becomes pioneer.""" - now = datetime.now(timezone.utc) - snipe_pr = builder.create( - state=PRState.MERGED, - tier=bronze, - repo='org/repo-a', - uid=1, - merged_at=now - timedelta(days=10), - token_score=MIN_TOKEN_SCORE_FOR_BASE_SCORE - 1, - earned_score=0.0, - collateral_score=0.0, - ) - snipe_pr.base_score = 5.0 - good_pr = builder.create( - state=PRState.MERGED, - tier=bronze, - repo='org/repo-a', - uid=2, - merged_at=now - timedelta(days=5), - earned_score=0.0, - collateral_score=0.0, - ) - good_pr.base_score = 30.0 - evals = { - 1: MinerEvaluation(uid=1, hotkey='h1', merged_pull_requests=[snipe_pr]), - 2: MinerEvaluation(uid=2, hotkey='h2', merged_pull_requests=[good_pr]), - } calculate_pioneer_dividends(evals) - # Snipe PR is not eligible, so it keeps default pioneer_rank=0 - assert snipe_pr.pioneer_rank == 0 - assert snipe_pr.pioneer_dividend == 0.0 - # Good PR becomes the solo pioneer (no followers -> no dividend) - assert good_pr.pioneer_rank == 1 - assert good_pr.pioneer_dividend == 0.0 + max_expected = 10.0 * PIONEER_DIVIDEND_MAX_RATIO + assert pioneer_pr.pioneer_dividend == round(max_expected, 2) - def test_ineligible_pr_does_not_receive_rank(self, builder, bronze): - """Ineligible PR from same miner on same repo must not get pioneer_rank.""" + def test_different_repos_independent(self, builder): + """Pioneer dividends are independent per repository.""" now = datetime.now(timezone.utc) - eligible_pr = builder.create( - state=PRState.MERGED, - tier=bronze, - repo='org/repo-a', - uid=1, - merged_at=now - timedelta(days=10), - earned_score=0.0, - collateral_score=0.0, - ) - eligible_pr.base_score = 30.0 - eligible_pr.earned_score = 30.0 - ineligible_pr = builder.create( + pr_a = builder.create( state=PRState.MERGED, - tier=bronze, - repo='org/repo-a', uid=1, + repo='test/repo-a', merged_at=now - timedelta(days=5), - token_score=MIN_TOKEN_SCORE_FOR_BASE_SCORE - 1, - earned_score=0.0, - collateral_score=0.0, + earned_score=100.0, ) - ineligible_pr.base_score = 2.0 - ineligible_pr.earned_score = 2.0 - evals = { - 1: MinerEvaluation(uid=1, hotkey='h1', merged_pull_requests=[eligible_pr, ineligible_pr]), - } - calculate_pioneer_dividends(evals) - - assert eligible_pr.pioneer_rank == 1 - assert ineligible_pr.pioneer_rank == 0 # must stay default - - def test_deterministic_tiebreak_by_pr_number(self, builder, bronze): - """Same merged_at timestamp: lower PR number wins pioneer status.""" - now = datetime.now(timezone.utc) - pr1 = builder.create( + pr_b = builder.create( state=PRState.MERGED, - tier=bronze, - repo='org/repo-a', - uid=1, - merged_at=now, - number=10, - earned_score=0.0, - collateral_score=0.0, - ) - pr2 = builder.create( - state=PRState.MERGED, - tier=bronze, - repo='org/repo-a', uid=2, - merged_at=now, - number=20, - earned_score=0.0, - collateral_score=0.0, - ) - pr1.base_score = 30.0 - pr2.base_score = 30.0 - evals = { - 1: MinerEvaluation(uid=1, hotkey='h1', merged_pull_requests=[pr1]), - 2: MinerEvaluation(uid=2, hotkey='h2', merged_pull_requests=[pr2]), - } - calculate_pioneer_dividends(evals) - - assert pr1.pioneer_rank == 1 - assert pr2.pioneer_rank == 2 - - def test_only_pioneering_pr_gets_dividend(self, builder, bronze): - """Follow-up PRs by the pioneer on same repo don't get dividend.""" - now = datetime.now(timezone.utc) - pioneer_pr = builder.create( - state=PRState.MERGED, - tier=bronze, - repo='org/repo-a', - uid=1, - merged_at=now - timedelta(days=10), - earned_score=0.0, - collateral_score=0.0, - ) - followup_pr = builder.create( - state=PRState.MERGED, - tier=bronze, - repo='org/repo-a', - uid=1, - merged_at=now - timedelta(days=2), - earned_score=0.0, - collateral_score=0.0, - ) - follower_pr = builder.create( - state=PRState.MERGED, - tier=bronze, - repo='org/repo-a', - uid=2, - merged_at=now, - earned_score=0.0, - collateral_score=0.0, + repo='test/repo-b', + merged_at=now - timedelta(days=5), + earned_score=100.0, ) - pioneer_pr.base_score = 30.0 - pioneer_pr.earned_score = 30.0 - followup_pr.base_score = 25.0 - followup_pr.earned_score = 25.0 - follower_pr.base_score = 10.0 - follower_pr.earned_score = 10.0 evals = { - 1: MinerEvaluation(uid=1, hotkey='h1', merged_pull_requests=[pioneer_pr, followup_pr]), - 2: MinerEvaluation(uid=2, hotkey='h2', merged_pull_requests=[follower_pr]), + 1: self._make_eval(1, [pr_a]), + 2: self._make_eval(2, [pr_b]), } calculate_pioneer_dividends(evals) - # Only the pioneering PR gets the dividend - assert pioneer_pr.pioneer_dividend == round(10.0 * PIONEER_DIVIDEND_RATE_1ST, 2) - assert followup_pr.pioneer_dividend == 0.0 - - def test_empty_evaluations(self, builder, bronze): - """No crash on empty evaluations.""" - evals = {} - calculate_pioneer_dividends(evals) # Should not raise + # No followers on either repo, so no dividends + assert pr_a.pioneer_dividend == 0.0 + assert pr_b.pioneer_dividend == 0.0 - def test_no_eligible_prs(self, builder, bronze): - """No crash when all PRs are ineligible.""" - now = datetime.now(timezone.utc) - pr = builder.create( - state=PRState.MERGED, - tier=bronze, - repo='org/repo-a', - uid=1, - merged_at=now, - token_score=0.0, - earned_score=0.0, - collateral_score=0.0, - ) - evals = {1: MinerEvaluation(uid=1, hotkey='h1', merged_pull_requests=[pr])} - calculate_pioneer_dividends(evals) - assert pr.pioneer_rank == 0 - assert pr.pioneer_dividend == 0.0 - - -# ========================================================================== -# TestFinalizeWithDividend -# ========================================================================== - - -class TestFinalizeWithDividend: - """Integration tests: pioneer dividend flows through finalize_miner_scores.""" - - def test_pioneer_dividend_additive_to_earned_score(self, builder, bronze): - """Pioneer dividend is added on top of earned_score: base × multipliers + dividend.""" + def test_ineligible_prs_excluded(self, builder): + """PRs below token score threshold don't participate in pioneer calculation.""" now = datetime.now(timezone.utc) pioneer_pr = builder.create( state=PRState.MERGED, - tier=bronze, - repo='org/repo-a', uid=1, + repo='test/repo', merged_at=now - timedelta(days=5), - earned_score=0.0, - collateral_score=0.0, + earned_score=100.0, ) - follower_pr = builder.create( + ineligible_pr = builder.create( state=PRState.MERGED, - tier=bronze, - repo='org/repo-a', uid=2, - merged_at=now, - earned_score=0.0, - collateral_score=0.0, + repo='test/repo', + merged_at=now - timedelta(days=1), + earned_score=50.0, + token_score=MIN_TOKEN_SCORE_FOR_BASE_SCORE - 1, ) - pioneer_pr.base_score = 30.0 - follower_pr.base_score = 30.0 - # Compute earned_scores first (base × multipliers) - pioneer_pr.calculate_final_earned_score() - follower_pr.calculate_final_earned_score() - assert pioneer_pr.earned_score == 30.0 # base × 1.0 - assert follower_pr.earned_score == 30.0 - - # Now apply dividend (uses follower earned_score) evals = { - 1: MinerEvaluation(uid=1, hotkey='h1', merged_pull_requests=[pioneer_pr]), - 2: MinerEvaluation(uid=2, hotkey='h2', merged_pull_requests=[follower_pr]), + 1: self._make_eval(1, [pioneer_pr]), + 2: self._make_eval(2, [ineligible_pr]), } calculate_pioneer_dividends(evals) - # Dividend = 30% of follower's earned_score - expected_dividend = round(30.0 * PIONEER_DIVIDEND_RATE_1ST, 2) - assert pioneer_pr.pioneer_dividend == expected_dividend - # Pioneer earned_score = base_earned + dividend = 30 + 9 = 39 - assert pioneer_pr.earned_score == 30.0 + expected_dividend - assert pioneer_pr.earned_score > follower_pr.earned_score - - def test_follower_keeps_full_score(self, builder, bronze): - """Follower's score is not reduced — dividend is additive, not zero-sum.""" - now = datetime.now(timezone.utc) - # Create a solo miner scenario for baseline - solo_pr = builder.create( - state=PRState.MERGED, - tier=bronze, - repo='org/solo-repo', - uid=3, - merged_at=now, - earned_score=0.0, - collateral_score=0.0, - ) - solo_pr.base_score = 30.0 - solo_eval = MinerEvaluation(uid=3, hotkey='h3', merged_pull_requests=[solo_pr]) - solo_eval.unique_repos_contributed_to.add('org/solo-repo') - - # Create a follower scenario - pioneer_pr = builder.create( - state=PRState.MERGED, - tier=bronze, - repo='org/repo-a', - uid=1, - merged_at=now - timedelta(days=5), - earned_score=0.0, - collateral_score=0.0, - ) - follower_pr = builder.create( - state=PRState.MERGED, - tier=bronze, - repo='org/repo-a', - uid=2, - merged_at=now, - earned_score=0.0, - collateral_score=0.0, - ) - pioneer_pr.base_score = 30.0 - follower_pr.base_score = 30.0 - eval1 = MinerEvaluation(uid=1, hotkey='h1', merged_pull_requests=[pioneer_pr]) - eval1.unique_repos_contributed_to.add('org/repo-a') - eval2 = MinerEvaluation(uid=2, hotkey='h2', merged_pull_requests=[follower_pr]) - eval2.unique_repos_contributed_to.add('org/repo-a') - - finalize_miner_scores({1: eval1, 2: eval2, 3: solo_eval}) - - # Follower's earned_score should equal solo miner's (no penalty) - assert follower_pr.pioneer_dividend == 0.0 - - -# ========================================================================== -# TestPioneerIncentiveEvidence -# ========================================================================== - - -class TestPioneerIncentiveEvidence: - """Evidence tests proving the mechanism rewards exploration over pile-on.""" - - def test_exploration_beats_pile_on(self, builder, bronze): - """5 miners piling on 1 repo: only pioneer gets dividend. Exploring avoids the crowd.""" - now = datetime.now(timezone.utc) - - # Pile-on: 5 miners on 1 repo — only 1 pioneer - builder.reset() - pile_evals = {} - for uid in range(1, 6): - pr = builder.create( - state=PRState.MERGED, - tier=bronze, - repo='org/saturated', - uid=uid, - merged_at=now - timedelta(days=uid), - earned_score=0.0, - collateral_score=0.0, - ) - pr.base_score = 30.0 - pr.earned_score = 30.0 - pile_evals[uid] = MinerEvaluation(uid=uid, hotkey=f'h{uid}', merged_pull_requests=[pr]) - calculate_pioneer_dividends(pile_evals) - pile_total_dividend = sum(pr.pioneer_dividend for ev in pile_evals.values() for pr in ev.merged_pull_requests) - - # With pile-on, only pioneer gets dividend (based on follower earned_scores) - expected = round( - 30.0 * PIONEER_DIVIDEND_RATE_1ST - + 30.0 * PIONEER_DIVIDEND_RATE_2ND - + 30.0 * PIONEER_DIVIDEND_RATE_REST - + 30.0 * PIONEER_DIVIDEND_RATE_REST, - 2, - ) - assert pile_total_dividend == expected - - def test_pioneer_earns_more_with_more_followers(self, builder, bronze): - """Pioneer's reward naturally grows as more miners follow — self-scaling incentive.""" - now = datetime.now(timezone.utc) - - # Scenario 1: 1 follower - builder.reset() - pr1 = builder.create( - state=PRState.MERGED, - tier=bronze, - repo='org/repo-a', - uid=1, - merged_at=now - timedelta(days=10), - earned_score=0.0, - collateral_score=0.0, - ) - pr1.base_score = 30.0 - pr1.earned_score = 30.0 - f1 = builder.create( - state=PRState.MERGED, - tier=bronze, - repo='org/repo-a', - uid=2, - merged_at=now, - earned_score=0.0, - collateral_score=0.0, - ) - f1.base_score = 30.0 - f1.earned_score = 30.0 - evals1 = { - 1: MinerEvaluation(uid=1, hotkey='h1', merged_pull_requests=[pr1]), - 2: MinerEvaluation(uid=2, hotkey='h2', merged_pull_requests=[f1]), - } - calculate_pioneer_dividends(evals1) - div_1_follower = pr1.pioneer_dividend - - # Scenario 2: 5 followers - builder.reset() - pr2 = builder.create( - state=PRState.MERGED, - tier=bronze, - repo='org/repo-b', - uid=1, - merged_at=now - timedelta(days=10), - earned_score=0.0, - collateral_score=0.0, - ) - pr2.base_score = 30.0 - pr2.earned_score = 30.0 - followers = [] - for uid in range(2, 7): - f = builder.create( - state=PRState.MERGED, - tier=bronze, - repo='org/repo-b', - uid=uid, - merged_at=now - timedelta(days=10 - uid), - earned_score=0.0, - collateral_score=0.0, - ) - f.base_score = 30.0 - f.earned_score = 30.0 - followers.append(f) - evals2 = {1: MinerEvaluation(uid=1, hotkey='h1', merged_pull_requests=[pr2])} - for f in followers: - evals2[f.uid] = MinerEvaluation(uid=f.uid, hotkey=f'h{f.uid}', merged_pull_requests=[f]) - calculate_pioneer_dividends(evals2) - div_5_followers = pr2.pioneer_dividend - - assert div_5_followers > div_1_follower + # Ineligible follower doesn't count + assert pioneer_pr.pioneer_dividend == 0.0 diff --git a/tests/validator/test_review_quality_multiplier.py b/tests/validator/test_review_quality_multiplier.py index 06bc0ce2..a443aa2e 100644 --- a/tests/validator/test_review_quality_multiplier.py +++ b/tests/validator/test_review_quality_multiplier.py @@ -20,7 +20,6 @@ from gittensor.constants import MAINTAINER_ASSOCIATIONS, REVIEW_PENALTY_RATE from gittensor.utils.github_api_tools import get_pull_request_maintainer_changes_requested_count from gittensor.validator.oss_contributions.scoring import calculate_review_quality_multiplier -from gittensor.validator.oss_contributions.tier_config import TIERS, Tier from tests.validator.conftest import PRBuilder # ============================================================================ @@ -33,11 +32,6 @@ def builder(): return PRBuilder() -@pytest.fixture -def bronze(): - return TIERS[Tier.BRONZE] - - # ============================================================================ # Helpers # ============================================================================ @@ -101,16 +95,16 @@ def test_returns_float(self): class TestReviewQualityMultiplierOnPullRequest: """Tests for review_quality_multiplier field on PullRequest and its effect on earned_score.""" - def test_default_multiplier_is_one(self, builder, bronze): - pr = builder.create(state=PRState.MERGED, tier=bronze) + def test_default_multiplier_is_one(self, builder): + pr = builder.create(state=PRState.MERGED) assert pr.review_quality_multiplier == 1.0 - def test_default_changes_requested_count_is_zero(self, builder, bronze): - pr = builder.create(state=PRState.MERGED, tier=bronze) + def test_default_changes_requested_count_is_zero(self, builder): + pr = builder.create(state=PRState.MERGED) assert pr.changes_requested_count == 0 - def test_review_multiplier_reduces_earned_score(self, builder, bronze): - pr = builder.create(state=PRState.MERGED, tier=bronze) + def test_review_multiplier_reduces_earned_score(self, builder): + pr = builder.create(state=PRState.MERGED) pr.base_score = 100.0 pr.repo_weight_multiplier = 1.0 pr.issue_multiplier = 1.0 @@ -126,8 +120,8 @@ def test_review_multiplier_reduces_earned_score(self, builder, bronze): assert score_one_review == pytest.approx(score_no_penalty * 0.88) - def test_zero_multiplier_zeroes_earned_score(self, builder, bronze): - pr = builder.create(state=PRState.MERGED, tier=bronze) + def test_zero_multiplier_zeroes_earned_score(self, builder): + pr = builder.create(state=PRState.MERGED) pr.base_score = 50.0 pr.repo_weight_multiplier = 1.0 pr.issue_multiplier = 1.0 @@ -138,9 +132,9 @@ def test_zero_multiplier_zeroes_earned_score(self, builder, bronze): assert pr.calculate_final_earned_score() == 0.0 - def test_multiplier_participates_in_product(self, builder, bronze): + def test_multiplier_participates_in_product(self, builder): """review_quality_multiplier participates in the product of all multipliers.""" - pr = builder.create(state=PRState.MERGED, tier=bronze) + pr = builder.create(state=PRState.MERGED) pr.base_score = 80.0 pr.repo_weight_multiplier = 1.0 pr.issue_multiplier = 1.0 diff --git a/tests/validator/test_tier_credibility.py b/tests/validator/test_tier_credibility.py deleted file mode 100644 index 441ac396..00000000 --- a/tests/validator/test_tier_credibility.py +++ /dev/null @@ -1,1497 +0,0 @@ -# The MIT License (MIT) -# Copyright © 2025 Entrius - -""" -Unit tests for tier credibility and unlocking logic. - -Uses pytest fixtures from conftest.py for clean, reusable test data. - -Run tests: - pytest tests/validator/test_tier_credibility.py -v - -Run specific test class: - pytest tests/validator/test_tier_credibility.py::TestTierUnlocking -v -""" - -import pytest - -from gittensor.classes import PRState -from gittensor.validator.oss_contributions.credibility import ( - calculate_credibility_per_tier, - calculate_tier_stats, - is_tier_unlocked, -) -from gittensor.validator.oss_contributions.tier_config import ( - TIERS, - TIERS_ORDER, - Tier, - TierConfig, - TierStats, - get_next_tier, - get_tier_from_config, -) - - -class TestGetNextTier: - """Test get_next_tier helper function.""" - - def test_bronze_next_is_silver(self): - """Bronze → Silver.""" - assert get_next_tier(Tier.BRONZE) == Tier.SILVER - - def test_silver_next_is_gold(self): - """Silver → Gold.""" - assert get_next_tier(Tier.SILVER) == Tier.GOLD - - def test_gold_next_is_none(self): - """Gold is top tier, no next.""" - assert get_next_tier(Tier.GOLD) is None - - def test_progression_matches_tiers_order(self): - """Verify get_next_tier follows TIERS_ORDER.""" - for i, tier in enumerate(TIERS_ORDER[:-1]): # All except last - expected_next = TIERS_ORDER[i + 1] - assert get_next_tier(tier) == expected_next - - # Last tier has no next - assert get_next_tier(TIERS_ORDER[-1]) is None - - -class TestGetTierFromConfig: - """Test get_tier_from_config reverse lookup.""" - - def test_bronze_config_returns_bronze(self, bronze_config): - """Bronze config → Tier.BRONZE.""" - assert get_tier_from_config(bronze_config) == Tier.BRONZE - - def test_silver_config_returns_silver(self, silver_config): - """Silver config → Tier.SILVER.""" - assert get_tier_from_config(silver_config) == Tier.SILVER - - def test_gold_config_returns_gold(self, gold_config): - """Gold config → Tier.GOLD.""" - assert get_tier_from_config(gold_config) == Tier.GOLD - - def test_unknown_config_returns_none(self): - """Unknown config returns None.""" - fake_config = TierConfig( - required_credibility=0.99, - required_min_token_score=999.0, - required_unique_repos_count=999, - required_min_token_score_per_repo=999.0, - credibility_scalar=999, - ) - assert get_tier_from_config(fake_config) is None - - def test_all_tiers_have_reversible_configs(self): - """Every tier in TIERS can be looked up from its config.""" - for tier, config in TIERS.items(): - assert get_tier_from_config(config) == tier - - -class TestTiersOrderIntegrity: - """Test TIERS_ORDER and TIERS dict structural integrity.""" - - def test_tiers_order_starts_with_bronze(self): - """First tier should be Bronze (entry level).""" - assert TIERS_ORDER[0] == Tier.BRONZE - - def test_tiers_order_ends_with_gold(self): - """Last tier should be Gold (highest).""" - assert TIERS_ORDER[-1] == Tier.GOLD - - def test_tiers_order_contains_all_tiers(self): - """TIERS_ORDER should contain all Tier enum values.""" - assert set(TIERS_ORDER) == set(Tier) - - def test_tiers_dict_has_config_for_all_tiers(self): - """Every Tier enum value should have a config in TIERS.""" - for tier in Tier: - assert tier in TIERS - assert isinstance(TIERS[tier], TierConfig) - - def test_all_tiers_have_requirements(self): - """All tiers should have unlock requirements (including Bronze).""" - for tier in TIERS_ORDER: - config = TIERS[tier] - assert config.required_credibility is not None - assert config.required_unique_repos_count is not None - assert config.required_unique_repos_count > 0 - assert 0 < config.required_credibility <= 1.0 - - def test_credibility_scalars_increase_with_tier(self): - """Higher tiers should have higher credibility scalars.""" - scalars = [TIERS[tier].credibility_scalar for tier in TIERS_ORDER] - for i in range(len(scalars) - 1): - assert scalars[i] < scalars[i + 1], f'Scalar should increase: {scalars}' - - def test_token_score_requirements_increase_with_tier(self): - """Higher tiers should require more token score per repo.""" - prev_token_per_repo = 0.0 - - for tier in TIERS_ORDER: - config = TIERS[tier] - token_per_repo = config.required_min_token_score_per_repo or 0.0 - assert token_per_repo >= prev_token_per_repo - prev_token_per_repo = token_per_repo - - def test_credibility_requirements_decrease_with_tier(self): - """Higher tiers have lower credibility requirements (harder repos, more lenient).""" - prev_credibility = 1.0 - - for tier in TIERS_ORDER: - config = TIERS[tier] - assert config.required_credibility <= prev_credibility - prev_credibility = config.required_credibility - - -# ============================================================================ -# TierStats Tests -# ============================================================================ - - -class TestTierStats: - """Test TierStats dataclass properties.""" - - def test_total_attempts_calculation(self): - stats = TierStats(merged_count=5, closed_count=3) - assert stats.total_attempts == 8 - - def test_total_attempts_zero(self): - stats = TierStats() - assert stats.total_attempts == 0 - - def test_total_prs_includes_open(self): - stats = TierStats(merged_count=5, closed_count=3, open_count=2) - assert stats.total_prs == 10 - - def test_credibility_formula(self): - stats = TierStats(merged_count=7, closed_count=3) - assert stats.credibility == 0.7 - - def test_credibility_100_percent(self): - stats = TierStats(merged_count=10, closed_count=0) - assert stats.credibility == 1.0 - - def test_credibility_0_percent(self): - stats = TierStats(merged_count=0, closed_count=10) - assert stats.credibility == 0.0 - - def test_credibility_no_attempts_is_zero(self): - stats = TierStats() - assert stats.credibility == 0.0 - - def test_open_prs_dont_affect_credibility(self): - stats = TierStats(merged_count=5, closed_count=5, open_count=100) - assert stats.credibility == 0.5 - assert stats.total_attempts == 10 # Excludes open - - -# ============================================================================ -# calculate_tier_stats Tests -# ============================================================================ - - -class TestCalculateTierStats: - """Test calculate_tier_stats function.""" - - def test_empty_lists(self): - stats = calculate_tier_stats([], [], []) - for tier in Tier: - assert stats[tier].merged_count == 0 - assert stats[tier].closed_count == 0 - assert stats[tier].open_count == 0 - - def test_counts_merged_per_tier(self, pr_factory, bronze_config, silver_config, gold_config): - merged = [ - pr_factory.merged(bronze_config), - pr_factory.merged(bronze_config), - pr_factory.merged(silver_config), - pr_factory.merged(gold_config), - ] - - stats = calculate_tier_stats(merged, [], []) - - assert stats[Tier.BRONZE].merged_count == 2 - assert stats[Tier.SILVER].merged_count == 1 - assert stats[Tier.GOLD].merged_count == 1 - - def test_counts_closed_per_tier(self, pr_factory, bronze_config, silver_config): - closed = [ - pr_factory.closed(bronze_config), - pr_factory.closed(silver_config), - pr_factory.closed(silver_config), - ] - - stats = calculate_tier_stats([], closed, []) - - assert stats[Tier.BRONZE].closed_count == 1 - assert stats[Tier.SILVER].closed_count == 2 - assert stats[Tier.GOLD].closed_count == 0 - - def test_counts_open_per_tier(self, pr_factory, bronze_config, gold_config): - open_prs = [ - pr_factory.open(bronze_config), - pr_factory.open(bronze_config), - pr_factory.open(gold_config), - ] - - stats = calculate_tier_stats([], [], open_prs) - - assert stats[Tier.BRONZE].open_count == 2 - assert stats[Tier.SILVER].open_count == 0 - assert stats[Tier.GOLD].open_count == 1 - - def test_scoring_details_off_by_default(self, pr_factory, bronze_config): - merged = [pr_factory.merged(bronze_config, earned_score=999.0)] - stats = calculate_tier_stats(merged, [], []) - assert stats[Tier.BRONZE].earned_score == 0.0 - - def test_scoring_details_included_when_requested(self, pr_factory, bronze_config): - merged = [ - pr_factory.merged(bronze_config, earned_score=100.0), - pr_factory.merged(bronze_config, earned_score=150.0), - ] - open_prs = [pr_factory.open(bronze_config, collateral_score=25.0)] - - stats = calculate_tier_stats(merged, [], open_prs, include_scoring_details=True) - - assert stats[Tier.BRONZE].earned_score == 250.0 - assert stats[Tier.BRONZE].collateral_score == 25.0 - - def test_ignores_prs_without_tier_config(self, pr_factory, bronze_config): - from datetime import datetime, timezone - - from gittensor.classes import PullRequest - - pr_no_tier = PullRequest( - number=1, - repository_full_name='test/repo', - uid=0, - hotkey='test', - github_id='123', - title='No tier', - author_login='test', - merged_at=datetime.now(timezone.utc), - created_at=datetime.now(timezone.utc), - pr_state=PRState.MERGED, - repository_tier_configuration=None, - ) - - stats = calculate_tier_stats([pr_no_tier], [], []) - - for tier in Tier: - assert stats[tier].merged_count == 0 - - -# ============================================================================ -# is_tier_unlocked Tests -# ============================================================================ - - -class TestTierUnlocking: - """Test is_tier_unlocked function.""" - - def _unlocked_bronze_stats(self) -> TierStats: - """Helper to create Bronze stats that meet unlock requirements (including qualified repos).""" - bronze_config = TIERS[Tier.BRONZE] - required_repos = bronze_config.required_unique_repos_count or 3 - token_per_repo = bronze_config.required_min_token_score_per_repo or 5.0 - return TierStats( - merged_count=required_repos, - closed_count=0, - unique_repo_contribution_count=required_repos, - qualified_unique_repo_count=required_repos, - token_score=required_repos * token_per_repo, - ) - - def _unlocked_silver_stats(self) -> TierStats: - """Helper to create Silver stats that meet unlock requirements (including qualified repos).""" - silver_config = TIERS[Tier.SILVER] - required_repos = silver_config.required_unique_repos_count or 3 - token_per_repo = silver_config.required_min_token_score_per_repo or 10.0 - min_total = silver_config.required_min_token_score or 50.0 - return TierStats( - merged_count=required_repos, - closed_count=0, - unique_repo_contribution_count=required_repos, - qualified_unique_repo_count=required_repos, - token_score=max(required_repos * token_per_repo, min_total), - ) - - def _unlocked_gold_stats(self) -> TierStats: - """Helper to create Gold stats that meet unlock requirements (including qualified repos).""" - gold_config = TIERS[Tier.GOLD] - required_repos = gold_config.required_unique_repos_count or 3 - token_per_repo = gold_config.required_min_token_score_per_repo or 25.0 - min_total = gold_config.required_min_token_score or 150.0 - return TierStats( - merged_count=required_repos, - closed_count=0, - unique_repo_contribution_count=required_repos, - qualified_unique_repo_count=required_repos, - token_score=max(required_repos * token_per_repo, min_total), - ) - - def test_bronze_locked_with_no_activity(self, empty_tier_stats): - """Bronze is locked when miner has no PRs.""" - assert is_tier_unlocked(Tier.BRONZE, empty_tier_stats) is False - - def test_bronze_requires_merges_and_credibility(self): - """Bronze requires meeting merge count, credibility threshold, and unique repos.""" - bronze_config = TIERS[Tier.BRONZE] - required_repos = bronze_config.required_unique_repos_count - required_credibility = bronze_config.required_credibility - required_unique_repos = bronze_config.required_unique_repos_count - - closed_count = int(required_repos * (1 - required_credibility) / required_credibility) + 1 - - # Not enough merges - stats = { - Tier.BRONZE: TierStats( - merged_count=required_repos - 1, - closed_count=closed_count, - unique_repo_contribution_count=required_unique_repos, - qualified_unique_repo_count=required_unique_repos, - token_score=required_unique_repos * 30.0, - ), - Tier.SILVER: TierStats(), - Tier.GOLD: TierStats(), - } - assert stats[Tier.BRONZE].credibility < required_credibility - assert is_tier_unlocked(Tier.BRONZE, stats) is False - - # Enough merges, meets credibility, meets unique repos - stats[Tier.BRONZE] = TierStats( - merged_count=required_repos, - closed_count=0, - unique_repo_contribution_count=required_unique_repos, - qualified_unique_repo_count=required_unique_repos, - token_score=required_unique_repos * 30.0, - ) - assert is_tier_unlocked(Tier.BRONZE, stats) is True - - def test_silver_requires_bronze_unlocked(self): - """Silver cannot be unlocked if Bronze is locked.""" - # Perfect Silver stats but Bronze locked - stats = { - Tier.BRONZE: TierStats(), # No Bronze activity - Tier.SILVER: self._unlocked_silver_stats(), - Tier.GOLD: TierStats(), - } - assert is_tier_unlocked(Tier.SILVER, stats) is False - - # Unlock Bronze, Silver should now unlock - stats[Tier.BRONZE] = self._unlocked_bronze_stats() - assert is_tier_unlocked(Tier.SILVER, stats) is True - - def test_silver_requires_qualified_repos(self): - """Silver requires meeting its qualified unique repo count requirement.""" - silver_config = TIERS[Tier.SILVER] - required_repos = silver_config.required_unique_repos_count - required_token_score = silver_config.required_min_token_score or 50.0 - - # Not enough qualified repos (one short) - stats = { - Tier.BRONZE: self._unlocked_bronze_stats(), - Tier.SILVER: TierStats( - merged_count=required_repos, - closed_count=0, - unique_repo_contribution_count=required_repos, - qualified_unique_repo_count=required_repos - 1, # One short! - token_score=required_token_score + 10.0, # Token score is fine - ), - Tier.GOLD: TierStats(), - } - assert is_tier_unlocked(Tier.SILVER, stats) is False - - # Enough qualified repos - stats[Tier.SILVER] = TierStats( - merged_count=required_repos, - closed_count=0, - unique_repo_contribution_count=required_repos, - qualified_unique_repo_count=required_repos, - token_score=required_token_score + 10.0, - ) - assert is_tier_unlocked(Tier.SILVER, stats) is True - - def test_silver_requires_credibility(self): - """Silver requires meeting its credibility requirement.""" - silver_config = TIERS[Tier.SILVER] - required_repos = silver_config.required_unique_repos_count - required_credibility = silver_config.required_credibility - required_token_score = silver_config.required_min_token_score or 300.0 - - # Calculate closed count to be just below credibility threshold - closed_count = int(required_repos * (1 - required_credibility) / required_credibility) + 1 - - stats = { - Tier.BRONZE: self._unlocked_bronze_stats(), - Tier.SILVER: TierStats( - merged_count=required_repos, - closed_count=closed_count, - unique_repo_contribution_count=required_repos, - qualified_unique_repo_count=required_repos, - token_score=required_token_score, - ), - Tier.GOLD: TierStats(), - } - assert stats[Tier.SILVER].credibility < required_credibility - assert is_tier_unlocked(Tier.SILVER, stats) is False - - # Fix credibility - use helper method - stats[Tier.SILVER] = self._unlocked_silver_stats() - assert is_tier_unlocked(Tier.SILVER, stats) is True - - def test_silver_requires_all_conditions(self): - """Silver requires credibility, total token score, AND qualified unique repos.""" - silver_config = TIERS[Tier.SILVER] - required_repos = silver_config.required_unique_repos_count - required_credibility = silver_config.required_credibility - required_token_score = silver_config.required_min_token_score or 50.0 - - # Calculate closed count for below credibility - closed_count = int(required_repos * (1 - required_credibility) / required_credibility) + 1 - - # Has qualified repos + token score, but low credibility - stats = { - Tier.BRONZE: self._unlocked_bronze_stats(), - Tier.SILVER: TierStats( - merged_count=required_repos, - closed_count=closed_count, - unique_repo_contribution_count=required_repos, - qualified_unique_repo_count=required_repos, - token_score=required_token_score + 10.0, - ), - Tier.GOLD: TierStats(), - } - assert is_tier_unlocked(Tier.SILVER, stats) is False - - # Has credibility + token score, but not enough qualified repos - stats[Tier.SILVER] = TierStats( - merged_count=required_repos, - closed_count=0, - unique_repo_contribution_count=required_repos, - qualified_unique_repo_count=required_repos - 1, # One short! - token_score=required_token_score + 10.0, - ) - assert is_tier_unlocked(Tier.SILVER, stats) is False - - # Has credibility + qualified repos, but low token score - stats[Tier.SILVER] = TierStats( - merged_count=required_repos, - closed_count=0, - unique_repo_contribution_count=required_repos, - qualified_unique_repo_count=required_repos, - token_score=required_token_score - 10.0, # Below threshold! - ) - assert is_tier_unlocked(Tier.SILVER, stats) is False - - # Has all three - stats[Tier.SILVER] = TierStats( - merged_count=required_repos, - closed_count=0, - unique_repo_contribution_count=required_repos, - qualified_unique_repo_count=required_repos, - token_score=required_token_score + 10.0, - ) - assert is_tier_unlocked(Tier.SILVER, stats) is True - - def test_gold_requires_qualified_repos(self): - """Gold requires meeting its qualified unique repo count requirement.""" - gold_config = TIERS[Tier.GOLD] - required_repos = gold_config.required_unique_repos_count - required_token_score = gold_config.required_min_token_score or 150.0 - - # Not enough qualified repos (one short) - stats = { - Tier.BRONZE: self._unlocked_bronze_stats(), - Tier.SILVER: self._unlocked_silver_stats(), - Tier.GOLD: TierStats( - merged_count=required_repos, - closed_count=0, - unique_repo_contribution_count=required_repos, - qualified_unique_repo_count=required_repos - 1, # One short! - token_score=required_token_score + 10.0, - ), - } - assert is_tier_unlocked(Tier.GOLD, stats) is False - - # Enough qualified repos - stats[Tier.GOLD] = TierStats( - merged_count=required_repos, - closed_count=0, - unique_repo_contribution_count=required_repos, - qualified_unique_repo_count=required_repos, - token_score=required_token_score + 10.0, - ) - assert is_tier_unlocked(Tier.GOLD, stats) is True - - def test_gold_requires_credibility(self): - """Gold requires meeting its credibility requirement.""" - gold_config = TIERS[Tier.GOLD] - required_repos = gold_config.required_unique_repos_count - required_credibility = gold_config.required_credibility - required_token_score = gold_config.required_min_token_score or 150.0 - - # Calculate closed count to be just below credibility threshold - closed_count = int(required_repos * (1 - required_credibility) / required_credibility) + 1 - - stats = { - Tier.BRONZE: self._unlocked_bronze_stats(), - Tier.SILVER: self._unlocked_silver_stats(), - Tier.GOLD: TierStats( - merged_count=required_repos, - closed_count=closed_count, - unique_repo_contribution_count=required_repos, - qualified_unique_repo_count=required_repos, - token_score=required_token_score + 10.0, - ), - } - assert stats[Tier.GOLD].credibility < required_credibility - assert is_tier_unlocked(Tier.GOLD, stats) is False - - # Fix credibility - stats[Tier.GOLD] = TierStats( - merged_count=required_repos, - closed_count=0, - unique_repo_contribution_count=required_repos, - qualified_unique_repo_count=required_repos, - token_score=required_token_score + 10.0, - ) - assert is_tier_unlocked(Tier.GOLD, stats) is True - - def test_gold_requires_silver_unlocked(self): - """Gold cannot be unlocked if Silver is locked.""" - silver_config = TIERS[Tier.SILVER] - gold_config = TIERS[Tier.GOLD] - silver_token_score = silver_config.required_min_token_score or 50.0 - gold_token_score = gold_config.required_min_token_score or 150.0 - - # Gold has perfect stats, but Silver is locked (not enough qualified repos) - stats = { - Tier.BRONZE: self._unlocked_bronze_stats(), - Tier.SILVER: TierStats( - merged_count=silver_config.required_unique_repos_count, - closed_count=0, - unique_repo_contribution_count=silver_config.required_unique_repos_count, - qualified_unique_repo_count=silver_config.required_unique_repos_count - 1, # One short! - token_score=silver_token_score + 10.0, - ), - Tier.GOLD: TierStats( - merged_count=gold_config.required_unique_repos_count + 5, - closed_count=0, - unique_repo_contribution_count=gold_config.required_unique_repos_count, - qualified_unique_repo_count=gold_config.required_unique_repos_count, - token_score=gold_token_score + 10.0, - ), - } - assert is_tier_unlocked(Tier.GOLD, stats) is False - - # Unlock Silver - stats[Tier.SILVER] = self._unlocked_silver_stats() - assert is_tier_unlocked(Tier.GOLD, stats) is True - - def test_gold_requires_bronze_unlocked(self): - """Gold cannot be unlocked if Bronze is locked (cascade).""" - gold_config = TIERS[Tier.GOLD] - - # Perfect Silver and Gold stats, but Bronze locked - stats = { - Tier.BRONZE: TierStats(), # No Bronze activity - Tier.SILVER: self._unlocked_silver_stats(), - Tier.GOLD: TierStats( - merged_count=gold_config.required_unique_repos_count + 5, - closed_count=0, - unique_repo_contribution_count=gold_config.required_unique_repos_count, - qualified_unique_repo_count=gold_config.required_unique_repos_count, - token_score=gold_config.required_min_token_score or 150.0, - ), - } - assert is_tier_unlocked(Tier.GOLD, stats) is False - - # Unlock Bronze - stats[Tier.BRONZE] = self._unlocked_bronze_stats() - assert is_tier_unlocked(Tier.GOLD, stats) is True - - def test_cascading_unlock(self): - """All tiers unlock when all requirements are met (including unique repos).""" - gold_config = TIERS[Tier.GOLD] - stats = { - Tier.BRONZE: self._unlocked_bronze_stats(), - Tier.SILVER: self._unlocked_silver_stats(), - Tier.GOLD: TierStats( - merged_count=gold_config.required_unique_repos_count, - closed_count=0, - unique_repo_contribution_count=gold_config.required_unique_repos_count, - qualified_unique_repo_count=gold_config.required_unique_repos_count, - token_score=gold_config.required_min_token_score or 150.0, - ), - } - assert is_tier_unlocked(Tier.BRONZE, stats) is True - assert is_tier_unlocked(Tier.SILVER, stats) is True - assert is_tier_unlocked(Tier.GOLD, stats) is True - - -# ============================================================================ -# Bronze-Specific Edge Cases -# ============================================================================ - - -class TestBronzeEdgeCases: - """ - Test Bronze-specific edge cases now that Bronze has unlock requirements. - - Bronze requirements: - - required_repos: 3 - - required_credibility: 80% - """ - - def test_bronze_locked_below_merge_threshold(self, pr_factory, bronze_config): - """Bronze stays locked when merges are below requirement.""" - bronze_tier_config = TIERS[Tier.BRONZE] - required_repos = bronze_tier_config.required_unique_repos_count - - # One below threshold - merged = pr_factory.merged_batch(bronze_config, count=required_repos - 1) - stats = calculate_tier_stats(merged, []) - credibility = calculate_credibility_per_tier(merged, []) - - assert is_tier_unlocked(Tier.BRONZE, stats) is False - assert credibility.get(Tier.BRONZE, 0.0) == 0.0 - - def test_bronze_locked_below_credibility_threshold(self, pr_factory, bronze_config): - """Bronze stays locked when credibility is below requirement.""" - bronze_tier_config = TIERS[Tier.BRONZE] - required_repos = bronze_tier_config.required_unique_repos_count - required_credibility = bronze_tier_config.required_credibility - - # Enough merges but terrible credibility - closed_count = int(required_repos * (1 - required_credibility) / required_credibility) + 2 - merged = pr_factory.merged_batch(bronze_config, count=required_repos) - closed = pr_factory.closed_batch(bronze_config, count=closed_count) - - stats = calculate_tier_stats(merged, closed) - credibility = calculate_credibility_per_tier(merged, closed) - - assert stats[Tier.BRONZE].merged_count >= required_repos - assert stats[Tier.BRONZE].credibility < required_credibility - assert is_tier_unlocked(Tier.BRONZE, stats) is False - assert credibility.get(Tier.BRONZE, 0.0) == 0.0 - - def test_bronze_unlocks_at_exact_threshold(self, pr_factory, bronze_config): - """Bronze unlocks when exactly at merge, credibility, and unique repo thresholds.""" - bronze_tier_config = TIERS[Tier.BRONZE] - required_repos = bronze_tier_config.required_unique_repos_count - required_credibility = bronze_tier_config.required_credibility - - # Calculate closed count for exactly at credibility threshold - closed_count = int(required_repos * (1 - required_credibility) / required_credibility) - merged = pr_factory.merged_batch(bronze_config, count=required_repos, unique_repos=True) - closed = pr_factory.closed_batch(bronze_config, count=closed_count, unique_repos=True) - - stats = calculate_tier_stats(merged, closed) - credibility = calculate_credibility_per_tier(merged, closed) - - assert stats[Tier.BRONZE].credibility >= required_credibility - assert is_tier_unlocked(Tier.BRONZE, stats) is True - assert credibility[Tier.BRONZE] >= required_credibility - - def test_bronze_demotion_cascades_to_all_tiers(self, pr_factory, bronze_config, silver_config, gold_config): - """When Bronze locks, Silver and Gold cascade to locked.""" - bronze_tier_config = TIERS[Tier.BRONZE] - silver_tier_config = TIERS[Tier.SILVER] - gold_tier_config = TIERS[Tier.GOLD] - bronze_required_credibility = bronze_tier_config.required_credibility - - # Perfect Silver and Gold stats, but Bronze has terrible credibility - bronze_merged = bronze_tier_config.required_unique_repos_count - bronze_closed = int(bronze_merged * (1 - bronze_required_credibility) / bronze_required_credibility) + 2 - - merged = ( - pr_factory.merged_batch(bronze_config, count=bronze_merged) - + pr_factory.merged_batch(silver_config, count=silver_tier_config.required_unique_repos_count + 5) - + pr_factory.merged_batch(gold_config, count=gold_tier_config.required_unique_repos_count + 5) - ) - closed = pr_factory.closed_batch(bronze_config, count=bronze_closed) - - stats = calculate_tier_stats(merged, closed) - credibility = calculate_credibility_per_tier(merged, closed) - - # Bronze locked due to low credibility - assert is_tier_unlocked(Tier.BRONZE, stats) is False - # Silver and Gold cascade to locked - assert is_tier_unlocked(Tier.SILVER, stats) is False - assert is_tier_unlocked(Tier.GOLD, stats) is False - # All credibilities are 0 - assert credibility.get(Tier.BRONZE, 0.0) == 0.0 - assert credibility.get(Tier.SILVER, 0.0) == 0.0 - assert credibility.get(Tier.GOLD, 0.0) == 0.0 - - def test_bronze_recovery_from_low_credibility(self, pr_factory, bronze_config): - """Bronze can recover by adding more merged PRs (to unique repos).""" - bronze_tier_config = TIERS[Tier.BRONZE] - required_repos = bronze_tier_config.required_unique_repos_count - required_credibility = bronze_tier_config.required_credibility - - # Start locked: enough merges but low credibility (all unique repos) - closed_count = int(required_repos * (1 - required_credibility) / required_credibility) + 2 - merged = pr_factory.merged_batch(bronze_config, count=required_repos, unique_repos=True) - closed = pr_factory.closed_batch(bronze_config, count=closed_count, unique_repos=True) - - stats = calculate_tier_stats(merged, closed) - assert is_tier_unlocked(Tier.BRONZE, stats) is False - - # Recovery: add more merged PRs to boost credibility (to unique repos) - extra_needed = ( - int((required_credibility * (required_repos + closed_count) - required_repos) / (1 - required_credibility)) - + 1 - ) - merged.extend(pr_factory.merged_batch(bronze_config, count=extra_needed, unique_repos=True)) - - stats = calculate_tier_stats(merged, closed) - credibility = calculate_credibility_per_tier(merged, closed) - - assert is_tier_unlocked(Tier.BRONZE, stats) is True - assert credibility[Tier.BRONZE] >= required_credibility - - -class TestBronzeLookbackExpiry: - """ - Test Bronze PRs expiring from lookback window. - - Miners must continuously maintain Bronze to keep higher tiers unlocked. - """ - - def test_bronze_prs_expire_locks_all_tiers(self, pr_factory, bronze_config, silver_config, gold_config): - """ - All tiers lock when Bronze PRs expire. - - Scenario: - - Miner had all tiers unlocked - - Bronze PRs expire outside lookback window - - All tiers cascade to locked - """ - bronze_tier_config = TIERS[Tier.BRONZE] - silver_tier_config = TIERS[Tier.SILVER] - gold_tier_config = TIERS[Tier.GOLD] - # Calculate token scores needed per PR to meet total requirements - silver_token_per_pr = ( - silver_tier_config.required_min_token_score or 50.0 - ) / silver_tier_config.required_unique_repos_count + 1.0 - gold_token_per_pr = ( - gold_tier_config.required_min_token_score or 150.0 - ) / gold_tier_config.required_unique_repos_count + 1.0 - - # Before: All tiers unlocked (with unique repos) - merged_before = ( - pr_factory.merged_batch( - bronze_config, count=bronze_tier_config.required_unique_repos_count, unique_repos=True - ) - + pr_factory.merged_batch( - silver_config, - count=silver_tier_config.required_unique_repos_count, - unique_repos=True, - token_score=silver_token_per_pr, - ) - + pr_factory.merged_batch( - gold_config, - count=gold_tier_config.required_unique_repos_count, - unique_repos=True, - token_score=gold_token_per_pr, - ) - ) - - stats_before = calculate_tier_stats(merged_before, []) - assert is_tier_unlocked(Tier.BRONZE, stats_before) is True - assert is_tier_unlocked(Tier.GOLD, stats_before) is True - - # After: Bronze PRs expired, only Silver and Gold remain - pr_factory.reset() - merged_after = pr_factory.merged_batch( - silver_config, - count=silver_tier_config.required_unique_repos_count, - unique_repos=True, - token_score=silver_token_per_pr, - ) + pr_factory.merged_batch( - gold_config, - count=gold_tier_config.required_unique_repos_count, - unique_repos=True, - token_score=gold_token_per_pr, - ) - - stats_after = calculate_tier_stats(merged_after, []) - credibility_after = calculate_credibility_per_tier(merged_after, []) - - # All tiers locked due to Bronze cascade - assert is_tier_unlocked(Tier.BRONZE, stats_after) is False - assert is_tier_unlocked(Tier.SILVER, stats_after) is False - assert is_tier_unlocked(Tier.GOLD, stats_after) is False - assert credibility_after.get(Tier.GOLD, 0.0) == 0.0 - - def test_partial_bronze_expiry_still_unlocked(self, pr_factory, bronze_config, silver_config): - """ - Partial Bronze expiry doesn't lock if enough PRs remain. - - Scenario: - - Miner had extra Bronze merges - - Some expire → still meets threshold - - Silver stays unlocked - """ - bronze_tier_config = TIERS[Tier.BRONZE] - silver_tier_config = TIERS[Tier.SILVER] - extra_bronze = 2 - - # Before: Bronze with buffer (unique repos) - merged_before = pr_factory.merged_batch( - bronze_config, count=bronze_tier_config.required_unique_repos_count + extra_bronze, unique_repos=True - ) + pr_factory.merged_batch( - silver_config, count=silver_tier_config.required_unique_repos_count, unique_repos=True - ) - - stats_before = calculate_tier_stats(merged_before, []) - assert is_tier_unlocked(Tier.SILVER, stats_before) is True - - # After: Extra Bronze expires, exactly at threshold - pr_factory.reset() - merged_after = pr_factory.merged_batch( - bronze_config, count=bronze_tier_config.required_unique_repos_count, unique_repos=True - ) + pr_factory.merged_batch( - silver_config, count=silver_tier_config.required_unique_repos_count, unique_repos=True - ) - - stats_after = calculate_tier_stats(merged_after, []) - assert is_tier_unlocked(Tier.BRONZE, stats_after) is True - assert is_tier_unlocked(Tier.SILVER, stats_after) is True - - def test_one_bronze_expiry_below_threshold_locks_all(self, pr_factory, bronze_config, silver_config, gold_config): - """ - Losing one Bronze PR when exactly at threshold locks all tiers. - """ - bronze_tier_config = TIERS[Tier.BRONZE] - silver_tier_config = TIERS[Tier.SILVER] - gold_tier_config = TIERS[Tier.GOLD] - # Calculate token scores needed per PR to meet total requirements - silver_token_per_pr = ( - silver_tier_config.required_min_token_score or 50.0 - ) / silver_tier_config.required_unique_repos_count + 1.0 - gold_token_per_pr = ( - gold_tier_config.required_min_token_score or 150.0 - ) / gold_tier_config.required_unique_repos_count + 1.0 - - # At threshold: exactly bronze_required merges (unique repos) - merged = ( - pr_factory.merged_batch( - bronze_config, count=bronze_tier_config.required_unique_repos_count, unique_repos=True - ) - + pr_factory.merged_batch( - silver_config, - count=silver_tier_config.required_unique_repos_count, - unique_repos=True, - token_score=silver_token_per_pr, - ) - + pr_factory.merged_batch( - gold_config, - count=gold_tier_config.required_unique_repos_count, - unique_repos=True, - token_score=gold_token_per_pr, - ) - ) - - stats = calculate_tier_stats(merged, []) - assert is_tier_unlocked(Tier.GOLD, stats) is True - - # One Bronze expires - pr_factory.reset() - merged_after = ( - pr_factory.merged_batch( - bronze_config, count=bronze_tier_config.required_unique_repos_count - 1, unique_repos=True - ) - + pr_factory.merged_batch( - silver_config, - count=silver_tier_config.required_unique_repos_count, - unique_repos=True, - token_score=silver_token_per_pr, - ) - + pr_factory.merged_batch( - gold_config, - count=gold_tier_config.required_unique_repos_count, - unique_repos=True, - token_score=gold_token_per_pr, - ) - ) - - stats_after = calculate_tier_stats(merged_after, []) - assert is_tier_unlocked(Tier.BRONZE, stats_after) is False - assert is_tier_unlocked(Tier.SILVER, stats_after) is False - assert is_tier_unlocked(Tier.GOLD, stats_after) is False - - def test_bronze_credibility_drops_on_expiry(self, pr_factory, bronze_config): - """ - Expiring merged Bronze PRs can drop credibility below threshold. - """ - bronze_tier_config = TIERS[Tier.BRONZE] - required_repos = bronze_tier_config.required_unique_repos_count - required_credibility = bronze_tier_config.required_credibility - - # Before: At credibility threshold with extra merged PRs (unique repos) - merged_count = required_repos + 2 - closed_count = int(merged_count * (1 - required_credibility) / required_credibility) - - merged = pr_factory.merged_batch(bronze_config, count=merged_count, unique_repos=True) - closed = pr_factory.closed_batch(bronze_config, count=closed_count, unique_repos=True) - - stats_before = calculate_tier_stats(merged, closed) - assert stats_before[Tier.BRONZE].credibility >= required_credibility - assert is_tier_unlocked(Tier.BRONZE, stats_before) is True - - # After: Some merged PRs expire (still need unique repo threshold met) - pr_factory.reset() - merged_after = pr_factory.merged_batch(bronze_config, count=required_repos, unique_repos=True) - - stats_after = calculate_tier_stats(merged_after, closed) - # May drop below threshold now - if stats_after[Tier.BRONZE].credibility < required_credibility: - assert is_tier_unlocked(Tier.BRONZE, stats_after) is False - - def test_bronze_maintenance_required_for_gold(self, pr_factory, bronze_config, silver_config, gold_config): - """ - Demonstrates continuous Bronze maintenance is required for Gold. - - Scenario: - - Miner gets Gold, then focuses only on Gold PRs - - Bronze PRs slowly expire - - Eventually Bronze locks → Gold cascades to locked - """ - bronze_tier_config = TIERS[Tier.BRONZE] - silver_tier_config = TIERS[Tier.SILVER] - gold_tier_config = TIERS[Tier.GOLD] - - # Phase 1: Full unlock with buffer (unique repos) - bronze_prs = pr_factory.merged_batch( - bronze_config, count=bronze_tier_config.required_unique_repos_count + 2, unique_repos=True - ) - silver_prs = pr_factory.merged_batch( - silver_config, count=silver_tier_config.required_unique_repos_count, unique_repos=True - ) - gold_prs = pr_factory.merged_batch( - gold_config, count=gold_tier_config.required_unique_repos_count + 5, unique_repos=True - ) - - stats = calculate_tier_stats(bronze_prs + silver_prs + gold_prs, []) - assert is_tier_unlocked(Tier.GOLD, stats) is True - - # Phase 2: Some Bronze expires (still above threshold) - stats = calculate_tier_stats( - bronze_prs[: bronze_tier_config.required_unique_repos_count + 1] + silver_prs + gold_prs, [] - ) - assert is_tier_unlocked(Tier.GOLD, stats) is True - - # Phase 3: More Bronze expires (exactly at threshold) - stats = calculate_tier_stats( - bronze_prs[: bronze_tier_config.required_unique_repos_count] + silver_prs + gold_prs, [] - ) - assert is_tier_unlocked(Tier.GOLD, stats) is True - - # Phase 4: One more Bronze expires (below threshold) - stats = calculate_tier_stats( - bronze_prs[: bronze_tier_config.required_unique_repos_count - 1] + silver_prs + gold_prs, [] - ) - assert is_tier_unlocked(Tier.BRONZE, stats) is False - assert is_tier_unlocked(Tier.GOLD, stats) is False # Cascade! - - def test_refreshing_bronze_restores_all_tiers(self, pr_factory, bronze_config, silver_config, gold_config): - """ - Adding new Bronze PRs restores all tier access. - """ - bronze_tier_config = TIERS[Tier.BRONZE] - silver_tier_config = TIERS[Tier.SILVER] - gold_tier_config = TIERS[Tier.GOLD] - # Calculate token scores needed per PR to meet total requirements - silver_token_per_pr = ( - silver_tier_config.required_min_token_score or 50.0 - ) / silver_tier_config.required_unique_repos_count + 1.0 - gold_token_per_pr = ( - gold_tier_config.required_min_token_score or 150.0 - ) / gold_tier_config.required_unique_repos_count + 1.0 - - # Lost access: Bronze one below threshold (unique repos) - old_bronze = pr_factory.merged_batch( - bronze_config, count=bronze_tier_config.required_unique_repos_count - 1, unique_repos=True - ) - silver_prs = pr_factory.merged_batch( - silver_config, - count=silver_tier_config.required_unique_repos_count, - unique_repos=True, - token_score=silver_token_per_pr, - ) - gold_prs = pr_factory.merged_batch( - gold_config, - count=gold_tier_config.required_unique_repos_count, - unique_repos=True, - token_score=gold_token_per_pr, - ) - - stats = calculate_tier_stats(old_bronze + silver_prs + gold_prs, []) - assert is_tier_unlocked(Tier.BRONZE, stats) is False - assert is_tier_unlocked(Tier.GOLD, stats) is False - - # Refresh: add 1 new Bronze PR (to unique repo) - new_bronze = pr_factory.merged_batch(bronze_config, count=1, unique_repos=True) - - stats = calculate_tier_stats(old_bronze + new_bronze + silver_prs + gold_prs, []) - assert is_tier_unlocked(Tier.BRONZE, stats) is True - assert is_tier_unlocked(Tier.SILVER, stats) is True - assert is_tier_unlocked(Tier.GOLD, stats) is True - - -# ============================================================================ -# calculate_credibility_per_tier Tests -# ============================================================================ - - -class TestCredibilityCalculation: - """Test calculate_credibility_per_tier function.""" - - def test_no_activity_returns_empty(self): - result = calculate_credibility_per_tier([], []) - assert result == {} - - def test_single_tier_credibility(self, pr_factory, bronze_config): - """Test credibility calculation for an unlocked tier (with unique repos).""" - bronze_tier_config = TIERS[Tier.BRONZE] - required_repos = bronze_tier_config.required_unique_repos_count - - # Create enough merges to unlock Bronze with 100% credibility (unique repos) - merged = pr_factory.merged_batch(bronze_config, count=required_repos, unique_repos=True) - - result = calculate_credibility_per_tier(merged, []) - - assert result[Tier.BRONZE] == 1.0 - - def test_credibility_with_some_closed(self, pr_factory, bronze_config): - """Test credibility when there are closed PRs but tier is still unlocked (unique repos).""" - bronze_tier_config = TIERS[Tier.BRONZE] - required_repos = bronze_tier_config.required_unique_repos_count - required_credibility = bronze_tier_config.required_credibility - - # Calculate max closed to still meet credibility requirement - # credibility = merged / (merged + closed) >= required_credibility - # closed <= merged * (1 - required_credibility) / required_credibility - max_closed = int(required_repos * (1 - required_credibility) / required_credibility) - - merged = pr_factory.merged_batch(bronze_config, count=required_repos, unique_repos=True) - closed = pr_factory.closed_batch(bronze_config, count=max_closed, unique_repos=True) - - result = calculate_credibility_per_tier(merged, closed) - - expected = required_repos / (required_repos + max_closed) - assert result[Tier.BRONZE] == pytest.approx(expected, abs=0.01) - assert result[Tier.BRONZE] >= required_credibility - - def test_locked_tier_returns_zero(self, pr_factory, bronze_config, silver_config): - """Silver returns 0.0 when locked (Bronze not unlocked).""" - silver_tier_config = TIERS[Tier.SILVER] - - # Silver has enough merges but Bronze is not unlocked (unique repos) - merged = pr_factory.merged_batch( - silver_config, count=silver_tier_config.required_unique_repos_count, unique_repos=True - ) - - result = calculate_credibility_per_tier(merged, []) - - assert result.get(Tier.SILVER, 0.0) == 0.0 - - def test_tier_locked_due_to_low_credibility(self, pr_factory, bronze_config): - """Tier returns 0.0 when credibility is below requirement.""" - bronze_tier_config = TIERS[Tier.BRONZE] - required_repos = bronze_tier_config.required_unique_repos_count - required_credibility = bronze_tier_config.required_credibility - - # Create PRs that have enough merges but credibility below requirement (unique repos) - closed_count = int(required_repos * (1 - required_credibility) / required_credibility) + 2 - - merged = pr_factory.merged_batch(bronze_config, count=required_repos, unique_repos=True) - closed = pr_factory.closed_batch(bronze_config, count=closed_count, unique_repos=True) - - result = calculate_credibility_per_tier(merged, closed) - - # Bronze is locked due to low credibility - assert result.get(Tier.BRONZE, 0.0) == 0.0 - - def test_100_percent_credibility(self, pr_factory, bronze_config): - """Test 100% credibility with no closed PRs (unique repos).""" - bronze_tier_config = TIERS[Tier.BRONZE] - required_repos = bronze_tier_config.required_unique_repos_count - - merged = pr_factory.merged_batch(bronze_config, count=required_repos + 2, unique_repos=True) - result = calculate_credibility_per_tier(merged, []) - assert result[Tier.BRONZE] == 1.0 - - def test_0_percent_credibility(self, pr_factory, bronze_config): - """No merged PRs means tier is locked (0.0 credibility).""" - closed = pr_factory.closed_batch(bronze_config, count=5, unique_repos=True) - result = calculate_credibility_per_tier([], closed) - # Bronze is locked because no merged PRs (doesn't meet required_repos) - assert result.get(Tier.BRONZE, 0.0) == 0.0 - - -# ============================================================================ -# Tier Demotion Tests -# ============================================================================ - - -class TestTierDemotion: - """Test tier demotion scenarios.""" - - def test_gold_demoted_when_credibility_drops(self, demoted_from_gold_miner): - """Gold locks when credibility drops below 70%.""" - stats = calculate_tier_stats(demoted_from_gold_miner.merged, demoted_from_gold_miner.closed) - credibility = calculate_credibility_per_tier(demoted_from_gold_miner.merged, demoted_from_gold_miner.closed) - - # Silver still OK - assert is_tier_unlocked(Tier.SILVER, stats) is True - assert credibility[Tier.SILVER] == 1.0 - - # Gold LOCKED - assert is_tier_unlocked(Tier.GOLD, stats) is False - assert credibility[Tier.GOLD] == 0.0 - - def test_gold_demoted_not_enough_merges(self, pr_factory, bronze_config, silver_config, gold_config): - """Gold locks when merge count drops below requirement (with unique repos).""" - bronze_tier_config = TIERS[Tier.BRONZE] - silver_tier_config = TIERS[Tier.SILVER] - gold_tier_config = TIERS[Tier.GOLD] - - merged = ( - pr_factory.merged_batch( - bronze_config, count=bronze_tier_config.required_unique_repos_count, unique_repos=True - ) - + pr_factory.merged_batch( - silver_config, count=silver_tier_config.required_unique_repos_count, unique_repos=True - ) - + pr_factory.merged_batch( - gold_config, count=gold_tier_config.required_unique_repos_count - 1, unique_repos=True - ) # One short - ) - - stats = calculate_tier_stats(merged, []) - credibility = calculate_credibility_per_tier(merged, []) - - assert is_tier_unlocked(Tier.SILVER, stats) is True - assert is_tier_unlocked(Tier.GOLD, stats) is False - assert credibility[Tier.GOLD] == 0.0 - - def test_silver_demotion_cascades_to_gold(self, cascade_demoted_miner): - """When Silver locks, Gold also locks (even with perfect Gold stats).""" - stats = calculate_tier_stats(cascade_demoted_miner.merged, cascade_demoted_miner.closed) - credibility = calculate_credibility_per_tier(cascade_demoted_miner.merged, cascade_demoted_miner.closed) - - assert is_tier_unlocked(Tier.BRONZE, stats) is True - assert is_tier_unlocked(Tier.SILVER, stats) is False - assert is_tier_unlocked(Tier.GOLD, stats) is False - assert credibility[Tier.SILVER] == 0.0 - assert credibility[Tier.GOLD] == 0.0 - - def test_silver_demoted_when_credibility_drops(self, demoted_from_silver_miner): - """Silver locks when credibility drops below 50%.""" - stats = calculate_tier_stats(demoted_from_silver_miner.merged, demoted_from_silver_miner.closed) - - assert is_tier_unlocked(Tier.SILVER, stats) is False - - def test_recovery_from_demotion(self, pr_factory, bronze_config, silver_config, gold_config): - """Miner can recover from demotion by getting more merges (to unique repos).""" - bronze_tier_config = TIERS[Tier.BRONZE] - silver_tier_config = TIERS[Tier.SILVER] - gold_tier_config = TIERS[Tier.GOLD] - gold_required_credibility = gold_tier_config.required_credibility - - # Initially demoted: below gold credibility threshold (unique repos) - gold_merged_count = gold_tier_config.required_unique_repos_count - gold_closed_count = int(gold_merged_count * (1 - gold_required_credibility) / gold_required_credibility) + 2 - - merged = ( - pr_factory.merged_batch( - bronze_config, count=bronze_tier_config.required_unique_repos_count, unique_repos=True - ) - + pr_factory.merged_batch( - silver_config, count=silver_tier_config.required_unique_repos_count, unique_repos=True - ) - + pr_factory.merged_batch(gold_config, count=gold_merged_count, unique_repos=True) - ) - closed = pr_factory.closed_batch(gold_config, count=gold_closed_count, unique_repos=True) - - stats = calculate_tier_stats(merged, closed) - assert is_tier_unlocked(Tier.GOLD, stats) is False - - # Recovery: add more merges to get above credibility threshold (unique repos) - # new_cred = (gold_merged + extra) / (gold_merged + gold_closed + extra) >= gold_required_credibility - # Solve for extra: extra >= (gold_required_credibility * (gold_merged + gold_closed) - gold_merged) / (1 - gold_required_credibility) - extra_needed = ( - int( - (gold_required_credibility * (gold_merged_count + gold_closed_count) - gold_merged_count) - / (1 - gold_required_credibility) - ) - + 1 - ) - merged.extend(pr_factory.merged_batch(gold_config, count=extra_needed, unique_repos=True)) - - stats = calculate_tier_stats(merged, closed) - credibility = calculate_credibility_per_tier(merged, closed) - - assert is_tier_unlocked(Tier.GOLD, stats) is True - assert credibility[Tier.GOLD] >= gold_required_credibility - - def test_spam_destroys_all_tiers(self, spammer_miner): - """Massive closed PRs tanks credibility everywhere.""" - stats = calculate_tier_stats(spammer_miner.merged, spammer_miner.closed) - credibility = calculate_credibility_per_tier(spammer_miner.merged, spammer_miner.closed) - - # All tiers locked due to terrible credibility - # Bronze: 5 merged, 20 closed = 20% (needs 80% for unlock) - assert is_tier_unlocked(Tier.BRONZE, stats) is False - assert credibility.get(Tier.BRONZE, 0.0) == 0.0 - - # Silver & Gold: LOCKED (cascade from Bronze) - assert is_tier_unlocked(Tier.SILVER, stats) is False - assert is_tier_unlocked(Tier.GOLD, stats) is False - - def test_gradual_decline(self, pr_factory, bronze_config, silver_config, gold_config): - """Miner starts strong then declines (with unique repos).""" - bronze_tier_config = TIERS[Tier.BRONZE] - silver_tier_config = TIERS[Tier.SILVER] - gold_tier_config = TIERS[Tier.GOLD] - gold_required_credibility = gold_tier_config.required_credibility - - # Phase 1: Strong start - all tiers unlocked (unique repos) - merged = ( - pr_factory.merged_batch( - bronze_config, count=bronze_tier_config.required_unique_repos_count, unique_repos=True - ) - + pr_factory.merged_batch( - silver_config, count=silver_tier_config.required_unique_repos_count, unique_repos=True - ) - + pr_factory.merged_batch( - gold_config, count=gold_tier_config.required_unique_repos_count + 3, unique_repos=True - ) - ) - - stats = calculate_tier_stats(merged, []) - assert is_tier_unlocked(Tier.GOLD, stats) is True - - # Phase 2: Decline - add closed PRs to drop Gold below credibility threshold (unique repos) - gold_merged_count = gold_tier_config.required_unique_repos_count + 3 - closed_for_drop = int(gold_merged_count * (1 - gold_required_credibility) / gold_required_credibility) + 2 - closed = pr_factory.closed_batch(gold_config, count=closed_for_drop, unique_repos=True) - - stats = calculate_tier_stats(merged, closed) - credibility = calculate_credibility_per_tier(merged, closed) - - # Gold now LOCKED (below credibility threshold) - assert is_tier_unlocked(Tier.GOLD, stats) is False - assert credibility[Tier.GOLD] == 0.0 - - # Silver still OK (no closed at Silver tier) - assert is_tier_unlocked(Tier.SILVER, stats) is True - assert credibility[Tier.SILVER] == 1.0 - - -# ============================================================================ -# Mixed Performance Tests -# ============================================================================ - - -class TestMixedPerformance: - """Test miners with varying performance across tiers.""" - - def test_mixed_tier_performance(self, mixed_performance_miner): - """Different credibility at each tier.""" - stats = calculate_tier_stats(mixed_performance_miner.merged, mixed_performance_miner.closed) - credibility = calculate_credibility_per_tier(mixed_performance_miner.merged, mixed_performance_miner.closed) - - # Bronze: 9 merged, 1 closed = 90% (above 80% threshold) - assert is_tier_unlocked(Tier.BRONZE, stats) is True - assert credibility[Tier.BRONZE] == pytest.approx(0.9, abs=0.01) - - # Silver: 11 merged, 9 closed = 55% (below 75% threshold) - LOCKED - assert is_tier_unlocked(Tier.SILVER, stats) is False - assert credibility[Tier.SILVER] == 0.0 - - # Gold: 60% (below 70% threshold) - LOCKED (cascade from Silver) - assert is_tier_unlocked(Tier.GOLD, stats) is False - assert credibility[Tier.GOLD] == 0.0 - - -# ============================================================================ -# Edge Cases & Boundary Tests -# ============================================================================ - - -class TestEdgeCases: - """Test edge cases and boundary conditions.""" - - def test_exactly_at_silver_threshold(self, silver_threshold_miner): - """Test exactly at Silver credibility requirement.""" - silver_tier_config = TIERS[Tier.SILVER] - required_credibility = silver_tier_config.required_credibility - - stats = calculate_tier_stats(silver_threshold_miner.merged, silver_threshold_miner.closed) - credibility = calculate_credibility_per_tier(silver_threshold_miner.merged, silver_threshold_miner.closed) - - assert is_tier_unlocked(Tier.SILVER, stats) is True - assert credibility[Tier.SILVER] >= required_credibility - - def test_exactly_at_gold_threshold(self, gold_threshold_miner): - """Test exactly at Gold credibility requirement.""" - gold_tier_config = TIERS[Tier.GOLD] - required_credibility = gold_tier_config.required_credibility - - stats = calculate_tier_stats(gold_threshold_miner.merged, gold_threshold_miner.closed) - credibility = calculate_credibility_per_tier(gold_threshold_miner.merged, gold_threshold_miner.closed) - - assert is_tier_unlocked(Tier.GOLD, stats) is True - assert credibility[Tier.GOLD] >= required_credibility - - def test_one_below_merge_threshold(self, pr_factory, silver_config, gold_config): - """Just one merge short at each tier.""" - merged = [ - *pr_factory.merged_batch(silver_config, count=2), # Need 3 - *pr_factory.merged_batch(gold_config, count=4), # Need 5 - ] - - stats = calculate_tier_stats(merged, []) - - assert is_tier_unlocked(Tier.SILVER, stats) is False - assert is_tier_unlocked(Tier.GOLD, stats) is False - - def test_credibility_just_below_threshold(self, pr_factory, silver_config): - """49.9% credibility (just below 50%).""" - merged = pr_factory.merged_batch(silver_config, count=499) - closed = pr_factory.closed_batch(silver_config, count=501) - - stats = calculate_tier_stats(merged, closed) - - assert stats[Tier.SILVER].credibility == pytest.approx(0.499, abs=0.001) - assert is_tier_unlocked(Tier.SILVER, stats) is False - - def test_single_pr_at_each_tier(self, pr_factory, bronze_config, silver_config, gold_config): - """Single PR is not enough to unlock any tier.""" - bronze_required = TIERS[Tier.BRONZE].required_unique_repos_count - - # Bronze: NOT unlocked with just 1 PR (needs required_repos) - merged = [pr_factory.merged(bronze_config)] - credibility = calculate_credibility_per_tier(merged, []) - # With 1 PR and Bronze requiring more merges, Bronze is locked - if bronze_required > 1: - assert credibility.get(Tier.BRONZE, 0.0) == 0.0 - else: - # If Bronze only needs 1 merge, it would be unlocked - assert credibility[Tier.BRONZE] == 1.0 - - # Silver: NOT unlocked (need Bronze unlocked + Silver requirements) - merged = [pr_factory.merged(silver_config)] - credibility = calculate_credibility_per_tier(merged, []) - assert credibility.get(Tier.SILVER, 0.0) == 0.0 - - # Gold: NOT unlocked (need Bronze + Silver unlocked + Gold requirements) - merged = [pr_factory.merged(gold_config)] - credibility = calculate_credibility_per_tier(merged, []) - assert credibility.get(Tier.GOLD, 0.0) == 0.0 - - def test_activation_threshold_boundary(self, pr_factory, bronze_config): - """Test activation threshold behavior. - - When required_repos >= activation_threshold (which is true for Bronze: 3 >= 2), - the "below activation threshold with tier unlocked" scenario can't happen. - - This test verifies that: - 1. Below required_repos = tier locked (credibility = 0) - 2. At required_repos with perfect record and unique repos = tier unlocked (credibility = 1.0) - """ - bronze_tier_config = TIERS[Tier.BRONZE] - required_repos = bronze_tier_config.required_unique_repos_count - - # Below required_repos: tier locked (even with unique repos) - merged = pr_factory.merged_batch(bronze_config, count=required_repos - 1, unique_repos=True) - cred = calculate_credibility_per_tier(merged, []) - assert cred.get(Tier.BRONZE, 0.0) == 0.0 # Locked due to not enough merges - - # At required_repos with perfect record and unique repos: tier unlocked - pr_factory.reset() - merged = pr_factory.merged_batch(bronze_config, count=required_repos, unique_repos=True) - cred = calculate_credibility_per_tier(merged, []) - assert cred[Tier.BRONZE] == 1.0 # 100% credibility - - def test_large_numbers(self, pr_factory, silver_config, gold_config): - """Large PR counts for precision testing.""" - merged = pr_factory.merged_batch(silver_config, count=100) + pr_factory.merged_batch(gold_config, count=1000) - closed = pr_factory.closed_batch(gold_config, count=429) - - stats = calculate_tier_stats(merged, closed) - - assert stats[Tier.GOLD].merged_count == 1000 - assert stats[Tier.GOLD].closed_count == 429 - # 1000/1429 = 69.98% - just below 70% - assert stats[Tier.GOLD].credibility == pytest.approx(0.6998, abs=0.001) - - -# ============================================================================ -# Integration Tests -# ============================================================================ - - -class TestIntegration: - """Integration tests using pre-built miner scenarios.""" - - def test_new_miner_no_tiers(self, new_miner): - """New miner has no tiers unlocked.""" - stats = calculate_tier_stats(new_miner.merged, new_miner.closed) - - assert is_tier_unlocked(Tier.BRONZE, stats) is False - assert is_tier_unlocked(Tier.SILVER, stats) is False - assert is_tier_unlocked(Tier.GOLD, stats) is False - - def test_bronze_miner_scenario(self, bronze_miner): - """Bronze-only miner with Bronze unlocked.""" - stats = calculate_tier_stats(bronze_miner.merged, bronze_miner.closed) - cred = calculate_credibility_per_tier(bronze_miner.merged, bronze_miner.closed) - - assert is_tier_unlocked(Tier.BRONZE, stats) is True - assert is_tier_unlocked(Tier.SILVER, stats) is False - assert cred[Tier.BRONZE] == 1.0 # 100% with no closed PRs - - def test_silver_miner_scenario(self, silver_unlocked_miner): - """Silver miner with 100% credibility (no closed PRs).""" - stats = calculate_tier_stats(silver_unlocked_miner.merged, silver_unlocked_miner.closed) - cred = calculate_credibility_per_tier(silver_unlocked_miner.merged, silver_unlocked_miner.closed) - - assert is_tier_unlocked(Tier.SILVER, stats) is True - assert is_tier_unlocked(Tier.GOLD, stats) is False - assert cred[Tier.SILVER] == 1.0 # 100% credibility with no closed PRs - - def test_gold_miner_scenario(self, gold_unlocked_miner): - """Gold miner with 100% credibility (no closed PRs).""" - stats = calculate_tier_stats(gold_unlocked_miner.merged, gold_unlocked_miner.closed) - cred = calculate_credibility_per_tier(gold_unlocked_miner.merged, gold_unlocked_miner.closed) - - assert is_tier_unlocked(Tier.GOLD, stats) is True - assert cred[Tier.GOLD] == 1.0 # 100% credibility with no closed PRs - - def test_open_prs_tracked_separately(self, miner_with_open_prs): - """Open PRs are counted but don't affect credibility.""" - stats = calculate_tier_stats(miner_with_open_prs.merged, miner_with_open_prs.closed, miner_with_open_prs.open) - - # Open PRs are counted - assert stats[Tier.BRONZE].open_count == 2 - assert stats[Tier.SILVER].open_count == 3 - - # But don't affect credibility calculation - # miner_with_open_prs fixture: 3 merged, 1 closed at Bronze - cred = calculate_credibility_per_tier(miner_with_open_prs.merged, miner_with_open_prs.closed) - # Bronze: 3 merged, 1 closed = 75% credibility - # Bronze requires 70% credibility, so Bronze is unlocked - assert cred.get(Tier.BRONZE, 0.0) == 0.75 # Unlocked with 75% credibility - - -if __name__ == '__main__': - pytest.main([__file__, '-v']) diff --git a/tests/validator/test_tier_emissions.py b/tests/validator/test_tier_emissions.py deleted file mode 100644 index 8ad0f399..00000000 --- a/tests/validator/test_tier_emissions.py +++ /dev/null @@ -1,508 +0,0 @@ -# The MIT License (MIT) -# Copyright © 2025 Entrius - -""" -Unit tests for tier-based emission allocation. - -Tests the allocate_emissions_by_tier() function which replaces total_score -with tier-weighted emission allocations. - -Run tests: - pytest tests/validator/test_tier_emissions.py -v -""" - -import pytest - -from gittensor.classes import MinerEvaluation -from gittensor.constants import TIER_EMISSION_SPLITS -from gittensor.validator.oss_contributions.tier_config import Tier, TierStats, allocate_emissions_by_tier - - -class TestTierEmissionSplitsConstant: - """Test the TIER_EMISSION_SPLITS constant configuration.""" - - def test_splits_sum_to_one(self): - """Emission splits must sum to 1.0.""" - total = sum(TIER_EMISSION_SPLITS.values()) - assert total == pytest.approx(1.0) - - def test_bronze_allocation(self): - """Bronze should get 15%.""" - assert TIER_EMISSION_SPLITS['Bronze'] == 0.15 - - def test_silver_allocation(self): - """Silver should get 35%.""" - assert TIER_EMISSION_SPLITS['Silver'] == 0.35 - - def test_gold_allocation(self): - """Gold should get 50%.""" - assert TIER_EMISSION_SPLITS['Gold'] == 0.50 - - def test_all_tiers_have_splits(self): - """All tier names should have emission splits defined.""" - for tier in Tier: - assert tier.value in TIER_EMISSION_SPLITS - - -class TestAllocateEmissionsByTierBasic: - """Test basic tier emission allocation scenarios.""" - - def _create_miner_eval( - self, - uid: int, - current_tier: Tier, - bronze_earned: float = 0.0, - bronze_collateral: float = 0.0, - silver_earned: float = 0.0, - silver_collateral: float = 0.0, - gold_earned: float = 0.0, - gold_collateral: float = 0.0, - ) -> MinerEvaluation: - """Helper to create a MinerEvaluation with tier stats.""" - eval = MinerEvaluation(uid=uid, hotkey=f'hotkey_{uid}') - eval.current_tier = current_tier - eval.stats_by_tier = { - Tier.BRONZE: TierStats(earned_score=bronze_earned, collateral_score=bronze_collateral), - Tier.SILVER: TierStats(earned_score=silver_earned, collateral_score=silver_collateral), - Tier.GOLD: TierStats(earned_score=gold_earned, collateral_score=gold_collateral), - } - eval.total_score = bronze_earned + silver_earned + gold_earned # Original before allocation - return eval - - def test_single_miner_gold_tier(self): - """Single Gold tier miner gets all emissions.""" - miner = self._create_miner_eval( - uid=1, - current_tier=Tier.GOLD, - bronze_earned=100.0, - silver_earned=200.0, - gold_earned=300.0, - ) - evaluations = {1: miner} - - allocate_emissions_by_tier(evaluations) - - # Single miner gets 100% of each tier's allocation - # Bronze: 100% of 15% = 0.15 - # Silver: 100% of 35% = 0.35 - # Gold: 100% of 50% = 0.50 - assert miner.total_score == pytest.approx(1.0) - - def test_two_miners_same_tier_equal_scores(self): - """Two miners with equal scores split tier allocations evenly.""" - miner_a = self._create_miner_eval(uid=1, current_tier=Tier.GOLD, bronze_earned=100.0) - miner_b = self._create_miner_eval(uid=2, current_tier=Tier.GOLD, bronze_earned=100.0) - evaluations = {1: miner_a, 2: miner_b} - - allocate_emissions_by_tier(evaluations) - - # Each miner gets 50% of Bronze allocation (15% / 2 = 7.5%) - # Total for each = 0.075 (both only have Bronze scores) - assert miner_a.total_score == pytest.approx(0.075) - assert miner_b.total_score == pytest.approx(0.075) - assert miner_a.total_score + miner_b.total_score == pytest.approx(0.15) - - def test_two_miners_different_scores(self): - """Two miners with different scores get proportional allocations.""" - # Miner A: 100 Bronze, Miner B: 10 Bronze - miner_a = self._create_miner_eval( - uid=1, - current_tier=Tier.GOLD, - bronze_earned=100.0, - silver_earned=600.0, - gold_earned=300.0, - ) - miner_b = self._create_miner_eval( - uid=2, - current_tier=Tier.GOLD, - bronze_earned=10.0, - ) - evaluations = {1: miner_a, 2: miner_b} - - allocate_emissions_by_tier(evaluations) - - # Bronze (15%): A gets 100/110, B gets 10/110 - # Silver (35%): A gets 100% (600/600) - # Gold (50%): A gets 100% (300/300) - bronze_a = (100.0 / 110.0) * 0.15 - bronze_b = (10.0 / 110.0) * 0.15 - silver_a = 0.35 - gold_a = 0.50 - - assert miner_a.total_score == pytest.approx(bronze_a + silver_a + gold_a) - assert miner_b.total_score == pytest.approx(bronze_b) - - def test_allocations_sum_to_one(self): - """All miner allocations should sum to 1.0 when max tier is Gold.""" - miner_a = self._create_miner_eval( - uid=1, - current_tier=Tier.GOLD, - bronze_earned=50.0, - silver_earned=100.0, - gold_earned=200.0, - ) - miner_b = self._create_miner_eval( - uid=2, - current_tier=Tier.SILVER, - bronze_earned=25.0, - silver_earned=50.0, - ) - miner_c = self._create_miner_eval( - uid=3, - current_tier=Tier.BRONZE, - bronze_earned=25.0, - ) - evaluations = {1: miner_a, 2: miner_b, 3: miner_c} - - allocate_emissions_by_tier(evaluations) - - total = miner_a.total_score + miner_b.total_score + miner_c.total_score - assert total == pytest.approx(1.0) - - -class TestAllMinersUntiered: - """Test edge case where no miners have a tier unlocked.""" - - def test_all_untiered_get_zero(self): - """When all miners are untiered, all get total_score = 0.""" - miner_a = MinerEvaluation(uid=1, hotkey='hotkey_1') - miner_a.current_tier = None - miner_a.total_score = 100.0 # Had some score before - - miner_b = MinerEvaluation(uid=2, hotkey='hotkey_2') - miner_b.current_tier = None - miner_b.total_score = 50.0 - - evaluations = {1: miner_a, 2: miner_b} - - allocate_emissions_by_tier(evaluations) - - assert miner_a.total_score == 0.0 - assert miner_b.total_score == 0.0 - - def test_empty_evaluations(self): - """Empty evaluations dict should not cause errors.""" - evaluations = {} - allocate_emissions_by_tier(evaluations) - assert len(evaluations) == 0 - - def test_none_evaluation_skipped(self): - """None evaluations in the dict should be skipped.""" - evaluations = {1: None, 2: None} - allocate_emissions_by_tier(evaluations) - # Should not raise any errors - - -class TestMaxTierRedistribution: - """Test tier redistribution when max tier is below Gold.""" - - def _create_miner_eval( - self, - uid: int, - current_tier: Tier, - bronze_earned: float = 0.0, - silver_earned: float = 0.0, - gold_earned: float = 0.0, - ) -> MinerEvaluation: - """Helper to create a MinerEvaluation with tier stats.""" - eval = MinerEvaluation(uid=uid, hotkey=f'hotkey_{uid}') - eval.current_tier = current_tier - eval.stats_by_tier = { - Tier.BRONZE: TierStats(earned_score=bronze_earned), - Tier.SILVER: TierStats(earned_score=silver_earned), - Tier.GOLD: TierStats(earned_score=gold_earned), - } - return eval - - def test_max_tier_bronze_redistribution(self): - """When max tier is Bronze, Bronze gets 100% of emissions.""" - miner = self._create_miner_eval(uid=1, current_tier=Tier.BRONZE, bronze_earned=100.0) - evaluations = {1: miner} - - allocate_emissions_by_tier(evaluations) - - # Bronze gets 100% when it's the only active tier - assert miner.total_score == pytest.approx(1.0) - - def test_max_tier_silver_redistribution(self): - """When max tier is Silver, Bronze and Silver split emissions proportionally.""" - # Original: Bronze 15%, Silver 35%, Gold 50% - # After redistribution: Bronze 15/(15+35) = 30%, Silver 35/(15+35) = 70% - miner = self._create_miner_eval(uid=1, current_tier=Tier.SILVER, bronze_earned=100.0, silver_earned=100.0) - evaluations = {1: miner} - - allocate_emissions_by_tier(evaluations) - - # Single miner gets 100% of active tiers - # 30% + 70% = 100% - assert miner.total_score == pytest.approx(1.0) - - def test_two_miners_max_tier_silver(self): - """Two miners with max tier Silver split redistributed emissions.""" - miner_a = self._create_miner_eval(uid=1, current_tier=Tier.SILVER, bronze_earned=75.0, silver_earned=100.0) - miner_b = self._create_miner_eval(uid=2, current_tier=Tier.BRONZE, bronze_earned=25.0) - evaluations = {1: miner_a, 2: miner_b} - - allocate_emissions_by_tier(evaluations) - - # Bronze total: 100, Silver total: 100 - # Redistributed: Bronze 30%, Silver 70% - # A gets: 75/100 * 30% + 100/100 * 70% = 0.225 + 0.70 = 0.925 - # B gets: 25/100 * 30% = 0.075 - assert miner_a.total_score == pytest.approx(0.225 + 0.70) - assert miner_b.total_score == pytest.approx(0.075) - assert miner_a.total_score + miner_b.total_score == pytest.approx(1.0) - - -class TestNegativeNetScore: - """Test that negative net scores in one tier don't affect others.""" - - def _create_miner_eval( - self, - uid: int, - current_tier: Tier, - bronze_earned: float = 0.0, - bronze_collateral: float = 0.0, - silver_earned: float = 0.0, - silver_collateral: float = 0.0, - ) -> MinerEvaluation: - """Helper to create a MinerEvaluation with tier stats.""" - eval = MinerEvaluation(uid=uid, hotkey=f'hotkey_{uid}') - eval.current_tier = current_tier - eval.stats_by_tier = { - Tier.BRONZE: TierStats(earned_score=bronze_earned, collateral_score=bronze_collateral), - Tier.SILVER: TierStats(earned_score=silver_earned, collateral_score=silver_collateral), - Tier.GOLD: TierStats(), - } - return eval - - def test_negative_net_score_floors_to_zero(self): - """Negative net score in one tier floors to 0, doesn't affect other tiers.""" - # Bronze: 50 earned - 100 collateral = -50 -> floors to 0 - # Silver: 100 earned - 20 collateral = 80 - miner = self._create_miner_eval( - uid=1, - current_tier=Tier.SILVER, - bronze_earned=50.0, - bronze_collateral=100.0, # More collateral than earned - silver_earned=100.0, - silver_collateral=20.0, - ) - evaluations = {1: miner} - - allocate_emissions_by_tier(evaluations) - - # Bronze net = 0 (floored from -50) - # Silver net = 80 - # Single miner gets 100% of Silver allocation (70% after redistribution from max=Silver) - # Bronze allocation is 0% (no positive scores) - # Total = 0 + 70% = 70%... but wait, Bronze has 0 net so it contributes nothing - # Actually with redistribution: Silver gets 70%, but Bronze allocation is 0 for this miner - # The miner's Bronze allocation = 0 (0 net score / 0 total = undefined, treated as 0) - # The miner's Silver allocation = 80/80 * 70% = 70% - # Since Bronze total is 0, there's no Bronze allocation to distribute - # So only Silver 70% goes to this miner - # Wait - we need to reconsider: if network Bronze total is 0, Bronze allocation can't be distributed - # This miner should get 70% from Silver only - assert miner.total_score == pytest.approx(0.70) - - def test_mixed_miners_with_negative_scores(self): - """Mixed miners where one has negative net in a tier.""" - # Miner A: Bronze 50 earned - 100 collateral = -50 -> 0 - # Miner B: Bronze 100 earned - 0 collateral = 100 - miner_a = self._create_miner_eval( - uid=1, - current_tier=Tier.SILVER, - bronze_earned=50.0, - bronze_collateral=100.0, - silver_earned=50.0, - ) - miner_b = self._create_miner_eval( - uid=2, - current_tier=Tier.SILVER, - bronze_earned=100.0, - silver_earned=50.0, - ) - evaluations = {1: miner_a, 2: miner_b} - - allocate_emissions_by_tier(evaluations) - - # Bronze net: A=0 (floored), B=100 -> total=100 - # Silver net: A=50, B=50 -> total=100 - # Redistributed: Bronze 30%, Silver 70% - # A gets: 0/100 * 30% + 50/100 * 70% = 0 + 0.35 = 0.35 - # B gets: 100/100 * 30% + 50/100 * 70% = 0.30 + 0.35 = 0.65 - assert miner_a.total_score == pytest.approx(0.35) - assert miner_b.total_score == pytest.approx(0.65) - assert miner_a.total_score + miner_b.total_score == pytest.approx(1.0) - - -class TestSingleMinerEdgeCases: - """Test edge cases with single miners.""" - - def _create_miner_eval(self, uid: int, current_tier: Tier, **tier_scores) -> MinerEvaluation: - """Helper to create a MinerEvaluation with tier stats.""" - eval = MinerEvaluation(uid=uid, hotkey=f'hotkey_{uid}') - eval.current_tier = current_tier - eval.stats_by_tier = { - Tier.BRONZE: TierStats(earned_score=tier_scores.get('bronze', 0.0)), - Tier.SILVER: TierStats(earned_score=tier_scores.get('silver', 0.0)), - Tier.GOLD: TierStats(earned_score=tier_scores.get('gold', 0.0)), - } - return eval - - def test_single_point_in_gold_gets_50_percent(self): - """Single miner with 1 point in Gold takes full 50% Gold allocation.""" - miner = self._create_miner_eval(uid=1, current_tier=Tier.GOLD, gold=1.0) - evaluations = {1: miner} - - allocate_emissions_by_tier(evaluations) - - # Only Gold tier has score, so miner gets 50% (just Gold allocation) - # Bronze and Silver have 0 network totals, so those allocations don't apply - assert miner.total_score == pytest.approx(0.50) - - def test_scores_only_in_lower_tiers(self): - """Miner with Gold tier but scores only in Bronze and Silver.""" - miner = self._create_miner_eval(uid=1, current_tier=Tier.GOLD, bronze=100.0, silver=100.0, gold=0.0) - evaluations = {1: miner} - - allocate_emissions_by_tier(evaluations) - - # Bronze: 15%, Silver: 35%, Gold: 0 (no score) - # Total = 15% + 35% = 50% - assert miner.total_score == pytest.approx(0.50) - - -class TestTierProgression: - """Test tier progression scenarios from the plan.""" - - def _create_miner_eval( - self, uid: int, current_tier: Tier, bronze: float, silver: float = 0.0, gold: float = 0.0 - ) -> MinerEvaluation: - """Helper to create a MinerEvaluation with tier stats.""" - eval = MinerEvaluation(uid=uid, hotkey=f'hotkey_{uid}') - eval.current_tier = current_tier - eval.stats_by_tier = { - Tier.BRONZE: TierStats(earned_score=bronze), - Tier.SILVER: TierStats(earned_score=silver), - Tier.GOLD: TierStats(earned_score=gold), - } - return eval - - def test_pioneer_scenario(self): - """ - Pioneer (first Gold miner) scenario from plan. - - Before: All miners at Bronze, compete for 100% of emissions - After: Pioneer unlocks Gold, gets 50% Gold + share of Bronze/Silver - """ - # Before: Two Bronze miners - miner_a = self._create_miner_eval(uid=1, current_tier=Tier.BRONZE, bronze=50.0) - miner_b = self._create_miner_eval(uid=2, current_tier=Tier.BRONZE, bronze=50.0) - evaluations = {1: miner_a, 2: miner_b} - - allocate_emissions_by_tier(evaluations) - - # Max tier = Bronze, so Bronze gets 100% - assert miner_a.total_score == pytest.approx(0.50) - assert miner_b.total_score == pytest.approx(0.50) - - def test_pioneer_unlocks_gold(self): - """When pioneer unlocks Gold, allocations change dramatically.""" - # Miner A (pioneer): Has all tiers - # Miner B: Still at Bronze only - miner_a = self._create_miner_eval(uid=1, current_tier=Tier.GOLD, bronze=100.0, silver=200.0, gold=300.0) - miner_b = self._create_miner_eval(uid=2, current_tier=Tier.BRONZE, bronze=100.0) - evaluations = {1: miner_a, 2: miner_b} - - allocate_emissions_by_tier(evaluations) - - # Bronze: 15% -> A: 50%, B: 50% - # Silver: 35% -> A: 100% - # Gold: 50% -> A: 100% - bronze_a = 0.5 * 0.15 - bronze_b = 0.5 * 0.15 - silver_a = 0.35 - gold_a = 0.50 - - assert miner_a.total_score == pytest.approx(bronze_a + silver_a + gold_a) - assert miner_b.total_score == pytest.approx(bronze_b) - # B is protected: still gets 7.5% instead of 0% - - -class TestExampleFromPlan: - """Test the example calculation from the plan.""" - - def test_plan_example(self): - """ - From plan: - Network state: - - Bronze total: 110 (Miner A: 100, Miner B: 10) - - Silver total: 600 (Miner A: 600) - - Gold total: 300 (Miner A: 300) - - Expected: - - Bronze (15%): A gets 100/110 × 0.15 = 0.1364, B gets 10/110 × 0.15 = 0.0136 - - Silver (35%): A gets 600/600 × 0.35 = 0.35 - - Gold (50%): A gets 300/300 × 0.50 = 0.50 - - Miner A: 0.1364 + 0.35 + 0.50 = 0.9864 - - Miner B: 0.0136 - """ - miner_a = MinerEvaluation(uid=1, hotkey='hotkey_1') - miner_a.current_tier = Tier.GOLD - miner_a.stats_by_tier = { - Tier.BRONZE: TierStats(earned_score=100.0), - Tier.SILVER: TierStats(earned_score=600.0), - Tier.GOLD: TierStats(earned_score=300.0), - } - - miner_b = MinerEvaluation(uid=2, hotkey='hotkey_2') - miner_b.current_tier = Tier.GOLD - miner_b.stats_by_tier = { - Tier.BRONZE: TierStats(earned_score=10.0), - Tier.SILVER: TierStats(earned_score=0.0), - Tier.GOLD: TierStats(earned_score=0.0), - } - - evaluations = {1: miner_a, 2: miner_b} - - allocate_emissions_by_tier(evaluations) - - # Expected calculations from plan - bronze_a = (100.0 / 110.0) * 0.15 - bronze_b = (10.0 / 110.0) * 0.15 - silver_a = 0.35 - gold_a = 0.50 - - assert miner_a.total_score == pytest.approx(bronze_a + silver_a + gold_a, abs=0.0001) - assert miner_b.total_score == pytest.approx(bronze_b, abs=0.0001) - - # Verify sum equals 1.0 - total = miner_a.total_score + miner_b.total_score - assert total == pytest.approx(1.0, abs=0.0001) - - -class TestMissingTierStats: - """Test handling of missing or incomplete tier stats.""" - - def test_missing_tier_stats_treated_as_zero(self): - """Missing tier stats should be treated as zero contribution.""" - miner = MinerEvaluation(uid=1, hotkey='hotkey_1') - miner.current_tier = Tier.BRONZE - # Only Bronze has stats - miner.stats_by_tier = { - Tier.BRONZE: TierStats(earned_score=100.0), - # Silver and Gold not present - } - - evaluations = {1: miner} - - allocate_emissions_by_tier(evaluations) - - # Max tier = Bronze, so Bronze gets 100% - assert miner.total_score == pytest.approx(1.0) - - -if __name__ == '__main__': - pytest.main([__file__, '-v']) diff --git a/tests/validator/test_tier_requirements.py b/tests/validator/test_tier_requirements.py deleted file mode 100644 index 86495d4b..00000000 --- a/tests/validator/test_tier_requirements.py +++ /dev/null @@ -1,1237 +0,0 @@ -# The MIT License (MIT) -# Copyright © 2025 Entrius - -""" -Unit tests for advanced tier requirements including: -- Credibility threshold behavior -- Lower tier credibility cascade -- Lookback expiry scenarios -- Unique repository requirements -- PRs without tier configuration -- Open PRs and unique repos -- Scoring details - -Uses pytest fixtures from conftest.py for clean, reusable test data. - -Run tests: - pytest tests/validator/test_tier_requirements.py -v - -Run specific test class: - pytest tests/validator/test_tier_requirements.py::TestUniqueRepoRequirement -v -""" - -import pytest - -from gittensor.validator.oss_contributions.credibility import ( - calculate_credibility_per_tier, - calculate_tier_stats, - is_tier_unlocked, -) -from gittensor.validator.oss_contributions.tier_config import ( - TIERS, - Tier, -) - - -class TestCredibilityThresholdBehavior: - """ - Test credibility behavior around activation threshold and tier requirements. - - Key behaviors: - - Below activation threshold: credibility = 1.0 (benefit of the doubt) - - At/above activation threshold: actual credibility is calculated - - Tier unlock requires both merge count AND credibility threshold - """ - - def test_at_activation_threshold_calculates_actual_credibility(self, pr_factory, bronze_config): - """ - When tier is unlocked and at/above activation threshold, actual credibility is calculated. - """ - bronze_tier_config = TIERS[Tier.BRONZE] - required_repos = bronze_tier_config.required_unique_repos_count - required_credibility = bronze_tier_config.required_credibility - - # Create PRs that unlock Bronze at exactly the credibility requirement (unique repos) - # merged / (merged + closed) = required_credibility - # For required_repos merged, closed = merged * (1 - required_credibility) / required_credibility - closed_count = int(required_repos * (1 - required_credibility) / required_credibility) - - merged = pr_factory.merged_batch(bronze_config, count=required_repos, unique_repos=True) - closed = pr_factory.closed_batch(bronze_config, count=closed_count, unique_repos=True) - - credibility = calculate_credibility_per_tier(merged, closed) - - expected = required_repos / (required_repos + closed_count) - assert credibility[Tier.BRONZE] == pytest.approx(expected, abs=0.01) - assert credibility[Tier.BRONZE] >= required_credibility - - def test_above_activation_threshold_calculates_actual_credibility(self, pr_factory, bronze_config): - """ - Above activation threshold, actual credibility is calculated. - """ - bronze_tier_config = TIERS[Tier.BRONZE] - required_repos = bronze_tier_config.required_unique_repos_count - - # Unlock Bronze with perfect credibility (no closed PRs, unique repos) - merged = pr_factory.merged_batch(bronze_config, count=required_repos, unique_repos=True) - - credibility = calculate_credibility_per_tier(merged, []) - - # 100% credibility since no closed PRs - assert credibility[Tier.BRONZE] == 1.0 - - def test_lower_tier_credibility_below_requirement_locks_higher_tiers(self, pr_factory, silver_config, gold_config): - """ - When lower tier credibility drops below its requirement, higher tiers lock. - - Scenario: - - Silver requires X% credibility (from config) - - Miner has enough Silver merges but credibility below requirement - - Silver locks → Gold cascades to locked - """ - silver_tier_config = TIERS[Tier.SILVER] - required_repos = silver_tier_config.required_unique_repos_count - required_credibility = silver_tier_config.required_credibility - - # Calculate closed count to drop just below required credibility - # credibility = merged / (merged + closed) - # We want: merged / (merged + closed) < required_credibility - # With required_repos merged, we need enough closed to drop below threshold - merged_count = required_repos - # To get credibility just below threshold: - # merged / total < required_credibility - # merged < required_credibility * total - # merged < required_credibility * (merged + closed) - # merged - required_credibility * merged < required_credibility * closed - # merged * (1 - required_credibility) < required_credibility * closed - # closed > merged * (1 - required_credibility) / required_credibility - closed_count = int(merged_count * (1 - required_credibility) / required_credibility) + 1 - - silver_merged = pr_factory.merged_batch(silver_config, count=merged_count) - silver_closed = pr_factory.closed_batch(silver_config, count=closed_count) - - # Add perfect Gold stats - gold_merged = pr_factory.merged_batch(gold_config, count=10) - - stats = calculate_tier_stats(silver_merged + gold_merged, silver_closed) - - # Verify Silver credibility is below requirement - assert stats[Tier.SILVER].credibility < required_credibility - # Verify Silver has enough merges - assert stats[Tier.SILVER].merged_count >= required_repos - - # Silver should be locked (credibility too low) - assert is_tier_unlocked(Tier.SILVER, stats) is False - # Gold cascades to locked - assert is_tier_unlocked(Tier.GOLD, stats) is False - - def test_tier_unlocked_when_credibility_exactly_at_requirement(self, pr_factory, bronze_config, silver_config): - """ - Tier unlocks when credibility is exactly at the requirement (with unique repos). - """ - bronze_tier_config = TIERS[Tier.BRONZE] - silver_tier_config = TIERS[Tier.SILVER] - required_repos = silver_tier_config.required_unique_repos_count - required_credibility = silver_tier_config.required_credibility - - # Calculate exact counts for required_credibility - # closed = merged * (1 - required_credibility) / required_credibility - merged_count = required_repos - closed_count = int(merged_count * (1 - required_credibility) / required_credibility) - - # Verify our math: merged / (merged + closed) should equal required_credibility - expected_credibility = merged_count / (merged_count + closed_count) - - # Need Bronze unlocked first (with unique repos) - bronze_merged = pr_factory.merged_batch( - bronze_config, count=bronze_tier_config.required_unique_repos_count, unique_repos=True - ) - silver_merged = pr_factory.merged_batch(silver_config, count=merged_count, unique_repos=True) - silver_closed = pr_factory.closed_batch(silver_config, count=closed_count, unique_repos=True) - - stats = calculate_tier_stats(bronze_merged + silver_merged, silver_closed) - - assert stats[Tier.SILVER].credibility == pytest.approx(expected_credibility, abs=0.01) - assert stats[Tier.SILVER].credibility >= required_credibility - assert is_tier_unlocked(Tier.SILVER, stats) is True - - def test_tier_unlocked_when_credibility_above_requirement( - self, pr_factory, bronze_config, silver_config, gold_config - ): - """ - Tier unlocks when credibility is above the requirement (with unique repos). - """ - bronze_tier_config = TIERS[Tier.BRONZE] - silver_tier_config = TIERS[Tier.SILVER] - gold_tier_config = TIERS[Tier.GOLD] - required_repos = gold_tier_config.required_unique_repos_count - required_credibility = gold_tier_config.required_credibility - - # Get well above threshold - merged_count = required_repos + 5 # Extra buffer - # For 90% credibility with merged_count merges: - # 0.9 = merged / (merged + closed) - # closed = merged * (1 - 0.9) / 0.9 = merged / 9 - closed_count = merged_count // 9 - - # Unlock Bronze and Silver first (with unique repos) - bronze_merged = pr_factory.merged_batch( - bronze_config, count=bronze_tier_config.required_unique_repos_count, unique_repos=True - ) - silver_merged = pr_factory.merged_batch( - silver_config, count=silver_tier_config.required_unique_repos_count, unique_repos=True - ) - - gold_merged = pr_factory.merged_batch(gold_config, count=merged_count, unique_repos=True) - gold_closed = pr_factory.closed_batch(gold_config, count=closed_count, unique_repos=True) - - stats = calculate_tier_stats(bronze_merged + silver_merged + gold_merged, gold_closed) - - assert stats[Tier.GOLD].credibility > required_credibility - assert is_tier_unlocked(Tier.GOLD, stats) is True - - def test_high_merges_low_credibility_still_locks(self, pr_factory, silver_config): - """ - Having many merges doesn't help if credibility is below requirement. - """ - silver_tier_config = TIERS[Tier.SILVER] - required_repos = silver_tier_config.required_unique_repos_count - required_credibility = silver_tier_config.required_credibility - - # Way more merges than required, but terrible credibility - merged_count = required_repos * 5 - # Calculate closed to get credibility just below requirement - closed_count = int(merged_count * (1 - required_credibility) / required_credibility) + 2 - - merged = pr_factory.merged_batch(silver_config, count=merged_count) - closed = pr_factory.closed_batch(silver_config, count=closed_count) - - stats = calculate_tier_stats(merged, closed) - - # Plenty of merges - assert stats[Tier.SILVER].merged_count > required_repos - # But credibility below threshold - assert stats[Tier.SILVER].credibility < required_credibility - # Still locked - assert is_tier_unlocked(Tier.SILVER, stats) is False - - -class TestLowerTierCredibilityCascade: - """ - Test cascade locking when lower tier credibility falls below requirements. - """ - - def test_silver_credibility_drop_locks_gold(self, pr_factory, silver_config, gold_config): - """ - Gold locks when Silver credibility drops below Silver's requirement. - """ - silver_tier_config = TIERS[Tier.SILVER] - gold_tier_config = TIERS[Tier.GOLD] - - silver_required_repos = silver_tier_config.required_unique_repos_count - silver_required_credibility = silver_tier_config.required_credibility - gold_required_repos = gold_tier_config.required_unique_repos_count - - # Silver: enough merges but terrible credibility - silver_merged_count = silver_required_repos - silver_closed_count = ( - int(silver_merged_count * (1 - silver_required_credibility) / silver_required_credibility) + 2 - ) - - silver_merged = pr_factory.merged_batch(silver_config, count=silver_merged_count) - silver_closed = pr_factory.closed_batch(silver_config, count=silver_closed_count) - - # Gold: perfect stats - gold_merged = pr_factory.merged_batch(gold_config, count=gold_required_repos + 5) - - stats = calculate_tier_stats(silver_merged + gold_merged, silver_closed) - credibility = calculate_credibility_per_tier(silver_merged + gold_merged, silver_closed) - - # Silver credibility below requirement - assert stats[Tier.SILVER].credibility < silver_required_credibility - - # Gold has perfect stats - assert stats[Tier.GOLD].merged_count >= gold_required_repos - assert stats[Tier.GOLD].credibility == 1.0 - - # But Gold is locked because Silver is locked - assert is_tier_unlocked(Tier.SILVER, stats) is False - assert is_tier_unlocked(Tier.GOLD, stats) is False - assert credibility.get(Tier.GOLD, 0.0) == 0.0 - - def test_recovering_lower_tier_credibility_unlocks_higher( - self, pr_factory, bronze_config, silver_config, gold_config - ): - """ - Improving lower tier credibility can restore higher tier access (with unique repos). - """ - bronze_tier_config = TIERS[Tier.BRONZE] - silver_tier_config = TIERS[Tier.SILVER] - gold_tier_config = TIERS[Tier.GOLD] - - silver_required_repos = silver_tier_config.required_unique_repos_count - silver_required_credibility = silver_tier_config.required_credibility - gold_required_repos = gold_tier_config.required_unique_repos_count - - # Need Bronze unlocked first (with unique repos) - bronze_merged = pr_factory.merged_batch( - bronze_config, count=bronze_tier_config.required_unique_repos_count, unique_repos=True - ) - - # Initial state: Silver below credibility threshold (unique repos) - silver_merged_count = silver_required_repos - silver_closed_count = ( - int(silver_merged_count * (1 - silver_required_credibility) / silver_required_credibility) + 2 - ) - - silver_merged = pr_factory.merged_batch(silver_config, count=silver_merged_count, unique_repos=True) - silver_closed = pr_factory.closed_batch(silver_config, count=silver_closed_count, unique_repos=True) - gold_merged = pr_factory.merged_batch(gold_config, count=gold_required_repos + 5, unique_repos=True) - - stats = calculate_tier_stats(bronze_merged + silver_merged + gold_merged, silver_closed) - assert is_tier_unlocked(Tier.GOLD, stats) is False - - # Recovery: add more Silver merges to boost credibility above threshold (unique repos) - # New credibility = (old_merged + new) / (old_merged + old_closed + new) - # We need enough new merges to get above silver_required_credibility - # Let's add enough to double our merged count - additional_silver = pr_factory.merged_batch(silver_config, count=silver_merged_count * 2, unique_repos=True) - - stats_after = calculate_tier_stats( - bronze_merged + silver_merged + additional_silver + gold_merged, silver_closed - ) - - # Should now be above threshold - assert stats_after[Tier.SILVER].credibility >= silver_required_credibility - assert is_tier_unlocked(Tier.SILVER, stats_after) is True - assert is_tier_unlocked(Tier.GOLD, stats_after) is True - - -# ============================================================================ -# Lookback Expiry Tests -# ============================================================================ - - -class TestLookbackExpiry: - """ - Test scenarios where PRs expire outside the lookback window. - - Miners must continuously maintain lower tiers to keep higher tiers unlocked. - When lower-tier PRs expire (fall outside 90-day window), the miner loses - those counts, potentially causing cascade lock failures. - """ - - def _bronze_prs(self, pr_factory, bronze_config): - """Helper to create Bronze PRs that unlock Bronze (with unique repos).""" - bronze_tier_config = TIERS[Tier.BRONZE] - return pr_factory.merged_batch( - bronze_config, count=bronze_tier_config.required_unique_repos_count, unique_repos=True - ) - - def test_silver_prs_expire_locks_gold(self, pr_factory, bronze_config, silver_config, gold_config): - """ - Gold miner loses Gold access when Silver PRs expire (with unique repos). - - Scenario: - - Miner had Bronze + Silver + Gold all unlocked - - Time passes, Silver PRs fall outside lookback - - Now has 0 Silver merges → Silver locks → Gold cascades to locked - """ - bronze_tier_config = TIERS[Tier.BRONZE] - silver_tier_config = TIERS[Tier.SILVER] - gold_tier_config = TIERS[Tier.GOLD] - - bronze_required = bronze_tier_config.required_unique_repos_count - silver_required = silver_tier_config.required_unique_repos_count - gold_required = gold_tier_config.required_unique_repos_count - - # Before expiry: Gold unlocked (Bronze + Silver + Gold PRs, unique repos) - bronze_prs = pr_factory.merged_batch(bronze_config, count=bronze_required, unique_repos=True) - silver_prs = pr_factory.merged_batch(silver_config, count=silver_required, unique_repos=True) - gold_prs = pr_factory.merged_batch(gold_config, count=gold_required + 2, unique_repos=True) - - stats_before = calculate_tier_stats(bronze_prs + silver_prs + gold_prs, []) - assert is_tier_unlocked(Tier.SILVER, stats_before) is True - assert is_tier_unlocked(Tier.GOLD, stats_before) is True - - # After expiry: Silver PRs gone (simulating lookback filter), Bronze stays - pr_factory.reset() - bronze_prs_after = pr_factory.merged_batch(bronze_config, count=bronze_required, unique_repos=True) - gold_prs_after = pr_factory.merged_batch(gold_config, count=gold_required + 2, unique_repos=True) - - stats_after = calculate_tier_stats(bronze_prs_after + gold_prs_after, []) - credibility_after = calculate_credibility_per_tier(bronze_prs_after + gold_prs_after, []) - - # Bronze still unlocked - assert is_tier_unlocked(Tier.BRONZE, stats_after) is True - # Silver now locked (no merges) - assert is_tier_unlocked(Tier.SILVER, stats_after) is False - # Gold cascades to locked despite perfect Gold stats - assert is_tier_unlocked(Tier.GOLD, stats_after) is False - assert credibility_after.get(Tier.GOLD, 0.0) == 0.0 - - def test_partial_silver_expiry_still_unlocked(self, pr_factory, bronze_config, silver_config, gold_config): - """ - Partial Silver expiry doesn't lock if enough PRs remain (with unique repos). - - Scenario: - - Miner had extra Silver merges + Gold unlocked - - Some Silver PRs expire → still meets threshold - - Gold stays unlocked - """ - bronze_tier_config = TIERS[Tier.BRONZE] - silver_tier_config = TIERS[Tier.SILVER] - gold_tier_config = TIERS[Tier.GOLD] - - bronze_required = bronze_tier_config.required_unique_repos_count - silver_required = silver_tier_config.required_unique_repos_count - gold_required = gold_tier_config.required_unique_repos_count - extra_silver = 2 # Buffer above requirement - - # Before: all tiers unlocked with extra Silver merges (unique repos) - bronze_prs = pr_factory.merged_batch(bronze_config, count=bronze_required, unique_repos=True) - merged_before = ( - bronze_prs - + pr_factory.merged_batch(silver_config, count=silver_required + extra_silver, unique_repos=True) - + pr_factory.merged_batch(gold_config, count=gold_required + 2, unique_repos=True) - ) - - stats_before = calculate_tier_stats(merged_before, []) - assert stats_before[Tier.SILVER].merged_count == silver_required + extra_silver - assert is_tier_unlocked(Tier.GOLD, stats_before) is True - - # After: extra Silver merges expire, exactly at threshold remains (unique repos) - pr_factory.reset() - merged_after = ( - pr_factory.merged_batch(bronze_config, count=bronze_required, unique_repos=True) - + pr_factory.merged_batch(silver_config, count=silver_required, unique_repos=True) - + pr_factory.merged_batch(gold_config, count=gold_required + 2, unique_repos=True) - ) - - stats_after = calculate_tier_stats(merged_after, []) - assert stats_after[Tier.SILVER].merged_count == silver_required - # Still unlocked - exactly at threshold - assert is_tier_unlocked(Tier.SILVER, stats_after) is True - assert is_tier_unlocked(Tier.GOLD, stats_after) is True - - def test_one_silver_expiry_below_threshold_locks(self, pr_factory, bronze_config, silver_config, gold_config): - """ - When exactly at threshold, losing one PR locks the tier (with unique repos). - - Scenario: - - Miner has exactly minimum Silver merges - - 1 Silver PR expires → below threshold - - Silver locks → Gold cascades - """ - bronze_tier_config = TIERS[Tier.BRONZE] - silver_tier_config = TIERS[Tier.SILVER] - gold_tier_config = TIERS[Tier.GOLD] - - bronze_required = bronze_tier_config.required_unique_repos_count - silver_required = silver_tier_config.required_unique_repos_count - gold_required = gold_tier_config.required_unique_repos_count - - # At threshold: exactly silver_required (with Bronze unlocked, unique repos) - merged = ( - pr_factory.merged_batch(bronze_config, count=bronze_required, unique_repos=True) - + pr_factory.merged_batch(silver_config, count=silver_required, unique_repos=True) - + pr_factory.merged_batch(gold_config, count=gold_required + 5, unique_repos=True) - ) - - stats = calculate_tier_stats(merged, []) - assert is_tier_unlocked(Tier.SILVER, stats) is True - assert is_tier_unlocked(Tier.GOLD, stats) is True - - # One Silver expires: now silver_required - 1 (Bronze still unlocked, unique repos) - pr_factory.reset() - merged_after = ( - pr_factory.merged_batch(bronze_config, count=bronze_required, unique_repos=True) - + pr_factory.merged_batch(silver_config, count=silver_required - 1, unique_repos=True) - + pr_factory.merged_batch(gold_config, count=gold_required + 5, unique_repos=True) - ) - - stats_after = calculate_tier_stats(merged_after, []) - assert is_tier_unlocked(Tier.BRONZE, stats_after) is True - assert is_tier_unlocked(Tier.SILVER, stats_after) is False - assert is_tier_unlocked(Tier.GOLD, stats_after) is False - - def test_credibility_drops_as_merges_expire(self, pr_factory, bronze_config, silver_config): - """ - Credibility changes as PRs expire from the lookback window (with unique repos). - - Scenario: - - Miner has good credibility at Silver - - Some merged PRs expire → credibility drops - - Still above requirement threshold - """ - bronze_tier_config = TIERS[Tier.BRONZE] - silver_tier_config = TIERS[Tier.SILVER] - - bronze_required = bronze_tier_config.required_unique_repos_count - silver_required = silver_tier_config.required_unique_repos_count - silver_cred_required = silver_tier_config.required_credibility - - # Before: high credibility (well above threshold, unique repos) - merged_count = silver_required + 5 - closed_count = 1 # Keep low to stay above 75% threshold - bronze_prs = pr_factory.merged_batch(bronze_config, count=bronze_required, unique_repos=True) - silver_merged_before = pr_factory.merged_batch(silver_config, count=merged_count, unique_repos=True) - closed = pr_factory.closed_batch(silver_config, count=closed_count, unique_repos=True) - - stats_before = calculate_tier_stats(bronze_prs + silver_merged_before, closed) - assert stats_before[Tier.SILVER].credibility == merged_count / (merged_count + closed_count) - assert is_tier_unlocked(Tier.SILVER, stats_before) is True - - # After: some merged PRs expire but still above threshold (unique repos) - pr_factory.reset() - remaining_merged = silver_required # Keep at minimum - bronze_prs_after = pr_factory.merged_batch(bronze_config, count=bronze_required, unique_repos=True) - silver_merged_after = pr_factory.merged_batch(silver_config, count=remaining_merged, unique_repos=True) - - stats_after = calculate_tier_stats(bronze_prs_after + silver_merged_after, closed) - new_credibility = remaining_merged / (remaining_merged + closed_count) - assert stats_after[Tier.SILVER].credibility == pytest.approx(new_credibility, abs=0.01) - # Should still be above required credibility - if new_credibility >= silver_cred_required: - assert is_tier_unlocked(Tier.SILVER, stats_after) is True - - def test_credibility_drops_below_threshold_on_expiry(self, pr_factory, bronze_config, silver_config, gold_config): - """ - Expiring merged PRs can drop credibility below threshold (with unique repos). - - Scenario: - - Gold miner: exactly at credibility threshold - - 1 merged PR expires → credibility drops below threshold - - Gold locks - """ - bronze_tier_config = TIERS[Tier.BRONZE] - silver_tier_config = TIERS[Tier.SILVER] - gold_tier_config = TIERS[Tier.GOLD] - - bronze_required = bronze_tier_config.required_unique_repos_count - silver_required = silver_tier_config.required_unique_repos_count - gold_required = gold_tier_config.required_unique_repos_count - gold_cred_required = gold_tier_config.required_credibility - - # Calculate counts so losing 1 merged PR drops credibility below threshold - # Use +3 instead of +2 to ensure enough margin for the math to work - gold_merged_count = gold_required + 3 - # Calculate closed count based on (merged-1) to ensure "after" is below threshold - gold_closed_count = int((gold_merged_count - 1) * (1 - gold_cred_required) / gold_cred_required) + 1 - - # Before: at or above threshold (all tiers unlocked, unique repos) - merged_before = ( - pr_factory.merged_batch(bronze_config, count=bronze_required, unique_repos=True) - + pr_factory.merged_batch(silver_config, count=silver_required, unique_repos=True) - + pr_factory.merged_batch(gold_config, count=gold_merged_count, unique_repos=True) - ) - closed = pr_factory.closed_batch(gold_config, count=gold_closed_count, unique_repos=True) - - stats_before = calculate_tier_stats(merged_before, closed) - assert stats_before[Tier.GOLD].credibility >= gold_cred_required - assert is_tier_unlocked(Tier.GOLD, stats_before) is True - - # After: 1 merged Gold PR expires (unique repos) - pr_factory.reset() - merged_after = ( - pr_factory.merged_batch(bronze_config, count=bronze_required, unique_repos=True) - + pr_factory.merged_batch(silver_config, count=silver_required, unique_repos=True) - + pr_factory.merged_batch(gold_config, count=gold_merged_count - 1, unique_repos=True) - ) - - stats_after = calculate_tier_stats(merged_after, closed) - # Credibility should drop below threshold - assert stats_after[Tier.GOLD].credibility < gold_cred_required - assert is_tier_unlocked(Tier.GOLD, stats_after) is False - - def test_closed_prs_expiring_improves_credibility(self, pr_factory, bronze_config, silver_config, gold_config): - """ - Expiring closed PRs can improve credibility (with unique repos). - - Scenario: - - Gold below credibility threshold (locked) - - Old closed PRs expire → credibility rises above threshold - - Gold unlocks - """ - bronze_tier_config = TIERS[Tier.BRONZE] - silver_tier_config = TIERS[Tier.SILVER] - gold_tier_config = TIERS[Tier.GOLD] - - bronze_required = bronze_tier_config.required_unique_repos_count - silver_required = silver_tier_config.required_unique_repos_count - gold_required = gold_tier_config.required_unique_repos_count - gold_cred_required = gold_tier_config.required_credibility - - # Before: below threshold (unique repos) - gold_merged_count = gold_required + 8 - gold_closed_count = int(gold_merged_count * (1 - gold_cred_required) / gold_cred_required) + 3 - - merged = ( - pr_factory.merged_batch(bronze_config, count=bronze_required, unique_repos=True) - + pr_factory.merged_batch(silver_config, count=silver_required, unique_repos=True) - + pr_factory.merged_batch(gold_config, count=gold_merged_count, unique_repos=True) - ) - closed_before = pr_factory.closed_batch(gold_config, count=gold_closed_count, unique_repos=True) - - stats_before = calculate_tier_stats(merged, closed_before) - assert stats_before[Tier.GOLD].credibility < gold_cred_required - assert is_tier_unlocked(Tier.GOLD, stats_before) is False - - # After: some closed PRs expire, improving credibility (unique repos) - pr_factory.reset() - remaining_closed = int(gold_merged_count * (1 - gold_cred_required) / gold_cred_required) - 1 - remaining_closed = max(0, remaining_closed) - closed_after = pr_factory.closed_batch(gold_config, count=remaining_closed, unique_repos=True) - - stats_after = calculate_tier_stats(merged, closed_after) - assert stats_after[Tier.GOLD].credibility >= gold_cred_required - assert is_tier_unlocked(Tier.GOLD, stats_after) is True - - def test_all_tier_activity_expires(self, pr_factory, bronze_config, silver_config): - """ - When all PRs at a tier expire, it's like starting fresh (with unique repos). - - Scenario: - - Miner had Bronze + Silver unlocked - - All Silver PRs expire (Bronze still active) - - Silver now has no activity (locked due to 0 merges) - """ - bronze_tier_config = TIERS[Tier.BRONZE] - silver_tier_config = TIERS[Tier.SILVER] - - bronze_required = bronze_tier_config.required_unique_repos_count - silver_required = silver_tier_config.required_unique_repos_count - - # Before: Silver unlocked (unique repos) - bronze_prs = pr_factory.merged_batch(bronze_config, count=bronze_required, unique_repos=True) - silver_prs = pr_factory.merged_batch(silver_config, count=silver_required + 2, unique_repos=True) - stats_before = calculate_tier_stats(bronze_prs + silver_prs, []) - assert is_tier_unlocked(Tier.SILVER, stats_before) is True - - # After: Silver PRs expired, only Bronze remains (unique repos) - pr_factory.reset() - bronze_prs_after = pr_factory.merged_batch(bronze_config, count=bronze_required, unique_repos=True) - stats_after = calculate_tier_stats(bronze_prs_after, []) - assert stats_after[Tier.SILVER].merged_count == 0 - assert is_tier_unlocked(Tier.BRONZE, stats_after) is True - assert is_tier_unlocked(Tier.SILVER, stats_after) is False - - def test_continuous_maintenance_required(self, pr_factory, bronze_config, silver_config, gold_config): - """ - Miners must continuously contribute to lower tiers (with unique repos). - - Scenario demonstrates the "tending garden" requirement: - - Miner gets Gold, then focuses only on Gold PRs - - Old Silver PRs expire one by one - - Eventually Silver locks → Gold cascades - """ - bronze_tier_config = TIERS[Tier.BRONZE] - silver_tier_config = TIERS[Tier.SILVER] - gold_tier_config = TIERS[Tier.GOLD] - - bronze_required = bronze_tier_config.required_unique_repos_count - silver_required = silver_tier_config.required_unique_repos_count - gold_required = gold_tier_config.required_unique_repos_count - - # Phase 1: Full unlock with buffer (unique repos) - bronze_prs = pr_factory.merged_batch(bronze_config, count=bronze_required, unique_repos=True) - silver_prs = pr_factory.merged_batch(silver_config, count=silver_required + 2, unique_repos=True) - gold_prs = pr_factory.merged_batch(gold_config, count=gold_required + 5, unique_repos=True) - - stats = calculate_tier_stats(bronze_prs + silver_prs + gold_prs, []) - assert is_tier_unlocked(Tier.GOLD, stats) is True - - # Phase 2: Some Silver expires (still above threshold) - stats = calculate_tier_stats(bronze_prs + silver_prs[: silver_required + 1] + gold_prs, []) - assert is_tier_unlocked(Tier.GOLD, stats) is True - - # Phase 3: More Silver expires (exactly at threshold) - stats = calculate_tier_stats(bronze_prs + silver_prs[:silver_required] + gold_prs, []) - assert is_tier_unlocked(Tier.GOLD, stats) is True - - # Phase 4: One more expires (below threshold) - stats = calculate_tier_stats(bronze_prs + silver_prs[: silver_required - 1] + gold_prs, []) - assert is_tier_unlocked(Tier.BRONZE, stats) is True - assert is_tier_unlocked(Tier.SILVER, stats) is False - assert is_tier_unlocked(Tier.GOLD, stats) is False # Cascade! - - def test_refreshing_lower_tier_restores_access(self, pr_factory, bronze_config, silver_config, gold_config): - """ - Adding new lower-tier PRs restores higher tier access (with unique repos). - - Scenario: - - Miner lost Gold due to Silver expiry (Bronze still active) - - Gets new Silver PRs merged - - Gold access restored - """ - bronze_tier_config = TIERS[Tier.BRONZE] - silver_tier_config = TIERS[Tier.SILVER] - gold_tier_config = TIERS[Tier.GOLD] - - bronze_required = bronze_tier_config.required_unique_repos_count - silver_required = silver_tier_config.required_unique_repos_count - gold_required = gold_tier_config.required_unique_repos_count - - # Lost access: Bronze unlocked, but one below Silver threshold (unique repos) - bronze_prs = pr_factory.merged_batch(bronze_config, count=bronze_required, unique_repos=True) - old_silver = pr_factory.merged_batch(silver_config, count=silver_required - 1, unique_repos=True) - gold_prs = pr_factory.merged_batch(gold_config, count=gold_required + 5, unique_repos=True) - - stats = calculate_tier_stats(bronze_prs + old_silver + gold_prs, []) - assert is_tier_unlocked(Tier.BRONZE, stats) is True - assert is_tier_unlocked(Tier.GOLD, stats) is False - - # Refresh: add 1 new Silver PR to meet threshold (unique repo) - new_silver = pr_factory.merged_batch(silver_config, count=1, unique_repos=True) - - stats = calculate_tier_stats(bronze_prs + old_silver + new_silver + gold_prs, []) - assert is_tier_unlocked(Tier.SILVER, stats) is True - assert is_tier_unlocked(Tier.GOLD, stats) is True - - -# ============================================================================ -# Unique Repository Requirement Tests -# ============================================================================ - - -class TestUniqueRepoRequirement: - """ - Test the unique repo contribution requirement for tier unlocking. - - This new requirement prevents same-repo spam by requiring miners to contribute - to a minimum number of unique repositories within each tier to unlock it. - """ - - def test_same_repo_spam_blocks_tier_unlock(self, pr_factory, bronze_config): - """ - Multiple PRs to the same repo don't count as unique repo contributions. - - Scenario: - - Miner creates multiple merged PRs to same repo - - Meets merge count and credibility requirements - - But only has 1 unique repo → tier locked - """ - bronze_tier_config = TIERS[Tier.BRONZE] - required_repos = bronze_tier_config.required_unique_repos_count - required_unique_repos = bronze_tier_config.required_unique_repos_count - - # Create PRs all to the same repo (default behavior without unique_repos=True) - merged = pr_factory.merged_batch(bronze_config, count=required_repos) - - stats = calculate_tier_stats(merged, []) - - # Has enough merges - assert stats[Tier.BRONZE].merged_count >= required_repos - # But only 1 unique repo - assert stats[Tier.BRONZE].unique_repo_contribution_count == 1 - # Required unique repos is 3 - assert required_unique_repos == 3 - # Tier is locked - assert is_tier_unlocked(Tier.BRONZE, stats) is False - - def test_unique_repos_unlock_tier(self, pr_factory, bronze_config): - """ - PRs to different repos count as unique repo contributions. - - Scenario: - - Miner creates PRs to unique repos - - Meets merge count, credibility, and unique repo requirements - - Tier unlocks - """ - bronze_tier_config = TIERS[Tier.BRONZE] - required_repos = bronze_tier_config.required_unique_repos_count - required_unique_repos = bronze_tier_config.required_unique_repos_count - - # Create PRs to unique repos - merged = pr_factory.merged_batch(bronze_config, count=required_repos, unique_repos=True) - - stats = calculate_tier_stats(merged, []) - - # Has enough merges - assert stats[Tier.BRONZE].merged_count >= required_repos - # Has enough unique repos - assert stats[Tier.BRONZE].unique_repo_contribution_count >= required_unique_repos - # Tier is unlocked - assert is_tier_unlocked(Tier.BRONZE, stats) is True - - def test_unique_repo_count_per_tier(self, pr_factory, bronze_config, silver_config, gold_config): - """ - Unique repo counts are tracked per tier. - - Scenario: - - Miner has PRs in multiple tiers - - Each tier tracks its own unique repo count - """ - # Create PRs with unique repos for each tier - bronze_prs = pr_factory.merged_batch(bronze_config, count=3, unique_repos=True) - silver_prs = pr_factory.merged_batch(silver_config, count=3, unique_repos=True) - - stats = calculate_tier_stats(bronze_prs + silver_prs, []) - - # Each tier has its own unique repo count - assert stats[Tier.BRONZE].unique_repo_contribution_count == 3 - assert stats[Tier.SILVER].unique_repo_contribution_count == 3 - - def test_same_repo_multiple_prs_counts_once(self, pr_factory, bronze_config): - """ - Multiple PRs to the same repo count as only 1 unique repo contribution. - - Scenario: - - Miner creates 5 PRs to repo-1 - - And 1 PR to repo-2 - - Unique repo count is 2, not 6 - """ - # Create 5 PRs to the same repo - prs_same_repo = [pr_factory.merged(bronze_config, repo='owner/repo-1') for _ in range(5)] - # Create 1 PR to a different repo - pr_different_repo = pr_factory.merged(bronze_config, repo='owner/repo-2') - - merged = prs_same_repo + [pr_different_repo] - stats = calculate_tier_stats(merged, []) - - assert stats[Tier.BRONZE].merged_count == 6 - assert stats[Tier.BRONZE].unique_repo_contribution_count == 2 - - def test_unique_repo_requirement_per_tier_config(self): - """ - Verify each tier has the expected unique repo requirement (all are 3). - """ - assert TIERS[Tier.BRONZE].required_unique_repos_count == 3 - assert TIERS[Tier.SILVER].required_unique_repos_count == 3 - assert TIERS[Tier.GOLD].required_unique_repos_count == 3 - - def test_exactly_at_unique_repo_threshold(self, pr_factory, bronze_config): - """ - Tier unlocks when exactly at unique repo requirement. - """ - bronze_tier_config = TIERS[Tier.BRONZE] - required_unique_repos = bronze_tier_config.required_unique_repos_count - - # Create exactly required number of unique repos - merged = pr_factory.merged_batch(bronze_config, count=required_unique_repos, unique_repos=True) - - stats = calculate_tier_stats(merged, []) - - assert stats[Tier.BRONZE].unique_repo_contribution_count == required_unique_repos - assert is_tier_unlocked(Tier.BRONZE, stats) is True - - def test_one_below_unique_repo_threshold(self, pr_factory, bronze_config): - """ - Tier stays locked when one below unique repo requirement. - """ - bronze_tier_config = TIERS[Tier.BRONZE] - required_unique_repos = bronze_tier_config.required_unique_repos_count - - # Create one less than required unique repos - merged = pr_factory.merged_batch(bronze_config, count=required_unique_repos - 1, unique_repos=True) - - stats = calculate_tier_stats(merged, []) - - # Has unique repos but not enough - assert stats[Tier.BRONZE].unique_repo_contribution_count == required_unique_repos - 1 - # Tier is locked (even if we had enough merges) - assert is_tier_unlocked(Tier.BRONZE, stats) is False - - def test_closed_prs_dont_count_for_unique_repos(self, pr_factory, bronze_config): - """ - Closed PRs don't count towards unique repo requirements. - - Scenario: - - Miner has 2 merged PRs to unique repos - - And 5 closed PRs to unique repos - - Only 2 unique repo contributions counted - """ - # 2 merged PRs to unique repos - merged = pr_factory.merged_batch(bronze_config, count=2, unique_repos=True) - # 5 closed PRs to unique repos - closed = pr_factory.closed_batch(bronze_config, count=5, unique_repos=True) - - stats = calculate_tier_stats(merged, closed) - - # Only merged PRs count towards unique repos - assert stats[Tier.BRONZE].unique_repo_contribution_count == 2 - assert is_tier_unlocked(Tier.BRONZE, stats) is False - - def test_unique_repo_with_mixed_same_repo_prs(self, pr_factory, bronze_config): - """ - Mix of unique and same-repo PRs correctly counts unique repos. - - Scenario: - - 3 PRs to 3 unique repos (meets requirement) - - Plus 5 more PRs to those same repos - - Total 8 merged PRs, 3 unique repos - """ - # Create PRs to 3 unique repos with multiple PRs each - repo1_prs = [pr_factory.merged(bronze_config, repo='owner/repo-1') for _ in range(3)] - repo2_prs = [pr_factory.merged(bronze_config, repo='owner/repo-2') for _ in range(3)] - repo3_prs = [pr_factory.merged(bronze_config, repo='owner/repo-3') for _ in range(2)] - - merged = repo1_prs + repo2_prs + repo3_prs - stats = calculate_tier_stats(merged, []) - - assert stats[Tier.BRONZE].merged_count == 8 - assert stats[Tier.BRONZE].unique_repo_contribution_count == 3 - assert is_tier_unlocked(Tier.BRONZE, stats) is True - - def test_tier_stats_tracks_unique_repos_correctly(self, pr_factory, bronze_config): - """ - TierStats unique_repo_contribution_count is calculated correctly. - """ - # 5 PRs to 2 unique repos - prs_repo_a = [pr_factory.merged(bronze_config, repo='owner/repo-a') for _ in range(3)] - prs_repo_b = [pr_factory.merged(bronze_config, repo='owner/repo-b') for _ in range(2)] - - merged = prs_repo_a + prs_repo_b - stats = calculate_tier_stats(merged, []) - - assert stats[Tier.BRONZE].unique_repo_contribution_count == 2 - - def test_silver_unique_repo_with_bronze_unlocked(self, pr_factory, bronze_config, silver_config): - """ - Silver tier also requires unique repos (with Bronze unlocked first). - - Scenario: - - Bronze unlocked with unique repos - - Silver has enough merges but same repo spam - - Silver stays locked - """ - bronze_tier_config = TIERS[Tier.BRONZE] - silver_tier_config = TIERS[Tier.SILVER] - - # Bronze unlocked with unique repos - bronze_prs = pr_factory.merged_batch( - bronze_config, count=bronze_tier_config.required_unique_repos_count, unique_repos=True - ) - - # Silver has enough merges but all to same repo - silver_prs = pr_factory.merged_batch( - silver_config, count=silver_tier_config.required_unique_repos_count - ) # No unique_repos=True - - stats = calculate_tier_stats(bronze_prs + silver_prs, []) - - assert is_tier_unlocked(Tier.BRONZE, stats) is True - assert stats[Tier.SILVER].merged_count >= silver_tier_config.required_unique_repos_count - assert stats[Tier.SILVER].unique_repo_contribution_count == 1 - assert is_tier_unlocked(Tier.SILVER, stats) is False - - def test_gold_unique_repo_requirement(self, pr_factory, bronze_config, silver_config, gold_config): - """ - Gold tier requires unique repos across Bronze, Silver, and Gold. - """ - bronze_tier_config = TIERS[Tier.BRONZE] - silver_tier_config = TIERS[Tier.SILVER] - gold_tier_config = TIERS[Tier.GOLD] - # Calculate token scores needed per PR to meet total requirements - silver_token_per_pr = ( - silver_tier_config.required_min_token_score or 50.0 - ) / silver_tier_config.required_unique_repos_count + 1.0 - gold_token_per_pr = ( - gold_tier_config.required_min_token_score or 150.0 - ) / gold_tier_config.required_unique_repos_count + 1.0 - - # All tiers with unique repos (with sufficient token scores) - bronze_prs = pr_factory.merged_batch( - bronze_config, count=bronze_tier_config.required_unique_repos_count, unique_repos=True - ) - silver_prs = pr_factory.merged_batch( - silver_config, - count=silver_tier_config.required_unique_repos_count, - unique_repos=True, - token_score=silver_token_per_pr, - ) - gold_prs = pr_factory.merged_batch( - gold_config, - count=gold_tier_config.required_unique_repos_count, - unique_repos=True, - token_score=gold_token_per_pr, - ) - - stats = calculate_tier_stats(bronze_prs + silver_prs + gold_prs, []) - - assert is_tier_unlocked(Tier.BRONZE, stats) is True - assert is_tier_unlocked(Tier.SILVER, stats) is True - assert is_tier_unlocked(Tier.GOLD, stats) is True - - def test_unique_repos_not_shared_across_tiers(self, pr_factory, bronze_config, silver_config): - """ - Unique repos in one tier don't count towards another tier's requirement. - - Each tier tracks its own unique repo contributions independently. - """ - bronze_tier_config = TIERS[Tier.BRONZE] - silver_tier_config = TIERS[Tier.SILVER] - - # Bronze with unique repos - bronze_prs = pr_factory.merged_batch( - bronze_config, count=bronze_tier_config.required_unique_repos_count, unique_repos=True - ) - - # Silver with same repo spam (using default repo which is 'test/repo') - # Reset to ensure we're using the default repo - silver_prs = [ - pr_factory.merged(silver_config, repo='test/shared-repo') - for _ in range(silver_tier_config.required_unique_repos_count) - ] - - stats = calculate_tier_stats(bronze_prs + silver_prs, []) - - # Bronze has its unique repos - assert stats[Tier.BRONZE].unique_repo_contribution_count == bronze_tier_config.required_unique_repos_count - # Silver only has 1 unique repo (all to same repo) - assert stats[Tier.SILVER].unique_repo_contribution_count == 1 - # Bronze unlocked, Silver locked - assert is_tier_unlocked(Tier.BRONZE, stats) is True - assert is_tier_unlocked(Tier.SILVER, stats) is False - - -class TestUniqueRepoEdgeCases: - """ - Edge cases for unique repo requirement. - """ - - def test_empty_repo_name_handling(self, pr_factory, bronze_config): - """ - PRs should always have a repo name in real scenarios. - """ - # All PRs have repository_full_name set by the factory - merged = pr_factory.merged_batch(bronze_config, count=3, unique_repos=True) - - for pr in merged: - assert pr.repository_full_name is not None - assert len(pr.repository_full_name) > 0 - - def test_zero_unique_repos_locks_tier(self, pr_factory, bronze_config): - """ - Zero unique repos (no PRs) means tier is locked. - """ - stats = calculate_tier_stats([], []) - - assert stats[Tier.BRONZE].unique_repo_contribution_count == 0 - assert is_tier_unlocked(Tier.BRONZE, stats) is False - - def test_many_unique_repos_above_requirement(self, pr_factory, bronze_config): - """ - Having more unique repos than required still unlocks the tier. - """ - bronze_tier_config = TIERS[Tier.BRONZE] - required_unique_repos = bronze_tier_config.required_unique_repos_count - - # Create many more unique repos than required - merged = pr_factory.merged_batch(bronze_config, count=10, unique_repos=True) - - stats = calculate_tier_stats(merged, []) - - assert stats[Tier.BRONZE].unique_repo_contribution_count == 10 - assert stats[Tier.BRONZE].unique_repo_contribution_count > required_unique_repos - assert is_tier_unlocked(Tier.BRONZE, stats) is True - - -# ============================================================================ -# PRs Without Tier Configuration Tests -# ============================================================================ - - -class TestPRsWithoutTierConfig: - """ - Test behavior of PRs that have no tier configuration. - - These represent PRs from repositories not enrolled in gittensor. - They should be completely ignored in all tier calculations. - """ - - def test_merged_pr_without_tier_not_counted(self, pr_factory, bronze_config): - """ - Merged PRs without tier config are completely ignored. - """ - from gittensor.classes import PRState - - bronze_tier_config = TIERS[Tier.BRONZE] - - # Normal PRs that meet requirements - normal_prs = pr_factory.merged_batch( - bronze_config, count=bronze_tier_config.required_unique_repos_count, unique_repos=True - ) - - # PRs without tier config (should be ignored) - untracked_prs = [ - pr_factory.create_without_tier(state=PRState.MERGED, repo=f'untracked/repo-{i}') for i in range(10) - ] - - stats = calculate_tier_stats(normal_prs + untracked_prs, []) - - # Only normal PRs counted - assert stats[Tier.BRONZE].merged_count == bronze_tier_config.required_unique_repos_count - # Untracked repos don't add to unique count - assert stats[Tier.BRONZE].unique_repo_contribution_count == bronze_tier_config.required_unique_repos_count - - def test_closed_pr_without_tier_not_counted(self, pr_factory, bronze_config): - """ - Closed PRs without tier config don't affect credibility. - """ - from gittensor.classes import PRState - - bronze_tier_config = TIERS[Tier.BRONZE] - - # Normal merged PRs - merged = pr_factory.merged_batch( - bronze_config, count=bronze_tier_config.required_unique_repos_count, unique_repos=True - ) - - # Lots of closed PRs without tier config (should be ignored) - untracked_closed = [ - pr_factory.create_without_tier(state=PRState.CLOSED, repo=f'untracked/repo-{i}') for i in range(50) - ] - - stats = calculate_tier_stats(merged, untracked_closed) - - # No closed PRs counted - assert stats[Tier.BRONZE].closed_count == 0 - # 100% credibility - assert stats[Tier.BRONZE].credibility == 1.0 - # Tier unlocked - assert is_tier_unlocked(Tier.BRONZE, stats) is True - - def test_open_pr_without_tier_not_counted(self, pr_factory, bronze_config): - """ - Open PRs without tier config are ignored. - """ - from gittensor.classes import PRState - - bronze_tier_config = TIERS[Tier.BRONZE] - - merged = pr_factory.merged_batch( - bronze_config, count=bronze_tier_config.required_unique_repos_count, unique_repos=True - ) - - # Open PRs without tier config - untracked_open = [ - pr_factory.create_without_tier(state=PRState.OPEN, repo=f'untracked/repo-{i}') for i in range(10) - ] - - stats = calculate_tier_stats(merged, [], untracked_open) - - # No open PRs counted - assert stats[Tier.BRONZE].open_count == 0 - assert stats[Tier.SILVER].open_count == 0 - assert stats[Tier.GOLD].open_count == 0 - - -# ============================================================================ -# Open PRs and Unique Repos Tests -# ============================================================================ - - -class TestOpenPRsAndUniqueRepos: - """ - Test that open PRs don't affect unique repo calculations. - - Only merged PRs contribute to unique_repo_contribution_count. - """ - - def test_open_prs_dont_count_for_unique_repos(self, pr_factory, bronze_config): - """ - Open PRs should not count toward unique repo requirement. - """ - # Create merged PRs to 2 unique repos - merged = [ - pr_factory.merged(bronze_config, repo='owner/repo-1'), - pr_factory.merged(bronze_config, repo='owner/repo-2'), - ] - - # Create open PRs to 5 different unique repos - open_prs = [pr_factory.open(bronze_config, repo=f'owner/open-repo-{i}') for i in range(5)] - - stats = calculate_tier_stats(merged, [], open_prs) - - # Only merged repos counted for unique - assert stats[Tier.BRONZE].unique_repo_contribution_count == 2 - # Open PRs tracked separately - assert stats[Tier.BRONZE].open_count == 5 - # Tier locked due to insufficient unique repos (need 3) - assert is_tier_unlocked(Tier.BRONZE, stats) is False - - def test_open_prs_dont_affect_credibility(self, pr_factory, bronze_config): - """ - Open PRs don't affect credibility calculation (only merged and closed). - """ - # 3 merged PRs (unique repos) - merged = pr_factory.merged_batch(bronze_config, count=3, unique_repos=True) - - # 1 closed PR - closed = [pr_factory.closed(bronze_config)] - - # 100 open PRs (should not affect credibility) - open_prs = pr_factory.open_batch(bronze_config, count=100, unique_repos=True) - - stats = calculate_tier_stats(merged, closed, open_prs) - - # Credibility is 3 / (3 + 1) = 75% (open PRs ignored) - assert stats[Tier.BRONZE].credibility == pytest.approx(0.75, abs=0.01) - # Open PRs tracked - assert stats[Tier.BRONZE].open_count == 100 - - -# ============================================================================ -# Scoring Details Tests -# ============================================================================ - - -class TestScoringDetails: - """ - Test include_scoring_details=True behavior. - - When enabled, earned_score and collateral_score are accumulated. - """ - - def test_earned_score_accumulated_for_merged_prs(self, pr_factory, bronze_config): - """ - Earned scores from merged PRs are summed when include_scoring_details=True. - """ - # Create merged PRs with different earned scores - merged = [ - pr_factory.merged(bronze_config, repo='owner/repo-1', earned_score=100.0), - pr_factory.merged(bronze_config, repo='owner/repo-2', earned_score=150.0), - pr_factory.merged(bronze_config, repo='owner/repo-3', earned_score=75.0), - ] - - stats = calculate_tier_stats(merged, [], [], include_scoring_details=True) - - # Total earned score should be 100 + 150 + 75 = 325 - assert stats[Tier.BRONZE].earned_score == pytest.approx(325.0, abs=0.01) - - def test_earned_score_not_accumulated_without_flag(self, pr_factory, bronze_config): - """ - Earned scores are NOT accumulated when include_scoring_details=False (default). - """ - merged = [ - pr_factory.merged(bronze_config, repo='owner/repo-1', earned_score=100.0), - pr_factory.merged(bronze_config, repo='owner/repo-2', earned_score=150.0), - ] - - stats = calculate_tier_stats(merged, []) # Default: include_scoring_details=False - - # Earned score stays at default (0.0) - assert stats[Tier.BRONZE].earned_score == 0.0 - - def test_collateral_score_accumulated_for_open_prs(self, pr_factory, bronze_config): - """ - Collateral scores from open PRs are summed when include_scoring_details=True. - """ - merged = pr_factory.merged_batch(bronze_config, count=3, unique_repos=True) - - # Create open PRs with different collateral scores - open_prs = [ - pr_factory.open(bronze_config, repo='owner/open-1', collateral_score=20.0), - pr_factory.open(bronze_config, repo='owner/open-2', collateral_score=35.0), - pr_factory.open(bronze_config, repo='owner/open-3', collateral_score=15.0), - ] - - stats = calculate_tier_stats(merged, [], open_prs, include_scoring_details=True) - - # Total collateral score should be 20 + 35 + 15 = 70 - assert stats[Tier.BRONZE].collateral_score == pytest.approx(70.0, abs=0.01) - - -if __name__ == '__main__': - pytest.main([__file__, '-v']) diff --git a/uv.lock b/uv.lock index 7518fc90..9b88a53a 100644 --- a/uv.lock +++ b/uv.lock @@ -1,3 +1,2696 @@ version = 1 revision = 3 requires-python = ">=3.12" +resolution-markers = [ + "sys_platform == 'linux'", + "sys_platform != 'linux'", +] + +[[package]] +name = "aiohappyeyeballs" +version = "2.6.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/26/30/f84a107a9c4331c14b2b586036f40965c128aa4fee4dda5d3d51cb14ad54/aiohappyeyeballs-2.6.1.tar.gz", hash = "sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558", size = 22760, upload-time = "2025-03-12T01:42:48.764Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0f/15/5bf3b99495fb160b63f95972b81750f18f7f4e02ad051373b669d17d44f2/aiohappyeyeballs-2.6.1-py3-none-any.whl", hash = "sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8", size = 15265, upload-time = "2025-03-12T01:42:47.083Z" }, +] + +[[package]] +name = "aiohttp" +version = "3.13.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohappyeyeballs" }, + { name = "aiosignal" }, + { name = "attrs" }, + { name = "frozenlist" }, + { name = "multidict" }, + { name = "propcache" }, + { name = "yarl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/77/9a/152096d4808df8e4268befa55fba462f440f14beab85e8ad9bf990516918/aiohttp-3.13.5.tar.gz", hash = "sha256:9d98cc980ecc96be6eb4c1994ce35d28d8b1f5e5208a23b421187d1209dbb7d1", size = 7858271, upload-time = "2026-03-31T22:01:03.343Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/be/6f/353954c29e7dcce7cf00280a02c75f30e133c00793c7a2ed3776d7b2f426/aiohttp-3.13.5-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:023ecba036ddd840b0b19bf195bfae970083fd7024ce1ac22e9bba90464620e9", size = 748876, upload-time = "2026-03-31T21:57:36.319Z" }, + { url = "https://files.pythonhosted.org/packages/f5/1b/428a7c64687b3b2e9cd293186695affc0e1e54a445d0361743b231f11066/aiohttp-3.13.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:15c933ad7920b7d9a20de151efcd05a6e38302cbf0e10c9b2acb9a42210a2416", size = 499557, upload-time = "2026-03-31T21:57:38.236Z" }, + { url = "https://files.pythonhosted.org/packages/29/47/7be41556bfbb6917069d6a6634bb7dd5e163ba445b783a90d40f5ac7e3a7/aiohttp-3.13.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ab2899f9fa2f9f741896ebb6fa07c4c883bfa5c7f2ddd8cf2aafa86fa981b2d2", size = 500258, upload-time = "2026-03-31T21:57:39.923Z" }, + { url = "https://files.pythonhosted.org/packages/67/84/c9ecc5828cb0b3695856c07c0a6817a99d51e2473400f705275a2b3d9239/aiohttp-3.13.5-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a60eaa2d440cd4707696b52e40ed3e2b0f73f65be07fd0ef23b6b539c9c0b0b4", size = 1749199, upload-time = "2026-03-31T21:57:41.938Z" }, + { url = "https://files.pythonhosted.org/packages/f0/d3/3c6d610e66b495657622edb6ae7c7fd31b2e9086b4ec50b47897ad6042a9/aiohttp-3.13.5-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:55b3bdd3292283295774ab585160c4004f4f2f203946997f49aac032c84649e9", size = 1721013, upload-time = "2026-03-31T21:57:43.904Z" }, + { url = "https://files.pythonhosted.org/packages/49/a0/24409c12217456df0bae7babe3b014e460b0b38a8e60753d6cb339f6556d/aiohttp-3.13.5-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c2b2355dc094e5f7d45a7bb262fe7207aa0460b37a0d87027dcf21b5d890e7d5", size = 1781501, upload-time = "2026-03-31T21:57:46.285Z" }, + { url = "https://files.pythonhosted.org/packages/98/9d/b65ec649adc5bccc008b0957a9a9c691070aeac4e41cea18559fef49958b/aiohttp-3.13.5-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b38765950832f7d728297689ad78f5f2cf79ff82487131c4d26fe6ceecdc5f8e", size = 1878981, upload-time = "2026-03-31T21:57:48.734Z" }, + { url = "https://files.pythonhosted.org/packages/57/d8/8d44036d7eb7b6a8ec4c5494ea0c8c8b94fbc0ed3991c1a7adf230df03bf/aiohttp-3.13.5-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b18f31b80d5a33661e08c89e202edabf1986e9b49c42b4504371daeaa11b47c1", size = 1767934, upload-time = "2026-03-31T21:57:51.171Z" }, + { url = "https://files.pythonhosted.org/packages/31/04/d3f8211f273356f158e3464e9e45484d3fb8c4ce5eb2f6fe9405c3273983/aiohttp-3.13.5-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:33add2463dde55c4f2d9635c6ab33ce154e5ecf322bd26d09af95c5f81cfa286", size = 1566671, upload-time = "2026-03-31T21:57:53.326Z" }, + { url = "https://files.pythonhosted.org/packages/41/db/073e4ebe00b78e2dfcacff734291651729a62953b48933d765dc513bf798/aiohttp-3.13.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:327cc432fdf1356fb4fbc6fe833ad4e9f6aacb71a8acaa5f1855e4b25910e4a9", size = 1705219, upload-time = "2026-03-31T21:57:55.385Z" }, + { url = "https://files.pythonhosted.org/packages/48/45/7dfba71a2f9fd97b15c95c06819de7eb38113d2cdb6319669195a7d64270/aiohttp-3.13.5-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:7c35b0bf0b48a70b4cb4fc5d7bed9b932532728e124874355de1a0af8ec4bc88", size = 1743049, upload-time = "2026-03-31T21:57:57.341Z" }, + { url = "https://files.pythonhosted.org/packages/18/71/901db0061e0f717d226386a7f471bb59b19566f2cae5f0d93874b017271f/aiohttp-3.13.5-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:df23d57718f24badef8656c49743e11a89fd6f5358fa8a7b96e728fda2abf7d3", size = 1749557, upload-time = "2026-03-31T21:57:59.626Z" }, + { url = "https://files.pythonhosted.org/packages/08/d5/41eebd16066e59cd43728fe74bce953d7402f2b4ddfdfef2c0e9f17ca274/aiohttp-3.13.5-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:02e048037a6501a5ec1f6fc9736135aec6eb8a004ce48838cb951c515f32c80b", size = 1558931, upload-time = "2026-03-31T21:58:01.972Z" }, + { url = "https://files.pythonhosted.org/packages/30/e6/4a799798bf05740e66c3a1161079bda7a3dd8e22ca392481d7a7f9af82a6/aiohttp-3.13.5-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:31cebae8b26f8a615d2b546fee45d5ffb76852ae6450e2a03f42c9102260d6fe", size = 1774125, upload-time = "2026-03-31T21:58:04.007Z" }, + { url = "https://files.pythonhosted.org/packages/84/63/7749337c90f92bc2cb18f9560d67aa6258c7060d1397d21529b8004fcf6f/aiohttp-3.13.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:888e78eb5ca55a615d285c3c09a7a91b42e9dd6fc699b166ebd5dee87c9ccf14", size = 1732427, upload-time = "2026-03-31T21:58:06.337Z" }, + { url = "https://files.pythonhosted.org/packages/98/de/cf2f44ff98d307e72fb97d5f5bbae3bfcb442f0ea9790c0bf5c5c2331404/aiohttp-3.13.5-cp312-cp312-win32.whl", hash = "sha256:8bd3ec6376e68a41f9f95f5ed170e2fcf22d4eb27a1f8cb361d0508f6e0557f3", size = 433534, upload-time = "2026-03-31T21:58:08.712Z" }, + { url = "https://files.pythonhosted.org/packages/aa/ca/eadf6f9c8fa5e31d40993e3db153fb5ed0b11008ad5d9de98a95045bed84/aiohttp-3.13.5-cp312-cp312-win_amd64.whl", hash = "sha256:110e448e02c729bcebb18c60b9214a87ba33bac4a9fa5e9a5f139938b56c6cb1", size = 460446, upload-time = "2026-03-31T21:58:10.945Z" }, + { url = "https://files.pythonhosted.org/packages/78/e9/d76bf503005709e390122d34e15256b88f7008e246c4bdbe915cd4f1adce/aiohttp-3.13.5-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a5029cc80718bbd545123cd8fe5d15025eccaaaace5d0eeec6bd556ad6163d61", size = 742930, upload-time = "2026-03-31T21:58:13.155Z" }, + { url = "https://files.pythonhosted.org/packages/57/00/4b7b70223deaebd9bb85984d01a764b0d7bd6526fcdc73cca83bcbe7243e/aiohttp-3.13.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4bb6bf5811620003614076bdc807ef3b5e38244f9d25ca5fe888eaccea2a9832", size = 496927, upload-time = "2026-03-31T21:58:15.073Z" }, + { url = "https://files.pythonhosted.org/packages/9c/f5/0fb20fb49f8efdcdce6cd8127604ad2c503e754a8f139f5e02b01626523f/aiohttp-3.13.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a84792f8631bf5a94e52d9cc881c0b824ab42717165a5579c760b830d9392ac9", size = 497141, upload-time = "2026-03-31T21:58:17.009Z" }, + { url = "https://files.pythonhosted.org/packages/3b/86/b7c870053e36a94e8951b803cb5b909bfbc9b90ca941527f5fcafbf6b0fa/aiohttp-3.13.5-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:57653eac22c6a4c13eb22ecf4d673d64a12f266e72785ab1c8b8e5940d0e8090", size = 1732476, upload-time = "2026-03-31T21:58:18.925Z" }, + { url = "https://files.pythonhosted.org/packages/b5/e5/4e161f84f98d80c03a238671b4136e6530453d65262867d989bbe78244d0/aiohttp-3.13.5-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e5e5f7debc7a57af53fdf5c5009f9391d9f4c12867049d509bf7bb164a6e295b", size = 1706507, upload-time = "2026-03-31T21:58:21.094Z" }, + { url = "https://files.pythonhosted.org/packages/d4/56/ea11a9f01518bd5a2a2fcee869d248c4b8a0cfa0bb13401574fa31adf4d4/aiohttp-3.13.5-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c719f65bebcdf6716f10e9eff80d27567f7892d8988c06de12bbbd39307c6e3a", size = 1773465, upload-time = "2026-03-31T21:58:23.159Z" }, + { url = "https://files.pythonhosted.org/packages/eb/40/333ca27fb74b0383f17c90570c748f7582501507307350a79d9f9f3c6eb1/aiohttp-3.13.5-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d97f93fdae594d886c5a866636397e2bcab146fd7a132fd6bb9ce182224452f8", size = 1873523, upload-time = "2026-03-31T21:58:25.59Z" }, + { url = "https://files.pythonhosted.org/packages/f0/d2/e2f77eef1acb7111405433c707dc735e63f67a56e176e72e9e7a2cd3f493/aiohttp-3.13.5-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3df334e39d4c2f899a914f1dba283c1aadc311790733f705182998c6f7cae665", size = 1754113, upload-time = "2026-03-31T21:58:27.624Z" }, + { url = "https://files.pythonhosted.org/packages/fb/56/3f653d7f53c89669301ec9e42c95233e2a0c0a6dd051269e6e678db4fdb0/aiohttp-3.13.5-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fe6970addfea9e5e081401bcbadf865d2b6da045472f58af08427e108d618540", size = 1562351, upload-time = "2026-03-31T21:58:29.918Z" }, + { url = "https://files.pythonhosted.org/packages/ec/a6/9b3e91eb8ae791cce4ee736da02211c85c6f835f1bdfac0594a8a3b7018c/aiohttp-3.13.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7becdf835feff2f4f335d7477f121af787e3504b48b449ff737afb35869ba7bb", size = 1693205, upload-time = "2026-03-31T21:58:32.214Z" }, + { url = "https://files.pythonhosted.org/packages/98/fc/bfb437a99a2fcebd6b6eaec609571954de2ed424f01c352f4b5504371dd3/aiohttp-3.13.5-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:676e5651705ad5d8a70aeb8eb6936c436d8ebbd56e63436cb7dd9bb36d2a9a46", size = 1730618, upload-time = "2026-03-31T21:58:34.728Z" }, + { url = "https://files.pythonhosted.org/packages/e4/b6/c8534862126191a034f68153194c389addc285a0f1347d85096d349bbc15/aiohttp-3.13.5-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:9b16c653d38eb1a611cc898c41e76859ca27f119d25b53c12875fd0474ae31a8", size = 1745185, upload-time = "2026-03-31T21:58:36.909Z" }, + { url = "https://files.pythonhosted.org/packages/0b/93/4ca8ee2ef5236e2707e0fd5fecb10ce214aee1ff4ab307af9c558bda3b37/aiohttp-3.13.5-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:999802d5fa0389f58decd24b537c54aa63c01c3219ce17d1214cbda3c2b22d2d", size = 1557311, upload-time = "2026-03-31T21:58:39.38Z" }, + { url = "https://files.pythonhosted.org/packages/57/ae/76177b15f18c5f5d094f19901d284025db28eccc5ae374d1d254181d33f4/aiohttp-3.13.5-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:ec707059ee75732b1ba130ed5f9580fe10ff75180c812bc267ded039db5128c6", size = 1773147, upload-time = "2026-03-31T21:58:41.476Z" }, + { url = "https://files.pythonhosted.org/packages/01/a4/62f05a0a98d88af59d93b7fcac564e5f18f513cb7471696ac286db970d6a/aiohttp-3.13.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:2d6d44a5b48132053c2f6cd5c8cb14bc67e99a63594e336b0f2af81e94d5530c", size = 1730356, upload-time = "2026-03-31T21:58:44.049Z" }, + { url = "https://files.pythonhosted.org/packages/e4/85/fc8601f59dfa8c9523808281f2da571f8b4699685f9809a228adcc90838d/aiohttp-3.13.5-cp313-cp313-win32.whl", hash = "sha256:329f292ed14d38a6c4c435e465f48bebb47479fd676a0411936cc371643225cc", size = 432637, upload-time = "2026-03-31T21:58:46.167Z" }, + { url = "https://files.pythonhosted.org/packages/c0/1b/ac685a8882896acf0f6b31d689e3792199cfe7aba37969fa91da63a7fa27/aiohttp-3.13.5-cp313-cp313-win_amd64.whl", hash = "sha256:69f571de7500e0557801c0b51f4780482c0ec5fe2ac851af5a92cfce1af1cb83", size = 458896, upload-time = "2026-03-31T21:58:48.119Z" }, + { url = "https://files.pythonhosted.org/packages/5d/ce/46572759afc859e867a5bc8ec3487315869013f59281ce61764f76d879de/aiohttp-3.13.5-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:eb4639f32fd4a9904ab8fb45bf3383ba71137f3d9d4ba25b3b3f3109977c5b8c", size = 745721, upload-time = "2026-03-31T21:58:50.229Z" }, + { url = "https://files.pythonhosted.org/packages/13/fe/8a2efd7626dbe6049b2ef8ace18ffda8a4dfcbe1bcff3ac30c0c7575c20b/aiohttp-3.13.5-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:7e5dc4311bd5ac493886c63cbf76ab579dbe4641268e7c74e48e774c74b6f2be", size = 497663, upload-time = "2026-03-31T21:58:52.232Z" }, + { url = "https://files.pythonhosted.org/packages/9b/91/cc8cc78a111826c54743d88651e1687008133c37e5ee615fee9b57990fac/aiohttp-3.13.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:756c3c304d394977519824449600adaf2be0ccee76d206ee339c5e76b70ded25", size = 499094, upload-time = "2026-03-31T21:58:54.566Z" }, + { url = "https://files.pythonhosted.org/packages/0a/33/a8362cb15cf16a3af7e86ed11962d5cd7d59b449202dc576cdc731310bde/aiohttp-3.13.5-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ecc26751323224cf8186efcf7fbcbc30f4e1d8c7970659daf25ad995e4032a56", size = 1726701, upload-time = "2026-03-31T21:58:56.864Z" }, + { url = "https://files.pythonhosted.org/packages/45/0c/c091ac5c3a17114bd76cbf85d674650969ddf93387876cf67f754204bd77/aiohttp-3.13.5-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:10a75acfcf794edf9d8db50e5a7ec5fc818b2a8d3f591ce93bc7b1210df016d2", size = 1683360, upload-time = "2026-03-31T21:58:59.072Z" }, + { url = "https://files.pythonhosted.org/packages/23/73/bcee1c2b79bc275e964d1446c55c54441a461938e70267c86afaae6fba27/aiohttp-3.13.5-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:0f7a18f258d124cd678c5fe072fe4432a4d5232b0657fca7c1847f599233c83a", size = 1773023, upload-time = "2026-03-31T21:59:01.776Z" }, + { url = "https://files.pythonhosted.org/packages/c7/ef/720e639df03004fee2d869f771799d8c23046dec47d5b81e396c7cda583a/aiohttp-3.13.5-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:df6104c009713d3a89621096f3e3e88cc323fd269dbd7c20afe18535094320be", size = 1853795, upload-time = "2026-03-31T21:59:04.568Z" }, + { url = "https://files.pythonhosted.org/packages/bd/c9/989f4034fb46841208de7aeeac2c6d8300745ab4f28c42f629ba77c2d916/aiohttp-3.13.5-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:241a94f7de7c0c3b616627aaad530fe2cb620084a8b144d3be7b6ecfe95bae3b", size = 1730405, upload-time = "2026-03-31T21:59:07.221Z" }, + { url = "https://files.pythonhosted.org/packages/ce/75/ee1fd286ca7dc599d824b5651dad7b3be7ff8d9a7e7b3fe9820d9180f7db/aiohttp-3.13.5-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c974fb66180e58709b6fc402846f13791240d180b74de81d23913abe48e96d94", size = 1558082, upload-time = "2026-03-31T21:59:09.484Z" }, + { url = "https://files.pythonhosted.org/packages/c3/20/1e9e6650dfc436340116b7aa89ff8cb2bbdf0abc11dfaceaad8f74273a10/aiohttp-3.13.5-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:6e27ea05d184afac78aabbac667450c75e54e35f62238d44463131bd3f96753d", size = 1692346, upload-time = "2026-03-31T21:59:12.068Z" }, + { url = "https://files.pythonhosted.org/packages/d8/40/8ebc6658d48ea630ac7903912fe0dd4e262f0e16825aa4c833c56c9f1f56/aiohttp-3.13.5-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:a79a6d399cef33a11b6f004c67bb07741d91f2be01b8d712d52c75711b1e07c7", size = 1698891, upload-time = "2026-03-31T21:59:14.552Z" }, + { url = "https://files.pythonhosted.org/packages/d8/78/ea0ae5ec8ba7a5c10bdd6e318f1ba5e76fcde17db8275188772afc7917a4/aiohttp-3.13.5-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:c632ce9c0b534fbe25b52c974515ed674937c5b99f549a92127c85f771a78772", size = 1742113, upload-time = "2026-03-31T21:59:17.068Z" }, + { url = "https://files.pythonhosted.org/packages/8a/66/9d308ed71e3f2491be1acb8769d96c6f0c47d92099f3bc9119cada27b357/aiohttp-3.13.5-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:fceedde51fbd67ee2bcc8c0b33d0126cc8b51ef3bbde2f86662bd6d5a6f10ec5", size = 1553088, upload-time = "2026-03-31T21:59:19.541Z" }, + { url = "https://files.pythonhosted.org/packages/da/a6/6cc25ed8dfc6e00c90f5c6d126a98e2cf28957ad06fa1036bd34b6f24a2c/aiohttp-3.13.5-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:f92995dfec9420bb69ae629abf422e516923ba79ba4403bc750d94fb4a6c68c1", size = 1757976, upload-time = "2026-03-31T21:59:22.311Z" }, + { url = "https://files.pythonhosted.org/packages/c1/2b/cce5b0ffe0de99c83e5e36d8f828e4161e415660a9f3e58339d07cce3006/aiohttp-3.13.5-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:20ae0ff08b1f2c8788d6fb85afcb798654ae6ba0b747575f8562de738078457b", size = 1712444, upload-time = "2026-03-31T21:59:24.635Z" }, + { url = "https://files.pythonhosted.org/packages/6c/cf/9e1795b4160c58d29421eafd1a69c6ce351e2f7c8d3c6b7e4ca44aea1a5b/aiohttp-3.13.5-cp314-cp314-win32.whl", hash = "sha256:b20df693de16f42b2472a9c485e1c948ee55524786a0a34345511afdd22246f3", size = 438128, upload-time = "2026-03-31T21:59:27.291Z" }, + { url = "https://files.pythonhosted.org/packages/22/4d/eaedff67fc805aeba4ba746aec891b4b24cebb1a7d078084b6300f79d063/aiohttp-3.13.5-cp314-cp314-win_amd64.whl", hash = "sha256:f85c6f327bf0b8c29da7d93b1cabb6363fb5e4e160a32fa241ed2dce21b73162", size = 464029, upload-time = "2026-03-31T21:59:29.429Z" }, + { url = "https://files.pythonhosted.org/packages/79/11/c27d9332ee20d68dd164dc12a6ecdef2e2e35ecc97ed6cf0d2442844624b/aiohttp-3.13.5-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:1efb06900858bb618ff5cee184ae2de5828896c448403d51fb633f09e109be0a", size = 778758, upload-time = "2026-03-31T21:59:31.547Z" }, + { url = "https://files.pythonhosted.org/packages/04/fb/377aead2e0a3ba5f09b7624f702a964bdf4f08b5b6728a9799830c80041e/aiohttp-3.13.5-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:fee86b7c4bd29bdaf0d53d14739b08a106fdda809ca5fe032a15f52fae5fe254", size = 512883, upload-time = "2026-03-31T21:59:34.098Z" }, + { url = "https://files.pythonhosted.org/packages/bb/a6/aa109a33671f7a5d3bd78b46da9d852797c5e665bfda7d6b373f56bff2ec/aiohttp-3.13.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:20058e23909b9e65f9da62b396b77dfa95965cbe840f8def6e572538b1d32e36", size = 516668, upload-time = "2026-03-31T21:59:36.497Z" }, + { url = "https://files.pythonhosted.org/packages/79/b3/ca078f9f2fa9563c36fb8ef89053ea2bb146d6f792c5104574d49d8acb63/aiohttp-3.13.5-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8cf20a8d6868cb15a73cab329ffc07291ba8c22b1b88176026106ae39aa6df0f", size = 1883461, upload-time = "2026-03-31T21:59:38.723Z" }, + { url = "https://files.pythonhosted.org/packages/b7/e3/a7ad633ca1ca497b852233a3cce6906a56c3225fb6d9217b5e5e60b7419d/aiohttp-3.13.5-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:330f5da04c987f1d5bdb8ae189137c77139f36bd1cb23779ca1a354a4b027800", size = 1747661, upload-time = "2026-03-31T21:59:41.187Z" }, + { url = "https://files.pythonhosted.org/packages/33/b9/cd6fe579bed34a906d3d783fe60f2fa297ef55b27bb4538438ee49d4dc41/aiohttp-3.13.5-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:6f1cbf0c7926d315c3c26c2da41fd2b5d2fe01ac0e157b78caefc51a782196cf", size = 1863800, upload-time = "2026-03-31T21:59:43.84Z" }, + { url = "https://files.pythonhosted.org/packages/c0/3f/2c1e2f5144cefa889c8afd5cf431994c32f3b29da9961698ff4e3811b79a/aiohttp-3.13.5-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:53fc049ed6390d05423ba33103ded7281fe897cf97878f369a527070bd95795b", size = 1958382, upload-time = "2026-03-31T21:59:46.187Z" }, + { url = "https://files.pythonhosted.org/packages/66/1d/f31ec3f1013723b3babe3609e7f119c2c2fb6ef33da90061a705ef3e1bc8/aiohttp-3.13.5-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:898703aa2667e3c5ca4c54ca36cd73f58b7a38ef87a5606414799ebce4d3fd3a", size = 1803724, upload-time = "2026-03-31T21:59:48.656Z" }, + { url = "https://files.pythonhosted.org/packages/0e/b4/57712dfc6f1542f067daa81eb61da282fab3e6f1966fca25db06c4fc62d5/aiohttp-3.13.5-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0494a01ca9584eea1e5fbd6d748e61ecff218c51b576ee1999c23db7066417d8", size = 1640027, upload-time = "2026-03-31T21:59:51.284Z" }, + { url = "https://files.pythonhosted.org/packages/25/3c/734c878fb43ec083d8e31bf029daae1beafeae582d1b35da234739e82ee7/aiohttp-3.13.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:6cf81fe010b8c17b09495cbd15c1d35afbc8fb405c0c9cf4738e5ae3af1d65be", size = 1806644, upload-time = "2026-03-31T21:59:53.753Z" }, + { url = "https://files.pythonhosted.org/packages/20/a5/f671e5cbec1c21d044ff3078223f949748f3a7f86b14e34a365d74a5d21f/aiohttp-3.13.5-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:c564dd5f09ddc9d8f2c2d0a301cd30a79a2cc1b46dd1a73bef8f0038863d016b", size = 1791630, upload-time = "2026-03-31T21:59:56.239Z" }, + { url = "https://files.pythonhosted.org/packages/0b/63/fb8d0ad63a0b8a99be97deac8c04dacf0785721c158bdf23d679a87aa99e/aiohttp-3.13.5-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:2994be9f6e51046c4f864598fd9abeb4fba6e88f0b2152422c9666dcd4aea9c6", size = 1809403, upload-time = "2026-03-31T21:59:59.103Z" }, + { url = "https://files.pythonhosted.org/packages/59/0c/bfed7f30662fcf12206481c2aac57dedee43fe1c49275e85b3a1e1742294/aiohttp-3.13.5-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:157826e2fa245d2ef46c83ea8a5faf77ca19355d278d425c29fda0beb3318037", size = 1634924, upload-time = "2026-03-31T22:00:02.116Z" }, + { url = "https://files.pythonhosted.org/packages/17/d6/fd518d668a09fd5a3319ae5e984d4d80b9a4b3df4e21c52f02251ef5a32e/aiohttp-3.13.5-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:a8aca50daa9493e9e13c0f566201a9006f080e7c50e5e90d0b06f53146a54500", size = 1836119, upload-time = "2026-03-31T22:00:04.756Z" }, + { url = "https://files.pythonhosted.org/packages/78/b7/15fb7a9d52e112a25b621c67b69c167805cb1f2ab8f1708a5c490d1b52fe/aiohttp-3.13.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:3b13560160d07e047a93f23aaa30718606493036253d5430887514715b67c9d9", size = 1772072, upload-time = "2026-03-31T22:00:07.494Z" }, + { url = "https://files.pythonhosted.org/packages/7e/df/57ba7f0c4a553fc2bd8b6321df236870ec6fd64a2a473a8a13d4f733214e/aiohttp-3.13.5-cp314-cp314t-win32.whl", hash = "sha256:9a0f4474b6ea6818b41f82172d799e4b3d29e22c2c520ce4357856fced9af2f8", size = 471819, upload-time = "2026-03-31T22:00:10.277Z" }, + { url = "https://files.pythonhosted.org/packages/62/29/2f8418269e46454a26171bfdd6a055d74febf32234e474930f2f60a17145/aiohttp-3.13.5-cp314-cp314t-win_amd64.whl", hash = "sha256:18a2f6c1182c51baa1d28d68fea51513cb2a76612f038853c0ad3c145423d3d9", size = 505441, upload-time = "2026-03-31T22:00:12.791Z" }, +] + +[[package]] +name = "aiosignal" +version = "1.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "frozenlist" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/61/62/06741b579156360248d1ec624842ad0edf697050bbaf7c3e46394e106ad1/aiosignal-1.4.0.tar.gz", hash = "sha256:f47eecd9468083c2029cc99945502cb7708b082c232f9aca65da147157b251c7", size = 25007, upload-time = "2025-07-03T22:54:43.528Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fb/76/641ae371508676492379f16e2fa48f4e2c11741bd63c48be4b12a6b09cba/aiosignal-1.4.0-py3-none-any.whl", hash = "sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e", size = 7490, upload-time = "2025-07-03T22:54:42.156Z" }, +] + +[[package]] +name = "aiosqlite" +version = "0.22.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/4e/8a/64761f4005f17809769d23e518d915db74e6310474e733e3593cfc854ef1/aiosqlite-0.22.1.tar.gz", hash = "sha256:043e0bd78d32888c0a9ca90fc788b38796843360c855a7262a532813133a0650", size = 14821, upload-time = "2025-12-23T19:25:43.997Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/00/b7/e3bf5133d697a08128598c8d0abc5e16377b51465a33756de24fa7dee953/aiosqlite-0.22.1-py3-none-any.whl", hash = "sha256:21c002eb13823fad740196c5a2e9d8e62f6243bd9e7e4a1f87fb5e44ecb4fceb", size = 17405, upload-time = "2025-12-23T19:25:42.139Z" }, +] + +[[package]] +name = "annotated-doc" +version = "0.0.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/57/ba/046ceea27344560984e26a590f90bc7f4a75b06701f653222458922b558c/annotated_doc-0.0.4.tar.gz", hash = "sha256:fbcda96e87e9c92ad167c2e53839e57503ecfda18804ea28102353485033faa4", size = 7288, upload-time = "2025-11-10T22:07:42.062Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/d3/26bf1008eb3d2daa8ef4cacc7f3bfdc11818d111f7e2d0201bc6e3b49d45/annotated_doc-0.0.4-py3-none-any.whl", hash = "sha256:571ac1dc6991c450b25a9c2d84a3705e2ae7a53467b5d111c24fa8baabbed320", size = 5303, upload-time = "2025-11-10T22:07:40.673Z" }, +] + +[[package]] +name = "annotated-types" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" }, +] + +[[package]] +name = "anyio" +version = "4.13.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "idna" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/19/14/2c5dd9f512b66549ae92767a9c7b330ae88e1932ca57876909410251fe13/anyio-4.13.0.tar.gz", hash = "sha256:334b70e641fd2221c1505b3890c69882fe4a2df910cba14d97019b90b24439dc", size = 231622, upload-time = "2026-03-24T12:59:09.671Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/da/42/e921fccf5015463e32a3cf6ee7f980a6ed0f395ceeaa45060b61d86486c2/anyio-4.13.0-py3-none-any.whl", hash = "sha256:08b310f9e24a9594186fd75b4f73f4a4152069e3853f1ed8bfbf58369f4ad708", size = 114353, upload-time = "2026-03-24T12:59:08.246Z" }, +] + +[[package]] +name = "async-substrate-interface" +version = "1.6.4" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiosqlite" }, + { name = "bt-decode" }, + { name = "scalecodec" }, + { name = "websockets" }, + { name = "wheel" }, + { name = "xxhash" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/bb/48/b53f7aa0f4e63ea8f33378e0d6bafb457de3b40b6dd4e426286159438af5/async_substrate_interface-1.6.4.tar.gz", hash = "sha256:982fd9c7102176d509a5bc31a1cbee0ba6c6dff7629328a94b08cad155520aad", size = 93715, upload-time = "2026-04-02T17:48:51.916Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/de/f0/bc336a092bfbece8379d28571452d3a46936a87c75603cbdd2b09fe13be8/async_substrate_interface-1.6.4-py3-none-any.whl", hash = "sha256:7f127f5fc2a66cfd0b9bd232809f5af7ef36f545679d35b7fb71b476887dafd4", size = 97176, upload-time = "2026-04-02T17:48:50.287Z" }, +] + +[[package]] +name = "asyncstdlib" +version = "3.13.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ce/87/11ce6ea0917205df34e9c05d85ff05b7405d3c9639b67118ed5d9daadbc3/asyncstdlib-3.13.3.tar.gz", hash = "sha256:17d2af4c43365cf684e0c640d9e6eaf893d08092f873d5c4ea54219eb5826348", size = 50854, upload-time = "2026-03-10T08:12:32.569Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1f/d8/55d924a6391a8bfca251f59c06431888c3c8ef43a97af6112001506173a4/asyncstdlib-3.13.3-py3-none-any.whl", hash = "sha256:5aac5438e0c6a60e279667ba545ea011f4dca061e9e7517957488c4dfa8bcf0d", size = 44229, upload-time = "2026-03-10T08:12:31.306Z" }, +] + +[[package]] +name = "attrs" +version = "26.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9a/8e/82a0fe20a541c03148528be8cac2408564a6c9a0cc7e9171802bc1d26985/attrs-26.1.0.tar.gz", hash = "sha256:d03ceb89cb322a8fd706d4fb91940737b6642aa36998fe130a9bc96c985eff32", size = 952055, upload-time = "2026-03-19T14:22:25.026Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/64/b4/17d4b0b2a2dc85a6df63d1157e028ed19f90d4cd97c36717afef2bc2f395/attrs-26.1.0-py3-none-any.whl", hash = "sha256:c647aa4a12dfbad9333ca4e71fe62ddc36f4e63b2d260a37a8b83d2f043ac309", size = 67548, upload-time = "2026-03-19T14:22:23.645Z" }, +] + +[[package]] +name = "backoff" +version = "2.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/47/d7/5bbeb12c44d7c4f2fb5b56abce497eb5ed9f34d85701de869acedd602619/backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba", size = 17001, upload-time = "2022-10-05T19:19:32.061Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/df/73/b6e24bd22e6720ca8ee9a85a0c4a2971af8497d8f3193fa05390cbd46e09/backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8", size = 15148, upload-time = "2022-10-05T19:19:30.546Z" }, +] + +[[package]] +name = "base58" +version = "2.1.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7f/45/8ae61209bb9015f516102fa559a2914178da1d5868428bd86a1b4421141d/base58-2.1.1.tar.gz", hash = "sha256:c5d0cb3f5b6e81e8e35da5754388ddcc6d0d14b6c6a132cb93d69ed580a7278c", size = 6528, upload-time = "2021-10-30T22:12:17.858Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4a/45/ec96b29162a402fc4c1c5512d114d7b3787b9d1c2ec241d9568b4816ee23/base58-2.1.1-py3-none-any.whl", hash = "sha256:11a36f4d3ce51dfc1043f3218591ac4eb1ceb172919cebe05b52a5bcc8d245c2", size = 5621, upload-time = "2021-10-30T22:12:16.658Z" }, +] + +[[package]] +name = "bittensor" +version = "10.0.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohttp" }, + { name = "async-substrate-interface" }, + { name = "asyncstdlib" }, + { name = "bittensor-drand" }, + { name = "bittensor-wallet" }, + { name = "colorama" }, + { name = "fastapi" }, + { name = "msgpack-numpy-opentensor" }, + { name = "munch" }, + { name = "netaddr" }, + { name = "numpy" }, + { name = "packaging" }, + { name = "pycryptodome" }, + { name = "pydantic" }, + { name = "python-statemachine" }, + { name = "pyyaml" }, + { name = "requests" }, + { name = "retry" }, + { name = "scalecodec" }, + { name = "setuptools" }, + { name = "uvicorn" }, + { name = "wheel" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3b/6d/cb6d019386fabcd28449889a290fe93f2781ebc637604917984ca1dc4c69/bittensor-10.0.1.tar.gz", hash = "sha256:900697ba9ccaeb8a22419560631132dbb3578bff0a9d8d1e19ae48d352d85328", size = 381830, upload-time = "2025-12-22T19:07:04.465Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f0/8c/af29be209e01f3dbaf2517c943118764018157f4fe236f29a737f09537a0/bittensor-10.0.1-py3-none-any.whl", hash = "sha256:cb80262ff9ff43386ebb1a15ba0a17b94be8966121f852d7fe9bfebc83fad052", size = 452691, upload-time = "2025-12-22T19:07:02.804Z" }, +] + +[[package]] +name = "bittensor-cli" +version = "9.17.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiohttp" }, + { name = "async-substrate-interface" }, + { name = "backoff" }, + { name = "bittensor-drand" }, + { name = "bittensor-wallet" }, + { name = "gitpython" }, + { name = "jinja2" }, + { name = "netaddr" }, + { name = "numpy" }, + { name = "packaging" }, + { name = "plotille" }, + { name = "plotly" }, + { name = "pycryptodome" }, + { name = "pyyaml" }, + { name = "rich" }, + { name = "scalecodec" }, + { name = "typer" }, + { name = "wheel" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d8/89/75a59ca4db8776ed12ed6d6cf63185f65bd256ed7caa23834157bce5aa9a/bittensor_cli-9.17.0.tar.gz", hash = "sha256:ca0525d4d0c720a9550705dd24e3fa7ac11bf83ed568932865d5bddf5fcecea4", size = 305888, upload-time = "2025-12-22T18:47:03.859Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c0/2f/e46745ae93cd9de865d418722f299cc6d93855f77264346728df25d23ecf/bittensor_cli-9.17.0-py3-none-any.whl", hash = "sha256:aedb82033e32b61908a558514ae973047c926ca8dd63fdf446eadef549697147", size = 340114, upload-time = "2025-12-22T18:47:02.14Z" }, +] + +[[package]] +name = "bittensor-commit-reveal" +version = "0.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/cd/57/e7d5458731413f7d1fa00be9ab4acfb9833c375efb67ac175b4c78254f67/bittensor_commit_reveal-0.4.0.tar.gz", hash = "sha256:583aa4311b2db9bed293a830f02762e187fba393ceeb0bb7306bb8fddfba1614", size = 38816, upload-time = "2025-04-14T23:06:11.639Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8d/c9/2778a776389d610adacef11a6ddb1e16403604da9a0c5ada3ad0a8aaa3ef/bittensor_commit_reveal-0.4.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:8b2aaa3545932adcf944d5fe5da93f4b5fe98779a96d46b014b428d559d47d40", size = 1492585, upload-time = "2025-04-14T23:06:05.938Z" }, + { url = "https://files.pythonhosted.org/packages/9e/7c/fa51563504de64487567e81bc4ea657c7fd0bcafb7e0f7e62537595ed4eb/bittensor_commit_reveal-0.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:94640d6f2b9c634f74370a0f83002160382e0671cca60d31deb946f36d1831a8", size = 1453134, upload-time = "2025-04-14T23:05:55.888Z" }, + { url = "https://files.pythonhosted.org/packages/39/f3/fd40de0bb3b654c9f3fcfdc672ddeb7057771f7d95a92c8848f4228f4f6a/bittensor_commit_reveal-0.4.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0efed917c10bce3fa886535ba38db41e33e1776685db0f63788330dded83706a", size = 3797839, upload-time = "2025-04-14T23:05:31.053Z" }, + { url = "https://files.pythonhosted.org/packages/ab/68/b63fa04a41eabf0b83a20a38f05322dac2e66c6b176e426db5bebbdeb4b7/bittensor_commit_reveal-0.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5cc6cdaf4efd1c612ff2f1391ffbbdf4b0f1f23836fc1cad5fc1fbcc92b1810d", size = 3833882, upload-time = "2025-04-14T23:05:44.034Z" }, + { url = "https://files.pythonhosted.org/packages/34/fd/e2e7dd06065fc9bd38bf97b72b76667bfb2ddc8032756ef174db8bb8124f/bittensor_commit_reveal-0.4.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:eff8c112b56ac661e278374c1e3ba813658144f08be26ee7cbd7ca3eb58b5a5f", size = 1492341, upload-time = "2025-04-14T23:06:07.675Z" }, + { url = "https://files.pythonhosted.org/packages/61/7d/8f0046a80f7ada28c1547fb9404dca940f15089d815ccd293bb49a31fb4f/bittensor_commit_reveal-0.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c000d2b46c710fd8b7a9760f937a8d0c63683ae253fd8f89f5643943ceea78ef", size = 1453092, upload-time = "2025-04-14T23:05:57.981Z" }, + { url = "https://files.pythonhosted.org/packages/71/7b/0bf879b63ef50cfef7b45b4589f3f52a5e63a3090fa2d1d2ca7563a78112/bittensor_commit_reveal-0.4.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:672630ac1e2837af3315cbdf1c0b161162df69b69a567603e81a2839816f133c", size = 3797427, upload-time = "2025-04-14T23:05:33.22Z" }, + { url = "https://files.pythonhosted.org/packages/4d/45/f1c14d88a2fbc722128b4ddb20e78af64d3a5d173dc67cefc77eb4e66bdb/bittensor_commit_reveal-0.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:275543f6d0e98d36a8f60eb8a8a76a97746f6c0bf83acf4f5a6cf3624b45a0bf", size = 3833454, upload-time = "2025-04-14T23:05:46.368Z" }, +] + +[[package]] +name = "bittensor-drand" +version = "1.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/36/13/36a587abc84cfa5a855879e247c3a763fe05cae02ff007f71f895ec933e2/bittensor_drand-1.3.0.tar.gz", hash = "sha256:ec3694c2226d66e2637168c8b31082d5cbbf991e350c254e340e1eb0255142fd", size = 52052, upload-time = "2026-02-19T20:54:55.05Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b0/f8/2bcfb2aecdd98e9bfc7d2f2e2fef4f340d71779645f4ab39206a85d2b009/bittensor_drand-1.3.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:e573ad16ebe12c218f5ad7d00a1919fa3602b3527e6bd2cd419255e374584abf", size = 1988663, upload-time = "2026-02-19T20:54:51.173Z" }, + { url = "https://files.pythonhosted.org/packages/19/40/6569a37da607a63519ea19f020034ecab3a3d3631389e829c6ccc9e98178/bittensor_drand-1.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b2e8351e53e20b299b6c03c26ea82be5e0480e5fe043b4c29fd64fba233c46be", size = 1912005, upload-time = "2026-02-19T20:54:44.515Z" }, + { url = "https://files.pythonhosted.org/packages/4b/24/46030b9ec766eee279f5eb95050ec91b212f2ab8469b26b17f654657ecf1/bittensor_drand-1.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f4ba5e553248c2fbc61b6c240260e7dd75b8a655006a30307e07a4038526e07", size = 2146672, upload-time = "2026-02-19T20:54:14.512Z" }, + { url = "https://files.pythonhosted.org/packages/34/1d/4582fb3b27c4689408c2209d4a69c910e64634438104c45ae9060f4ea2e2/bittensor_drand-1.3.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:69f7d246c6bb85089b6829ce08836de00856ff3954290dcd35cb238b06a610f5", size = 2244072, upload-time = "2026-02-19T20:54:25.142Z" }, + { url = "https://files.pythonhosted.org/packages/0d/23/bb35315766d82d063a57ce9123b9ed778630571dd2fb11ffb540de45784c/bittensor_drand-1.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd32683bf035f6122782fe77d7ed6c7c99319f33252248feff7b466f468962fd", size = 2160988, upload-time = "2026-02-19T20:54:33.734Z" }, + { url = "https://files.pythonhosted.org/packages/c1/da/78033f58af1df4669b7537434f34f462bd09822e7f67d9b5a0bbc1dbbd7b/bittensor_drand-1.3.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:db664c3d4923e66df5cfab4469e21a923ce5402df7bc09b1b1492fc05539b6ac", size = 1988394, upload-time = "2026-02-19T20:54:52.688Z" }, + { url = "https://files.pythonhosted.org/packages/8e/e6/f0f1b4b0ccc071b674ce8f99ff087e9a8bedd491fd07f7a0bd86c8632395/bittensor_drand-1.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45330eca12ff79be137b7cae75cd2647e8accdd8215417bf6b29419575b31b3b", size = 1912081, upload-time = "2026-02-19T20:54:46.258Z" }, + { url = "https://files.pythonhosted.org/packages/bd/3f/15f4e1dec69f8279a7f11b13093f1bc4272ab84d9ec1bb587b7f639e4d0a/bittensor_drand-1.3.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:166b9d8f5139006368d4f31692e92c08689e88bc5e8a56b5ca408324e48c69fb", size = 2145656, upload-time = "2026-02-19T20:54:16.447Z" }, + { url = "https://files.pythonhosted.org/packages/7d/51/f17a345024313b871be74db17d2d8cba6f6fdbb7347d1edb4cb8fa092db7/bittensor_drand-1.3.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb8ad08bc1123addbe5e9ac4238829f779b61117cc5a27b16a08d4fdc5660376", size = 2243517, upload-time = "2026-02-19T20:54:27.005Z" }, + { url = "https://files.pythonhosted.org/packages/68/3f/8bbd8a1268fdfdf01da335e177ea47b8eaa10f909941fe429b8f093d03e0/bittensor_drand-1.3.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f239c8b7be222cfbc752050fe609d5653a913f3a8b62484bd7b3da616c61ba00", size = 2160560, upload-time = "2026-02-19T20:54:35.516Z" }, + { url = "https://files.pythonhosted.org/packages/f6/3c/be9a7159e400e175d2bc5657579edeef1620bc5311a126930566f9a2613c/bittensor_drand-1.3.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:11a1dace30891e1cffe39533a36630e696b9b8c6f67d2d1c03f5f434e259ec9c", size = 2149890, upload-time = "2026-02-19T20:54:19.004Z" }, + { url = "https://files.pythonhosted.org/packages/bc/b5/0e99beea96403881895ccc313ece930d4453bcb8a56f82c5b50067a90413/bittensor_drand-1.3.0-cp314-cp314-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:068595a2cc1ac1bc192da55e369eebf12e6796561128e9848449df9e936d41cd", size = 2243052, upload-time = "2026-02-19T20:54:29.184Z" }, + { url = "https://files.pythonhosted.org/packages/92/84/e1914df2f0d909a60b779538bf16e21f924aa8db2b536de143dff8659f42/bittensor_drand-1.3.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98e9aaece037c42688953f74f8d967e5b0f2aab6f32a2f661aee5ee899807b87", size = 2160452, upload-time = "2026-02-19T20:54:38.848Z" }, +] + +[[package]] +name = "bittensor-wallet" +version = "4.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/02/94/d1473ad79490b16ccb3cd9f13c48f4db72907d0287636a40c038683f7afc/bittensor_wallet-4.0.0.tar.gz", hash = "sha256:9f990eac74b08897f9fc9a8af460d566b5cfb6e542f0313e4896a809449beb47", size = 82800, upload-time = "2025-08-06T20:11:07.446Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a5/e3/3bff8a61a00fb567521296dd5e138238622d0e07bb9ab6c777132e0d9381/bittensor_wallet-4.0.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:5ff0b15c41604b1adb035cbe8fcdfd8b840f60355817a1c4dd8ec257f9b63b33", size = 827433, upload-time = "2025-08-06T20:11:02.565Z" }, + { url = "https://files.pythonhosted.org/packages/50/f1/5496aba00b3322d76f4e8b71d9cbafcf704d01f3f82a3c8f53a164446b3a/bittensor_wallet-4.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:5ba1953a05b4ae220f84ddaf885b4dc8c800db33d7ac70b77d3b60b951009a08", size = 775007, upload-time = "2025-08-06T20:10:53.929Z" }, + { url = "https://files.pythonhosted.org/packages/77/19/c575bf7d541d581f2761294e2f48cd96096ce2a22f81f7d70f7985a4086d/bittensor_wallet-4.0.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5ead2bc9423f908d94dda6deb7f9d8f18c9be057f999952274b7158214ff0790", size = 2758947, upload-time = "2025-08-06T20:10:20.643Z" }, + { url = "https://files.pythonhosted.org/packages/cd/4e/841553284d19e9f9456d8339b78f11132cd91c656c0d07506722ade1ef8c/bittensor_wallet-4.0.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c1f53313b653eb0b67bf606a4f9e24ece503ba32025628b568da89ba7d7c4e82", size = 3171671, upload-time = "2025-08-06T20:10:41.239Z" }, + { url = "https://files.pythonhosted.org/packages/79/73/e88207b7ad2f619d1888a529ba53ac3edf48313c8ac9f62706336630ce62/bittensor_wallet-4.0.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3c6b8d58f3b3c8c0b01ff8fd6260fad6acfa74506e4a38ce2df8f7ecd33f4d73", size = 2973485, upload-time = "2025-08-06T20:10:30.723Z" }, + { url = "https://files.pythonhosted.org/packages/55/e2/6ddb2c5459e87fa1ede13153732cf3ff84eec45b469a2c0720baf5a5310a/bittensor_wallet-4.0.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:cb0098d85056d7e7786ae7ccfdf85c5d278835013fc8a7d68dfd2c632558a4bb", size = 827767, upload-time = "2025-08-06T20:11:04.149Z" }, + { url = "https://files.pythonhosted.org/packages/8e/6c/b2139d92defeca3b89b9cf857883058329df11913742e381f3698859cc84/bittensor_wallet-4.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:30ea5e57bae98a186aec7cb87dcf0b04f0b3d25eef6c528c7dc4afde34048717", size = 774865, upload-time = "2025-08-06T20:10:55.847Z" }, + { url = "https://files.pythonhosted.org/packages/f3/d8/55873e20289af14b66a7c923d2d73cc8d550bd58badab7d7ca16a2b4dbc2/bittensor_wallet-4.0.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:406fb7c64d9c43ac07ee8c04ad9412c8678610522976fe7f2069c87d56e17055", size = 2758477, upload-time = "2025-08-06T20:10:22.985Z" }, + { url = "https://files.pythonhosted.org/packages/7f/04/9973d78726ff544c71421ba198503c19e2521516ebf6d7e134679796211f/bittensor_wallet-4.0.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2708ef7d4e5be030829d3a55d7a96bd0b98daace82f2b05f8ae45bdb2a66c3c", size = 3171597, upload-time = "2025-08-06T20:10:45.702Z" }, + { url = "https://files.pythonhosted.org/packages/c2/94/55bdfadb99080dd631c0eba45e485b96a2dbf77453d74bcdbd6c34a6b0da/bittensor_wallet-4.0.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fb493b2534abcab87c30488bdec5e0dfd02af6c12741d2c6ddcb765ce9321dbb", size = 2972794, upload-time = "2025-08-06T20:10:32.594Z" }, +] + +[[package]] +name = "bt-decode" +version = "0.8.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "toml" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9d/d6/f30b65454ff3f78b698ec9e0b18fcd22299b43c5581f1e913f77657761db/bt_decode-0.8.0.tar.gz", hash = "sha256:deb6b798bea703c9b9e40267f6cddcfb45f7f4c884bbb3d2280143b18095eb09", size = 1200411, upload-time = "2025-10-28T21:07:11.869Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/38/3d/53c6ff30b5cc63d269aaba8a68eae9c06f71b92affadde1d93446e8c155f/bt_decode-0.8.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:4ca5c01d2b1d3edfe2430f45b9e13c5f0ac78af7047d3b702d0bcf6307348a93", size = 596467, upload-time = "2025-10-28T21:07:07.199Z" }, + { url = "https://files.pythonhosted.org/packages/78/cd/186857054f12796f13b614921599750979c59636f34afa186ff76c257106/bt_decode-0.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:30e2c68dbcc69da901c3bc3a20ece66c0e90867fcb54ff46a90b506d92a81143", size = 579138, upload-time = "2025-10-28T21:06:59.367Z" }, + { url = "https://files.pythonhosted.org/packages/68/ac/f4df2de63c5f90bea084ddbcff02c0a7f8ea8018cbd952e5368c8170c39f/bt_decode-0.8.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad5c36325d0ad7e597b0f3f4e28ea7a49d5587123224d1c07c7e705d366563df", size = 638952, upload-time = "2025-10-28T21:06:36.583Z" }, + { url = "https://files.pythonhosted.org/packages/38/98/65e2ed447369a6a5f2597dbec79b0fcb7e2516c4b053d49f12894cfec557/bt_decode-0.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dfcff566afd5083ca6091ece4aef00728de25871d8e5499fa03669e15cf0625a", size = 648694, upload-time = "2025-10-28T21:06:52.463Z" }, + { url = "https://files.pythonhosted.org/packages/e2/bf/7b9e6feb4c282f6af29e6932926da237a09353c7424802e9e67059d5b717/bt_decode-0.8.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:46519b293d1338660b0d12c5bf0cc6442204d0e3129f16d450bff66de55b4a70", size = 714298, upload-time = "2025-10-28T21:06:44.819Z" }, + { url = "https://files.pythonhosted.org/packages/e8/78/74de03c3f964234e8fda67b98f8bd928be3bb8179d51a6be5b3f730140ec/bt_decode-0.8.0-cp312-cp312-win32.whl", hash = "sha256:202a28a42bd972c701850a8bbbb197fcf370ea11c85a265319036503c8584425", size = 420339, upload-time = "2025-10-28T21:07:21.599Z" }, + { url = "https://files.pythonhosted.org/packages/e5/2f/49865c7a45e20f0b71f7c80c57354e883eccb7daa711b4c0d100b6621c3b/bt_decode-0.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:fd7201a9ddd4c44d27023f4aa9174f4a7a1ea94fed310294020d2638e8976b86", size = 439667, upload-time = "2025-10-28T21:07:15.126Z" }, + { url = "https://files.pythonhosted.org/packages/f2/01/b6eab67d288f52b0c732194db85e8787bb2994690f0f0d1744cf873e12ec/bt_decode-0.8.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:13fbbfa4ebe60df27bc4b4bb32de8969182d24239b56a2cf56b0a933e88b2529", size = 596597, upload-time = "2025-10-28T21:07:08.228Z" }, + { url = "https://files.pythonhosted.org/packages/6c/fd/938ace0d01136ca4bc800b746a9a8ec58b908f1db20fa0233b6095362e92/bt_decode-0.8.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8f7bb0b887531a560a71c761bbc8032b4bc44e1d456ac1ae693daed78d40c4de", size = 579396, upload-time = "2025-10-28T21:07:00.352Z" }, + { url = "https://files.pythonhosted.org/packages/43/78/7cfa3eb15ab5174e8c929519e4d6b139a903d3ba9c5e24cf3ec8b11d7160/bt_decode-0.8.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44b6670e6c7f3278dd7a7df237feec632849c40b525cb16d68468de04d88a332", size = 638695, upload-time = "2025-10-28T21:06:37.966Z" }, + { url = "https://files.pythonhosted.org/packages/78/1f/199195c6589142dfa317f4c112525ed32de251f47792f6eb27166c30fb89/bt_decode-0.8.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9473338d99c339d84175f957177b49976b67aa1e50fa67ffe652e7fff4d3621e", size = 648005, upload-time = "2025-10-28T21:06:53.452Z" }, + { url = "https://files.pythonhosted.org/packages/07/39/13140ea0f97acc1c4e7eadd0eeeac5eb2a92c53e39bd345f1e4fafd5c2f7/bt_decode-0.8.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0fb6709709faf753110c19b22f44a6ffe64e95de2c435cec0f41f3fa54b81a4f", size = 714488, upload-time = "2025-10-28T21:06:46.015Z" }, + { url = "https://files.pythonhosted.org/packages/50/9d/a0993816e12cba61a86008a4926d77693dabac84d86040687c8130587aa3/bt_decode-0.8.0-cp313-cp313-win32.whl", hash = "sha256:dbddd1d2e393467d01d708454944733030b449cfc0d40ef6ac5a3b726ea2bffe", size = 420244, upload-time = "2025-10-28T21:07:22.642Z" }, + { url = "https://files.pythonhosted.org/packages/88/da/9c36a3ba0afe61874a525ca922fda952cde4975a2eda4a9234ce925734c7/bt_decode-0.8.0-cp313-cp313-win_amd64.whl", hash = "sha256:14a1a57eae0ad31c4e9ac0b1c862484225577d361082d0ae0a6054a7fca0f4cb", size = 439647, upload-time = "2025-10-28T21:07:16.137Z" }, + { url = "https://files.pythonhosted.org/packages/80/39/64ce41ba66b1a9225277c2ac8b7bc71ff6a81b80d9452b88133a078654c0/bt_decode-0.8.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3d502724d3d2bf411607062eec1dbad13e2eccb9aac102c79d604e5645e8881d", size = 597200, upload-time = "2025-10-28T21:07:09.362Z" }, + { url = "https://files.pythonhosted.org/packages/43/b6/4cae000fa7823eec7998f980eba8aa1a4f5bdec5063b0fa17e2c26e6a66d/bt_decode-0.8.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:3284eb05f0d7727af482010192015acfc96b3fde7c4fffe3ddc1ec4d3b8f1c42", size = 579730, upload-time = "2025-10-28T21:07:01.32Z" }, + { url = "https://files.pythonhosted.org/packages/1e/85/c7e20828128bbb7163069cbf7eb4b577399aabc06dc83086107e5b5a601c/bt_decode-0.8.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c0101661f87a17e4202fe1ab0923909bf6481c2c7d5dbcbc6ec6f6dc44c68a5", size = 639186, upload-time = "2025-10-28T21:06:39.551Z" }, + { url = "https://files.pythonhosted.org/packages/ee/4d/6c62a3e4f96703afcfc3a236bff91d18ba95dc235e33bae6f24de60844ef/bt_decode-0.8.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e789607cf3f48ea4832b7edf288782416a36274a525b306442e1c1ce9a7ac872", size = 645591, upload-time = "2025-10-28T21:06:54.497Z" }, + { url = "https://files.pythonhosted.org/packages/ca/40/86f6a3affcd115cd551c6224a9f560ce4208b5ca9dd4a459d531e191a429/bt_decode-0.8.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d76c4736ec3940bdaf20ef2b511a45f6bd6b564b20496a91a3b61ade9435f73a", size = 714809, upload-time = "2025-10-28T21:06:47.342Z" }, + { url = "https://files.pythonhosted.org/packages/57/a1/34359b713dc5b0aa5d8211ff6e8b18d61ef34f5c66bd0da697376dbc73e5/bt_decode-0.8.0-cp314-cp314-win32.whl", hash = "sha256:8dd101c5e00e521e3448cef375aa242515768aae641bb3711b490baa40d7b2f1", size = 420805, upload-time = "2025-10-28T21:07:23.735Z" }, + { url = "https://files.pythonhosted.org/packages/68/86/60a4cccfbe05f42863eef8920875de5b35e802f0710a60578fce835db9bc/bt_decode-0.8.0-cp314-cp314-win_amd64.whl", hash = "sha256:77721d8e494dd4510e13c2afde1b75a36ed44b42cf502efc125aa853ed84b293", size = 439857, upload-time = "2025-10-28T21:07:17.526Z" }, +] + +[[package]] +name = "certifi" +version = "2026.2.25" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/af/2d/7bf41579a8986e348fa033a31cdd0e4121114f6bce2457e8876010b092dd/certifi-2026.2.25.tar.gz", hash = "sha256:e887ab5cee78ea814d3472169153c2d12cd43b14bd03329a39a9c6e2e80bfba7", size = 155029, upload-time = "2026-02-25T02:54:17.342Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9a/3c/c17fb3ca2d9c3acff52e30b309f538586f9f5b9c9cf454f3845fc9af4881/certifi-2026.2.25-py3-none-any.whl", hash = "sha256:027692e4402ad994f1c42e52a4997a9763c646b73e4096e4d5d6db8af1d6f0fa", size = 153684, upload-time = "2026-02-25T02:54:15.766Z" }, +] + +[[package]] +name = "cffi" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pycparser", marker = "implementation_name != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/eb/56/b1ba7935a17738ae8453301356628e8147c79dbb825bcbc73dc7401f9846/cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529", size = 523588, upload-time = "2025-09-08T23:24:04.541Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ea/47/4f61023ea636104d4f16ab488e268b93008c3d0bb76893b1b31db1f96802/cffi-2.0.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6d02d6655b0e54f54c4ef0b94eb6be0607b70853c45ce98bd278dc7de718be5d", size = 185271, upload-time = "2025-09-08T23:22:44.795Z" }, + { url = "https://files.pythonhosted.org/packages/df/a2/781b623f57358e360d62cdd7a8c681f074a71d445418a776eef0aadb4ab4/cffi-2.0.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8eca2a813c1cb7ad4fb74d368c2ffbbb4789d377ee5bb8df98373c2cc0dee76c", size = 181048, upload-time = "2025-09-08T23:22:45.938Z" }, + { url = "https://files.pythonhosted.org/packages/ff/df/a4f0fbd47331ceeba3d37c2e51e9dfc9722498becbeec2bd8bc856c9538a/cffi-2.0.0-cp312-cp312-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:21d1152871b019407d8ac3985f6775c079416c282e431a4da6afe7aefd2bccbe", size = 212529, upload-time = "2025-09-08T23:22:47.349Z" }, + { url = "https://files.pythonhosted.org/packages/d5/72/12b5f8d3865bf0f87cf1404d8c374e7487dcf097a1c91c436e72e6badd83/cffi-2.0.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b21e08af67b8a103c71a250401c78d5e0893beff75e28c53c98f4de42f774062", size = 220097, upload-time = "2025-09-08T23:22:48.677Z" }, + { url = "https://files.pythonhosted.org/packages/c2/95/7a135d52a50dfa7c882ab0ac17e8dc11cec9d55d2c18dda414c051c5e69e/cffi-2.0.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:1e3a615586f05fc4065a8b22b8152f0c1b00cdbc60596d187c2a74f9e3036e4e", size = 207983, upload-time = "2025-09-08T23:22:50.06Z" }, + { url = "https://files.pythonhosted.org/packages/3a/c8/15cb9ada8895957ea171c62dc78ff3e99159ee7adb13c0123c001a2546c1/cffi-2.0.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:81afed14892743bbe14dacb9e36d9e0e504cd204e0b165062c488942b9718037", size = 206519, upload-time = "2025-09-08T23:22:51.364Z" }, + { url = "https://files.pythonhosted.org/packages/78/2d/7fa73dfa841b5ac06c7b8855cfc18622132e365f5b81d02230333ff26e9e/cffi-2.0.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:3e17ed538242334bf70832644a32a7aae3d83b57567f9fd60a26257e992b79ba", size = 219572, upload-time = "2025-09-08T23:22:52.902Z" }, + { url = "https://files.pythonhosted.org/packages/07/e0/267e57e387b4ca276b90f0434ff88b2c2241ad72b16d31836adddfd6031b/cffi-2.0.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3925dd22fa2b7699ed2617149842d2e6adde22b262fcbfada50e3d195e4b3a94", size = 222963, upload-time = "2025-09-08T23:22:54.518Z" }, + { url = "https://files.pythonhosted.org/packages/b6/75/1f2747525e06f53efbd878f4d03bac5b859cbc11c633d0fb81432d98a795/cffi-2.0.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2c8f814d84194c9ea681642fd164267891702542f028a15fc97d4674b6206187", size = 221361, upload-time = "2025-09-08T23:22:55.867Z" }, + { url = "https://files.pythonhosted.org/packages/7b/2b/2b6435f76bfeb6bbf055596976da087377ede68df465419d192acf00c437/cffi-2.0.0-cp312-cp312-win32.whl", hash = "sha256:da902562c3e9c550df360bfa53c035b2f241fed6d9aef119048073680ace4a18", size = 172932, upload-time = "2025-09-08T23:22:57.188Z" }, + { url = "https://files.pythonhosted.org/packages/f8/ed/13bd4418627013bec4ed6e54283b1959cf6db888048c7cf4b4c3b5b36002/cffi-2.0.0-cp312-cp312-win_amd64.whl", hash = "sha256:da68248800ad6320861f129cd9c1bf96ca849a2771a59e0344e88681905916f5", size = 183557, upload-time = "2025-09-08T23:22:58.351Z" }, + { url = "https://files.pythonhosted.org/packages/95/31/9f7f93ad2f8eff1dbc1c3656d7ca5bfd8fb52c9d786b4dcf19b2d02217fa/cffi-2.0.0-cp312-cp312-win_arm64.whl", hash = "sha256:4671d9dd5ec934cb9a73e7ee9676f9362aba54f7f34910956b84d727b0d73fb6", size = 177762, upload-time = "2025-09-08T23:22:59.668Z" }, + { url = "https://files.pythonhosted.org/packages/4b/8d/a0a47a0c9e413a658623d014e91e74a50cdd2c423f7ccfd44086ef767f90/cffi-2.0.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:00bdf7acc5f795150faa6957054fbbca2439db2f775ce831222b66f192f03beb", size = 185230, upload-time = "2025-09-08T23:23:00.879Z" }, + { url = "https://files.pythonhosted.org/packages/4a/d2/a6c0296814556c68ee32009d9c2ad4f85f2707cdecfd7727951ec228005d/cffi-2.0.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:45d5e886156860dc35862657e1494b9bae8dfa63bf56796f2fb56e1679fc0bca", size = 181043, upload-time = "2025-09-08T23:23:02.231Z" }, + { url = "https://files.pythonhosted.org/packages/b0/1e/d22cc63332bd59b06481ceaac49d6c507598642e2230f201649058a7e704/cffi-2.0.0-cp313-cp313-manylinux1_i686.manylinux2014_i686.manylinux_2_17_i686.manylinux_2_5_i686.whl", hash = "sha256:07b271772c100085dd28b74fa0cd81c8fb1a3ba18b21e03d7c27f3436a10606b", size = 212446, upload-time = "2025-09-08T23:23:03.472Z" }, + { url = "https://files.pythonhosted.org/packages/a9/f5/a2c23eb03b61a0b8747f211eb716446c826ad66818ddc7810cc2cc19b3f2/cffi-2.0.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d48a880098c96020b02d5a1f7d9251308510ce8858940e6fa99ece33f610838b", size = 220101, upload-time = "2025-09-08T23:23:04.792Z" }, + { url = "https://files.pythonhosted.org/packages/f2/7f/e6647792fc5850d634695bc0e6ab4111ae88e89981d35ac269956605feba/cffi-2.0.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f93fd8e5c8c0a4aa1f424d6173f14a892044054871c771f8566e4008eaa359d2", size = 207948, upload-time = "2025-09-08T23:23:06.127Z" }, + { url = "https://files.pythonhosted.org/packages/cb/1e/a5a1bd6f1fb30f22573f76533de12a00bf274abcdc55c8edab639078abb6/cffi-2.0.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:dd4f05f54a52fb558f1ba9f528228066954fee3ebe629fc1660d874d040ae5a3", size = 206422, upload-time = "2025-09-08T23:23:07.753Z" }, + { url = "https://files.pythonhosted.org/packages/98/df/0a1755e750013a2081e863e7cd37e0cdd02664372c754e5560099eb7aa44/cffi-2.0.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c8d3b5532fc71b7a77c09192b4a5a200ea992702734a2e9279a37f2478236f26", size = 219499, upload-time = "2025-09-08T23:23:09.648Z" }, + { url = "https://files.pythonhosted.org/packages/50/e1/a969e687fcf9ea58e6e2a928ad5e2dd88cc12f6f0ab477e9971f2309b57c/cffi-2.0.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d9b29c1f0ae438d5ee9acb31cadee00a58c46cc9c0b2f9038c6b0b3470877a8c", size = 222928, upload-time = "2025-09-08T23:23:10.928Z" }, + { url = "https://files.pythonhosted.org/packages/36/54/0362578dd2c9e557a28ac77698ed67323ed5b9775ca9d3fe73fe191bb5d8/cffi-2.0.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6d50360be4546678fc1b79ffe7a66265e28667840010348dd69a314145807a1b", size = 221302, upload-time = "2025-09-08T23:23:12.42Z" }, + { url = "https://files.pythonhosted.org/packages/eb/6d/bf9bda840d5f1dfdbf0feca87fbdb64a918a69bca42cfa0ba7b137c48cb8/cffi-2.0.0-cp313-cp313-win32.whl", hash = "sha256:74a03b9698e198d47562765773b4a8309919089150a0bb17d829ad7b44b60d27", size = 172909, upload-time = "2025-09-08T23:23:14.32Z" }, + { url = "https://files.pythonhosted.org/packages/37/18/6519e1ee6f5a1e579e04b9ddb6f1676c17368a7aba48299c3759bbc3c8b3/cffi-2.0.0-cp313-cp313-win_amd64.whl", hash = "sha256:19f705ada2530c1167abacb171925dd886168931e0a7b78f5bffcae5c6b5be75", size = 183402, upload-time = "2025-09-08T23:23:15.535Z" }, + { url = "https://files.pythonhosted.org/packages/cb/0e/02ceeec9a7d6ee63bb596121c2c8e9b3a9e150936f4fbef6ca1943e6137c/cffi-2.0.0-cp313-cp313-win_arm64.whl", hash = "sha256:256f80b80ca3853f90c21b23ee78cd008713787b1b1e93eae9f3d6a7134abd91", size = 177780, upload-time = "2025-09-08T23:23:16.761Z" }, + { url = "https://files.pythonhosted.org/packages/92/c4/3ce07396253a83250ee98564f8d7e9789fab8e58858f35d07a9a2c78de9f/cffi-2.0.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:fc33c5141b55ed366cfaad382df24fe7dcbc686de5be719b207bb248e3053dc5", size = 185320, upload-time = "2025-09-08T23:23:18.087Z" }, + { url = "https://files.pythonhosted.org/packages/59/dd/27e9fa567a23931c838c6b02d0764611c62290062a6d4e8ff7863daf9730/cffi-2.0.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c654de545946e0db659b3400168c9ad31b5d29593291482c43e3564effbcee13", size = 181487, upload-time = "2025-09-08T23:23:19.622Z" }, + { url = "https://files.pythonhosted.org/packages/d6/43/0e822876f87ea8a4ef95442c3d766a06a51fc5298823f884ef87aaad168c/cffi-2.0.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:24b6f81f1983e6df8db3adc38562c83f7d4a0c36162885ec7f7b77c7dcbec97b", size = 220049, upload-time = "2025-09-08T23:23:20.853Z" }, + { url = "https://files.pythonhosted.org/packages/b4/89/76799151d9c2d2d1ead63c2429da9ea9d7aac304603de0c6e8764e6e8e70/cffi-2.0.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:12873ca6cb9b0f0d3a0da705d6086fe911591737a59f28b7936bdfed27c0d47c", size = 207793, upload-time = "2025-09-08T23:23:22.08Z" }, + { url = "https://files.pythonhosted.org/packages/bb/dd/3465b14bb9e24ee24cb88c9e3730f6de63111fffe513492bf8c808a3547e/cffi-2.0.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:d9b97165e8aed9272a6bb17c01e3cc5871a594a446ebedc996e2397a1c1ea8ef", size = 206300, upload-time = "2025-09-08T23:23:23.314Z" }, + { url = "https://files.pythonhosted.org/packages/47/d9/d83e293854571c877a92da46fdec39158f8d7e68da75bf73581225d28e90/cffi-2.0.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:afb8db5439b81cf9c9d0c80404b60c3cc9c3add93e114dcae767f1477cb53775", size = 219244, upload-time = "2025-09-08T23:23:24.541Z" }, + { url = "https://files.pythonhosted.org/packages/2b/0f/1f177e3683aead2bb00f7679a16451d302c436b5cbf2505f0ea8146ef59e/cffi-2.0.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:737fe7d37e1a1bffe70bd5754ea763a62a066dc5913ca57e957824b72a85e205", size = 222828, upload-time = "2025-09-08T23:23:26.143Z" }, + { url = "https://files.pythonhosted.org/packages/c6/0f/cafacebd4b040e3119dcb32fed8bdef8dfe94da653155f9d0b9dc660166e/cffi-2.0.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:38100abb9d1b1435bc4cc340bb4489635dc2f0da7456590877030c9b3d40b0c1", size = 220926, upload-time = "2025-09-08T23:23:27.873Z" }, + { url = "https://files.pythonhosted.org/packages/3e/aa/df335faa45b395396fcbc03de2dfcab242cd61a9900e914fe682a59170b1/cffi-2.0.0-cp314-cp314-win32.whl", hash = "sha256:087067fa8953339c723661eda6b54bc98c5625757ea62e95eb4898ad5e776e9f", size = 175328, upload-time = "2025-09-08T23:23:44.61Z" }, + { url = "https://files.pythonhosted.org/packages/bb/92/882c2d30831744296ce713f0feb4c1cd30f346ef747b530b5318715cc367/cffi-2.0.0-cp314-cp314-win_amd64.whl", hash = "sha256:203a48d1fb583fc7d78a4c6655692963b860a417c0528492a6bc21f1aaefab25", size = 185650, upload-time = "2025-09-08T23:23:45.848Z" }, + { url = "https://files.pythonhosted.org/packages/9f/2c/98ece204b9d35a7366b5b2c6539c350313ca13932143e79dc133ba757104/cffi-2.0.0-cp314-cp314-win_arm64.whl", hash = "sha256:dbd5c7a25a7cb98f5ca55d258b103a2054f859a46ae11aaf23134f9cc0d356ad", size = 180687, upload-time = "2025-09-08T23:23:47.105Z" }, + { url = "https://files.pythonhosted.org/packages/3e/61/c768e4d548bfa607abcda77423448df8c471f25dbe64fb2ef6d555eae006/cffi-2.0.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:9a67fc9e8eb39039280526379fb3a70023d77caec1852002b4da7e8b270c4dd9", size = 188773, upload-time = "2025-09-08T23:23:29.347Z" }, + { url = "https://files.pythonhosted.org/packages/2c/ea/5f76bce7cf6fcd0ab1a1058b5af899bfbef198bea4d5686da88471ea0336/cffi-2.0.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:7a66c7204d8869299919db4d5069a82f1561581af12b11b3c9f48c584eb8743d", size = 185013, upload-time = "2025-09-08T23:23:30.63Z" }, + { url = "https://files.pythonhosted.org/packages/be/b4/c56878d0d1755cf9caa54ba71e5d049479c52f9e4afc230f06822162ab2f/cffi-2.0.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7cc09976e8b56f8cebd752f7113ad07752461f48a58cbba644139015ac24954c", size = 221593, upload-time = "2025-09-08T23:23:31.91Z" }, + { url = "https://files.pythonhosted.org/packages/e0/0d/eb704606dfe8033e7128df5e90fee946bbcb64a04fcdaa97321309004000/cffi-2.0.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:92b68146a71df78564e4ef48af17551a5ddd142e5190cdf2c5624d0c3ff5b2e8", size = 209354, upload-time = "2025-09-08T23:23:33.214Z" }, + { url = "https://files.pythonhosted.org/packages/d8/19/3c435d727b368ca475fb8742ab97c9cb13a0de600ce86f62eab7fa3eea60/cffi-2.0.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b1e74d11748e7e98e2f426ab176d4ed720a64412b6a15054378afdb71e0f37dc", size = 208480, upload-time = "2025-09-08T23:23:34.495Z" }, + { url = "https://files.pythonhosted.org/packages/d0/44/681604464ed9541673e486521497406fadcc15b5217c3e326b061696899a/cffi-2.0.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:28a3a209b96630bca57cce802da70c266eb08c6e97e5afd61a75611ee6c64592", size = 221584, upload-time = "2025-09-08T23:23:36.096Z" }, + { url = "https://files.pythonhosted.org/packages/25/8e/342a504ff018a2825d395d44d63a767dd8ebc927ebda557fecdaca3ac33a/cffi-2.0.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7553fb2090d71822f02c629afe6042c299edf91ba1bf94951165613553984512", size = 224443, upload-time = "2025-09-08T23:23:37.328Z" }, + { url = "https://files.pythonhosted.org/packages/e1/5e/b666bacbbc60fbf415ba9988324a132c9a7a0448a9a8f125074671c0f2c3/cffi-2.0.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6c6c373cfc5c83a975506110d17457138c8c63016b563cc9ed6e056a82f13ce4", size = 223437, upload-time = "2025-09-08T23:23:38.945Z" }, + { url = "https://files.pythonhosted.org/packages/a0/1d/ec1a60bd1a10daa292d3cd6bb0b359a81607154fb8165f3ec95fe003b85c/cffi-2.0.0-cp314-cp314t-win32.whl", hash = "sha256:1fc9ea04857caf665289b7a75923f2c6ed559b8298a1b8c49e59f7dd95c8481e", size = 180487, upload-time = "2025-09-08T23:23:40.423Z" }, + { url = "https://files.pythonhosted.org/packages/bf/41/4c1168c74fac325c0c8156f04b6749c8b6a8f405bbf91413ba088359f60d/cffi-2.0.0-cp314-cp314t-win_amd64.whl", hash = "sha256:d68b6cef7827e8641e8ef16f4494edda8b36104d79773a334beaa1e3521430f6", size = 191726, upload-time = "2025-09-08T23:23:41.742Z" }, + { url = "https://files.pythonhosted.org/packages/ae/3a/dbeec9d1ee0844c679f6bb5d6ad4e9f198b1224f4e7a32825f47f6192b0c/cffi-2.0.0-cp314-cp314t-win_arm64.whl", hash = "sha256:0a1527a803f0a659de1af2e1fd700213caba79377e27e4693648c2923da066f9", size = 184195, upload-time = "2025-09-08T23:23:43.004Z" }, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.7" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e7/a1/67fe25fac3c7642725500a3f6cfe5821ad557c3abb11c9d20d12c7008d3e/charset_normalizer-3.4.7.tar.gz", hash = "sha256:ae89db9e5f98a11a4bf50407d4363e7b09b31e55bc117b4f7d80aab97ba009e5", size = 144271, upload-time = "2026-04-02T09:28:39.342Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0c/eb/4fc8d0a7110eb5fc9cc161723a34a8a6c200ce3b4fbf681bc86feee22308/charset_normalizer-3.4.7-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:eca9705049ad3c7345d574e3510665cb2cf844c2f2dcfe675332677f081cbd46", size = 311328, upload-time = "2026-04-02T09:26:24.331Z" }, + { url = "https://files.pythonhosted.org/packages/f8/e3/0fadc706008ac9d7b9b5be6dc767c05f9d3e5df51744ce4cc9605de7b9f4/charset_normalizer-3.4.7-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6178f72c5508bfc5fd446a5905e698c6212932f25bcdd4b47a757a50605a90e2", size = 208061, upload-time = "2026-04-02T09:26:25.568Z" }, + { url = "https://files.pythonhosted.org/packages/42/f0/3dd1045c47f4a4604df85ec18ad093912ae1344ac706993aff91d38773a2/charset_normalizer-3.4.7-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e1421b502d83040e6d7fb2fb18dff63957f720da3d77b2fbd3187ceb63755d7b", size = 229031, upload-time = "2026-04-02T09:26:26.865Z" }, + { url = "https://files.pythonhosted.org/packages/dc/67/675a46eb016118a2fbde5a277a5d15f4f69d5f3f5f338e5ee2f8948fcf43/charset_normalizer-3.4.7-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:edac0f1ab77644605be2cbba52e6b7f630731fc42b34cb0f634be1a6eface56a", size = 225239, upload-time = "2026-04-02T09:26:28.044Z" }, + { url = "https://files.pythonhosted.org/packages/4b/f8/d0118a2f5f23b02cd166fa385c60f9b0d4f9194f574e2b31cef350ad7223/charset_normalizer-3.4.7-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5649fd1c7bade02f320a462fdefd0b4bd3ce036065836d4f42e0de958038e116", size = 216589, upload-time = "2026-04-02T09:26:29.239Z" }, + { url = "https://files.pythonhosted.org/packages/b1/f1/6d2b0b261b6c4ceef0fcb0d17a01cc5bc53586c2d4796fa04b5c540bc13d/charset_normalizer-3.4.7-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:203104ed3e428044fd943bc4bf45fa73c0730391f9621e37fe39ecf477b128cb", size = 202733, upload-time = "2026-04-02T09:26:30.5Z" }, + { url = "https://files.pythonhosted.org/packages/6f/c0/7b1f943f7e87cc3db9626ba17807d042c38645f0a1d4415c7a14afb5591f/charset_normalizer-3.4.7-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:298930cec56029e05497a76988377cbd7457ba864beeea92ad7e844fe74cd1f1", size = 212652, upload-time = "2026-04-02T09:26:31.709Z" }, + { url = "https://files.pythonhosted.org/packages/38/dd/5a9ab159fe45c6e72079398f277b7d2b523e7f716acc489726115a910097/charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:708838739abf24b2ceb208d0e22403dd018faeef86ddac04319a62ae884c4f15", size = 211229, upload-time = "2026-04-02T09:26:33.282Z" }, + { url = "https://files.pythonhosted.org/packages/d5/ff/531a1cad5ca855d1c1a8b69cb71abfd6d85c0291580146fda7c82857caa1/charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:0f7eb884681e3938906ed0434f20c63046eacd0111c4ba96f27b76084cd679f5", size = 203552, upload-time = "2026-04-02T09:26:34.845Z" }, + { url = "https://files.pythonhosted.org/packages/c1/4c/a5fb52d528a8ca41f7598cb619409ece30a169fbdf9cdce592e53b46c3a6/charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4dc1e73c36828f982bfe79fadf5919923f8a6f4df2860804db9a98c48824ce8d", size = 230806, upload-time = "2026-04-02T09:26:36.152Z" }, + { url = "https://files.pythonhosted.org/packages/59/7a/071feed8124111a32b316b33ae4de83d36923039ef8cf48120266844285b/charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:aed52fea0513bac0ccde438c188c8a471c4e0f457c2dd20cdbf6ea7a450046c7", size = 212316, upload-time = "2026-04-02T09:26:37.672Z" }, + { url = "https://files.pythonhosted.org/packages/fd/35/f7dba3994312d7ba508e041eaac39a36b120f32d4c8662b8814dab876431/charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:fea24543955a6a729c45a73fe90e08c743f0b3334bbf3201e6c4bc1b0c7fa464", size = 227274, upload-time = "2026-04-02T09:26:38.93Z" }, + { url = "https://files.pythonhosted.org/packages/8a/2d/a572df5c9204ab7688ec1edc895a73ebded3b023bb07364710b05dd1c9be/charset_normalizer-3.4.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:bb6d88045545b26da47aa879dd4a89a71d1dce0f0e549b1abcb31dfe4a8eac49", size = 218468, upload-time = "2026-04-02T09:26:40.17Z" }, + { url = "https://files.pythonhosted.org/packages/86/eb/890922a8b03a568ca2f336c36585a4713c55d4d67bf0f0c78924be6315ca/charset_normalizer-3.4.7-cp312-cp312-win32.whl", hash = "sha256:2257141f39fe65a3fdf38aeccae4b953e5f3b3324f4ff0daf9f15b8518666a2c", size = 148460, upload-time = "2026-04-02T09:26:41.416Z" }, + { url = "https://files.pythonhosted.org/packages/35/d9/0e7dffa06c5ab081f75b1b786f0aefc88365825dfcd0ac544bdb7b2b6853/charset_normalizer-3.4.7-cp312-cp312-win_amd64.whl", hash = "sha256:5ed6ab538499c8644b8a3e18debabcd7ce684f3fa91cf867521a7a0279cab2d6", size = 159330, upload-time = "2026-04-02T09:26:42.554Z" }, + { url = "https://files.pythonhosted.org/packages/9e/5d/481bcc2a7c88ea6b0878c299547843b2521ccbc40980cb406267088bc701/charset_normalizer-3.4.7-cp312-cp312-win_arm64.whl", hash = "sha256:56be790f86bfb2c98fb742ce566dfb4816e5a83384616ab59c49e0604d49c51d", size = 147828, upload-time = "2026-04-02T09:26:44.075Z" }, + { url = "https://files.pythonhosted.org/packages/c1/3b/66777e39d3ae1ddc77ee606be4ec6d8cbd4c801f65e5a1b6f2b11b8346dd/charset_normalizer-3.4.7-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:f496c9c3cc02230093d8330875c4c3cdfc3b73612a5fd921c65d39cbcef08063", size = 309627, upload-time = "2026-04-02T09:26:45.198Z" }, + { url = "https://files.pythonhosted.org/packages/2e/4e/b7f84e617b4854ade48a1b7915c8ccfadeba444d2a18c291f696e37f0d3b/charset_normalizer-3.4.7-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0ea948db76d31190bf08bd371623927ee1339d5f2a0b4b1b4a4439a65298703c", size = 207008, upload-time = "2026-04-02T09:26:46.824Z" }, + { url = "https://files.pythonhosted.org/packages/c4/bb/ec73c0257c9e11b268f018f068f5d00aa0ef8c8b09f7753ebd5f2880e248/charset_normalizer-3.4.7-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a277ab8928b9f299723bc1a2dabb1265911b1a76341f90a510368ca44ad9ab66", size = 228303, upload-time = "2026-04-02T09:26:48.397Z" }, + { url = "https://files.pythonhosted.org/packages/85/fb/32d1f5033484494619f701e719429c69b766bfc4dbc61aa9e9c8c166528b/charset_normalizer-3.4.7-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3bec022aec2c514d9cf199522a802bd007cd588ab17ab2525f20f9c34d067c18", size = 224282, upload-time = "2026-04-02T09:26:49.684Z" }, + { url = "https://files.pythonhosted.org/packages/fa/07/330e3a0dda4c404d6da83b327270906e9654a24f6c546dc886a0eb0ffb23/charset_normalizer-3.4.7-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e044c39e41b92c845bc815e5ae4230804e8e7bc29e399b0437d64222d92809dd", size = 215595, upload-time = "2026-04-02T09:26:50.915Z" }, + { url = "https://files.pythonhosted.org/packages/e3/7c/fc890655786e423f02556e0216d4b8c6bcb6bdfa890160dc66bf52dee468/charset_normalizer-3.4.7-cp313-cp313-manylinux_2_31_armv7l.whl", hash = "sha256:f495a1652cf3fbab2eb0639776dad966c2fb874d79d87ca07f9d5f059b8bd215", size = 201986, upload-time = "2026-04-02T09:26:52.197Z" }, + { url = "https://files.pythonhosted.org/packages/d8/97/bfb18b3db2aed3b90cf54dc292ad79fdd5ad65c4eae454099475cbeadd0d/charset_normalizer-3.4.7-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e712b419df8ba5e42b226c510472b37bd57b38e897d3eca5e8cfd410a29fa859", size = 211711, upload-time = "2026-04-02T09:26:53.49Z" }, + { url = "https://files.pythonhosted.org/packages/6f/a5/a581c13798546a7fd557c82614a5c65a13df2157e9ad6373166d2a3e645d/charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7804338df6fcc08105c7745f1502ba68d900f45fd770d5bdd5288ddccb8a42d8", size = 210036, upload-time = "2026-04-02T09:26:54.975Z" }, + { url = "https://files.pythonhosted.org/packages/8c/bf/b3ab5bcb478e4193d517644b0fb2bf5497fbceeaa7a1bc0f4d5b50953861/charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:481551899c856c704d58119b5025793fa6730adda3571971af568f66d2424bb5", size = 202998, upload-time = "2026-04-02T09:26:56.303Z" }, + { url = "https://files.pythonhosted.org/packages/e7/4e/23efd79b65d314fa320ec6017b4b5834d5c12a58ba4610aa353af2e2f577/charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f59099f9b66f0d7145115e6f80dd8b1d847176df89b234a5a6b3f00437aa0832", size = 230056, upload-time = "2026-04-02T09:26:57.554Z" }, + { url = "https://files.pythonhosted.org/packages/b9/9f/1e1941bc3f0e01df116e68dc37a55c4d249df5e6fa77f008841aef68264f/charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:f59ad4c0e8f6bba240a9bb85504faa1ab438237199d4cce5f622761507b8f6a6", size = 211537, upload-time = "2026-04-02T09:26:58.843Z" }, + { url = "https://files.pythonhosted.org/packages/80/0f/088cbb3020d44428964a6c97fe1edfb1b9550396bf6d278330281e8b709c/charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:3dedcc22d73ec993f42055eff4fcfed9318d1eeb9a6606c55892a26964964e48", size = 226176, upload-time = "2026-04-02T09:27:00.437Z" }, + { url = "https://files.pythonhosted.org/packages/6a/9f/130394f9bbe06f4f63e22641d32fc9b202b7e251c9aef4db044324dac493/charset_normalizer-3.4.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:64f02c6841d7d83f832cd97ccf8eb8a906d06eb95d5276069175c696b024b60a", size = 217723, upload-time = "2026-04-02T09:27:02.021Z" }, + { url = "https://files.pythonhosted.org/packages/73/55/c469897448a06e49f8fa03f6caae97074fde823f432a98f979cc42b90e69/charset_normalizer-3.4.7-cp313-cp313-win32.whl", hash = "sha256:4042d5c8f957e15221d423ba781e85d553722fc4113f523f2feb7b188cc34c5e", size = 148085, upload-time = "2026-04-02T09:27:03.192Z" }, + { url = "https://files.pythonhosted.org/packages/5d/78/1b74c5bbb3f99b77a1715c91b3e0b5bdb6fe302d95ace4f5b1bec37b0167/charset_normalizer-3.4.7-cp313-cp313-win_amd64.whl", hash = "sha256:3946fa46a0cf3e4c8cb1cc52f56bb536310d34f25f01ca9b6c16afa767dab110", size = 158819, upload-time = "2026-04-02T09:27:04.454Z" }, + { url = "https://files.pythonhosted.org/packages/68/86/46bd42279d323deb8687c4a5a811fd548cb7d1de10cf6535d099877a9a9f/charset_normalizer-3.4.7-cp313-cp313-win_arm64.whl", hash = "sha256:80d04837f55fc81da168b98de4f4b797ef007fc8a79ab71c6ec9bc4dd662b15b", size = 147915, upload-time = "2026-04-02T09:27:05.971Z" }, + { url = "https://files.pythonhosted.org/packages/97/c8/c67cb8c70e19ef1960b97b22ed2a1567711de46c4ddf19799923adc836c2/charset_normalizer-3.4.7-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:c36c333c39be2dbca264d7803333c896ab8fa7d4d6f0ab7edb7dfd7aea6e98c0", size = 309234, upload-time = "2026-04-02T09:27:07.194Z" }, + { url = "https://files.pythonhosted.org/packages/99/85/c091fdee33f20de70d6c8b522743b6f831a2f1cd3ff86de4c6a827c48a76/charset_normalizer-3.4.7-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1c2aed2e5e41f24ea8ef1590b8e848a79b56f3a5564a65ceec43c9d692dc7d8a", size = 208042, upload-time = "2026-04-02T09:27:08.749Z" }, + { url = "https://files.pythonhosted.org/packages/87/1c/ab2ce611b984d2fd5d86a5a8a19c1ae26acac6bad967da4967562c75114d/charset_normalizer-3.4.7-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:54523e136b8948060c0fa0bc7b1b50c32c186f2fceee897a495406bb6e311d2b", size = 228706, upload-time = "2026-04-02T09:27:09.951Z" }, + { url = "https://files.pythonhosted.org/packages/a8/29/2b1d2cb00bf085f59d29eb773ce58ec2d325430f8c216804a0a5cd83cbca/charset_normalizer-3.4.7-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:715479b9a2802ecac752a3b0efa2b0b60285cf962ee38414211abdfccc233b41", size = 224727, upload-time = "2026-04-02T09:27:11.175Z" }, + { url = "https://files.pythonhosted.org/packages/47/5c/032c2d5a07fe4d4855fea851209cca2b6f03ebeb6d4e3afdb3358386a684/charset_normalizer-3.4.7-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bd6c2a1c7573c64738d716488d2cdd3c00e340e4835707d8fdb8dc1a66ef164e", size = 215882, upload-time = "2026-04-02T09:27:12.446Z" }, + { url = "https://files.pythonhosted.org/packages/2c/c2/356065d5a8b78ed04499cae5f339f091946a6a74f91e03476c33f0ab7100/charset_normalizer-3.4.7-cp314-cp314-manylinux_2_31_armv7l.whl", hash = "sha256:c45e9440fb78f8ddabcf714b68f936737a121355bf59f3907f4e17721b9d1aae", size = 200860, upload-time = "2026-04-02T09:27:13.721Z" }, + { url = "https://files.pythonhosted.org/packages/0c/cd/a32a84217ced5039f53b29f460962abb2d4420def55afabe45b1c3c7483d/charset_normalizer-3.4.7-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:3534e7dcbdcf757da6b85a0bbf5b6868786d5982dd959b065e65481644817a18", size = 211564, upload-time = "2026-04-02T09:27:15.272Z" }, + { url = "https://files.pythonhosted.org/packages/44/86/58e6f13ce26cc3b8f4a36b94a0f22ae2f00a72534520f4ae6857c4b81f89/charset_normalizer-3.4.7-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:e8ac484bf18ce6975760921bb6148041faa8fef0547200386ea0b52b5d27bf7b", size = 211276, upload-time = "2026-04-02T09:27:16.834Z" }, + { url = "https://files.pythonhosted.org/packages/8f/fe/d17c32dc72e17e155e06883efa84514ca375f8a528ba2546bee73fc4df81/charset_normalizer-3.4.7-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:a5fe03b42827c13cdccd08e6c0247b6a6d4b5e3cdc53fd1749f5896adcdc2356", size = 201238, upload-time = "2026-04-02T09:27:18.229Z" }, + { url = "https://files.pythonhosted.org/packages/6a/29/f33daa50b06525a237451cdb6c69da366c381a3dadcd833fa5676bc468b3/charset_normalizer-3.4.7-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:2d6eb928e13016cea4f1f21d1e10c1cebd5a421bc57ddf5b1142ae3f86824fab", size = 230189, upload-time = "2026-04-02T09:27:19.445Z" }, + { url = "https://files.pythonhosted.org/packages/b6/6e/52c84015394a6a0bdcd435210a7e944c5f94ea1055f5cc5d56c5fe368e7b/charset_normalizer-3.4.7-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:e74327fb75de8986940def6e8dee4f127cc9752bee7355bb323cc5b2659b6d46", size = 211352, upload-time = "2026-04-02T09:27:20.79Z" }, + { url = "https://files.pythonhosted.org/packages/8c/d7/4353be581b373033fb9198bf1da3cf8f09c1082561e8e922aa7b39bf9fe8/charset_normalizer-3.4.7-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:d6038d37043bced98a66e68d3aa2b6a35505dc01328cd65217cefe82f25def44", size = 227024, upload-time = "2026-04-02T09:27:22.063Z" }, + { url = "https://files.pythonhosted.org/packages/30/45/99d18aa925bd1740098ccd3060e238e21115fffbfdcb8f3ece837d0ace6c/charset_normalizer-3.4.7-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:7579e913a5339fb8fa133f6bbcfd8e6749696206cf05acdbdca71a1b436d8e72", size = 217869, upload-time = "2026-04-02T09:27:23.486Z" }, + { url = "https://files.pythonhosted.org/packages/5c/05/5ee478aa53f4bb7996482153d4bfe1b89e0f087f0ab6b294fcf92d595873/charset_normalizer-3.4.7-cp314-cp314-win32.whl", hash = "sha256:5b77459df20e08151cd6f8b9ef8ef1f961ef73d85c21a555c7eed5b79410ec10", size = 148541, upload-time = "2026-04-02T09:27:25.146Z" }, + { url = "https://files.pythonhosted.org/packages/48/77/72dcb0921b2ce86420b2d79d454c7022bf5be40202a2a07906b9f2a35c97/charset_normalizer-3.4.7-cp314-cp314-win_amd64.whl", hash = "sha256:92a0a01ead5e668468e952e4238cccd7c537364eb7d851ab144ab6627dbbe12f", size = 159634, upload-time = "2026-04-02T09:27:26.642Z" }, + { url = "https://files.pythonhosted.org/packages/c6/a3/c2369911cd72f02386e4e340770f6e158c7980267da16af8f668217abaa0/charset_normalizer-3.4.7-cp314-cp314-win_arm64.whl", hash = "sha256:67f6279d125ca0046a7fd386d01b311c6363844deac3e5b069b514ba3e63c246", size = 148384, upload-time = "2026-04-02T09:27:28.271Z" }, + { url = "https://files.pythonhosted.org/packages/94/09/7e8a7f73d24dba1f0035fbbf014d2c36828fc1bf9c88f84093e57d315935/charset_normalizer-3.4.7-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:effc3f449787117233702311a1b7d8f59cba9ced946ba727bdc329ec69028e24", size = 330133, upload-time = "2026-04-02T09:27:29.474Z" }, + { url = "https://files.pythonhosted.org/packages/8d/da/96975ddb11f8e977f706f45cddd8540fd8242f71ecdb5d18a80723dcf62c/charset_normalizer-3.4.7-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fbccdc05410c9ee21bbf16a35f4c1d16123dcdeb8a1d38f33654fa21d0234f79", size = 216257, upload-time = "2026-04-02T09:27:30.793Z" }, + { url = "https://files.pythonhosted.org/packages/e5/e8/1d63bf8ef2d388e95c64b2098f45f84758f6d102a087552da1485912637b/charset_normalizer-3.4.7-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:733784b6d6def852c814bce5f318d25da2ee65dd4839a0718641c696e09a2960", size = 234851, upload-time = "2026-04-02T09:27:32.44Z" }, + { url = "https://files.pythonhosted.org/packages/9b/40/e5ff04233e70da2681fa43969ad6f66ca5611d7e669be0246c4c7aaf6dc8/charset_normalizer-3.4.7-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a89c23ef8d2c6b27fd200a42aa4ac72786e7c60d40efdc76e6011260b6e949c4", size = 233393, upload-time = "2026-04-02T09:27:34.03Z" }, + { url = "https://files.pythonhosted.org/packages/be/c1/06c6c49d5a5450f76899992f1ee40b41d076aee9279b49cf9974d2f313d5/charset_normalizer-3.4.7-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6c114670c45346afedc0d947faf3c7f701051d2518b943679c8ff88befe14f8e", size = 223251, upload-time = "2026-04-02T09:27:35.369Z" }, + { url = "https://files.pythonhosted.org/packages/2b/9f/f2ff16fb050946169e3e1f82134d107e5d4ae72647ec8a1b1446c148480f/charset_normalizer-3.4.7-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:a180c5e59792af262bf263b21a3c49353f25945d8d9f70628e73de370d55e1e1", size = 206609, upload-time = "2026-04-02T09:27:36.661Z" }, + { url = "https://files.pythonhosted.org/packages/69/d5/a527c0cd8d64d2eab7459784fb4169a0ac76e5a6fc5237337982fd61347e/charset_normalizer-3.4.7-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:3c9a494bc5ec77d43cea229c4f6db1e4d8fe7e1bbffa8b6f0f0032430ff8ab44", size = 220014, upload-time = "2026-04-02T09:27:38.019Z" }, + { url = "https://files.pythonhosted.org/packages/7e/80/8a7b8104a3e203074dc9aa2c613d4b726c0e136bad1cc734594b02867972/charset_normalizer-3.4.7-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8d828b6667a32a728a1ad1d93957cdf37489c57b97ae6c4de2860fa749b8fc1e", size = 218979, upload-time = "2026-04-02T09:27:39.37Z" }, + { url = "https://files.pythonhosted.org/packages/02/9a/b759b503d507f375b2b5c153e4d2ee0a75aa215b7f2489cf314f4541f2c0/charset_normalizer-3.4.7-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:cf1493cd8607bec4d8a7b9b004e699fcf8f9103a9284cc94962cb73d20f9d4a3", size = 209238, upload-time = "2026-04-02T09:27:40.722Z" }, + { url = "https://files.pythonhosted.org/packages/c2/4e/0f3f5d47b86bdb79256e7290b26ac847a2832d9a4033f7eb2cd4bcf4bb5b/charset_normalizer-3.4.7-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:0c96c3b819b5c3e9e165495db84d41914d6894d55181d2d108cc1a69bfc9cce0", size = 236110, upload-time = "2026-04-02T09:27:42.33Z" }, + { url = "https://files.pythonhosted.org/packages/96/23/bce28734eb3ed2c91dcf93abeb8a5cf393a7b2749725030bb630e554fdd8/charset_normalizer-3.4.7-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:752a45dc4a6934060b3b0dab47e04edc3326575f82be64bc4fc293914566503e", size = 219824, upload-time = "2026-04-02T09:27:43.924Z" }, + { url = "https://files.pythonhosted.org/packages/2c/6f/6e897c6984cc4d41af319b077f2f600fc8214eb2fe2d6bcb79141b882400/charset_normalizer-3.4.7-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:8778f0c7a52e56f75d12dae53ae320fae900a8b9b4164b981b9c5ce059cd1fcb", size = 233103, upload-time = "2026-04-02T09:27:45.348Z" }, + { url = "https://files.pythonhosted.org/packages/76/22/ef7bd0fe480a0ae9b656189ec00744b60933f68b4f42a7bb06589f6f576a/charset_normalizer-3.4.7-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ce3412fbe1e31eb81ea42f4169ed94861c56e643189e1e75f0041f3fe7020abe", size = 225194, upload-time = "2026-04-02T09:27:46.706Z" }, + { url = "https://files.pythonhosted.org/packages/c5/a7/0e0ab3e0b5bc1219bd80a6a0d4d72ca74d9250cb2382b7c699c147e06017/charset_normalizer-3.4.7-cp314-cp314t-win32.whl", hash = "sha256:c03a41a8784091e67a39648f70c5f97b5b6a37f216896d44d2cdcb82615339a0", size = 159827, upload-time = "2026-04-02T09:27:48.053Z" }, + { url = "https://files.pythonhosted.org/packages/7a/1d/29d32e0fb40864b1f878c7f5a0b343ae676c6e2b271a2d55cc3a152391da/charset_normalizer-3.4.7-cp314-cp314t-win_amd64.whl", hash = "sha256:03853ed82eeebbce3c2abfdbc98c96dc205f32a79627688ac9a27370ea61a49c", size = 174168, upload-time = "2026-04-02T09:27:49.795Z" }, + { url = "https://files.pythonhosted.org/packages/de/32/d92444ad05c7a6e41fb2036749777c163baf7a0301a040cb672d6b2b1ae9/charset_normalizer-3.4.7-cp314-cp314t-win_arm64.whl", hash = "sha256:c35abb8bfff0185efac5878da64c45dafd2b37fb0383add1be155a763c1f083d", size = 153018, upload-time = "2026-04-02T09:27:51.116Z" }, + { url = "https://files.pythonhosted.org/packages/db/8f/61959034484a4a7c527811f4721e75d02d653a35afb0b6054474d8185d4c/charset_normalizer-3.4.7-py3-none-any.whl", hash = "sha256:3dce51d0f5e7951f8bb4900c257dad282f49190fdbebecd4ba99bcc41fef404d", size = 61958, upload-time = "2026-04-02T09:28:37.794Z" }, +] + +[[package]] +name = "click" +version = "8.3.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/57/75/31212c6bf2503fdf920d87fee5d7a86a2e3bcf444984126f13d8e4016804/click-8.3.2.tar.gz", hash = "sha256:14162b8b3b3550a7d479eafa77dfd3c38d9dc8951f6f69c78913a8f9a7540fd5", size = 302856, upload-time = "2026-04-03T19:14:45.118Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e4/20/71885d8b97d4f3dde17b1fdb92dbd4908b00541c5a3379787137285f602e/click-8.3.2-py3-none-any.whl", hash = "sha256:1924d2c27c5653561cd2cae4548d1406039cb79b858b747cfea24924bbc1616d", size = 108379, upload-time = "2026-04-03T19:14:43.505Z" }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + +[[package]] +name = "cytoolz" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "toolz" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/bd/d4/16916f3dc20a3f5455b63c35dcb260b3716f59ce27a93586804e70e431d5/cytoolz-1.1.0.tar.gz", hash = "sha256:13a7bf254c3c0d28b12e2290b82aed0f0977a4c2a2bf84854fcdc7796a29f3b0", size = 642510, upload-time = "2025-10-19T00:44:56.174Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c6/ec/01426224f7acf60183d3921b25e1a8e71713d3d39cb464d64ac7aace6ea6/cytoolz-1.1.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:99f8e134c9be11649342853ec8c90837af4089fc8ff1e8f9a024a57d1fa08514", size = 1327800, upload-time = "2025-10-19T00:40:48.674Z" }, + { url = "https://files.pythonhosted.org/packages/b4/07/e07e8fedd332ac9626ad58bea31416dda19bfd14310731fa38b16a97e15f/cytoolz-1.1.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:0a6f44cf9319c30feb9a50aa513d777ef51efec16f31c404409e7deb8063df64", size = 997118, upload-time = "2025-10-19T00:40:50.919Z" }, + { url = "https://files.pythonhosted.org/packages/ab/72/c0f766d63ed2f9ea8dc8e1628d385d99b41fb834ce17ac3669e3f91e115d/cytoolz-1.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:945580dc158c557172fca899a35a99a16fbcebf6db0c77cb6621084bc82189f9", size = 991169, upload-time = "2025-10-19T00:40:52.887Z" }, + { url = "https://files.pythonhosted.org/packages/df/4b/1f757353d1bf33e56a7391ecc9bc49c1e529803b93a9d2f67fe5f92906fe/cytoolz-1.1.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:257905ec050d04f2f856854620d1e25556fd735064cebd81b460f54939b9f9d5", size = 2700680, upload-time = "2025-10-19T00:40:54.597Z" }, + { url = "https://files.pythonhosted.org/packages/25/73/9b25bb7ed8d419b9d6ff2ae0b3d06694de79a3f98f5169a1293ff7ad3a3f/cytoolz-1.1.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:82779049f352fb3ab5e8c993ab45edbb6e02efb1f17f0b50f4972c706cc51d76", size = 2824951, upload-time = "2025-10-19T00:40:56.137Z" }, + { url = "https://files.pythonhosted.org/packages/0c/93/9c787f7c909e75670fff467f2504725d06d8c3f51d6dfe22c55a08c8ccd4/cytoolz-1.1.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7d3e405e435320e08c5a1633afaf285a392e2d9cef35c925d91e2a31dfd7a688", size = 2679635, upload-time = "2025-10-19T00:40:57.799Z" }, + { url = "https://files.pythonhosted.org/packages/50/aa/9ee92c302cccf7a41a7311b325b51ebeff25d36c1f82bdc1bbe3f58dc947/cytoolz-1.1.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:923df8f5591e0d20543060c29909c149ab1963a7267037b39eee03a83dbc50a8", size = 2938352, upload-time = "2025-10-19T00:40:59.49Z" }, + { url = "https://files.pythonhosted.org/packages/6a/a3/3b58c5c1692c3bacd65640d0d5c7267a7ebb76204f7507aec29de7063d2f/cytoolz-1.1.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:25db9e4862f22ea0ae2e56c8bec9fc9fd756b655ae13e8c7b5625d7ed1c582d4", size = 3022121, upload-time = "2025-10-19T00:41:01.209Z" }, + { url = "https://files.pythonhosted.org/packages/e1/93/c647bc3334355088c57351a536c2d4a83dd45f7de591fab383975e45bff9/cytoolz-1.1.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c7a98deb11ccd8e5d9f9441ef2ff3352aab52226a2b7d04756caaa53cd612363", size = 2857656, upload-time = "2025-10-19T00:41:03.456Z" }, + { url = "https://files.pythonhosted.org/packages/b2/c2/43fea146bf4141deea959e19dcddf268c5ed759dec5c2ed4a6941d711933/cytoolz-1.1.0-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:dce4ee9fc99104bc77efdea80f32ca5a650cd653bcc8a1d984a931153d3d9b58", size = 2551284, upload-time = "2025-10-19T00:41:05.347Z" }, + { url = "https://files.pythonhosted.org/packages/6f/df/cdc7a81ce5cfcde7ef523143d545635fc37e80ccacce140ae58483a21da3/cytoolz-1.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:80d6da158f7d20c15819701bbda1c041f0944ede2f564f5c739b1bc80a9ffb8b", size = 2721673, upload-time = "2025-10-19T00:41:07.528Z" }, + { url = "https://files.pythonhosted.org/packages/45/be/f8524bb9ad8812ad375e61238dcaa3177628234d1b908ad0b74e3657cafd/cytoolz-1.1.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:3b5c5a192abda123ad45ef716ec9082b4cf7d95e9ada8291c5c2cc5558be858b", size = 2722884, upload-time = "2025-10-19T00:41:09.698Z" }, + { url = "https://files.pythonhosted.org/packages/23/e6/6bb8e4f9c267ad42d1ff77b6d2e4984665505afae50a216290e1d7311431/cytoolz-1.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5b399ce7d967b1cb6280250818b786be652aa8ddffd3c0bb5c48c6220d945ab5", size = 2685486, upload-time = "2025-10-19T00:41:11.349Z" }, + { url = "https://files.pythonhosted.org/packages/d7/dd/88619f9c8d2b682562c0c886bbb7c35720cb83fda2ac9a41bdd14073d9bd/cytoolz-1.1.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:e7e29a1a03f00b4322196cfe8e2c38da9a6c8d573566052c586df83aacc5663c", size = 2839661, upload-time = "2025-10-19T00:41:13.053Z" }, + { url = "https://files.pythonhosted.org/packages/b8/8d/4478ebf471ee78dd496d254dc0f4ad729cd8e6ba8257de4f0a98a2838ef2/cytoolz-1.1.0-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:5291b117d71652a817ec164e7011f18e6a51f8a352cc9a70ed5b976c51102fda", size = 2547095, upload-time = "2025-10-19T00:41:16.054Z" }, + { url = "https://files.pythonhosted.org/packages/e6/68/f1dea33367b0b3f64e199c230a14a6b6f243c189020effafd31e970ca527/cytoolz-1.1.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:8caef62f846a9011676c51bda9189ae394cdd6bb17f2946ecaedc23243268320", size = 2870901, upload-time = "2025-10-19T00:41:17.727Z" }, + { url = "https://files.pythonhosted.org/packages/4a/9a/33591c09dfe799b8fb692cf2ad383e2c41ab6593cc960b00d1fc8a145655/cytoolz-1.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:de425c5a8e3be7bb3a195e19191d28d9eb3c2038046064a92edc4505033ec9cb", size = 2765422, upload-time = "2025-10-19T00:41:20.075Z" }, + { url = "https://files.pythonhosted.org/packages/60/2b/a8aa233c9416df87f004e57ae4280bd5e1f389b4943d179f01020c6ec629/cytoolz-1.1.0-cp312-cp312-win32.whl", hash = "sha256:296440a870e8d1f2e1d1edf98f60f1532b9d3ab8dfbd4b25ec08cd76311e79e5", size = 901933, upload-time = "2025-10-19T00:41:21.646Z" }, + { url = "https://files.pythonhosted.org/packages/ad/33/4c9bdf8390dc01d2617c7f11930697157164a52259b6818ddfa2f94f89f4/cytoolz-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:07156987f224c6dac59aa18fb8bf91e1412f5463961862716a3381bf429c8699", size = 947989, upload-time = "2025-10-19T00:41:23.288Z" }, + { url = "https://files.pythonhosted.org/packages/35/ac/6e2708835875f5acb52318462ed296bf94ed0cb8c7cb70e62fbd03f709e3/cytoolz-1.1.0-cp312-cp312-win_arm64.whl", hash = "sha256:23e616b38f5b3160c7bb45b0f84a8f3deb4bd26b29fb2dfc716f241c738e27b8", size = 903913, upload-time = "2025-10-19T00:41:24.992Z" }, + { url = "https://files.pythonhosted.org/packages/71/4a/b3ddb3ee44fe0045e95dd973746f93f033b6f92cce1fc3cbbe24b329943c/cytoolz-1.1.0-cp313-cp313-ios_13_0_arm64_iphoneos.whl", hash = "sha256:76c9b58555300be6dde87a41faf1f97966d79b9a678b7a526fcff75d28ef4945", size = 976728, upload-time = "2025-10-19T00:41:26.5Z" }, + { url = "https://files.pythonhosted.org/packages/42/21/a3681434aa425875dd828bb515924b0f12c37a55c7d2bc5c0c5de3aeb0b4/cytoolz-1.1.0-cp313-cp313-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:d1d638b10d3144795655e9395566ce35807df09219fd7cacd9e6acbdef67946a", size = 986057, upload-time = "2025-10-19T00:41:28.911Z" }, + { url = "https://files.pythonhosted.org/packages/d9/cb/efc1b29e211e0670a6953222afaac84dcbba5cb940b130c0e49858978040/cytoolz-1.1.0-cp313-cp313-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:26801c1a165e84786a99e03c9c9973356caaca002d66727b761fb1042878ef06", size = 992632, upload-time = "2025-10-19T00:41:30.612Z" }, + { url = "https://files.pythonhosted.org/packages/be/b0/e50621d21e939338c97faab651f58ea7fa32101226a91de79ecfb89d71e1/cytoolz-1.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:2a9a464542912d3272f6dccc5142df057c71c6a5cbd30439389a732df401afb7", size = 1317534, upload-time = "2025-10-19T00:41:32.625Z" }, + { url = "https://files.pythonhosted.org/packages/0d/6b/25aa9739b0235a5bc4c1ea293186bc6822a4c6607acfe1422423287e7400/cytoolz-1.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ed6104fa942aa5784bf54f339563de637557e3443b105760bc4de8f16a7fc79b", size = 992336, upload-time = "2025-10-19T00:41:34.073Z" }, + { url = "https://files.pythonhosted.org/packages/e1/53/5f4deb0ff958805309d135d899c764364c1e8a632ce4994bd7c45fb98df2/cytoolz-1.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:56161f0ab60dc4159ec343509abaf809dc88e85c7e420e354442c62e3e7cbb77", size = 986118, upload-time = "2025-10-19T00:41:35.7Z" }, + { url = "https://files.pythonhosted.org/packages/1c/e3/f6255b76c8cc0debbe1c0779130777dc0434da6d9b28a90d9f76f8cb67cd/cytoolz-1.1.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:832bd36cc9123535f1945acf6921f8a2a15acc19cfe4065b1c9b985a28671886", size = 2679563, upload-time = "2025-10-19T00:41:37.926Z" }, + { url = "https://files.pythonhosted.org/packages/59/8a/acc6e39a84e930522b965586ad3a36694f9bf247b23188ee0eb47b1c9ed1/cytoolz-1.1.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1842636b6e034f229bf084c2bcdcfd36c8437e752eefd2c74ce9e2f10415cb6e", size = 2813020, upload-time = "2025-10-19T00:41:39.935Z" }, + { url = "https://files.pythonhosted.org/packages/db/f5/0083608286ad1716eda7c41f868e85ac549f6fd6b7646993109fa0bdfd98/cytoolz-1.1.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:823df012ab90d2f2a0f92fea453528539bf71ac1879e518524cd0c86aa6df7b9", size = 2669312, upload-time = "2025-10-19T00:41:41.55Z" }, + { url = "https://files.pythonhosted.org/packages/47/a8/d16080b575520fe5da00cede1ece4e0a4180ec23f88dcdc6a2f5a90a7f7f/cytoolz-1.1.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2f1fcf9e7e7b3487883ff3f815abc35b89dcc45c4cf81c72b7ee457aa72d197b", size = 2922147, upload-time = "2025-10-19T00:41:43.252Z" }, + { url = "https://files.pythonhosted.org/packages/7e/bc/716c9c1243701e58cad511eb3937fd550e645293c5ed1907639c5d66f194/cytoolz-1.1.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4cdb3fa1772116827f263f25b0cdd44c663b6701346a56411960534a06c082de", size = 2981602, upload-time = "2025-10-19T00:41:45.354Z" }, + { url = "https://files.pythonhosted.org/packages/14/bc/571b232996846b27f4ac0c957dc8bf60261e9b4d0d01c8d955e82329544e/cytoolz-1.1.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d1b5c95041741b81430454db65183e133976f45ac3c03454cfa8147952568529", size = 2830103, upload-time = "2025-10-19T00:41:47.959Z" }, + { url = "https://files.pythonhosted.org/packages/5b/55/c594afb46ecd78e4b7e1fb92c947ed041807875661ceda73baaf61baba4f/cytoolz-1.1.0-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b2079fd9f1a65f4c61e6278c8a6d4f85edf30c606df8d5b32f1add88cbbe2286", size = 2533802, upload-time = "2025-10-19T00:41:49.683Z" }, + { url = "https://files.pythonhosted.org/packages/93/83/1edcf95832555a78fc43b975f3ebe8ceadcc9664dd47fd33747a14df5069/cytoolz-1.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a92a320d72bef1c7e2d4c6d875125cf57fc38be45feb3fac1bfa64ea401f54a4", size = 2706071, upload-time = "2025-10-19T00:41:51.386Z" }, + { url = "https://files.pythonhosted.org/packages/e2/df/035a408df87f25cfe3611557818b250126cd2281b2104cd88395de205583/cytoolz-1.1.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:06d1c79aa51e6a92a90b0e456ebce2288f03dd6a76c7f582bfaa3eda7692e8a5", size = 2707575, upload-time = "2025-10-19T00:41:53.305Z" }, + { url = "https://files.pythonhosted.org/packages/7a/a4/ef78e13e16e93bf695a9331321d75fbc834a088d941f1c19e6b63314e257/cytoolz-1.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:e1d7be25f6971e986a52b6d3a0da28e1941850985417c35528f6823aef2cfec5", size = 2660486, upload-time = "2025-10-19T00:41:55.542Z" }, + { url = "https://files.pythonhosted.org/packages/30/7a/2c3d60682b26058d435416c4e90d4a94db854de5be944dfd069ed1be648a/cytoolz-1.1.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:964b248edc31efc50a65e9eaa0c845718503823439d2fa5f8d2c7e974c2b5409", size = 2819605, upload-time = "2025-10-19T00:41:58.257Z" }, + { url = "https://files.pythonhosted.org/packages/45/92/19b722a1d83cc443fbc0c16e0dc376f8a451437890d3d9ee370358cf0709/cytoolz-1.1.0-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:c9ff2b3c57c79b65cb5be14a18c6fd4a06d5036fb3f33e973a9f70e9ac13ca28", size = 2533559, upload-time = "2025-10-19T00:42:00.324Z" }, + { url = "https://files.pythonhosted.org/packages/1d/15/fa3b7891da51115204416f14192081d3dea0eaee091f123fdc1347de8dd1/cytoolz-1.1.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:22290b73086af600042d99f5ce52a43d4ad9872c382610413176e19fc1d4fd2d", size = 2839171, upload-time = "2025-10-19T00:42:01.881Z" }, + { url = "https://files.pythonhosted.org/packages/46/40/d3519d5cd86eebebf1e8b7174ec32dfb6ecec67b48b0cfb92bf226659b5a/cytoolz-1.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a2ade74fccd080ea793382968913ee38d7a35c921df435bbf0a6aeecf0d17574", size = 2743379, upload-time = "2025-10-19T00:42:03.809Z" }, + { url = "https://files.pythonhosted.org/packages/93/e2/a9e7511f0a13fdbefa5bf73cf8e4763878140de9453fd3e50d6ac57b6be7/cytoolz-1.1.0-cp313-cp313-win32.whl", hash = "sha256:db5dbcfda1c00e937426cbf9bdc63c24ebbc358c3263bfcbc1ab4a88dc52aa8e", size = 900844, upload-time = "2025-10-19T00:42:05.967Z" }, + { url = "https://files.pythonhosted.org/packages/d6/a4/fb7eb403c6a4c81e5a30363f34a71adcc8bf5292dc8ea32e2440aa5668f2/cytoolz-1.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:9e2d3fe3b45c3eb7233746f7aca37789be3dceec3e07dcc406d3e045ea0f7bdc", size = 946461, upload-time = "2025-10-19T00:42:07.983Z" }, + { url = "https://files.pythonhosted.org/packages/93/bb/1c8c33d353548d240bc6e8677ee8c3560ce5fa2f084e928facf7c35a6dcf/cytoolz-1.1.0-cp313-cp313-win_arm64.whl", hash = "sha256:32c559f95ff44a9ebcbd934acaa1e6dc8f3e6ffce4762a79a88528064873d6d5", size = 902673, upload-time = "2025-10-19T00:42:09.982Z" }, + { url = "https://files.pythonhosted.org/packages/c4/ba/4a53acc60f59030fcaf48c7766e3c4c81bd997379425aa45b129396557b5/cytoolz-1.1.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:9e2cd93b28f667c5870a070ab2b8bb4397470a85c4b204f2454b0ad001cd1ca3", size = 1372336, upload-time = "2025-10-19T00:42:12.104Z" }, + { url = "https://files.pythonhosted.org/packages/ac/90/f28fd8ad8319d8f5c8da69a2c29b8cf52a6d2c0161602d92b366d58926ab/cytoolz-1.1.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:f494124e141a9361f31d79875fe7ea459a3be2b9dadd90480427c0c52a0943d4", size = 1011930, upload-time = "2025-10-19T00:42:14.231Z" }, + { url = "https://files.pythonhosted.org/packages/c9/95/4561c4e0ad1c944f7673d6d916405d68080f10552cfc5d69a1cf2475a9a1/cytoolz-1.1.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:53a3262bf221f19437ed544bf8c0e1980c81ac8e2a53d87a9bc075dba943d36f", size = 1020610, upload-time = "2025-10-19T00:42:15.877Z" }, + { url = "https://files.pythonhosted.org/packages/c3/14/b2e1ffa4995ec36e1372e243411ff36325e4e6d7ffa34eb4098f5357d176/cytoolz-1.1.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:47663e57d3f3f124921f38055e86a1022d0844c444ede2e8f090d3bbf80deb65", size = 2917327, upload-time = "2025-10-19T00:42:17.706Z" }, + { url = "https://files.pythonhosted.org/packages/4a/29/7cab6c609b4514ac84cca2f7dca6c509977a8fc16d27c3a50e97f105fa6a/cytoolz-1.1.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a5a8755c4104ee4e3d5ba434c543b5f85fdee6a1f1df33d93f518294da793a60", size = 3108951, upload-time = "2025-10-19T00:42:19.363Z" }, + { url = "https://files.pythonhosted.org/packages/9a/71/1d1103b819458679277206ad07d78ca6b31c4bb88d6463fd193e19bfb270/cytoolz-1.1.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4d96ff3d381423af1b105295f97de86d1db51732c9566eb37378bab6670c5010", size = 2807149, upload-time = "2025-10-19T00:42:20.964Z" }, + { url = "https://files.pythonhosted.org/packages/1a/d4/3d83a05a21e7d2ed2b9e6daf489999c29934b005de9190272b8a2e3735d0/cytoolz-1.1.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:0ec96b3d537cdf47d4e76ded199f7440715f4c71029b45445cff92c1248808c2", size = 3111608, upload-time = "2025-10-19T00:42:22.684Z" }, + { url = "https://files.pythonhosted.org/packages/51/88/96f68354c3d4af68de41f0db4fe41a23b96a50a4a416636cea325490cfeb/cytoolz-1.1.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:208e2f2ef90a32b0acbff3303d90d89b13570a228d491d2e622a7883a3c68148", size = 3179373, upload-time = "2025-10-19T00:42:24.395Z" }, + { url = "https://files.pythonhosted.org/packages/ce/50/ed87a5cd8e6f27ffbb64c39e9730e18ec66c37631db2888ae711909f10c9/cytoolz-1.1.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0d416a81bb0bd517558668e49d30a7475b5445f9bbafaab7dcf066f1e9adba36", size = 3003120, upload-time = "2025-10-19T00:42:26.18Z" }, + { url = "https://files.pythonhosted.org/packages/d3/a7/acde155b050d6eaa8e9c7845c98fc5fb28501568e78e83ebbf44f8855274/cytoolz-1.1.0-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f32e94c91ffe49af04835ee713ebd8e005c85ebe83e7e1fdcc00f27164c2d636", size = 2703225, upload-time = "2025-10-19T00:42:27.93Z" }, + { url = "https://files.pythonhosted.org/packages/1b/b6/9d518597c5bdea626b61101e8d2ff94124787a42259dafd9f5fc396f346a/cytoolz-1.1.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:15d0c6405efc040499c46df44056a5c382f551a7624a41cf3e4c84a96b988a15", size = 2956033, upload-time = "2025-10-19T00:42:29.993Z" }, + { url = "https://files.pythonhosted.org/packages/89/7a/93e5f860926165538c85e1c5e1670ad3424f158df810f8ccd269da652138/cytoolz-1.1.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:bf069c5381d757debae891401b88b3a346ba3a28ca45ba9251103b282463fad8", size = 2862950, upload-time = "2025-10-19T00:42:31.803Z" }, + { url = "https://files.pythonhosted.org/packages/76/e6/99d6af00487bedc27597b54c9fcbfd5c833a69c6b7a9b9f0fff777bfc7aa/cytoolz-1.1.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:7d5cf15892e63411ec1bd67deff0e84317d974e6ab2cdfefdd4a7cea2989df66", size = 2861757, upload-time = "2025-10-19T00:42:33.625Z" }, + { url = "https://files.pythonhosted.org/packages/71/ca/adfa1fb7949478135a37755cb8e88c20cd6b75c22a05f1128f05f3ab2c60/cytoolz-1.1.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:3e3872c21170f8341656f8692f8939e8800dcee6549ad2474d4c817bdefd62cd", size = 2979049, upload-time = "2025-10-19T00:42:35.377Z" }, + { url = "https://files.pythonhosted.org/packages/70/4c/7bf47a03a4497d500bc73d4204e2d907771a017fa4457741b2a1d7c09319/cytoolz-1.1.0-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:b9ddeff8e8fd65eb1fcefa61018100b2b627e759ea6ad275d2e2a93ffac147bf", size = 2699492, upload-time = "2025-10-19T00:42:37.133Z" }, + { url = "https://files.pythonhosted.org/packages/7e/e7/3d034b0e4817314f07aa465d5864e9b8df9d25cb260a53dd84583e491558/cytoolz-1.1.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:02feeeda93e1fa3b33414eb57c2b0aefd1db8f558dd33fdfcce664a0f86056e4", size = 2995646, upload-time = "2025-10-19T00:42:38.912Z" }, + { url = "https://files.pythonhosted.org/packages/c1/62/be357181c71648d9fe1d1ce91cd42c63457dcf3c158e144416fd51dced83/cytoolz-1.1.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:d08154ad45349162b6c37f12d5d1b2e6eef338e657b85e1621e4e6a4a69d64cb", size = 2919481, upload-time = "2025-10-19T00:42:40.85Z" }, + { url = "https://files.pythonhosted.org/packages/62/d5/bf5434fde726c4f80cb99912b2d8e0afa1587557e2a2d7e0315eb942f2de/cytoolz-1.1.0-cp313-cp313t-win32.whl", hash = "sha256:10ae4718a056948d73ca3e1bb9ab1f95f897ec1e362f829b9d37cc29ab566c60", size = 951595, upload-time = "2025-10-19T00:42:42.877Z" }, + { url = "https://files.pythonhosted.org/packages/64/29/39c161e9204a9715321ddea698cbd0abc317e78522c7c642363c20589e71/cytoolz-1.1.0-cp313-cp313t-win_amd64.whl", hash = "sha256:1bb77bc6197e5cb19784b6a42bb0f8427e81737a630d9d7dda62ed31733f9e6c", size = 1004445, upload-time = "2025-10-19T00:42:44.855Z" }, + { url = "https://files.pythonhosted.org/packages/e2/5a/7cbff5e9a689f558cb0bdf277f9562b2ac51acf7cd15e055b8c3efb0e1ef/cytoolz-1.1.0-cp313-cp313t-win_arm64.whl", hash = "sha256:563dda652c6ff52d215704fbe6b491879b78d7bbbb3a9524ec8e763483cb459f", size = 926207, upload-time = "2025-10-19T00:42:46.456Z" }, + { url = "https://files.pythonhosted.org/packages/b7/e8/297a85ba700f437c01eba962428e6ab4572f6c3e68e8ff442ce5c9d3a496/cytoolz-1.1.0-cp314-cp314-ios_13_0_arm64_iphoneos.whl", hash = "sha256:d542cee7c7882d2a914a33dec4d3600416fb336734df979473249d4c53d207a1", size = 980613, upload-time = "2025-10-19T00:42:47.988Z" }, + { url = "https://files.pythonhosted.org/packages/e8/d7/2b02c9d18e9cc263a0e22690f78080809f1eafe72f26b29ccc115d3bf5c8/cytoolz-1.1.0-cp314-cp314-ios_13_0_arm64_iphonesimulator.whl", hash = "sha256:31922849b701b0f24bb62e56eb2488dcd3aa6ae3057694bd6b3b7c4c2bc27c2f", size = 990476, upload-time = "2025-10-19T00:42:49.653Z" }, + { url = "https://files.pythonhosted.org/packages/89/26/b6b159d2929310fca0eff8a4989cd4b1ecbdf7c46fdff46c7a20fcae55c8/cytoolz-1.1.0-cp314-cp314-ios_13_0_x86_64_iphonesimulator.whl", hash = "sha256:e68308d32afd31943314735c1335e4ab5696110e96b405f6bdb8f2a8dc771a16", size = 992712, upload-time = "2025-10-19T00:42:51.306Z" }, + { url = "https://files.pythonhosted.org/packages/42/a0/f7c572aa151ed466b0fce4a327c3cc916d3ef3c82e341be59ea4b9bee9e4/cytoolz-1.1.0-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:fc4bb48b3b866e1867f7c6411a4229e5b44be3989060663713e10efc24c9bd5f", size = 1322596, upload-time = "2025-10-19T00:42:52.978Z" }, + { url = "https://files.pythonhosted.org/packages/72/7c/a55d035e20b77b6725e85c8f1a418b3a4c23967288b8b0c2d1a40f158cbe/cytoolz-1.1.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:456f77207d1445025d7ef262b8370a05492dcb1490cb428b0f3bf1bd744a89b0", size = 992825, upload-time = "2025-10-19T00:42:55.026Z" }, + { url = "https://files.pythonhosted.org/packages/03/af/39d2d3db322136e12e9336a1f13bab51eab88b386bfb11f91d3faff8ba34/cytoolz-1.1.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:174ebc71ebb20a9baeffce6ee07ee2cd913754325c93f99d767380d8317930f7", size = 990525, upload-time = "2025-10-19T00:42:56.666Z" }, + { url = "https://files.pythonhosted.org/packages/a6/bd/65d7a869d307f9b10ad45c2c1cbb40b81a8d0ed1138fa17fd904f5c83298/cytoolz-1.1.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:8b3604fef602bcd53415055a4f68468339192fd17be39e687ae24f476d23d56e", size = 2672409, upload-time = "2025-10-19T00:42:58.81Z" }, + { url = "https://files.pythonhosted.org/packages/2d/fb/74dfd844bfd67e810bd36e8e3903a143035447245828e7fcd7c81351d775/cytoolz-1.1.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3604b959a01f64c366e7d10ec7634d5f5cfe10301e27a8f090f6eb3b2a628a18", size = 2808477, upload-time = "2025-10-19T00:43:00.577Z" }, + { url = "https://files.pythonhosted.org/packages/d6/1f/587686c43e31c19241ec317da66438d093523921ea7749bbc65558a30df9/cytoolz-1.1.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6db2127a3c1bc2f59f08010d2ae53a760771a9de2f67423ad8d400e9ba4276e8", size = 2636881, upload-time = "2025-10-19T00:43:02.24Z" }, + { url = "https://files.pythonhosted.org/packages/bc/6d/90468cd34f77cb38a11af52c4dc6199efcc97a486395a21bef72e9b7602e/cytoolz-1.1.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:56584745ac647993a016a21bc76399113b7595e312f8d0a1b140c9fcf9b58a27", size = 2937315, upload-time = "2025-10-19T00:43:03.954Z" }, + { url = "https://files.pythonhosted.org/packages/d9/50/7b92cd78c613b92e3509e6291d3fb7e0d72ebda999a8df806a96c40ca9ab/cytoolz-1.1.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:db2c4c3a7f7bd7e03bb1a236a125c8feb86c75802f4ecda6ecfaf946610b2930", size = 2959988, upload-time = "2025-10-19T00:43:05.758Z" }, + { url = "https://files.pythonhosted.org/packages/44/d5/34b5a28a8d9bb329f984b4c2259407ca3f501d1abeb01bacea07937d85d1/cytoolz-1.1.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:48cb8a692111a285d2b9acd16d185428176bfbffa8a7c274308525fccd01dd42", size = 2795116, upload-time = "2025-10-19T00:43:07.411Z" }, + { url = "https://files.pythonhosted.org/packages/f5/d9/5dd829e33273ec03bdc3c812e6c3281987ae2c5c91645582f6c331544a64/cytoolz-1.1.0-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:d2f344ba5eb17dcf38ee37fdde726f69053f54927db8f8a1bed6ac61e5b1890d", size = 2535390, upload-time = "2025-10-19T00:43:09.104Z" }, + { url = "https://files.pythonhosted.org/packages/87/1f/7f9c58068a8eec2183110df051bc6b69dd621143f84473eeb6dc1b32905a/cytoolz-1.1.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:abf76b1c1abd031f098f293b6d90ee08bdaa45f8b5678430e331d991b82684b1", size = 2704834, upload-time = "2025-10-19T00:43:10.942Z" }, + { url = "https://files.pythonhosted.org/packages/d2/90/667def5665333575d01a65fe3ec0ca31b897895f6e3bc1a42d6ea3659369/cytoolz-1.1.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:ddf9a38a5b686091265ff45b53d142e44a538cd6c2e70610d3bc6be094219032", size = 2658441, upload-time = "2025-10-19T00:43:12.655Z" }, + { url = "https://files.pythonhosted.org/packages/23/79/6615f9a14960bd29ac98b823777b6589357833f65cf1a11b5abc1587c120/cytoolz-1.1.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:946786755274f07bb2be0400f28adb31d7d85a7c7001873c0a8e24a503428fb3", size = 2654766, upload-time = "2025-10-19T00:43:14.325Z" }, + { url = "https://files.pythonhosted.org/packages/b0/99/be59c6e0ae02153ef10ae1ff0f380fb19d973c651b50cf829a731f6c9e79/cytoolz-1.1.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:d5b8f78b9fed79cf185ad4ddec099abeef45951bdcb416c5835ba05f0a1242c7", size = 2827649, upload-time = "2025-10-19T00:43:16.132Z" }, + { url = "https://files.pythonhosted.org/packages/19/b7/854ddcf9f9618844108677c20d48f4611b5c636956adea0f0e85e027608f/cytoolz-1.1.0-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:fccde6efefdbc02e676ccb352a2ccc8a8e929f59a1c6d3d60bb78e923a49ca44", size = 2533456, upload-time = "2025-10-19T00:43:17.764Z" }, + { url = "https://files.pythonhosted.org/packages/45/66/bfe6fbb2bdcf03c8377c8c2f542576e15f3340c905a09d78a6cb3badd39a/cytoolz-1.1.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:717b7775313da5f51b0fbf50d865aa9c39cb241bd4cb605df3cf2246d6567397", size = 2826455, upload-time = "2025-10-19T00:43:19.561Z" }, + { url = "https://files.pythonhosted.org/packages/c3/0c/cce4047bd927e95f59e73319c02c9bc86bd3d76392e0eb9e41a1147a479c/cytoolz-1.1.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5158744a09d0e0e4a4f82225e3a3c4ebf38f9ae74467aaa905467270e52f2794", size = 2714897, upload-time = "2025-10-19T00:43:21.291Z" }, + { url = "https://files.pythonhosted.org/packages/ac/9a/061323bb289b565802bad14fb7ab59fcd8713105df142bcf4dd9ff64f8ac/cytoolz-1.1.0-cp314-cp314-win32.whl", hash = "sha256:1ed534bdbbf063b2bb28fca7d0f6723a3e5a72b086e7c7fe6d74ae8c3e4d00e2", size = 901490, upload-time = "2025-10-19T00:43:22.895Z" }, + { url = "https://files.pythonhosted.org/packages/a3/20/1f3a733d710d2a25d6f10b463bef55ada52fe6392a5d233c8d770191f48a/cytoolz-1.1.0-cp314-cp314-win_amd64.whl", hash = "sha256:472c1c9a085f5ad973ec0ad7f0b9ba0969faea6f96c9e397f6293d386f3a25ec", size = 946730, upload-time = "2025-10-19T00:43:24.838Z" }, + { url = "https://files.pythonhosted.org/packages/f2/22/2d657db4a5d1c10a152061800f812caba9ef20d7bd2406f51a5fd800c180/cytoolz-1.1.0-cp314-cp314-win_arm64.whl", hash = "sha256:a7ad7ca3386fa86bd301be3fa36e7f0acb024f412f665937955acfc8eb42deff", size = 905722, upload-time = "2025-10-19T00:43:26.439Z" }, + { url = "https://files.pythonhosted.org/packages/19/97/b4a8c76796a9a8b9bc90c7992840fa1589a1af8e0426562dea4ce9b384a7/cytoolz-1.1.0-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:64b63ed4b71b1ba813300ad0f06b8aff19a12cf51116e0e4f1ed837cea4debcf", size = 1372606, upload-time = "2025-10-19T00:43:28.491Z" }, + { url = "https://files.pythonhosted.org/packages/08/d4/a1bb1a32b454a2d650db8374ff3bf875ba0fc1c36e6446ec02a83b9140a1/cytoolz-1.1.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:a60ba6f2ed9eb0003a737e1ee1e9fa2258e749da6477946008d4324efa25149f", size = 1012189, upload-time = "2025-10-19T00:43:30.177Z" }, + { url = "https://files.pythonhosted.org/packages/21/4b/2f5cbbd81588918ee7dd70cffb66731608f578a9b72166aafa991071af7d/cytoolz-1.1.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1aa58e2434d732241f7f051e6f17657e969a89971025e24578b5cbc6f1346485", size = 1020624, upload-time = "2025-10-19T00:43:31.712Z" }, + { url = "https://files.pythonhosted.org/packages/f5/99/c4954dd86cd593cd776a038b36795a259b8b5c12cbab6363edf5f6d9c909/cytoolz-1.1.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:6965af3fc7214645970e312deb9bd35a213a1eaabcfef4f39115e60bf2f76867", size = 2917016, upload-time = "2025-10-19T00:43:33.531Z" }, + { url = "https://files.pythonhosted.org/packages/b2/7c/f1f70a17e272b433232bc8a27df97e46b202d6cc07e3b0d63f7f41ba0f2d/cytoolz-1.1.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ddd2863f321d67527d3b67a93000a378ad6f967056f68c06467fe011278a6d0e", size = 3107634, upload-time = "2025-10-19T00:43:35.57Z" }, + { url = "https://files.pythonhosted.org/packages/8f/bd/c3226a57474b4aef1f90040510cba30d0decd3515fed48dc229b37c2f898/cytoolz-1.1.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4e6b428e9eb5126053c2ae0efa62512ff4b38ed3951f4d0888ca7005d63e56f5", size = 2806221, upload-time = "2025-10-19T00:43:37.707Z" }, + { url = "https://files.pythonhosted.org/packages/c3/47/2f7bfe4aaa1e07dc9828bea228ed744faf73b26aee0c1bdf3b5520bf1909/cytoolz-1.1.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:d758e5ef311d2671e0ae8c214c52e44617cf1e58bef8f022b547b9802a5a7f30", size = 3107671, upload-time = "2025-10-19T00:43:39.401Z" }, + { url = "https://files.pythonhosted.org/packages/4d/12/6ff3b04fbd1369d0fcd5f8b5910ba6e427e33bf113754c4c35ec3f747924/cytoolz-1.1.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a95416eca473e6c1179b48d86adcf528b59c63ce78f4cb9934f2e413afa9b56b", size = 3176350, upload-time = "2025-10-19T00:43:41.148Z" }, + { url = "https://files.pythonhosted.org/packages/e6/8c/6691d986b728e77b5d2872743ebcd962d37a2d0f7e9ad95a81b284fbf905/cytoolz-1.1.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:36c8ede93525cf11e2cc787b7156e5cecd7340193ef800b816a16f1404a8dc6d", size = 3001173, upload-time = "2025-10-19T00:43:42.923Z" }, + { url = "https://files.pythonhosted.org/packages/7a/cb/f59d83a5058e1198db5a1f04e4a124c94d60390e4fa89b6d2e38ee8288a0/cytoolz-1.1.0-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c0c949755b6d8a649c5fbc888bc30915926f1b09fe42fea9f289e297c2f6ddd3", size = 2701374, upload-time = "2025-10-19T00:43:44.716Z" }, + { url = "https://files.pythonhosted.org/packages/b7/f0/1ae6d28df503b0bdae094879da2072b8ba13db5919cd3798918761578411/cytoolz-1.1.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e1b6d37545816905a76d9ed59fa4e332f929e879f062a39ea0f6f620405cdc27", size = 2953081, upload-time = "2025-10-19T00:43:47.103Z" }, + { url = "https://files.pythonhosted.org/packages/f4/06/d86fe811c6222dc32d3e08f5d88d2be598a6055b4d0590e7c1428d55c386/cytoolz-1.1.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:05332112d4087904842b36954cd1d3fc0e463a2f4a7ef9477bd241427c593c3b", size = 2862228, upload-time = "2025-10-19T00:43:49.353Z" }, + { url = "https://files.pythonhosted.org/packages/ae/32/978ef6f42623be44a0a03ae9de875ab54aa26c7e38c5c4cd505460b0927d/cytoolz-1.1.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:31538ca2fad2d688cbd962ccc3f1da847329e2258a52940f10a2ac0719e526be", size = 2861971, upload-time = "2025-10-19T00:43:51.028Z" }, + { url = "https://files.pythonhosted.org/packages/ee/f7/74c69497e756b752b359925d1feef68b91df024a4124a823740f675dacd3/cytoolz-1.1.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:747562aa70abf219ea16f07d50ac0157db856d447f7f498f592e097cbc77df0b", size = 2975304, upload-time = "2025-10-19T00:43:52.99Z" }, + { url = "https://files.pythonhosted.org/packages/5b/2b/3ce0e6889a6491f3418ad4d84ae407b8456b02169a5a1f87990dbba7433b/cytoolz-1.1.0-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:3dc15c48b20c0f467e15e341e102896c8422dccf8efc6322def5c1b02f074629", size = 2697371, upload-time = "2025-10-19T00:43:55.312Z" }, + { url = "https://files.pythonhosted.org/packages/15/87/c616577f0891d97860643c845f7221e95240aa589586de727e28a5eb6e52/cytoolz-1.1.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:3c03137ee6103ba92d5d6ad6a510e86fded69cd67050bd8a1843f15283be17ac", size = 2992436, upload-time = "2025-10-19T00:43:57.253Z" }, + { url = "https://files.pythonhosted.org/packages/e7/9f/490c81bffb3428ab1fa114051fbb5ba18aaa2e2fe4da5bf4170ca524e6b3/cytoolz-1.1.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:be8e298d88f88bd172b59912240558be3b7a04959375646e7fd4996401452941", size = 2917612, upload-time = "2025-10-19T00:43:59.423Z" }, + { url = "https://files.pythonhosted.org/packages/66/35/0fec2769660ca6472bbf3317ab634675827bb706d193e3240aaf20eab961/cytoolz-1.1.0-cp314-cp314t-win32.whl", hash = "sha256:3d407140f5604a89578285d4aac7b18b8eafa055cf776e781aabb89c48738fad", size = 960842, upload-time = "2025-10-19T00:44:01.143Z" }, + { url = "https://files.pythonhosted.org/packages/46/b4/b7ce3d3cd20337becfec978ecfa6d0ef64884d0cf32d44edfed8700914b9/cytoolz-1.1.0-cp314-cp314t-win_amd64.whl", hash = "sha256:56e5afb69eb6e1b3ffc34716ee5f92ffbdb5cb003b3a5ca4d4b0fe700e217162", size = 1020835, upload-time = "2025-10-19T00:44:03.246Z" }, + { url = "https://files.pythonhosted.org/packages/2c/1f/0498009aa563a9c5d04f520aadc6e1c0942434d089d0b2f51ea986470f55/cytoolz-1.1.0-cp314-cp314t-win_arm64.whl", hash = "sha256:27b19b4a286b3ff52040efa42dbe403730aebe5fdfd2def704eb285e2125c63e", size = 927963, upload-time = "2025-10-19T00:44:04.85Z" }, +] + +[[package]] +name = "debugpy" +version = "1.8.11" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/bc/e7/666f4c9b0e24796af50aadc28d36d21c2e01e831a934535f956e09b3650c/debugpy-1.8.11.tar.gz", hash = "sha256:6ad2688b69235c43b020e04fecccdf6a96c8943ca9c2fb340b8adc103c655e57", size = 1640124, upload-time = "2024-12-13T17:21:07.233Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c6/ae/2cf26f3111e9d94384d9c01e9d6170188b0aeda15b60a4ac6457f7c8a26f/debugpy-1.8.11-cp312-cp312-macosx_14_0_universal2.whl", hash = "sha256:84e511a7545d11683d32cdb8f809ef63fc17ea2a00455cc62d0a4dbb4ed1c308", size = 2498756, upload-time = "2024-12-13T17:21:35.856Z" }, + { url = "https://files.pythonhosted.org/packages/b0/16/ec551789d547541a46831a19aa15c147741133da188e7e6acf77510545a7/debugpy-1.8.11-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ce291a5aca4985d82875d6779f61375e959208cdf09fcec40001e65fb0a54768", size = 4219136, upload-time = "2024-12-13T17:21:37.526Z" }, + { url = "https://files.pythonhosted.org/packages/72/6f/b2b3ce673c55f882d27a6eb04a5f0c68bcad6b742ac08a86d8392ae58030/debugpy-1.8.11-cp312-cp312-win32.whl", hash = "sha256:28e45b3f827d3bf2592f3cf7ae63282e859f3259db44ed2b129093ca0ac7940b", size = 5224440, upload-time = "2024-12-13T17:21:41.033Z" }, + { url = "https://files.pythonhosted.org/packages/77/09/b1f05be802c1caef5b3efc042fc6a7cadd13d8118b072afd04a9b9e91e06/debugpy-1.8.11-cp312-cp312-win_amd64.whl", hash = "sha256:44b1b8e6253bceada11f714acf4309ffb98bfa9ac55e4fce14f9e5d4484287a1", size = 5264578, upload-time = "2024-12-13T17:21:44.242Z" }, + { url = "https://files.pythonhosted.org/packages/2e/66/931dc2479aa8fbf362dc6dcee707d895a84b0b2d7b64020135f20b8db1ed/debugpy-1.8.11-cp313-cp313-macosx_14_0_universal2.whl", hash = "sha256:8988f7163e4381b0da7696f37eec7aca19deb02e500245df68a7159739bbd0d3", size = 2483651, upload-time = "2024-12-13T17:21:47.315Z" }, + { url = "https://files.pythonhosted.org/packages/10/07/6c171d0fe6b8d237e35598b742f20ba062511b3a4631938cc78eefbbf847/debugpy-1.8.11-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c1f6a173d1140e557347419767d2b14ac1c9cd847e0b4c5444c7f3144697e4e", size = 4213770, upload-time = "2024-12-13T17:21:49.073Z" }, + { url = "https://files.pythonhosted.org/packages/89/f1/0711da6ac250d4fe3bf7b3e9b14b4a86e82a98b7825075c07e19bab8da3d/debugpy-1.8.11-cp313-cp313-win32.whl", hash = "sha256:bb3b15e25891f38da3ca0740271e63ab9db61f41d4d8541745cfc1824252cb28", size = 5223911, upload-time = "2024-12-13T17:21:51.534Z" }, + { url = "https://files.pythonhosted.org/packages/56/98/5e27fa39050749ed460025bcd0034a0a5e78a580a14079b164cc3abdeb98/debugpy-1.8.11-cp313-cp313-win_amd64.whl", hash = "sha256:d8768edcbeb34da9e11bcb8b5c2e0958d25218df7a6e56adf415ef262cd7b6d1", size = 5264166, upload-time = "2024-12-13T17:21:53.504Z" }, + { url = "https://files.pythonhosted.org/packages/77/0a/d29a5aacf47b4383ed569b8478c02d59ee3a01ad91224d2cff8562410e43/debugpy-1.8.11-py2.py3-none-any.whl", hash = "sha256:0e22f846f4211383e6a416d04b4c13ed174d24cc5d43f5fd52e7821d0ebc8920", size = 5226874, upload-time = "2024-12-13T17:22:15.097Z" }, +] + +[[package]] +name = "decorator" +version = "5.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/fa/6d96a0978d19e17b68d634497769987b16c8f4cd0a7a05048bec693caa6b/decorator-5.2.1.tar.gz", hash = "sha256:65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360", size = 56711, upload-time = "2025-02-24T04:41:34.073Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4e/8c/f3147f5c4b73e7550fe5f9352eaa956ae838d5c51eb58e7a25b9f3e2643b/decorator-5.2.1-py3-none-any.whl", hash = "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a", size = 9190, upload-time = "2025-02-24T04:41:32.565Z" }, +] + +[[package]] +name = "ecdsa" +version = "0.19.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/25/ca/8de7744cb3bc966c85430ca2d0fcaeea872507c6a4cf6e007f7fe269ed9d/ecdsa-0.19.2.tar.gz", hash = "sha256:62635b0ac1ca2e027f82122b5b81cb706edc38cd91c63dda28e4f3455a2bf930", size = 202432, upload-time = "2026-03-26T09:58:17.675Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/51/79/119091c98e2bf49e24ed9f3ae69f816d715d2904aefa6a2baa039a2ba0b0/ecdsa-0.19.2-py2.py3-none-any.whl", hash = "sha256:840f5dc5e375c68f36c1a7a5b9caad28f95daa65185c9253c0c08dd952bb7399", size = 150818, upload-time = "2026-03-26T09:58:15.808Z" }, +] + +[[package]] +name = "eth-hash" +version = "0.8.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3c/f5/c67fc24f2f676aa9b7ab29679d44f113f314c817207cd4319353356f62da/eth_hash-0.8.0.tar.gz", hash = "sha256:b009752b620da2e9c7668014849d1f5fadbe4f138603f1871cc5d4ca706896b1", size = 12225, upload-time = "2026-03-25T16:36:55.099Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/87/87/b36792150ca0b28e4df683a34be15a61461ca0e349e5b5cf3ec8f694edb9/eth_hash-0.8.0-py3-none-any.whl", hash = "sha256:523718a51b369ab89866b929a5c93c52978cd866ea309192ad980dd8271f9fac", size = 7965, upload-time = "2026-03-25T16:36:54.205Z" }, +] + +[[package]] +name = "eth-keys" +version = "0.7.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "eth-typing" }, + { name = "eth-utils" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/58/11/1ed831c50bd74f57829aa06e58bd82a809c37e070ee501c953b9ac1f1552/eth_keys-0.7.0.tar.gz", hash = "sha256:79d24fd876201df67741de3e3fefb3f4dbcbb6ace66e47e6fe662851a4547814", size = 30166, upload-time = "2025-04-07T17:40:21.697Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4d/25/0ae00f2b0095e559d61ad3dc32171bd5a29dfd95ab04b4edd641f7c75f72/eth_keys-0.7.0-py3-none-any.whl", hash = "sha256:b0cdda8ffe8e5ba69c7c5ca33f153828edcace844f67aabd4542d7de38b159cf", size = 20656, upload-time = "2025-04-07T17:40:20.441Z" }, +] + +[[package]] +name = "eth-typing" +version = "6.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/37/e7/06c5af99ad40494f6d10126a9030ff4eb14c5b773f2a4076017efb0a163a/eth_typing-6.0.0.tar.gz", hash = "sha256:315dd460dc0b71c15a6cd51e3c0b70d237eec8771beb844144f3a1fb4adb2392", size = 21852, upload-time = "2026-03-25T16:41:57.444Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/aa/0d/e756622fab29f404d846d7464f929d642a7ee6eff5b38bcc79e7c64ac630/eth_typing-6.0.0-py3-none-any.whl", hash = "sha256:ee74fb641eb36dd885e1c42c2a3055314efa532b3e71480816df70a94d35cfb9", size = 19191, upload-time = "2026-03-25T16:41:55.544Z" }, +] + +[[package]] +name = "eth-utils" +version = "5.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cytoolz", marker = "implementation_name == 'cpython'" }, + { name = "eth-hash" }, + { name = "eth-typing" }, + { name = "pydantic" }, + { name = "toolz", marker = "implementation_name == 'pypy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e6/e1/ee3a8728227c3558853e63ff35bd4c449abdf5022a19601369400deacd39/eth_utils-5.3.1.tar.gz", hash = "sha256:c94e2d2abd024a9a42023b4ddc1c645814ff3d6a737b33d5cfd890ebf159c2d1", size = 123506, upload-time = "2025-08-27T16:37:17.378Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/bf/4d/257cdc01ada430b8e84b9f2385c2553f33218f5b47da9adf0a616308d4b7/eth_utils-5.3.1-py3-none-any.whl", hash = "sha256:1f5476d8f29588d25b8ae4987e1ffdfae6d4c09026e476c4aad13b32dda3ead0", size = 102529, upload-time = "2025-08-27T16:37:15.449Z" }, +] + +[[package]] +name = "fastapi" +version = "0.110.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pydantic" }, + { name = "starlette" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6d/f7/c6a62efe7f0ff70721392592bde14c69651868dca198d7ace51a1019b479/fastapi-0.110.1.tar.gz", hash = "sha256:6feac43ec359dfe4f45b2c18ec8c94edb8dc2dfc461d417d9e626590c071baad", size = 11977267, upload-time = "2024-04-02T03:22:01.022Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c0/c1/2dc286475c8e2e455e431a1cf1cf29662c9f9290434161088ba039d77481/fastapi-0.110.1-py3-none-any.whl", hash = "sha256:5df913203c482f820d31f48e635e022f8cbfe7350e4830ef05a3163925b1addc", size = 91876, upload-time = "2024-04-02T03:21:56.886Z" }, +] + +[[package]] +name = "frozenlist" +version = "1.8.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2d/f5/c831fac6cc817d26fd54c7eaccd04ef7e0288806943f7cc5bbf69f3ac1f0/frozenlist-1.8.0.tar.gz", hash = "sha256:3ede829ed8d842f6cd48fc7081d7a41001a56f1f38603f9d49bf3020d59a31ad", size = 45875, upload-time = "2025-10-06T05:38:17.865Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/69/29/948b9aa87e75820a38650af445d2ef2b6b8a6fab1a23b6bb9e4ef0be2d59/frozenlist-1.8.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:78f7b9e5d6f2fdb88cdde9440dc147259b62b9d3b019924def9f6478be254ac1", size = 87782, upload-time = "2025-10-06T05:36:06.649Z" }, + { url = "https://files.pythonhosted.org/packages/64/80/4f6e318ee2a7c0750ed724fa33a4bdf1eacdc5a39a7a24e818a773cd91af/frozenlist-1.8.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:229bf37d2e4acdaf808fd3f06e854a4a7a3661e871b10dc1f8f1896a3b05f18b", size = 50594, upload-time = "2025-10-06T05:36:07.69Z" }, + { url = "https://files.pythonhosted.org/packages/2b/94/5c8a2b50a496b11dd519f4a24cb5496cf125681dd99e94c604ccdea9419a/frozenlist-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f833670942247a14eafbb675458b4e61c82e002a148f49e68257b79296e865c4", size = 50448, upload-time = "2025-10-06T05:36:08.78Z" }, + { url = "https://files.pythonhosted.org/packages/6a/bd/d91c5e39f490a49df14320f4e8c80161cfcce09f1e2cde1edd16a551abb3/frozenlist-1.8.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:494a5952b1c597ba44e0e78113a7266e656b9794eec897b19ead706bd7074383", size = 242411, upload-time = "2025-10-06T05:36:09.801Z" }, + { url = "https://files.pythonhosted.org/packages/8f/83/f61505a05109ef3293dfb1ff594d13d64a2324ac3482be2cedc2be818256/frozenlist-1.8.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:96f423a119f4777a4a056b66ce11527366a8bb92f54e541ade21f2374433f6d4", size = 243014, upload-time = "2025-10-06T05:36:11.394Z" }, + { url = "https://files.pythonhosted.org/packages/d8/cb/cb6c7b0f7d4023ddda30cf56b8b17494eb3a79e3fda666bf735f63118b35/frozenlist-1.8.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3462dd9475af2025c31cc61be6652dfa25cbfb56cbbf52f4ccfe029f38decaf8", size = 234909, upload-time = "2025-10-06T05:36:12.598Z" }, + { url = "https://files.pythonhosted.org/packages/31/c5/cd7a1f3b8b34af009fb17d4123c5a778b44ae2804e3ad6b86204255f9ec5/frozenlist-1.8.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c4c800524c9cd9bac5166cd6f55285957fcfc907db323e193f2afcd4d9abd69b", size = 250049, upload-time = "2025-10-06T05:36:14.065Z" }, + { url = "https://files.pythonhosted.org/packages/c0/01/2f95d3b416c584a1e7f0e1d6d31998c4a795f7544069ee2e0962a4b60740/frozenlist-1.8.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d6a5df73acd3399d893dafc71663ad22534b5aa4f94e8a2fabfe856c3c1b6a52", size = 256485, upload-time = "2025-10-06T05:36:15.39Z" }, + { url = "https://files.pythonhosted.org/packages/ce/03/024bf7720b3abaebcff6d0793d73c154237b85bdf67b7ed55e5e9596dc9a/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:405e8fe955c2280ce66428b3ca55e12b3c4e9c336fb2103a4937e891c69a4a29", size = 237619, upload-time = "2025-10-06T05:36:16.558Z" }, + { url = "https://files.pythonhosted.org/packages/69/fa/f8abdfe7d76b731f5d8bd217827cf6764d4f1d9763407e42717b4bed50a0/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:908bd3f6439f2fef9e85031b59fd4f1297af54415fb60e4254a95f75b3cab3f3", size = 250320, upload-time = "2025-10-06T05:36:17.821Z" }, + { url = "https://files.pythonhosted.org/packages/f5/3c/b051329f718b463b22613e269ad72138cc256c540f78a6de89452803a47d/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:294e487f9ec720bd8ffcebc99d575f7eff3568a08a253d1ee1a0378754b74143", size = 246820, upload-time = "2025-10-06T05:36:19.046Z" }, + { url = "https://files.pythonhosted.org/packages/0f/ae/58282e8f98e444b3f4dd42448ff36fa38bef29e40d40f330b22e7108f565/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:74c51543498289c0c43656701be6b077f4b265868fa7f8a8859c197006efb608", size = 250518, upload-time = "2025-10-06T05:36:20.763Z" }, + { url = "https://files.pythonhosted.org/packages/8f/96/007e5944694d66123183845a106547a15944fbbb7154788cbf7272789536/frozenlist-1.8.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:776f352e8329135506a1d6bf16ac3f87bc25b28e765949282dcc627af36123aa", size = 239096, upload-time = "2025-10-06T05:36:22.129Z" }, + { url = "https://files.pythonhosted.org/packages/66/bb/852b9d6db2fa40be96f29c0d1205c306288f0684df8fd26ca1951d461a56/frozenlist-1.8.0-cp312-cp312-win32.whl", hash = "sha256:433403ae80709741ce34038da08511d4a77062aa924baf411ef73d1146e74faf", size = 39985, upload-time = "2025-10-06T05:36:23.661Z" }, + { url = "https://files.pythonhosted.org/packages/b8/af/38e51a553dd66eb064cdf193841f16f077585d4d28394c2fa6235cb41765/frozenlist-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:34187385b08f866104f0c0617404c8eb08165ab1272e884abc89c112e9c00746", size = 44591, upload-time = "2025-10-06T05:36:24.958Z" }, + { url = "https://files.pythonhosted.org/packages/a7/06/1dc65480ab147339fecc70797e9c2f69d9cea9cf38934ce08df070fdb9cb/frozenlist-1.8.0-cp312-cp312-win_arm64.whl", hash = "sha256:fe3c58d2f5db5fbd18c2987cba06d51b0529f52bc3a6cdc33d3f4eab725104bd", size = 40102, upload-time = "2025-10-06T05:36:26.333Z" }, + { url = "https://files.pythonhosted.org/packages/2d/40/0832c31a37d60f60ed79e9dfb5a92e1e2af4f40a16a29abcc7992af9edff/frozenlist-1.8.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:8d92f1a84bb12d9e56f818b3a746f3efba93c1b63c8387a73dde655e1e42282a", size = 85717, upload-time = "2025-10-06T05:36:27.341Z" }, + { url = "https://files.pythonhosted.org/packages/30/ba/b0b3de23f40bc55a7057bd38434e25c34fa48e17f20ee273bbde5e0650f3/frozenlist-1.8.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:96153e77a591c8adc2ee805756c61f59fef4cf4073a9275ee86fe8cba41241f7", size = 49651, upload-time = "2025-10-06T05:36:28.855Z" }, + { url = "https://files.pythonhosted.org/packages/0c/ab/6e5080ee374f875296c4243c381bbdef97a9ac39c6e3ce1d5f7d42cb78d6/frozenlist-1.8.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f21f00a91358803399890ab167098c131ec2ddd5f8f5fd5fe9c9f2c6fcd91e40", size = 49417, upload-time = "2025-10-06T05:36:29.877Z" }, + { url = "https://files.pythonhosted.org/packages/d5/4e/e4691508f9477ce67da2015d8c00acd751e6287739123113a9fca6f1604e/frozenlist-1.8.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:fb30f9626572a76dfe4293c7194a09fb1fe93ba94c7d4f720dfae3b646b45027", size = 234391, upload-time = "2025-10-06T05:36:31.301Z" }, + { url = "https://files.pythonhosted.org/packages/40/76/c202df58e3acdf12969a7895fd6f3bc016c642e6726aa63bd3025e0fc71c/frozenlist-1.8.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eaa352d7047a31d87dafcacbabe89df0aa506abb5b1b85a2fb91bc3faa02d822", size = 233048, upload-time = "2025-10-06T05:36:32.531Z" }, + { url = "https://files.pythonhosted.org/packages/f9/c0/8746afb90f17b73ca5979c7a3958116e105ff796e718575175319b5bb4ce/frozenlist-1.8.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:03ae967b4e297f58f8c774c7eabcce57fe3c2434817d4385c50661845a058121", size = 226549, upload-time = "2025-10-06T05:36:33.706Z" }, + { url = "https://files.pythonhosted.org/packages/7e/eb/4c7eefc718ff72f9b6c4893291abaae5fbc0c82226a32dcd8ef4f7a5dbef/frozenlist-1.8.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f6292f1de555ffcc675941d65fffffb0a5bcd992905015f85d0592201793e0e5", size = 239833, upload-time = "2025-10-06T05:36:34.947Z" }, + { url = "https://files.pythonhosted.org/packages/c2/4e/e5c02187cf704224f8b21bee886f3d713ca379535f16893233b9d672ea71/frozenlist-1.8.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:29548f9b5b5e3460ce7378144c3010363d8035cea44bc0bf02d57f5a685e084e", size = 245363, upload-time = "2025-10-06T05:36:36.534Z" }, + { url = "https://files.pythonhosted.org/packages/1f/96/cb85ec608464472e82ad37a17f844889c36100eed57bea094518bf270692/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ec3cc8c5d4084591b4237c0a272cc4f50a5b03396a47d9caaf76f5d7b38a4f11", size = 229314, upload-time = "2025-10-06T05:36:38.582Z" }, + { url = "https://files.pythonhosted.org/packages/5d/6f/4ae69c550e4cee66b57887daeebe006fe985917c01d0fff9caab9883f6d0/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:517279f58009d0b1f2e7c1b130b377a349405da3f7621ed6bfae50b10adf20c1", size = 243365, upload-time = "2025-10-06T05:36:40.152Z" }, + { url = "https://files.pythonhosted.org/packages/7a/58/afd56de246cf11780a40a2c28dc7cbabbf06337cc8ddb1c780a2d97e88d8/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:db1e72ede2d0d7ccb213f218df6a078a9c09a7de257c2fe8fcef16d5925230b1", size = 237763, upload-time = "2025-10-06T05:36:41.355Z" }, + { url = "https://files.pythonhosted.org/packages/cb/36/cdfaf6ed42e2644740d4a10452d8e97fa1c062e2a8006e4b09f1b5fd7d63/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:b4dec9482a65c54a5044486847b8a66bf10c9cb4926d42927ec4e8fd5db7fed8", size = 240110, upload-time = "2025-10-06T05:36:42.716Z" }, + { url = "https://files.pythonhosted.org/packages/03/a8/9ea226fbefad669f11b52e864c55f0bd57d3c8d7eb07e9f2e9a0b39502e1/frozenlist-1.8.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:21900c48ae04d13d416f0e1e0c4d81f7931f73a9dfa0b7a8746fb2fe7dd970ed", size = 233717, upload-time = "2025-10-06T05:36:44.251Z" }, + { url = "https://files.pythonhosted.org/packages/1e/0b/1b5531611e83ba7d13ccc9988967ea1b51186af64c42b7a7af465dcc9568/frozenlist-1.8.0-cp313-cp313-win32.whl", hash = "sha256:8b7b94a067d1c504ee0b16def57ad5738701e4ba10cec90529f13fa03c833496", size = 39628, upload-time = "2025-10-06T05:36:45.423Z" }, + { url = "https://files.pythonhosted.org/packages/d8/cf/174c91dbc9cc49bc7b7aab74d8b734e974d1faa8f191c74af9b7e80848e6/frozenlist-1.8.0-cp313-cp313-win_amd64.whl", hash = "sha256:878be833caa6a3821caf85eb39c5ba92d28e85df26d57afb06b35b2efd937231", size = 43882, upload-time = "2025-10-06T05:36:46.796Z" }, + { url = "https://files.pythonhosted.org/packages/c1/17/502cd212cbfa96eb1388614fe39a3fc9ab87dbbe042b66f97acb57474834/frozenlist-1.8.0-cp313-cp313-win_arm64.whl", hash = "sha256:44389d135b3ff43ba8cc89ff7f51f5a0bb6b63d829c8300f79a2fe4fe61bcc62", size = 39676, upload-time = "2025-10-06T05:36:47.8Z" }, + { url = "https://files.pythonhosted.org/packages/d2/5c/3bbfaa920dfab09e76946a5d2833a7cbdf7b9b4a91c714666ac4855b88b4/frozenlist-1.8.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:e25ac20a2ef37e91c1b39938b591457666a0fa835c7783c3a8f33ea42870db94", size = 89235, upload-time = "2025-10-06T05:36:48.78Z" }, + { url = "https://files.pythonhosted.org/packages/d2/d6/f03961ef72166cec1687e84e8925838442b615bd0b8854b54923ce5b7b8a/frozenlist-1.8.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:07cdca25a91a4386d2e76ad992916a85038a9b97561bf7a3fd12d5d9ce31870c", size = 50742, upload-time = "2025-10-06T05:36:49.837Z" }, + { url = "https://files.pythonhosted.org/packages/1e/bb/a6d12b7ba4c3337667d0e421f7181c82dda448ce4e7ad7ecd249a16fa806/frozenlist-1.8.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4e0c11f2cc6717e0a741f84a527c52616140741cd812a50422f83dc31749fb52", size = 51725, upload-time = "2025-10-06T05:36:50.851Z" }, + { url = "https://files.pythonhosted.org/packages/bc/71/d1fed0ffe2c2ccd70b43714c6cab0f4188f09f8a67a7914a6b46ee30f274/frozenlist-1.8.0-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:b3210649ee28062ea6099cfda39e147fa1bc039583c8ee4481cb7811e2448c51", size = 284533, upload-time = "2025-10-06T05:36:51.898Z" }, + { url = "https://files.pythonhosted.org/packages/c9/1f/fb1685a7b009d89f9bf78a42d94461bc06581f6e718c39344754a5d9bada/frozenlist-1.8.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:581ef5194c48035a7de2aefc72ac6539823bb71508189e5de01d60c9dcd5fa65", size = 292506, upload-time = "2025-10-06T05:36:53.101Z" }, + { url = "https://files.pythonhosted.org/packages/e6/3b/b991fe1612703f7e0d05c0cf734c1b77aaf7c7d321df4572e8d36e7048c8/frozenlist-1.8.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3ef2d026f16a2b1866e1d86fc4e1291e1ed8a387b2c333809419a2f8b3a77b82", size = 274161, upload-time = "2025-10-06T05:36:54.309Z" }, + { url = "https://files.pythonhosted.org/packages/ca/ec/c5c618767bcdf66e88945ec0157d7f6c4a1322f1473392319b7a2501ded7/frozenlist-1.8.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:5500ef82073f599ac84d888e3a8c1f77ac831183244bfd7f11eaa0289fb30714", size = 294676, upload-time = "2025-10-06T05:36:55.566Z" }, + { url = "https://files.pythonhosted.org/packages/7c/ce/3934758637d8f8a88d11f0585d6495ef54b2044ed6ec84492a91fa3b27aa/frozenlist-1.8.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:50066c3997d0091c411a66e710f4e11752251e6d2d73d70d8d5d4c76442a199d", size = 300638, upload-time = "2025-10-06T05:36:56.758Z" }, + { url = "https://files.pythonhosted.org/packages/fc/4f/a7e4d0d467298f42de4b41cbc7ddaf19d3cfeabaf9ff97c20c6c7ee409f9/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:5c1c8e78426e59b3f8005e9b19f6ff46e5845895adbde20ece9218319eca6506", size = 283067, upload-time = "2025-10-06T05:36:57.965Z" }, + { url = "https://files.pythonhosted.org/packages/dc/48/c7b163063d55a83772b268e6d1affb960771b0e203b632cfe09522d67ea5/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:eefdba20de0d938cec6a89bd4d70f346a03108a19b9df4248d3cf0d88f1b0f51", size = 292101, upload-time = "2025-10-06T05:36:59.237Z" }, + { url = "https://files.pythonhosted.org/packages/9f/d0/2366d3c4ecdc2fd391e0afa6e11500bfba0ea772764d631bbf82f0136c9d/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:cf253e0e1c3ceb4aaff6df637ce033ff6535fb8c70a764a8f46aafd3d6ab798e", size = 289901, upload-time = "2025-10-06T05:37:00.811Z" }, + { url = "https://files.pythonhosted.org/packages/b8/94/daff920e82c1b70e3618a2ac39fbc01ae3e2ff6124e80739ce5d71c9b920/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:032efa2674356903cd0261c4317a561a6850f3ac864a63fc1583147fb05a79b0", size = 289395, upload-time = "2025-10-06T05:37:02.115Z" }, + { url = "https://files.pythonhosted.org/packages/e3/20/bba307ab4235a09fdcd3cc5508dbabd17c4634a1af4b96e0f69bfe551ebd/frozenlist-1.8.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6da155091429aeba16851ecb10a9104a108bcd32f6c1642867eadaee401c1c41", size = 283659, upload-time = "2025-10-06T05:37:03.711Z" }, + { url = "https://files.pythonhosted.org/packages/fd/00/04ca1c3a7a124b6de4f8a9a17cc2fcad138b4608e7a3fc5877804b8715d7/frozenlist-1.8.0-cp313-cp313t-win32.whl", hash = "sha256:0f96534f8bfebc1a394209427d0f8a63d343c9779cda6fc25e8e121b5fd8555b", size = 43492, upload-time = "2025-10-06T05:37:04.915Z" }, + { url = "https://files.pythonhosted.org/packages/59/5e/c69f733a86a94ab10f68e496dc6b7e8bc078ebb415281d5698313e3af3a1/frozenlist-1.8.0-cp313-cp313t-win_amd64.whl", hash = "sha256:5d63a068f978fc69421fb0e6eb91a9603187527c86b7cd3f534a5b77a592b888", size = 48034, upload-time = "2025-10-06T05:37:06.343Z" }, + { url = "https://files.pythonhosted.org/packages/16/6c/be9d79775d8abe79b05fa6d23da99ad6e7763a1d080fbae7290b286093fd/frozenlist-1.8.0-cp313-cp313t-win_arm64.whl", hash = "sha256:bf0a7e10b077bf5fb9380ad3ae8ce20ef919a6ad93b4552896419ac7e1d8e042", size = 41749, upload-time = "2025-10-06T05:37:07.431Z" }, + { url = "https://files.pythonhosted.org/packages/f1/c8/85da824b7e7b9b6e7f7705b2ecaf9591ba6f79c1177f324c2735e41d36a2/frozenlist-1.8.0-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:cee686f1f4cadeb2136007ddedd0aaf928ab95216e7691c63e50a8ec066336d0", size = 86127, upload-time = "2025-10-06T05:37:08.438Z" }, + { url = "https://files.pythonhosted.org/packages/8e/e8/a1185e236ec66c20afd72399522f142c3724c785789255202d27ae992818/frozenlist-1.8.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:119fb2a1bd47307e899c2fac7f28e85b9a543864df47aa7ec9d3c1b4545f096f", size = 49698, upload-time = "2025-10-06T05:37:09.48Z" }, + { url = "https://files.pythonhosted.org/packages/a1/93/72b1736d68f03fda5fdf0f2180fb6caaae3894f1b854d006ac61ecc727ee/frozenlist-1.8.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4970ece02dbc8c3a92fcc5228e36a3e933a01a999f7094ff7c23fbd2beeaa67c", size = 49749, upload-time = "2025-10-06T05:37:10.569Z" }, + { url = "https://files.pythonhosted.org/packages/a7/b2/fabede9fafd976b991e9f1b9c8c873ed86f202889b864756f240ce6dd855/frozenlist-1.8.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:cba69cb73723c3f329622e34bdbf5ce1f80c21c290ff04256cff1cd3c2036ed2", size = 231298, upload-time = "2025-10-06T05:37:11.993Z" }, + { url = "https://files.pythonhosted.org/packages/3a/3b/d9b1e0b0eed36e70477ffb8360c49c85c8ca8ef9700a4e6711f39a6e8b45/frozenlist-1.8.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:778a11b15673f6f1df23d9586f83c4846c471a8af693a22e066508b77d201ec8", size = 232015, upload-time = "2025-10-06T05:37:13.194Z" }, + { url = "https://files.pythonhosted.org/packages/dc/94/be719d2766c1138148564a3960fc2c06eb688da592bdc25adcf856101be7/frozenlist-1.8.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0325024fe97f94c41c08872db482cf8ac4800d80e79222c6b0b7b162d5b13686", size = 225038, upload-time = "2025-10-06T05:37:14.577Z" }, + { url = "https://files.pythonhosted.org/packages/e4/09/6712b6c5465f083f52f50cf74167b92d4ea2f50e46a9eea0523d658454ae/frozenlist-1.8.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:97260ff46b207a82a7567b581ab4190bd4dfa09f4db8a8b49d1a958f6aa4940e", size = 240130, upload-time = "2025-10-06T05:37:15.781Z" }, + { url = "https://files.pythonhosted.org/packages/f8/d4/cd065cdcf21550b54f3ce6a22e143ac9e4836ca42a0de1022da8498eac89/frozenlist-1.8.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:54b2077180eb7f83dd52c40b2750d0a9f175e06a42e3213ce047219de902717a", size = 242845, upload-time = "2025-10-06T05:37:17.037Z" }, + { url = "https://files.pythonhosted.org/packages/62/c3/f57a5c8c70cd1ead3d5d5f776f89d33110b1addae0ab010ad774d9a44fb9/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:2f05983daecab868a31e1da44462873306d3cbfd76d1f0b5b69c473d21dbb128", size = 229131, upload-time = "2025-10-06T05:37:18.221Z" }, + { url = "https://files.pythonhosted.org/packages/6c/52/232476fe9cb64f0742f3fde2b7d26c1dac18b6d62071c74d4ded55e0ef94/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:33f48f51a446114bc5d251fb2954ab0164d5be02ad3382abcbfe07e2531d650f", size = 240542, upload-time = "2025-10-06T05:37:19.771Z" }, + { url = "https://files.pythonhosted.org/packages/5f/85/07bf3f5d0fb5414aee5f47d33c6f5c77bfe49aac680bfece33d4fdf6a246/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:154e55ec0655291b5dd1b8731c637ecdb50975a2ae70c606d100750a540082f7", size = 237308, upload-time = "2025-10-06T05:37:20.969Z" }, + { url = "https://files.pythonhosted.org/packages/11/99/ae3a33d5befd41ac0ca2cc7fd3aa707c9c324de2e89db0e0f45db9a64c26/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:4314debad13beb564b708b4a496020e5306c7333fa9a3ab90374169a20ffab30", size = 238210, upload-time = "2025-10-06T05:37:22.252Z" }, + { url = "https://files.pythonhosted.org/packages/b2/60/b1d2da22f4970e7a155f0adde9b1435712ece01b3cd45ba63702aea33938/frozenlist-1.8.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:073f8bf8becba60aa931eb3bc420b217bb7d5b8f4750e6f8b3be7f3da85d38b7", size = 231972, upload-time = "2025-10-06T05:37:23.5Z" }, + { url = "https://files.pythonhosted.org/packages/3f/ab/945b2f32de889993b9c9133216c068b7fcf257d8595a0ac420ac8677cab0/frozenlist-1.8.0-cp314-cp314-win32.whl", hash = "sha256:bac9c42ba2ac65ddc115d930c78d24ab8d4f465fd3fc473cdedfccadb9429806", size = 40536, upload-time = "2025-10-06T05:37:25.581Z" }, + { url = "https://files.pythonhosted.org/packages/59/ad/9caa9b9c836d9ad6f067157a531ac48b7d36499f5036d4141ce78c230b1b/frozenlist-1.8.0-cp314-cp314-win_amd64.whl", hash = "sha256:3e0761f4d1a44f1d1a47996511752cf3dcec5bbdd9cc2b4fe595caf97754b7a0", size = 44330, upload-time = "2025-10-06T05:37:26.928Z" }, + { url = "https://files.pythonhosted.org/packages/82/13/e6950121764f2676f43534c555249f57030150260aee9dcf7d64efda11dd/frozenlist-1.8.0-cp314-cp314-win_arm64.whl", hash = "sha256:d1eaff1d00c7751b7c6662e9c5ba6eb2c17a2306ba5e2a37f24ddf3cc953402b", size = 40627, upload-time = "2025-10-06T05:37:28.075Z" }, + { url = "https://files.pythonhosted.org/packages/c0/c7/43200656ecc4e02d3f8bc248df68256cd9572b3f0017f0a0c4e93440ae23/frozenlist-1.8.0-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:d3bb933317c52d7ea5004a1c442eef86f426886fba134ef8cf4226ea6ee1821d", size = 89238, upload-time = "2025-10-06T05:37:29.373Z" }, + { url = "https://files.pythonhosted.org/packages/d1/29/55c5f0689b9c0fb765055629f472c0de484dcaf0acee2f7707266ae3583c/frozenlist-1.8.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:8009897cdef112072f93a0efdce29cd819e717fd2f649ee3016efd3cd885a7ed", size = 50738, upload-time = "2025-10-06T05:37:30.792Z" }, + { url = "https://files.pythonhosted.org/packages/ba/7d/b7282a445956506fa11da8c2db7d276adcbf2b17d8bb8407a47685263f90/frozenlist-1.8.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:2c5dcbbc55383e5883246d11fd179782a9d07a986c40f49abe89ddf865913930", size = 51739, upload-time = "2025-10-06T05:37:32.127Z" }, + { url = "https://files.pythonhosted.org/packages/62/1c/3d8622e60d0b767a5510d1d3cf21065b9db874696a51ea6d7a43180a259c/frozenlist-1.8.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:39ecbc32f1390387d2aa4f5a995e465e9e2f79ba3adcac92d68e3e0afae6657c", size = 284186, upload-time = "2025-10-06T05:37:33.21Z" }, + { url = "https://files.pythonhosted.org/packages/2d/14/aa36d5f85a89679a85a1d44cd7a6657e0b1c75f61e7cad987b203d2daca8/frozenlist-1.8.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:92db2bf818d5cc8d9c1f1fc56b897662e24ea5adb36ad1f1d82875bd64e03c24", size = 292196, upload-time = "2025-10-06T05:37:36.107Z" }, + { url = "https://files.pythonhosted.org/packages/05/23/6bde59eb55abd407d34f77d39a5126fb7b4f109a3f611d3929f14b700c66/frozenlist-1.8.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:2dc43a022e555de94c3b68a4ef0b11c4f747d12c024a520c7101709a2144fb37", size = 273830, upload-time = "2025-10-06T05:37:37.663Z" }, + { url = "https://files.pythonhosted.org/packages/d2/3f/22cff331bfad7a8afa616289000ba793347fcd7bc275f3b28ecea2a27909/frozenlist-1.8.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:cb89a7f2de3602cfed448095bab3f178399646ab7c61454315089787df07733a", size = 294289, upload-time = "2025-10-06T05:37:39.261Z" }, + { url = "https://files.pythonhosted.org/packages/a4/89/5b057c799de4838b6c69aa82b79705f2027615e01be996d2486a69ca99c4/frozenlist-1.8.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:33139dc858c580ea50e7e60a1b0ea003efa1fd42e6ec7fdbad78fff65fad2fd2", size = 300318, upload-time = "2025-10-06T05:37:43.213Z" }, + { url = "https://files.pythonhosted.org/packages/30/de/2c22ab3eb2a8af6d69dc799e48455813bab3690c760de58e1bf43b36da3e/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:168c0969a329b416119507ba30b9ea13688fafffac1b7822802537569a1cb0ef", size = 282814, upload-time = "2025-10-06T05:37:45.337Z" }, + { url = "https://files.pythonhosted.org/packages/59/f7/970141a6a8dbd7f556d94977858cfb36fa9b66e0892c6dd780d2219d8cd8/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:28bd570e8e189d7f7b001966435f9dac6718324b5be2990ac496cf1ea9ddb7fe", size = 291762, upload-time = "2025-10-06T05:37:46.657Z" }, + { url = "https://files.pythonhosted.org/packages/c1/15/ca1adae83a719f82df9116d66f5bb28bb95557b3951903d39135620ef157/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:b2a095d45c5d46e5e79ba1e5b9cb787f541a8dee0433836cea4b96a2c439dcd8", size = 289470, upload-time = "2025-10-06T05:37:47.946Z" }, + { url = "https://files.pythonhosted.org/packages/ac/83/dca6dc53bf657d371fbc88ddeb21b79891e747189c5de990b9dfff2ccba1/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:eab8145831a0d56ec9c4139b6c3e594c7a83c2c8be25d5bcf2d86136a532287a", size = 289042, upload-time = "2025-10-06T05:37:49.499Z" }, + { url = "https://files.pythonhosted.org/packages/96/52/abddd34ca99be142f354398700536c5bd315880ed0a213812bc491cff5e4/frozenlist-1.8.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:974b28cf63cc99dfb2188d8d222bc6843656188164848c4f679e63dae4b0708e", size = 283148, upload-time = "2025-10-06T05:37:50.745Z" }, + { url = "https://files.pythonhosted.org/packages/af/d3/76bd4ed4317e7119c2b7f57c3f6934aba26d277acc6309f873341640e21f/frozenlist-1.8.0-cp314-cp314t-win32.whl", hash = "sha256:342c97bf697ac5480c0a7ec73cd700ecfa5a8a40ac923bd035484616efecc2df", size = 44676, upload-time = "2025-10-06T05:37:52.222Z" }, + { url = "https://files.pythonhosted.org/packages/89/76/c615883b7b521ead2944bb3480398cbb07e12b7b4e4d073d3752eb721558/frozenlist-1.8.0-cp314-cp314t-win_amd64.whl", hash = "sha256:06be8f67f39c8b1dc671f5d83aaefd3358ae5cdcf8314552c57e7ed3e6475bdd", size = 49451, upload-time = "2025-10-06T05:37:53.425Z" }, + { url = "https://files.pythonhosted.org/packages/e0/a3/5982da14e113d07b325230f95060e2169f5311b1017ea8af2a29b374c289/frozenlist-1.8.0-cp314-cp314t-win_arm64.whl", hash = "sha256:102e6314ca4da683dca92e3b1355490fed5f313b768500084fbe6371fddfdb79", size = 42507, upload-time = "2025-10-06T05:37:54.513Z" }, + { url = "https://files.pythonhosted.org/packages/9a/9a/e35b4a917281c0b8419d4207f4334c8e8c5dbf4f3f5f9ada73958d937dcc/frozenlist-1.8.0-py3-none-any.whl", hash = "sha256:0c18a16eab41e82c295618a77502e17b195883241c563b00f0aa5106fc4eaa0d", size = 13409, upload-time = "2025-10-06T05:38:16.721Z" }, +] + +[[package]] +name = "gitdb" +version = "4.0.12" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "smmap" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/72/94/63b0fc47eb32792c7ba1fe1b694daec9a63620db1e313033d18140c2320a/gitdb-4.0.12.tar.gz", hash = "sha256:5ef71f855d191a3326fcfbc0d5da835f26b13fbcba60c32c21091c349ffdb571", size = 394684, upload-time = "2025-01-02T07:20:46.413Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/61/5c78b91c3143ed5c14207f463aecfc8f9dbb5092fb2869baf37c273b2705/gitdb-4.0.12-py3-none-any.whl", hash = "sha256:67073e15955400952c6565cc3e707c554a4eea2e428946f7a4c162fab9bd9bcf", size = 62794, upload-time = "2025-01-02T07:20:43.624Z" }, +] + +[[package]] +name = "gitpython" +version = "3.1.46" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "gitdb" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/df/b5/59d16470a1f0dfe8c793f9ef56fd3826093fc52b3bd96d6b9d6c26c7e27b/gitpython-3.1.46.tar.gz", hash = "sha256:400124c7d0ef4ea03f7310ac2fbf7151e09ff97f2a3288d64a440c584a29c37f", size = 215371, upload-time = "2026-01-01T15:37:32.073Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6a/09/e21df6aef1e1ffc0c816f0522ddc3f6dcded766c3261813131c78a704470/gitpython-3.1.46-py3-none-any.whl", hash = "sha256:79812ed143d9d25b6d176a10bb511de0f9c67b1fa641d82097b0ab90398a2058", size = 208620, upload-time = "2026-01-01T15:37:30.574Z" }, +] + +[[package]] +name = "gittensor" +version = "5.0.0" +source = { editable = "." } +dependencies = [ + { name = "bittensor" }, + { name = "bittensor-cli" }, + { name = "bittensor-commit-reveal" }, + { name = "bittensor-wallet" }, + { name = "click" }, + { name = "levenshtein" }, + { name = "psycopg2-binary" }, + { name = "python-dotenv" }, + { name = "pytz" }, + { name = "rich" }, + { name = "substrate-interface" }, + { name = "tree-sitter" }, + { name = "tree-sitter-language-pack" }, + { name = "wandb" }, +] + +[package.optional-dependencies] +debug = [ + { name = "debugpy" }, + { name = "fastapi" }, + { name = "uvicorn" }, +] +dev = [ + { name = "pyright" }, + { name = "pytest" }, + { name = "ruff" }, +] + +[package.metadata] +requires-dist = [ + { name = "bittensor", specifier = "==10.0.1" }, + { name = "bittensor-cli", specifier = "==9.17.0" }, + { name = "bittensor-commit-reveal", specifier = "==0.4.0" }, + { name = "bittensor-wallet", specifier = "==4.0.0" }, + { name = "click" }, + { name = "debugpy", marker = "extra == 'debug'", specifier = "==1.8.11" }, + { name = "fastapi", marker = "extra == 'debug'", specifier = "==0.110.1" }, + { name = "levenshtein", specifier = "==0.27.3" }, + { name = "psycopg2-binary", specifier = "==2.9.10" }, + { name = "pyright", marker = "extra == 'dev'" }, + { name = "pytest", marker = "extra == 'dev'", specifier = "==9.0.0" }, + { name = "python-dotenv", specifier = "==1.2.1" }, + { name = "pytz", specifier = "==2025.2" }, + { name = "rich" }, + { name = "ruff", marker = "extra == 'dev'", specifier = "==0.14.10" }, + { name = "substrate-interface" }, + { name = "tree-sitter", specifier = "==0.24.0" }, + { name = "tree-sitter-language-pack", specifier = "==0.7.2" }, + { name = "uvicorn", marker = "extra == 'debug'", specifier = "==0.32.0" }, + { name = "wandb", specifier = "==0.21.3" }, +] +provides-extras = ["dev", "debug"] + +[[package]] +name = "h11" +version = "0.16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" }, +] + +[[package]] +name = "idna" +version = "3.11" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" }, +] + +[[package]] +name = "iniconfig" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" }, +] + +[[package]] +name = "jinja2" +version = "3.1.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115, upload-time = "2025-03-05T20:05:02.478Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" }, +] + +[[package]] +name = "levenshtein" +version = "0.27.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "rapidfuzz" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/82/56/dcf68853b062e3b94bdc3d011cc4198779abc5b9dc134146a062920ce2e2/levenshtein-0.27.3.tar.gz", hash = "sha256:1ac326b2c84215795163d8a5af471188918b8797b4953ec87aaba22c9c1f9fc0", size = 393269, upload-time = "2025-11-01T12:14:31.04Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7c/8e/3be9d8e0245704e3af5258fb6cb157c3d59902e1351e95edf6ed8a8c0434/levenshtein-0.27.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:2de7f095b0ca8e44de9de986ccba661cd0dec3511c751b499e76b60da46805e9", size = 169622, upload-time = "2025-11-01T12:13:10.026Z" }, + { url = "https://files.pythonhosted.org/packages/a6/42/a2b2fda5e8caf6ecd5aac142f946a77574a3961e65da62c12fd7e48e5cb1/levenshtein-0.27.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d9b8b29e5d5145a3c958664c85151b1bb4b26e4ca764380b947e6a96a321217c", size = 159183, upload-time = "2025-11-01T12:13:11.197Z" }, + { url = "https://files.pythonhosted.org/packages/eb/c4/f083fabbd61c449752df1746533538f4a8629e8811931b52f66e6c4290ad/levenshtein-0.27.3-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fc975465a51b1c5889eadee1a583b81fba46372b4b22df28973e49e8ddb8f54a", size = 133120, upload-time = "2025-11-01T12:13:12.363Z" }, + { url = "https://files.pythonhosted.org/packages/4e/e5/b6421e04cb0629615b8efd6d4d167dd2b1afb5097b87bb83cd992004dcca/levenshtein-0.27.3-cp312-cp312-manylinux_2_24_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:57573ed885118554770979fdee584071b66103f6d50beddeabb54607a1213d81", size = 114988, upload-time = "2025-11-01T12:13:13.486Z" }, + { url = "https://files.pythonhosted.org/packages/e5/77/39ee0e8d3028e90178e1031530ccc98563f8f2f0d905ec784669dcf0fa90/levenshtein-0.27.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:23aff800a6dd5d91bb3754a6092085aa7ad46b28e497682c155c74f681cfaa2d", size = 153346, upload-time = "2025-11-01T12:13:14.744Z" }, + { url = "https://files.pythonhosted.org/packages/3c/0d/c0f367bbd260dbd7a4e134fd21f459e0f5eac43deac507952b46a1d8a93a/levenshtein-0.27.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:c08a952432b8ad9dccb145f812176db94c52cda732311ddc08d29fd3bf185b0a", size = 1114538, upload-time = "2025-11-01T12:13:15.851Z" }, + { url = "https://files.pythonhosted.org/packages/d8/ef/ae71433f7b4db0bd2af7974785e36cdec899919203fb82e647c5a6109c07/levenshtein-0.27.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:3bfcb2d78ab9cc06a1e75da8fcfb7a430fe513d66cfe54c07e50f32805e5e6db", size = 1009734, upload-time = "2025-11-01T12:13:17.212Z" }, + { url = "https://files.pythonhosted.org/packages/27/dc/62c28b812dcb0953fc32ab7adf3d0e814e43c8560bb28d9269a44d874adf/levenshtein-0.27.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ba7235f6dcb31a217247468295e2dd4c6c1d3ac81629dc5d355d93e1a5f4c185", size = 1185581, upload-time = "2025-11-01T12:13:18.661Z" }, + { url = "https://files.pythonhosted.org/packages/56/e8/2e7ab9c565793220edb8e5432f9a846386a157075bdd032a90e9585bce38/levenshtein-0.27.3-cp312-cp312-win32.whl", hash = "sha256:ea80d70f1d18c161a209be556b9094968627cbaae620e102459ef9c320a98cbb", size = 84660, upload-time = "2025-11-01T12:13:19.87Z" }, + { url = "https://files.pythonhosted.org/packages/2c/a6/907a1fc8587dc91c40156973e09d106ab064c06eb28dc4700ba0fe54d654/levenshtein-0.27.3-cp312-cp312-win_amd64.whl", hash = "sha256:fbaa1219d9b2d955339a37e684256a861e9274a3fe3a6ee1b8ea8724c3231ed9", size = 94909, upload-time = "2025-11-01T12:13:21.323Z" }, + { url = "https://files.pythonhosted.org/packages/d5/d6/e04f0ddf6a71df3cdd1817b71703490ac874601ed460b2af172d3752c321/levenshtein-0.27.3-cp312-cp312-win_arm64.whl", hash = "sha256:2edbaa84f887ea1d9d8e4440af3fdda44769a7855d581c6248d7ee51518402a8", size = 87358, upload-time = "2025-11-01T12:13:22.393Z" }, + { url = "https://files.pythonhosted.org/packages/3e/f2/162e9ea7490b36bbf05776c8e3a8114c75aa78546ddda8e8f36731db3da6/levenshtein-0.27.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e55aa9f9453fd89d4a9ff1f3c4a650b307d5f61a7eed0568a52fbd2ff2eba107", size = 169230, upload-time = "2025-11-01T12:13:23.735Z" }, + { url = "https://files.pythonhosted.org/packages/01/2d/7316ba7f94e3d60e89bd120526bc71e4812866bb7162767a2a10f73f72c5/levenshtein-0.27.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:ae4d484453c48939ecd01c5c213530c68dd5cd6e5090f0091ef69799ec7a8a9f", size = 158643, upload-time = "2025-11-01T12:13:25.549Z" }, + { url = "https://files.pythonhosted.org/packages/5e/87/85433cb1e51c45016f061d96fea3106b6969f700e2cbb56c15de82d0deeb/levenshtein-0.27.3-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d18659832567ee387b266be390da0de356a3aa6cf0e8bc009b6042d8188e131f", size = 132881, upload-time = "2025-11-01T12:13:26.822Z" }, + { url = "https://files.pythonhosted.org/packages/40/1c/3ce66c9a7da169a43dd89146d69df9dec935e6f86c70c6404f48d1291d2c/levenshtein-0.27.3-cp313-cp313-manylinux_2_24_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:027b3d142cc8ea2ab4e60444d7175f65a94dde22a54382b2f7b47cc24936eb53", size = 114650, upload-time = "2025-11-01T12:13:28.382Z" }, + { url = "https://files.pythonhosted.org/packages/73/60/7138e98884ca105c76ef192f5b43165d6eac6f32b432853ebe9f09ee50c9/levenshtein-0.27.3-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ffdca6989368cc64f347f0423c528520f12775b812e170a0eb0c10e4c9b0f3ff", size = 153127, upload-time = "2025-11-01T12:13:29.781Z" }, + { url = "https://files.pythonhosted.org/packages/df/8f/664ac8b83026d7d1382866b68babae17e92b7b6ff8dc3c6205c0066b8ce1/levenshtein-0.27.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:fa00ab389386032b02a1c9050ec3c6aa824d2bbcc692548fdc44a46b71c058c6", size = 1114602, upload-time = "2025-11-01T12:13:31.651Z" }, + { url = "https://files.pythonhosted.org/packages/2c/c8/8905d96cf2d7ed6af7eb39a8be0925ef335729473c1e9d1f56230ecaffc5/levenshtein-0.27.3-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:691c9003c6c481b899a5c2f72e8ce05a6d956a9668dc75f2a3ce9f4381a76dc6", size = 1008036, upload-time = "2025-11-01T12:13:33.006Z" }, + { url = "https://files.pythonhosted.org/packages/c7/57/01c37608121380a6357a297625562adad1c1fc8058d4f62279b735108927/levenshtein-0.27.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:12f7fc8bf0c24492fe97905348e020b55b9fc6dbaab7cd452566d1a466cb5e15", size = 1185338, upload-time = "2025-11-01T12:13:34.452Z" }, + { url = "https://files.pythonhosted.org/packages/dd/57/bceab41d40b58dee7927a8d1d18ed3bff7c95c5e530fb60093ce741a8c26/levenshtein-0.27.3-cp313-cp313-win32.whl", hash = "sha256:9f4872e4e19ee48eed39f214eea4eca42e5ef303f8a4a488d8312370674dbf3a", size = 84562, upload-time = "2025-11-01T12:13:35.858Z" }, + { url = "https://files.pythonhosted.org/packages/42/1d/74f1ff589bb687d0cad2bbdceef208dc070f56d1e38a3831da8c00bf13bb/levenshtein-0.27.3-cp313-cp313-win_amd64.whl", hash = "sha256:83aa2422e9a9af2c9d3e56a53e3e8de6bae58d1793628cae48c4282577c5c2c6", size = 94658, upload-time = "2025-11-01T12:13:36.963Z" }, + { url = "https://files.pythonhosted.org/packages/21/3c/22c86d3c8f254141096fd6089d2e9fdf98b1472c7a5d79d36d3557ec2d83/levenshtein-0.27.3-cp313-cp313-win_arm64.whl", hash = "sha256:d4adaf1edbcf38c3f2e290b52f4dcb5c6deff20308c26ef1127a106bc2d23e9f", size = 86929, upload-time = "2025-11-01T12:13:37.997Z" }, + { url = "https://files.pythonhosted.org/packages/0e/bc/9b7cf1b5fa098b86844d42de22549304699deff309c5c9e28b9a3fc4076a/levenshtein-0.27.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:272e24764b8210337b65a1cfd69ce40df5d2de1a3baf1234e7f06d2826ba2e7a", size = 170360, upload-time = "2025-11-01T12:13:39.019Z" }, + { url = "https://files.pythonhosted.org/packages/dc/95/997f2c83bd4712426bf0de8143b5e4403c7ebbafb5d1271983e774de3ae7/levenshtein-0.27.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:329a8e748a4e14d56daaa11f07bce3fde53385d05bad6b3f6dd9ee7802cdc915", size = 159098, upload-time = "2025-11-01T12:13:40.17Z" }, + { url = "https://files.pythonhosted.org/packages/fc/96/123c3316ae2f72c73be4fba9756924af015da4c0e5b12804f5753c0ee511/levenshtein-0.27.3-cp313-cp313t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a5fea1a9c6b9cc8729e467e2174b4359ff6bac27356bb5f31898e596b4ce133a", size = 136655, upload-time = "2025-11-01T12:13:41.262Z" }, + { url = "https://files.pythonhosted.org/packages/45/72/a3180d437736b1b9eacc3100be655a756deafb91de47c762d40eb45a9d91/levenshtein-0.27.3-cp313-cp313t-manylinux_2_24_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3a61aa825819b6356555091d8a575d1235bd9c3753a68316a261af4856c3b487", size = 117511, upload-time = "2025-11-01T12:13:42.647Z" }, + { url = "https://files.pythonhosted.org/packages/61/f9/ba7c546a4b99347938e6661104064ab6a3651c601d59f241ffdc37510ecc/levenshtein-0.27.3-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a51de7a514e8183f0a82f2947d01b014d2391426543b1c076bf5a26328cec4e4", size = 155656, upload-time = "2025-11-01T12:13:44.208Z" }, + { url = "https://files.pythonhosted.org/packages/42/cd/5edd6e1e02c3e47c8121761756dd0f85f816b636f25509118b687e6b0f96/levenshtein-0.27.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:53cbf726d6e92040c9be7e594d959d496bd62597ea48eba9d96105898acbeafe", size = 1116689, upload-time = "2025-11-01T12:13:45.485Z" }, + { url = "https://files.pythonhosted.org/packages/95/67/25ca0119e0c6ec17226c72638f48ef8887124597ac48ad5da111c0b3a825/levenshtein-0.27.3-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:191b358afead8561c4fcfed22f83c13bb6c8da5f5789e277f0c5aa1c45ca612f", size = 1003166, upload-time = "2025-11-01T12:13:47.126Z" }, + { url = "https://files.pythonhosted.org/packages/45/64/ab216f3fb3cef1ee7e222665537f9340d828ef84c99409ba31f2ef2a3947/levenshtein-0.27.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ba1318d0635b834b8f0397014a7c43f007e65fce396a47614780c881bdff828b", size = 1189362, upload-time = "2025-11-01T12:13:48.627Z" }, + { url = "https://files.pythonhosted.org/packages/31/58/b150034858de0899a5a222974b6710618ebc0779a0695df070f7ab559a0b/levenshtein-0.27.3-cp313-cp313t-win32.whl", hash = "sha256:8dd9e1db6c3b35567043e155a686e4827c4aa28a594bd81e3eea84d3a1bd5875", size = 86149, upload-time = "2025-11-01T12:13:50.588Z" }, + { url = "https://files.pythonhosted.org/packages/0a/c4/bbe46a11073641450200e6a604b3b62d311166e8061c492612a40e560e85/levenshtein-0.27.3-cp313-cp313t-win_amd64.whl", hash = "sha256:7813ecdac7a6223264ebfea0c8d69959c43d21a99694ef28018d22c4265c2af6", size = 96685, upload-time = "2025-11-01T12:13:51.641Z" }, + { url = "https://files.pythonhosted.org/packages/23/65/30b362ad9bfc1085741776a08b6ddee3f434e9daac2920daaee2e26271bf/levenshtein-0.27.3-cp313-cp313t-win_arm64.whl", hash = "sha256:8f05a0d23d13a6f802c7af595d0e43f5b9b98b6ed390cec7a35cb5d6693b882b", size = 88538, upload-time = "2025-11-01T12:13:52.757Z" }, + { url = "https://files.pythonhosted.org/packages/f3/e1/2f705da403f865a5fa3449b155738dc9c53021698fd6926253a9af03180b/levenshtein-0.27.3-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:a6728bfae9a86002f0223576675fc7e2a6e7735da47185a1d13d1eaaa73dd4be", size = 169457, upload-time = "2025-11-01T12:13:53.778Z" }, + { url = "https://files.pythonhosted.org/packages/76/2c/bb6ef359e007fe7b6b3195b68a94f4dd3ecd1885ee337ee8fbd4df55996f/levenshtein-0.27.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:8e5037c4a6f97a238e24aad6f98a1e984348b7931b1b04b6bd02bd4f8238150d", size = 158680, upload-time = "2025-11-01T12:13:55.005Z" }, + { url = "https://files.pythonhosted.org/packages/51/7b/de1999f4cf1cfebc3fbbf03a6d58498952d6560d9798af4b0a566e6b6f30/levenshtein-0.27.3-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c6cf5ecf9026bf24cf66ad019c6583f50058fae3e1b3c20e8812455b55d597f1", size = 133167, upload-time = "2025-11-01T12:13:56.426Z" }, + { url = "https://files.pythonhosted.org/packages/c7/da/aaa7f3a0a8ae8744b284043653652db3d7d93595517f9ed8158c03287692/levenshtein-0.27.3-cp314-cp314-manylinux_2_24_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:9285084bd2fc19adb47dab54ed4a71f57f78fe0d754e4a01e3c75409a25aed24", size = 114530, upload-time = "2025-11-01T12:13:57.883Z" }, + { url = "https://files.pythonhosted.org/packages/29/ce/ed422816fb30ffa3bc11597b30d5deca06b4a1388707a04215da73c65b53/levenshtein-0.27.3-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ce3bbbe92172a08b599d79956182c6b7ab6ec8d4adbe7237417a363b968ad87b", size = 153325, upload-time = "2025-11-01T12:13:59.318Z" }, + { url = "https://files.pythonhosted.org/packages/d9/5a/a225477a0bda154f19f1c07a5e35500d631ae25dfd620b479027d79f0d4c/levenshtein-0.27.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:9dac48fab9d166ca90e12fb6cf6c7c8eb9c41aacf7136584411e20f7f136f745", size = 1114956, upload-time = "2025-11-01T12:14:00.543Z" }, + { url = "https://files.pythonhosted.org/packages/ca/c4/a1be1040f3cce516a5e2be68453fd0c32ac63b2e9d31f476723fd8002c09/levenshtein-0.27.3-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:d37a83722dc5326c93d17078e926c4732dc4f3488dc017c6839e34cd16af92b7", size = 1007610, upload-time = "2025-11-01T12:14:02.036Z" }, + { url = "https://files.pythonhosted.org/packages/86/d7/6f50e8a307e0c2befd819b481eb3a4c2eacab3dd8101982423003fac8ea3/levenshtein-0.27.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:3466cb8294ce586e49dd467560a153ab8d296015c538223f149f9aefd3d9f955", size = 1185379, upload-time = "2025-11-01T12:14:03.385Z" }, + { url = "https://files.pythonhosted.org/packages/6b/e5/5d8fb1b3ebd5735f53221bf95c923066bcfc132234925820128f7eee5b47/levenshtein-0.27.3-cp314-cp314-win32.whl", hash = "sha256:c848bf2457b268672b7e9e73b44f18f49856420ac50b2564cf115a6e4ef82688", size = 86328, upload-time = "2025-11-01T12:14:04.74Z" }, + { url = "https://files.pythonhosted.org/packages/30/82/8a9ccbdb4e38bd4d516f2804999dccb8cb4bcb4e33f52851735da0c73ea7/levenshtein-0.27.3-cp314-cp314-win_amd64.whl", hash = "sha256:742633f024362a4ed6ef9d7e75d68f74b041ae738985fcf55a0e6d1d4cade438", size = 96640, upload-time = "2025-11-01T12:14:06.24Z" }, + { url = "https://files.pythonhosted.org/packages/14/86/f9d15919f59f5d92c6baa500315e1fa0143a39d811427b83c54f038267ca/levenshtein-0.27.3-cp314-cp314-win_arm64.whl", hash = "sha256:9eed6851224b19e8d588ddb8eb8a4ae3c2dcabf3d1213985f0b94a67e517b1df", size = 89689, upload-time = "2025-11-01T12:14:07.379Z" }, + { url = "https://files.pythonhosted.org/packages/ed/f6/10f44975ae6dc3047b2cd260e3d4c3a5258b8d10690a42904115de24fc51/levenshtein-0.27.3-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:77de69a345c76227b51a4521cd85442eb3da54c7eb6a06663a20c058fc49e683", size = 170518, upload-time = "2025-11-01T12:14:09.196Z" }, + { url = "https://files.pythonhosted.org/packages/08/07/fa294a145a0c99a814a9a807614962c1ee0f5749ca691645980462027d5d/levenshtein-0.27.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:eba2756dc1f5b962b0ff80e49abb2153d5e809cc5e7fa5e85be9410ce474795d", size = 159097, upload-time = "2025-11-01T12:14:10.404Z" }, + { url = "https://files.pythonhosted.org/packages/ae/50/24bdf37813fc30f293e53b46022b091144f4737a6a66663d2235b311bb98/levenshtein-0.27.3-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2c8fcb498287e971d84260f67808ff1a06b3f6212d80fea75cf5155db80606ff", size = 136650, upload-time = "2025-11-01T12:14:11.579Z" }, + { url = "https://files.pythonhosted.org/packages/d0/a9/0399c7a190b277cdea3acc801129d9d30da57c3fa79519e7b8c3f080d86c/levenshtein-0.27.3-cp314-cp314t-manylinux_2_24_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:f067092c67464faab13e00a5c1a80da93baca8955d4d49579861400762e35591", size = 117515, upload-time = "2025-11-01T12:14:12.877Z" }, + { url = "https://files.pythonhosted.org/packages/bf/a4/1c27533e97578b385a4b8079abe8d1ce2e514717c761efbe4bf7bbd0ac2e/levenshtein-0.27.3-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:92415f32c68491203f2855d05eef3277d376182d014cf0859c013c89f277fbbf", size = 155711, upload-time = "2025-11-01T12:14:13.985Z" }, + { url = "https://files.pythonhosted.org/packages/50/35/bbc26638394a72b1e31a685ec251c995ee66a630c7e5c86f98770928b632/levenshtein-0.27.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:ef61eeaf1e0a42d7d947978d981fe4b9426b98b3dd8c1582c535f10dee044c3f", size = 1116692, upload-time = "2025-11-01T12:14:15.359Z" }, + { url = "https://files.pythonhosted.org/packages/cd/83/32fcf28b388f8dc6c36b54552b9bae289dab07d43df104893158c834cbcc/levenshtein-0.27.3-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:103bb2e9049d1aa0d1216dd09c1c9106ecfe7541bbdc1a0490b9357d42eec8f2", size = 1003167, upload-time = "2025-11-01T12:14:17.469Z" }, + { url = "https://files.pythonhosted.org/packages/d1/79/1fbf2877ec4b819f373a32ebe3c48a61ee810693593a6015108b0be97b78/levenshtein-0.27.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:6a64ddd1986b2a4c468b09544382287315c53585eb067f6e200c337741e057ee", size = 1189417, upload-time = "2025-11-01T12:14:19.081Z" }, + { url = "https://files.pythonhosted.org/packages/d3/ac/dad4e09f1f7459c64172e48e40ed2baf3aa92d38205bcbd1b4ff00853701/levenshtein-0.27.3-cp314-cp314t-win32.whl", hash = "sha256:957244f27dc284ccb030a8b77b8a00deb7eefdcd70052a4b1d96f375780ae9dc", size = 88144, upload-time = "2025-11-01T12:14:20.667Z" }, + { url = "https://files.pythonhosted.org/packages/c0/61/cd51dc8b8a382e17c559a9812734c3a9afc2dab7d36253516335ee16ae50/levenshtein-0.27.3-cp314-cp314t-win_amd64.whl", hash = "sha256:ccd7eaa6d8048c3ec07c93cfbcdefd4a3ae8c6aca3a370f2023ee69341e5f076", size = 98516, upload-time = "2025-11-01T12:14:21.786Z" }, + { url = "https://files.pythonhosted.org/packages/27/5e/3fb67e882c1fee01ebb7abc1c0a6669e5ff8acd060e93bfe7229e9ce6e4f/levenshtein-0.27.3-cp314-cp314t-win_arm64.whl", hash = "sha256:1d8520b89b7a27bb5aadbcc156715619bcbf556a8ac46ad932470945dca6e1bd", size = 91020, upload-time = "2025-11-01T12:14:22.944Z" }, +] + +[[package]] +name = "markdown-it-py" +version = "4.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mdurl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5b/f5/4ec618ed16cc4f8fb3b701563655a69816155e79e24a17b651541804721d/markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3", size = 73070, upload-time = "2025-08-11T12:57:52.854Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321, upload-time = "2025-08-11T12:57:51.923Z" }, +] + +[[package]] +name = "markupsafe" +version = "3.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7e/99/7690b6d4034fffd95959cbe0c02de8deb3098cc577c67bb6a24fe5d7caa7/markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698", size = 80313, upload-time = "2025-09-27T18:37:40.426Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/72/147da192e38635ada20e0a2e1a51cf8823d2119ce8883f7053879c2199b5/markupsafe-3.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e", size = 11615, upload-time = "2025-09-27T18:36:30.854Z" }, + { url = "https://files.pythonhosted.org/packages/9a/81/7e4e08678a1f98521201c3079f77db69fb552acd56067661f8c2f534a718/markupsafe-3.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce", size = 12020, upload-time = "2025-09-27T18:36:31.971Z" }, + { url = "https://files.pythonhosted.org/packages/1e/2c/799f4742efc39633a1b54a92eec4082e4f815314869865d876824c257c1e/markupsafe-3.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d", size = 24332, upload-time = "2025-09-27T18:36:32.813Z" }, + { url = "https://files.pythonhosted.org/packages/3c/2e/8d0c2ab90a8c1d9a24f0399058ab8519a3279d1bd4289511d74e909f060e/markupsafe-3.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d6dd0be5b5b189d31db7cda48b91d7e0a9795f31430b7f271219ab30f1d3ac9d", size = 22947, upload-time = "2025-09-27T18:36:33.86Z" }, + { url = "https://files.pythonhosted.org/packages/2c/54/887f3092a85238093a0b2154bd629c89444f395618842e8b0c41783898ea/markupsafe-3.0.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:94c6f0bb423f739146aec64595853541634bde58b2135f27f61c1ffd1cd4d16a", size = 21962, upload-time = "2025-09-27T18:36:35.099Z" }, + { url = "https://files.pythonhosted.org/packages/c9/2f/336b8c7b6f4a4d95e91119dc8521402461b74a485558d8f238a68312f11c/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:be8813b57049a7dc738189df53d69395eba14fb99345e0a5994914a3864c8a4b", size = 23760, upload-time = "2025-09-27T18:36:36.001Z" }, + { url = "https://files.pythonhosted.org/packages/32/43/67935f2b7e4982ffb50a4d169b724d74b62a3964bc1a9a527f5ac4f1ee2b/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:83891d0e9fb81a825d9a6d61e3f07550ca70a076484292a70fde82c4b807286f", size = 21529, upload-time = "2025-09-27T18:36:36.906Z" }, + { url = "https://files.pythonhosted.org/packages/89/e0/4486f11e51bbba8b0c041098859e869e304d1c261e59244baa3d295d47b7/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:77f0643abe7495da77fb436f50f8dab76dbc6e5fd25d39589a0f1fe6548bfa2b", size = 23015, upload-time = "2025-09-27T18:36:37.868Z" }, + { url = "https://files.pythonhosted.org/packages/2f/e1/78ee7a023dac597a5825441ebd17170785a9dab23de95d2c7508ade94e0e/markupsafe-3.0.3-cp312-cp312-win32.whl", hash = "sha256:d88b440e37a16e651bda4c7c2b930eb586fd15ca7406cb39e211fcff3bf3017d", size = 14540, upload-time = "2025-09-27T18:36:38.761Z" }, + { url = "https://files.pythonhosted.org/packages/aa/5b/bec5aa9bbbb2c946ca2733ef9c4ca91c91b6a24580193e891b5f7dbe8e1e/markupsafe-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:26a5784ded40c9e318cfc2bdb30fe164bdb8665ded9cd64d500a34fb42067b1c", size = 15105, upload-time = "2025-09-27T18:36:39.701Z" }, + { url = "https://files.pythonhosted.org/packages/e5/f1/216fc1bbfd74011693a4fd837e7026152e89c4bcf3e77b6692fba9923123/markupsafe-3.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:35add3b638a5d900e807944a078b51922212fb3dedb01633a8defc4b01a3c85f", size = 13906, upload-time = "2025-09-27T18:36:40.689Z" }, + { url = "https://files.pythonhosted.org/packages/38/2f/907b9c7bbba283e68f20259574b13d005c121a0fa4c175f9bed27c4597ff/markupsafe-3.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e1cf1972137e83c5d4c136c43ced9ac51d0e124706ee1c8aa8532c1287fa8795", size = 11622, upload-time = "2025-09-27T18:36:41.777Z" }, + { url = "https://files.pythonhosted.org/packages/9c/d9/5f7756922cdd676869eca1c4e3c0cd0df60ed30199ffd775e319089cb3ed/markupsafe-3.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:116bb52f642a37c115f517494ea5feb03889e04df47eeff5b130b1808ce7c219", size = 12029, upload-time = "2025-09-27T18:36:43.257Z" }, + { url = "https://files.pythonhosted.org/packages/00/07/575a68c754943058c78f30db02ee03a64b3c638586fba6a6dd56830b30a3/markupsafe-3.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:133a43e73a802c5562be9bbcd03d090aa5a1fe899db609c29e8c8d815c5f6de6", size = 24374, upload-time = "2025-09-27T18:36:44.508Z" }, + { url = "https://files.pythonhosted.org/packages/a9/21/9b05698b46f218fc0e118e1f8168395c65c8a2c750ae2bab54fc4bd4e0e8/markupsafe-3.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfcd093f13f0f0b7fdd0f198b90053bf7b2f02a3927a30e63f3ccc9df56b676", size = 22980, upload-time = "2025-09-27T18:36:45.385Z" }, + { url = "https://files.pythonhosted.org/packages/7f/71/544260864f893f18b6827315b988c146b559391e6e7e8f7252839b1b846a/markupsafe-3.0.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:509fa21c6deb7a7a273d629cf5ec029bc209d1a51178615ddf718f5918992ab9", size = 21990, upload-time = "2025-09-27T18:36:46.916Z" }, + { url = "https://files.pythonhosted.org/packages/c2/28/b50fc2f74d1ad761af2f5dcce7492648b983d00a65b8c0e0cb457c82ebbe/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4afe79fb3de0b7097d81da19090f4df4f8d3a2b3adaa8764138aac2e44f3af1", size = 23784, upload-time = "2025-09-27T18:36:47.884Z" }, + { url = "https://files.pythonhosted.org/packages/ed/76/104b2aa106a208da8b17a2fb72e033a5a9d7073c68f7e508b94916ed47a9/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:795e7751525cae078558e679d646ae45574b47ed6e7771863fcc079a6171a0fc", size = 21588, upload-time = "2025-09-27T18:36:48.82Z" }, + { url = "https://files.pythonhosted.org/packages/b5/99/16a5eb2d140087ebd97180d95249b00a03aa87e29cc224056274f2e45fd6/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8485f406a96febb5140bfeca44a73e3ce5116b2501ac54fe953e488fb1d03b12", size = 23041, upload-time = "2025-09-27T18:36:49.797Z" }, + { url = "https://files.pythonhosted.org/packages/19/bc/e7140ed90c5d61d77cea142eed9f9c303f4c4806f60a1044c13e3f1471d0/markupsafe-3.0.3-cp313-cp313-win32.whl", hash = "sha256:bdd37121970bfd8be76c5fb069c7751683bdf373db1ed6c010162b2a130248ed", size = 14543, upload-time = "2025-09-27T18:36:51.584Z" }, + { url = "https://files.pythonhosted.org/packages/05/73/c4abe620b841b6b791f2edc248f556900667a5a1cf023a6646967ae98335/markupsafe-3.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:9a1abfdc021a164803f4d485104931fb8f8c1efd55bc6b748d2f5774e78b62c5", size = 15113, upload-time = "2025-09-27T18:36:52.537Z" }, + { url = "https://files.pythonhosted.org/packages/f0/3a/fa34a0f7cfef23cf9500d68cb7c32dd64ffd58a12b09225fb03dd37d5b80/markupsafe-3.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:7e68f88e5b8799aa49c85cd116c932a1ac15caaa3f5db09087854d218359e485", size = 13911, upload-time = "2025-09-27T18:36:53.513Z" }, + { url = "https://files.pythonhosted.org/packages/e4/d7/e05cd7efe43a88a17a37b3ae96e79a19e846f3f456fe79c57ca61356ef01/markupsafe-3.0.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:218551f6df4868a8d527e3062d0fb968682fe92054e89978594c28e642c43a73", size = 11658, upload-time = "2025-09-27T18:36:54.819Z" }, + { url = "https://files.pythonhosted.org/packages/99/9e/e412117548182ce2148bdeacdda3bb494260c0b0184360fe0d56389b523b/markupsafe-3.0.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3524b778fe5cfb3452a09d31e7b5adefeea8c5be1d43c4f810ba09f2ceb29d37", size = 12066, upload-time = "2025-09-27T18:36:55.714Z" }, + { url = "https://files.pythonhosted.org/packages/bc/e6/fa0ffcda717ef64a5108eaa7b4f5ed28d56122c9a6d70ab8b72f9f715c80/markupsafe-3.0.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4e885a3d1efa2eadc93c894a21770e4bc67899e3543680313b09f139e149ab19", size = 25639, upload-time = "2025-09-27T18:36:56.908Z" }, + { url = "https://files.pythonhosted.org/packages/96/ec/2102e881fe9d25fc16cb4b25d5f5cde50970967ffa5dddafdb771237062d/markupsafe-3.0.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8709b08f4a89aa7586de0aadc8da56180242ee0ada3999749b183aa23df95025", size = 23569, upload-time = "2025-09-27T18:36:57.913Z" }, + { url = "https://files.pythonhosted.org/packages/4b/30/6f2fce1f1f205fc9323255b216ca8a235b15860c34b6798f810f05828e32/markupsafe-3.0.3-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b8512a91625c9b3da6f127803b166b629725e68af71f8184ae7e7d54686a56d6", size = 23284, upload-time = "2025-09-27T18:36:58.833Z" }, + { url = "https://files.pythonhosted.org/packages/58/47/4a0ccea4ab9f5dcb6f79c0236d954acb382202721e704223a8aafa38b5c8/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9b79b7a16f7fedff2495d684f2b59b0457c3b493778c9eed31111be64d58279f", size = 24801, upload-time = "2025-09-27T18:36:59.739Z" }, + { url = "https://files.pythonhosted.org/packages/6a/70/3780e9b72180b6fecb83a4814d84c3bf4b4ae4bf0b19c27196104149734c/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:12c63dfb4a98206f045aa9563db46507995f7ef6d83b2f68eda65c307c6829eb", size = 22769, upload-time = "2025-09-27T18:37:00.719Z" }, + { url = "https://files.pythonhosted.org/packages/98/c5/c03c7f4125180fc215220c035beac6b9cb684bc7a067c84fc69414d315f5/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8f71bc33915be5186016f675cd83a1e08523649b0e33efdb898db577ef5bb009", size = 23642, upload-time = "2025-09-27T18:37:01.673Z" }, + { url = "https://files.pythonhosted.org/packages/80/d6/2d1b89f6ca4bff1036499b1e29a1d02d282259f3681540e16563f27ebc23/markupsafe-3.0.3-cp313-cp313t-win32.whl", hash = "sha256:69c0b73548bc525c8cb9a251cddf1931d1db4d2258e9599c28c07ef3580ef354", size = 14612, upload-time = "2025-09-27T18:37:02.639Z" }, + { url = "https://files.pythonhosted.org/packages/2b/98/e48a4bfba0a0ffcf9925fe2d69240bfaa19c6f7507b8cd09c70684a53c1e/markupsafe-3.0.3-cp313-cp313t-win_amd64.whl", hash = "sha256:1b4b79e8ebf6b55351f0d91fe80f893b4743f104bff22e90697db1590e47a218", size = 15200, upload-time = "2025-09-27T18:37:03.582Z" }, + { url = "https://files.pythonhosted.org/packages/0e/72/e3cc540f351f316e9ed0f092757459afbc595824ca724cbc5a5d4263713f/markupsafe-3.0.3-cp313-cp313t-win_arm64.whl", hash = "sha256:ad2cf8aa28b8c020ab2fc8287b0f823d0a7d8630784c31e9ee5edea20f406287", size = 13973, upload-time = "2025-09-27T18:37:04.929Z" }, + { url = "https://files.pythonhosted.org/packages/33/8a/8e42d4838cd89b7dde187011e97fe6c3af66d8c044997d2183fbd6d31352/markupsafe-3.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:eaa9599de571d72e2daf60164784109f19978b327a3910d3e9de8c97b5b70cfe", size = 11619, upload-time = "2025-09-27T18:37:06.342Z" }, + { url = "https://files.pythonhosted.org/packages/b5/64/7660f8a4a8e53c924d0fa05dc3a55c9cee10bbd82b11c5afb27d44b096ce/markupsafe-3.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c47a551199eb8eb2121d4f0f15ae0f923d31350ab9280078d1e5f12b249e0026", size = 12029, upload-time = "2025-09-27T18:37:07.213Z" }, + { url = "https://files.pythonhosted.org/packages/da/ef/e648bfd021127bef5fa12e1720ffed0c6cbb8310c8d9bea7266337ff06de/markupsafe-3.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f34c41761022dd093b4b6896d4810782ffbabe30f2d443ff5f083e0cbbb8c737", size = 24408, upload-time = "2025-09-27T18:37:09.572Z" }, + { url = "https://files.pythonhosted.org/packages/41/3c/a36c2450754618e62008bf7435ccb0f88053e07592e6028a34776213d877/markupsafe-3.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:457a69a9577064c05a97c41f4e65148652db078a3a509039e64d3467b9e7ef97", size = 23005, upload-time = "2025-09-27T18:37:10.58Z" }, + { url = "https://files.pythonhosted.org/packages/bc/20/b7fdf89a8456b099837cd1dc21974632a02a999ec9bf7ca3e490aacd98e7/markupsafe-3.0.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e8afc3f2ccfa24215f8cb28dcf43f0113ac3c37c2f0f0806d8c70e4228c5cf4d", size = 22048, upload-time = "2025-09-27T18:37:11.547Z" }, + { url = "https://files.pythonhosted.org/packages/9a/a7/591f592afdc734f47db08a75793a55d7fbcc6902a723ae4cfbab61010cc5/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ec15a59cf5af7be74194f7ab02d0f59a62bdcf1a537677ce67a2537c9b87fcda", size = 23821, upload-time = "2025-09-27T18:37:12.48Z" }, + { url = "https://files.pythonhosted.org/packages/7d/33/45b24e4f44195b26521bc6f1a82197118f74df348556594bd2262bda1038/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:0eb9ff8191e8498cca014656ae6b8d61f39da5f95b488805da4bb029cccbfbaf", size = 21606, upload-time = "2025-09-27T18:37:13.485Z" }, + { url = "https://files.pythonhosted.org/packages/ff/0e/53dfaca23a69fbfbbf17a4b64072090e70717344c52eaaaa9c5ddff1e5f0/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2713baf880df847f2bece4230d4d094280f4e67b1e813eec43b4c0e144a34ffe", size = 23043, upload-time = "2025-09-27T18:37:14.408Z" }, + { url = "https://files.pythonhosted.org/packages/46/11/f333a06fc16236d5238bfe74daccbca41459dcd8d1fa952e8fbd5dccfb70/markupsafe-3.0.3-cp314-cp314-win32.whl", hash = "sha256:729586769a26dbceff69f7a7dbbf59ab6572b99d94576a5592625d5b411576b9", size = 14747, upload-time = "2025-09-27T18:37:15.36Z" }, + { url = "https://files.pythonhosted.org/packages/28/52/182836104b33b444e400b14f797212f720cbc9ed6ba34c800639d154e821/markupsafe-3.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:bdc919ead48f234740ad807933cdf545180bfbe9342c2bb451556db2ed958581", size = 15341, upload-time = "2025-09-27T18:37:16.496Z" }, + { url = "https://files.pythonhosted.org/packages/6f/18/acf23e91bd94fd7b3031558b1f013adfa21a8e407a3fdb32745538730382/markupsafe-3.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:5a7d5dc5140555cf21a6fefbdbf8723f06fcd2f63ef108f2854de715e4422cb4", size = 14073, upload-time = "2025-09-27T18:37:17.476Z" }, + { url = "https://files.pythonhosted.org/packages/3c/f0/57689aa4076e1b43b15fdfa646b04653969d50cf30c32a102762be2485da/markupsafe-3.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:1353ef0c1b138e1907ae78e2f6c63ff67501122006b0f9abad68fda5f4ffc6ab", size = 11661, upload-time = "2025-09-27T18:37:18.453Z" }, + { url = "https://files.pythonhosted.org/packages/89/c3/2e67a7ca217c6912985ec766c6393b636fb0c2344443ff9d91404dc4c79f/markupsafe-3.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1085e7fbddd3be5f89cc898938f42c0b3c711fdcb37d75221de2666af647c175", size = 12069, upload-time = "2025-09-27T18:37:19.332Z" }, + { url = "https://files.pythonhosted.org/packages/f0/00/be561dce4e6ca66b15276e184ce4b8aec61fe83662cce2f7d72bd3249d28/markupsafe-3.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1b52b4fb9df4eb9ae465f8d0c228a00624de2334f216f178a995ccdcf82c4634", size = 25670, upload-time = "2025-09-27T18:37:20.245Z" }, + { url = "https://files.pythonhosted.org/packages/50/09/c419f6f5a92e5fadde27efd190eca90f05e1261b10dbd8cbcb39cd8ea1dc/markupsafe-3.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fed51ac40f757d41b7c48425901843666a6677e3e8eb0abcff09e4ba6e664f50", size = 23598, upload-time = "2025-09-27T18:37:21.177Z" }, + { url = "https://files.pythonhosted.org/packages/22/44/a0681611106e0b2921b3033fc19bc53323e0b50bc70cffdd19f7d679bb66/markupsafe-3.0.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f190daf01f13c72eac4efd5c430a8de82489d9cff23c364c3ea822545032993e", size = 23261, upload-time = "2025-09-27T18:37:22.167Z" }, + { url = "https://files.pythonhosted.org/packages/5f/57/1b0b3f100259dc9fffe780cfb60d4be71375510e435efec3d116b6436d43/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e56b7d45a839a697b5eb268c82a71bd8c7f6c94d6fd50c3d577fa39a9f1409f5", size = 24835, upload-time = "2025-09-27T18:37:23.296Z" }, + { url = "https://files.pythonhosted.org/packages/26/6a/4bf6d0c97c4920f1597cc14dd720705eca0bf7c787aebc6bb4d1bead5388/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:f3e98bb3798ead92273dc0e5fd0f31ade220f59a266ffd8a4f6065e0a3ce0523", size = 22733, upload-time = "2025-09-27T18:37:24.237Z" }, + { url = "https://files.pythonhosted.org/packages/14/c7/ca723101509b518797fedc2fdf79ba57f886b4aca8a7d31857ba3ee8281f/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5678211cb9333a6468fb8d8be0305520aa073f50d17f089b5b4b477ea6e67fdc", size = 23672, upload-time = "2025-09-27T18:37:25.271Z" }, + { url = "https://files.pythonhosted.org/packages/fb/df/5bd7a48c256faecd1d36edc13133e51397e41b73bb77e1a69deab746ebac/markupsafe-3.0.3-cp314-cp314t-win32.whl", hash = "sha256:915c04ba3851909ce68ccc2b8e2cd691618c4dc4c4232fb7982bca3f41fd8c3d", size = 14819, upload-time = "2025-09-27T18:37:26.285Z" }, + { url = "https://files.pythonhosted.org/packages/1a/8a/0402ba61a2f16038b48b39bccca271134be00c5c9f0f623208399333c448/markupsafe-3.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4faffd047e07c38848ce017e8725090413cd80cbc23d86e55c587bf979e579c9", size = 15426, upload-time = "2025-09-27T18:37:27.316Z" }, + { url = "https://files.pythonhosted.org/packages/70/bc/6f1c2f612465f5fa89b95bead1f44dcb607670fd42891d8fdcd5d039f4f4/markupsafe-3.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:32001d6a8fc98c8cb5c947787c5d08b0a50663d139f1305bac5885d98d9b40fa", size = 14146, upload-time = "2025-09-27T18:37:28.327Z" }, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729, upload-time = "2022-08-14T12:40:10.846Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" }, +] + +[[package]] +name = "more-itertools" +version = "11.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/24/24/e0acc4bf54cba50c1d432c70a72a3df96db4a321b2c4c68432a60759044f/more_itertools-11.0.1.tar.gz", hash = "sha256:fefaf25b7ab08f0b45fa9f1892cae93b9fc0089ef034d39213bce15f1cc9e199", size = 144739, upload-time = "2026-04-02T16:17:45.061Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d8/f4/5e52c7319b8087acef603ed6e50dc325c02eaa999355414830468611f13c/more_itertools-11.0.1-py3-none-any.whl", hash = "sha256:eaf287826069452a8f61026c597eae2428b2d1ba2859083abbf240b46842ce6d", size = 72182, upload-time = "2026-04-02T16:17:43.724Z" }, +] + +[[package]] +name = "msgpack" +version = "1.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/4d/f2/bfb55a6236ed8725a96b0aa3acbd0ec17588e6a2c3b62a93eb513ed8783f/msgpack-1.1.2.tar.gz", hash = "sha256:3b60763c1373dd60f398488069bcdc703cd08a711477b5d480eecc9f9626f47e", size = 173581, upload-time = "2025-10-08T09:15:56.596Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ad/bd/8b0d01c756203fbab65d265859749860682ccd2a59594609aeec3a144efa/msgpack-1.1.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:70a0dff9d1f8da25179ffcf880e10cf1aad55fdb63cd59c9a49a1b82290062aa", size = 81939, upload-time = "2025-10-08T09:15:01.472Z" }, + { url = "https://files.pythonhosted.org/packages/34/68/ba4f155f793a74c1483d4bdef136e1023f7bcba557f0db4ef3db3c665cf1/msgpack-1.1.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:446abdd8b94b55c800ac34b102dffd2f6aa0ce643c55dfc017ad89347db3dbdb", size = 85064, upload-time = "2025-10-08T09:15:03.764Z" }, + { url = "https://files.pythonhosted.org/packages/f2/60/a064b0345fc36c4c3d2c743c82d9100c40388d77f0b48b2f04d6041dbec1/msgpack-1.1.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c63eea553c69ab05b6747901b97d620bb2a690633c77f23feb0c6a947a8a7b8f", size = 417131, upload-time = "2025-10-08T09:15:05.136Z" }, + { url = "https://files.pythonhosted.org/packages/65/92/a5100f7185a800a5d29f8d14041f61475b9de465ffcc0f3b9fba606e4505/msgpack-1.1.2-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:372839311ccf6bdaf39b00b61288e0557916c3729529b301c52c2d88842add42", size = 427556, upload-time = "2025-10-08T09:15:06.837Z" }, + { url = "https://files.pythonhosted.org/packages/f5/87/ffe21d1bf7d9991354ad93949286f643b2bb6ddbeab66373922b44c3b8cc/msgpack-1.1.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2929af52106ca73fcb28576218476ffbb531a036c2adbcf54a3664de124303e9", size = 404920, upload-time = "2025-10-08T09:15:08.179Z" }, + { url = "https://files.pythonhosted.org/packages/ff/41/8543ed2b8604f7c0d89ce066f42007faac1eaa7d79a81555f206a5cdb889/msgpack-1.1.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:be52a8fc79e45b0364210eef5234a7cf8d330836d0a64dfbb878efa903d84620", size = 415013, upload-time = "2025-10-08T09:15:09.83Z" }, + { url = "https://files.pythonhosted.org/packages/41/0d/2ddfaa8b7e1cee6c490d46cb0a39742b19e2481600a7a0e96537e9c22f43/msgpack-1.1.2-cp312-cp312-win32.whl", hash = "sha256:1fff3d825d7859ac888b0fbda39a42d59193543920eda9d9bea44d958a878029", size = 65096, upload-time = "2025-10-08T09:15:11.11Z" }, + { url = "https://files.pythonhosted.org/packages/8c/ec/d431eb7941fb55a31dd6ca3404d41fbb52d99172df2e7707754488390910/msgpack-1.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:1de460f0403172cff81169a30b9a92b260cb809c4cb7e2fc79ae8d0510c78b6b", size = 72708, upload-time = "2025-10-08T09:15:12.554Z" }, + { url = "https://files.pythonhosted.org/packages/c5/31/5b1a1f70eb0e87d1678e9624908f86317787b536060641d6798e3cf70ace/msgpack-1.1.2-cp312-cp312-win_arm64.whl", hash = "sha256:be5980f3ee0e6bd44f3a9e9dea01054f175b50c3e6cdb692bc9424c0bbb8bf69", size = 64119, upload-time = "2025-10-08T09:15:13.589Z" }, + { url = "https://files.pythonhosted.org/packages/6b/31/b46518ecc604d7edf3a4f94cb3bf021fc62aa301f0cb849936968164ef23/msgpack-1.1.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4efd7b5979ccb539c221a4c4e16aac1a533efc97f3b759bb5a5ac9f6d10383bf", size = 81212, upload-time = "2025-10-08T09:15:14.552Z" }, + { url = "https://files.pythonhosted.org/packages/92/dc/c385f38f2c2433333345a82926c6bfa5ecfff3ef787201614317b58dd8be/msgpack-1.1.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:42eefe2c3e2af97ed470eec850facbe1b5ad1d6eacdbadc42ec98e7dcf68b4b7", size = 84315, upload-time = "2025-10-08T09:15:15.543Z" }, + { url = "https://files.pythonhosted.org/packages/d3/68/93180dce57f684a61a88a45ed13047558ded2be46f03acb8dec6d7c513af/msgpack-1.1.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1fdf7d83102bf09e7ce3357de96c59b627395352a4024f6e2458501f158bf999", size = 412721, upload-time = "2025-10-08T09:15:16.567Z" }, + { url = "https://files.pythonhosted.org/packages/5d/ba/459f18c16f2b3fc1a1ca871f72f07d70c07bf768ad0a507a698b8052ac58/msgpack-1.1.2-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fac4be746328f90caa3cd4bc67e6fe36ca2bf61d5c6eb6d895b6527e3f05071e", size = 424657, upload-time = "2025-10-08T09:15:17.825Z" }, + { url = "https://files.pythonhosted.org/packages/38/f8/4398c46863b093252fe67368b44edc6c13b17f4e6b0e4929dbf0bdb13f23/msgpack-1.1.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:fffee09044073e69f2bad787071aeec727183e7580443dfeb8556cbf1978d162", size = 402668, upload-time = "2025-10-08T09:15:19.003Z" }, + { url = "https://files.pythonhosted.org/packages/28/ce/698c1eff75626e4124b4d78e21cca0b4cc90043afb80a507626ea354ab52/msgpack-1.1.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5928604de9b032bc17f5099496417f113c45bc6bc21b5c6920caf34b3c428794", size = 419040, upload-time = "2025-10-08T09:15:20.183Z" }, + { url = "https://files.pythonhosted.org/packages/67/32/f3cd1667028424fa7001d82e10ee35386eea1408b93d399b09fb0aa7875f/msgpack-1.1.2-cp313-cp313-win32.whl", hash = "sha256:a7787d353595c7c7e145e2331abf8b7ff1e6673a6b974ded96e6d4ec09f00c8c", size = 65037, upload-time = "2025-10-08T09:15:21.416Z" }, + { url = "https://files.pythonhosted.org/packages/74/07/1ed8277f8653c40ebc65985180b007879f6a836c525b3885dcc6448ae6cb/msgpack-1.1.2-cp313-cp313-win_amd64.whl", hash = "sha256:a465f0dceb8e13a487e54c07d04ae3ba131c7c5b95e2612596eafde1dccf64a9", size = 72631, upload-time = "2025-10-08T09:15:22.431Z" }, + { url = "https://files.pythonhosted.org/packages/e5/db/0314e4e2db56ebcf450f277904ffd84a7988b9e5da8d0d61ab2d057df2b6/msgpack-1.1.2-cp313-cp313-win_arm64.whl", hash = "sha256:e69b39f8c0aa5ec24b57737ebee40be647035158f14ed4b40e6f150077e21a84", size = 64118, upload-time = "2025-10-08T09:15:23.402Z" }, + { url = "https://files.pythonhosted.org/packages/22/71/201105712d0a2ff07b7873ed3c220292fb2ea5120603c00c4b634bcdafb3/msgpack-1.1.2-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:e23ce8d5f7aa6ea6d2a2b326b4ba46c985dbb204523759984430db7114f8aa00", size = 81127, upload-time = "2025-10-08T09:15:24.408Z" }, + { url = "https://files.pythonhosted.org/packages/1b/9f/38ff9e57a2eade7bf9dfee5eae17f39fc0e998658050279cbb14d97d36d9/msgpack-1.1.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:6c15b7d74c939ebe620dd8e559384be806204d73b4f9356320632d783d1f7939", size = 84981, upload-time = "2025-10-08T09:15:25.812Z" }, + { url = "https://files.pythonhosted.org/packages/8e/a9/3536e385167b88c2cc8f4424c49e28d49a6fc35206d4a8060f136e71f94c/msgpack-1.1.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:99e2cb7b9031568a2a5c73aa077180f93dd2e95b4f8d3b8e14a73ae94a9e667e", size = 411885, upload-time = "2025-10-08T09:15:27.22Z" }, + { url = "https://files.pythonhosted.org/packages/2f/40/dc34d1a8d5f1e51fc64640b62b191684da52ca469da9cd74e84936ffa4a6/msgpack-1.1.2-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:180759d89a057eab503cf62eeec0aa61c4ea1200dee709f3a8e9397dbb3b6931", size = 419658, upload-time = "2025-10-08T09:15:28.4Z" }, + { url = "https://files.pythonhosted.org/packages/3b/ef/2b92e286366500a09a67e03496ee8b8ba00562797a52f3c117aa2b29514b/msgpack-1.1.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:04fb995247a6e83830b62f0b07bf36540c213f6eac8e851166d8d86d83cbd014", size = 403290, upload-time = "2025-10-08T09:15:29.764Z" }, + { url = "https://files.pythonhosted.org/packages/78/90/e0ea7990abea5764e4655b8177aa7c63cdfa89945b6e7641055800f6c16b/msgpack-1.1.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:8e22ab046fa7ede9e36eeb4cfad44d46450f37bb05d5ec482b02868f451c95e2", size = 415234, upload-time = "2025-10-08T09:15:31.022Z" }, + { url = "https://files.pythonhosted.org/packages/72/4e/9390aed5db983a2310818cd7d3ec0aecad45e1f7007e0cda79c79507bb0d/msgpack-1.1.2-cp314-cp314-win32.whl", hash = "sha256:80a0ff7d4abf5fecb995fcf235d4064b9a9a8a40a3ab80999e6ac1e30b702717", size = 66391, upload-time = "2025-10-08T09:15:32.265Z" }, + { url = "https://files.pythonhosted.org/packages/6e/f1/abd09c2ae91228c5f3998dbd7f41353def9eac64253de3c8105efa2082f7/msgpack-1.1.2-cp314-cp314-win_amd64.whl", hash = "sha256:9ade919fac6a3e7260b7f64cea89df6bec59104987cbea34d34a2fa15d74310b", size = 73787, upload-time = "2025-10-08T09:15:33.219Z" }, + { url = "https://files.pythonhosted.org/packages/6a/b0/9d9f667ab48b16ad4115c1935d94023b82b3198064cb84a123e97f7466c1/msgpack-1.1.2-cp314-cp314-win_arm64.whl", hash = "sha256:59415c6076b1e30e563eb732e23b994a61c159cec44deaf584e5cc1dd662f2af", size = 66453, upload-time = "2025-10-08T09:15:34.225Z" }, + { url = "https://files.pythonhosted.org/packages/16/67/93f80545eb1792b61a217fa7f06d5e5cb9e0055bed867f43e2b8e012e137/msgpack-1.1.2-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:897c478140877e5307760b0ea66e0932738879e7aa68144d9b78ea4c8302a84a", size = 85264, upload-time = "2025-10-08T09:15:35.61Z" }, + { url = "https://files.pythonhosted.org/packages/87/1c/33c8a24959cf193966ef11a6f6a2995a65eb066bd681fd085afd519a57ce/msgpack-1.1.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:a668204fa43e6d02f89dbe79a30b0d67238d9ec4c5bd8a940fc3a004a47b721b", size = 89076, upload-time = "2025-10-08T09:15:36.619Z" }, + { url = "https://files.pythonhosted.org/packages/fc/6b/62e85ff7193663fbea5c0254ef32f0c77134b4059f8da89b958beb7696f3/msgpack-1.1.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5559d03930d3aa0f3aacb4c42c776af1a2ace2611871c84a75afe436695e6245", size = 435242, upload-time = "2025-10-08T09:15:37.647Z" }, + { url = "https://files.pythonhosted.org/packages/c1/47/5c74ecb4cc277cf09f64e913947871682ffa82b3b93c8dad68083112f412/msgpack-1.1.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:70c5a7a9fea7f036b716191c29047374c10721c389c21e9ffafad04df8c52c90", size = 432509, upload-time = "2025-10-08T09:15:38.794Z" }, + { url = "https://files.pythonhosted.org/packages/24/a4/e98ccdb56dc4e98c929a3f150de1799831c0a800583cde9fa022fa90602d/msgpack-1.1.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:f2cb069d8b981abc72b41aea1c580ce92d57c673ec61af4c500153a626cb9e20", size = 415957, upload-time = "2025-10-08T09:15:40.238Z" }, + { url = "https://files.pythonhosted.org/packages/da/28/6951f7fb67bc0a4e184a6b38ab71a92d9ba58080b27a77d3e2fb0be5998f/msgpack-1.1.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:d62ce1f483f355f61adb5433ebfd8868c5f078d1a52d042b0a998682b4fa8c27", size = 422910, upload-time = "2025-10-08T09:15:41.505Z" }, + { url = "https://files.pythonhosted.org/packages/f0/03/42106dcded51f0a0b5284d3ce30a671e7bd3f7318d122b2ead66ad289fed/msgpack-1.1.2-cp314-cp314t-win32.whl", hash = "sha256:1d1418482b1ee984625d88aa9585db570180c286d942da463533b238b98b812b", size = 75197, upload-time = "2025-10-08T09:15:42.954Z" }, + { url = "https://files.pythonhosted.org/packages/15/86/d0071e94987f8db59d4eeb386ddc64d0bb9b10820a8d82bcd3e53eeb2da6/msgpack-1.1.2-cp314-cp314t-win_amd64.whl", hash = "sha256:5a46bf7e831d09470ad92dff02b8b1ac92175ca36b087f904a0519857c6be3ff", size = 85772, upload-time = "2025-10-08T09:15:43.954Z" }, + { url = "https://files.pythonhosted.org/packages/81/f2/08ace4142eb281c12701fc3b93a10795e4d4dc7f753911d836675050f886/msgpack-1.1.2-cp314-cp314t-win_arm64.whl", hash = "sha256:d99ef64f349d5ec3293688e91486c5fdb925ed03807f64d98d205d2713c60b46", size = 70868, upload-time = "2025-10-08T09:15:44.959Z" }, +] + +[[package]] +name = "msgpack-numpy-opentensor" +version = "0.5.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "msgpack" }, + { name = "numpy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b2/69/2a6af13c3be6934a9ba149120a78bf63cf1455ddb1d11ec2cc5e5d6f8186/msgpack-numpy-opentensor-0.5.0.tar.gz", hash = "sha256:213232c20e2efd528ec8a9882b605e8ad87cfc35b57dfcfefe05d33aaaabe574", size = 9661, upload-time = "2023-10-02T19:01:38.831Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cd/22/590508afb85d5c27ebcb2837410413f4613eebdda6e4e02997fe08ba78e4/msgpack_numpy_opentensor-0.5.0-py2.py3-none-any.whl", hash = "sha256:8a61c597a976425a87094d8e89846aa9528eb1f037e97ff1428fe3cd61a238e7", size = 7209, upload-time = "2023-10-02T19:01:37.417Z" }, +] + +[[package]] +name = "multidict" +version = "6.7.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1a/c2/c2d94cbe6ac1753f3fc980da97b3d930efe1da3af3c9f5125354436c073d/multidict-6.7.1.tar.gz", hash = "sha256:ec6652a1bee61c53a3e5776b6049172c53b6aaba34f18c9ad04f82712bac623d", size = 102010, upload-time = "2026-01-26T02:46:45.979Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8d/9c/f20e0e2cf80e4b2e4b1c365bf5fe104ee633c751a724246262db8f1a0b13/multidict-6.7.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:a90f75c956e32891a4eda3639ce6dd86e87105271f43d43442a3aedf3cddf172", size = 76893, upload-time = "2026-01-26T02:43:52.754Z" }, + { url = "https://files.pythonhosted.org/packages/fe/cf/18ef143a81610136d3da8193da9d80bfe1cb548a1e2d1c775f26b23d024a/multidict-6.7.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:3fccb473e87eaa1382689053e4a4618e7ba7b9b9b8d6adf2027ee474597128cd", size = 45456, upload-time = "2026-01-26T02:43:53.893Z" }, + { url = "https://files.pythonhosted.org/packages/a9/65/1caac9d4cd32e8433908683446eebc953e82d22b03d10d41a5f0fefe991b/multidict-6.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b0fa96985700739c4c7853a43c0b3e169360d6855780021bfc6d0f1ce7c123e7", size = 43872, upload-time = "2026-01-26T02:43:55.041Z" }, + { url = "https://files.pythonhosted.org/packages/cf/3b/d6bd75dc4f3ff7c73766e04e705b00ed6dbbaccf670d9e05a12b006f5a21/multidict-6.7.1-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:cb2a55f408c3043e42b40cc8eecd575afa27b7e0b956dfb190de0f8499a57a53", size = 251018, upload-time = "2026-01-26T02:43:56.198Z" }, + { url = "https://files.pythonhosted.org/packages/fd/80/c959c5933adedb9ac15152e4067c702a808ea183a8b64cf8f31af8ad3155/multidict-6.7.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eb0ce7b2a32d09892b3dd6cc44877a0d02a33241fafca5f25c8b6b62374f8b75", size = 258883, upload-time = "2026-01-26T02:43:57.499Z" }, + { url = "https://files.pythonhosted.org/packages/86/85/7ed40adafea3d4f1c8b916e3b5cc3a8e07dfcdcb9cd72800f4ed3ca1b387/multidict-6.7.1-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c3a32d23520ee37bf327d1e1a656fec76a2edd5c038bf43eddfa0572ec49c60b", size = 242413, upload-time = "2026-01-26T02:43:58.755Z" }, + { url = "https://files.pythonhosted.org/packages/d2/57/b8565ff533e48595503c785f8361ff9a4fde4d67de25c207cd0ba3befd03/multidict-6.7.1-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:9c90fed18bffc0189ba814749fdcc102b536e83a9f738a9003e569acd540a733", size = 268404, upload-time = "2026-01-26T02:44:00.216Z" }, + { url = "https://files.pythonhosted.org/packages/e0/50/9810c5c29350f7258180dfdcb2e52783a0632862eb334c4896ac717cebcb/multidict-6.7.1-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:da62917e6076f512daccfbbde27f46fed1c98fee202f0559adec8ee0de67f71a", size = 269456, upload-time = "2026-01-26T02:44:02.202Z" }, + { url = "https://files.pythonhosted.org/packages/f3/8d/5e5be3ced1d12966fefb5c4ea3b2a5b480afcea36406559442c6e31d4a48/multidict-6.7.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bfde23ef6ed9db7eaee6c37dcec08524cb43903c60b285b172b6c094711b3961", size = 256322, upload-time = "2026-01-26T02:44:03.56Z" }, + { url = "https://files.pythonhosted.org/packages/31/6e/d8a26d81ac166a5592782d208dd90dfdc0a7a218adaa52b45a672b46c122/multidict-6.7.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3758692429e4e32f1ba0df23219cd0b4fc0a52f476726fff9337d1a57676a582", size = 253955, upload-time = "2026-01-26T02:44:04.845Z" }, + { url = "https://files.pythonhosted.org/packages/59/4c/7c672c8aad41534ba619bcd4ade7a0dc87ed6b8b5c06149b85d3dd03f0cd/multidict-6.7.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:398c1478926eca669f2fd6a5856b6de9c0acf23a2cb59a14c0ba5844fa38077e", size = 251254, upload-time = "2026-01-26T02:44:06.133Z" }, + { url = "https://files.pythonhosted.org/packages/7b/bd/84c24de512cbafbdbc39439f74e967f19570ce7924e3007174a29c348916/multidict-6.7.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:c102791b1c4f3ab36ce4101154549105a53dc828f016356b3e3bcae2e3a039d3", size = 252059, upload-time = "2026-01-26T02:44:07.518Z" }, + { url = "https://files.pythonhosted.org/packages/fa/ba/f5449385510825b73d01c2d4087bf6d2fccc20a2d42ac34df93191d3dd03/multidict-6.7.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:a088b62bd733e2ad12c50dad01b7d0166c30287c166e137433d3b410add807a6", size = 263588, upload-time = "2026-01-26T02:44:09.382Z" }, + { url = "https://files.pythonhosted.org/packages/d7/11/afc7c677f68f75c84a69fe37184f0f82fce13ce4b92f49f3db280b7e92b3/multidict-6.7.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:3d51ff4785d58d3f6c91bdbffcb5e1f7ddfda557727043aa20d20ec4f65e324a", size = 259642, upload-time = "2026-01-26T02:44:10.73Z" }, + { url = "https://files.pythonhosted.org/packages/2b/17/ebb9644da78c4ab36403739e0e6e0e30ebb135b9caf3440825001a0bddcb/multidict-6.7.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fc5907494fccf3e7d3f94f95c91d6336b092b5fc83811720fae5e2765890dfba", size = 251377, upload-time = "2026-01-26T02:44:12.042Z" }, + { url = "https://files.pythonhosted.org/packages/ca/a4/840f5b97339e27846c46307f2530a2805d9d537d8b8bd416af031cad7fa0/multidict-6.7.1-cp312-cp312-win32.whl", hash = "sha256:28ca5ce2fd9716631133d0e9a9b9a745ad7f60bac2bccafb56aa380fc0b6c511", size = 41887, upload-time = "2026-01-26T02:44:14.245Z" }, + { url = "https://files.pythonhosted.org/packages/80/31/0b2517913687895f5904325c2069d6a3b78f66cc641a86a2baf75a05dcbb/multidict-6.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:fcee94dfbd638784645b066074b338bc9cc155d4b4bffa4adce1615c5a426c19", size = 46053, upload-time = "2026-01-26T02:44:15.371Z" }, + { url = "https://files.pythonhosted.org/packages/0c/5b/aba28e4ee4006ae4c7df8d327d31025d760ffa992ea23812a601d226e682/multidict-6.7.1-cp312-cp312-win_arm64.whl", hash = "sha256:ba0a9fb644d0c1a2194cf7ffb043bd852cea63a57f66fbd33959f7dae18517bf", size = 43307, upload-time = "2026-01-26T02:44:16.852Z" }, + { url = "https://files.pythonhosted.org/packages/f2/22/929c141d6c0dba87d3e1d38fbdf1ba8baba86b7776469f2bc2d3227a1e67/multidict-6.7.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:2b41f5fed0ed563624f1c17630cb9941cf2309d4df00e494b551b5f3e3d67a23", size = 76174, upload-time = "2026-01-26T02:44:18.509Z" }, + { url = "https://files.pythonhosted.org/packages/c7/75/bc704ae15fee974f8fccd871305e254754167dce5f9e42d88a2def741a1d/multidict-6.7.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:84e61e3af5463c19b67ced91f6c634effb89ef8bfc5ca0267f954451ed4bb6a2", size = 45116, upload-time = "2026-01-26T02:44:19.745Z" }, + { url = "https://files.pythonhosted.org/packages/79/76/55cd7186f498ed080a18440c9013011eb548f77ae1b297206d030eb1180a/multidict-6.7.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:935434b9853c7c112eee7ac891bc4cb86455aa631269ae35442cb316790c1445", size = 43524, upload-time = "2026-01-26T02:44:21.571Z" }, + { url = "https://files.pythonhosted.org/packages/e9/3c/414842ef8d5a1628d68edee29ba0e5bcf235dbfb3ccd3ea303a7fe8c72ff/multidict-6.7.1-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:432feb25a1cb67fe82a9680b4d65fb542e4635cb3166cd9c01560651ad60f177", size = 249368, upload-time = "2026-01-26T02:44:22.803Z" }, + { url = "https://files.pythonhosted.org/packages/f6/32/befed7f74c458b4a525e60519fe8d87eef72bb1e99924fa2b0f9d97a221e/multidict-6.7.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e82d14e3c948952a1a85503817e038cba5905a3352de76b9a465075d072fba23", size = 256952, upload-time = "2026-01-26T02:44:24.306Z" }, + { url = "https://files.pythonhosted.org/packages/03/d6/c878a44ba877f366630c860fdf74bfb203c33778f12b6ac274936853c451/multidict-6.7.1-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4cfb48c6ea66c83bcaaf7e4dfa7ec1b6bbcf751b7db85a328902796dfde4c060", size = 240317, upload-time = "2026-01-26T02:44:25.772Z" }, + { url = "https://files.pythonhosted.org/packages/68/49/57421b4d7ad2e9e60e25922b08ceb37e077b90444bde6ead629095327a6f/multidict-6.7.1-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:1d540e51b7e8e170174555edecddbd5538105443754539193e3e1061864d444d", size = 267132, upload-time = "2026-01-26T02:44:27.648Z" }, + { url = "https://files.pythonhosted.org/packages/b7/fe/ec0edd52ddbcea2a2e89e174f0206444a61440b40f39704e64dc807a70bd/multidict-6.7.1-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:273d23f4b40f3dce4d6c8a821c741a86dec62cded82e1175ba3d99be128147ed", size = 268140, upload-time = "2026-01-26T02:44:29.588Z" }, + { url = "https://files.pythonhosted.org/packages/b0/73/6e1b01cbeb458807aa0831742232dbdd1fa92bfa33f52a3f176b4ff3dc11/multidict-6.7.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d624335fd4fa1c08a53f8b4be7676ebde19cd092b3895c421045ca87895b429", size = 254277, upload-time = "2026-01-26T02:44:30.902Z" }, + { url = "https://files.pythonhosted.org/packages/6a/b2/5fb8c124d7561a4974c342bc8c778b471ebbeb3cc17df696f034a7e9afe7/multidict-6.7.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:12fad252f8b267cc75b66e8fc51b3079604e8d43a75428ffe193cd9e2195dfd6", size = 252291, upload-time = "2026-01-26T02:44:32.31Z" }, + { url = "https://files.pythonhosted.org/packages/5a/96/51d4e4e06bcce92577fcd488e22600bd38e4fd59c20cb49434d054903bd2/multidict-6.7.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:03ede2a6ffbe8ef936b92cb4529f27f42be7f56afcdab5ab739cd5f27fb1cbf9", size = 250156, upload-time = "2026-01-26T02:44:33.734Z" }, + { url = "https://files.pythonhosted.org/packages/db/6b/420e173eec5fba721a50e2a9f89eda89d9c98fded1124f8d5c675f7a0c0f/multidict-6.7.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:90efbcf47dbe33dcf643a1e400d67d59abeac5db07dc3f27d6bdeae497a2198c", size = 249742, upload-time = "2026-01-26T02:44:35.222Z" }, + { url = "https://files.pythonhosted.org/packages/44/a3/ec5b5bd98f306bc2aa297b8c6f11a46714a56b1e6ef5ebda50a4f5d7c5fb/multidict-6.7.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:5c4b9bfc148f5a91be9244d6264c53035c8a0dcd2f51f1c3c6e30e30ebaa1c84", size = 262221, upload-time = "2026-01-26T02:44:36.604Z" }, + { url = "https://files.pythonhosted.org/packages/cd/f7/e8c0d0da0cd1e28d10e624604e1a36bcc3353aaebdfdc3a43c72bc683a12/multidict-6.7.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:401c5a650f3add2472d1d288c26deebc540f99e2fb83e9525007a74cd2116f1d", size = 258664, upload-time = "2026-01-26T02:44:38.008Z" }, + { url = "https://files.pythonhosted.org/packages/52/da/151a44e8016dd33feed44f730bd856a66257c1ee7aed4f44b649fb7edeb3/multidict-6.7.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:97891f3b1b3ffbded884e2916cacf3c6fc87b66bb0dde46f7357404750559f33", size = 249490, upload-time = "2026-01-26T02:44:39.386Z" }, + { url = "https://files.pythonhosted.org/packages/87/af/a3b86bf9630b732897f6fc3f4c4714b90aa4361983ccbdcd6c0339b21b0c/multidict-6.7.1-cp313-cp313-win32.whl", hash = "sha256:e1c5988359516095535c4301af38d8a8838534158f649c05dd1050222321bcb3", size = 41695, upload-time = "2026-01-26T02:44:41.318Z" }, + { url = "https://files.pythonhosted.org/packages/b2/35/e994121b0e90e46134673422dd564623f93304614f5d11886b1b3e06f503/multidict-6.7.1-cp313-cp313-win_amd64.whl", hash = "sha256:960c83bf01a95b12b08fd54324a4eb1d5b52c88932b5cba5d6e712bb3ed12eb5", size = 45884, upload-time = "2026-01-26T02:44:42.488Z" }, + { url = "https://files.pythonhosted.org/packages/ca/61/42d3e5dbf661242a69c97ea363f2d7b46c567da8eadef8890022be6e2ab0/multidict-6.7.1-cp313-cp313-win_arm64.whl", hash = "sha256:563fe25c678aaba333d5399408f5ec3c383ca5b663e7f774dd179a520b8144df", size = 43122, upload-time = "2026-01-26T02:44:43.664Z" }, + { url = "https://files.pythonhosted.org/packages/6d/b3/e6b21c6c4f314bb956016b0b3ef2162590a529b84cb831c257519e7fde44/multidict-6.7.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:c76c4bec1538375dad9d452d246ca5368ad6e1c9039dadcf007ae59c70619ea1", size = 83175, upload-time = "2026-01-26T02:44:44.894Z" }, + { url = "https://files.pythonhosted.org/packages/fb/76/23ecd2abfe0957b234f6c960f4ade497f55f2c16aeb684d4ecdbf1c95791/multidict-6.7.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:57b46b24b5d5ebcc978da4ec23a819a9402b4228b8a90d9c656422b4bdd8a963", size = 48460, upload-time = "2026-01-26T02:44:46.106Z" }, + { url = "https://files.pythonhosted.org/packages/c4/57/a0ed92b23f3a042c36bc4227b72b97eca803f5f1801c1ab77c8a212d455e/multidict-6.7.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e954b24433c768ce78ab7929e84ccf3422e46deb45a4dc9f93438f8217fa2d34", size = 46930, upload-time = "2026-01-26T02:44:47.278Z" }, + { url = "https://files.pythonhosted.org/packages/b5/66/02ec7ace29162e447f6382c495dc95826bf931d3818799bbef11e8f7df1a/multidict-6.7.1-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3bd231490fa7217cc832528e1cd8752a96f0125ddd2b5749390f7c3ec8721b65", size = 242582, upload-time = "2026-01-26T02:44:48.604Z" }, + { url = "https://files.pythonhosted.org/packages/58/18/64f5a795e7677670e872673aca234162514696274597b3708b2c0d276cce/multidict-6.7.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:253282d70d67885a15c8a7716f3a73edf2d635793ceda8173b9ecc21f2fb8292", size = 250031, upload-time = "2026-01-26T02:44:50.544Z" }, + { url = "https://files.pythonhosted.org/packages/c8/ed/e192291dbbe51a8290c5686f482084d31bcd9d09af24f63358c3d42fd284/multidict-6.7.1-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0b4c48648d7649c9335cf1927a8b87fa692de3dcb15faa676c6a6f1f1aabda43", size = 228596, upload-time = "2026-01-26T02:44:51.951Z" }, + { url = "https://files.pythonhosted.org/packages/1e/7e/3562a15a60cf747397e7f2180b0a11dc0c38d9175a650e75fa1b4d325e15/multidict-6.7.1-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:98bc624954ec4d2c7cb074b8eefc2b5d0ce7d482e410df446414355d158fe4ca", size = 257492, upload-time = "2026-01-26T02:44:53.902Z" }, + { url = "https://files.pythonhosted.org/packages/24/02/7d0f9eae92b5249bb50ac1595b295f10e263dd0078ebb55115c31e0eaccd/multidict-6.7.1-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:1b99af4d9eec0b49927b4402bcbb58dea89d3e0db8806a4086117019939ad3dd", size = 255899, upload-time = "2026-01-26T02:44:55.316Z" }, + { url = "https://files.pythonhosted.org/packages/00/e3/9b60ed9e23e64c73a5cde95269ef1330678e9c6e34dd4eb6b431b85b5a10/multidict-6.7.1-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6aac4f16b472d5b7dc6f66a0d49dd57b0e0902090be16594dc9ebfd3d17c47e7", size = 247970, upload-time = "2026-01-26T02:44:56.783Z" }, + { url = "https://files.pythonhosted.org/packages/3e/06/538e58a63ed5cfb0bd4517e346b91da32fde409d839720f664e9a4ae4f9d/multidict-6.7.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:21f830fe223215dffd51f538e78c172ed7c7f60c9b96a2bf05c4848ad49921c3", size = 245060, upload-time = "2026-01-26T02:44:58.195Z" }, + { url = "https://files.pythonhosted.org/packages/b2/2f/d743a3045a97c895d401e9bd29aaa09b94f5cbdf1bd561609e5a6c431c70/multidict-6.7.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:f5dd81c45b05518b9aa4da4aa74e1c93d715efa234fd3e8a179df611cc85e5f4", size = 235888, upload-time = "2026-01-26T02:44:59.57Z" }, + { url = "https://files.pythonhosted.org/packages/38/83/5a325cac191ab28b63c52f14f1131f3b0a55ba3b9aa65a6d0bf2a9b921a0/multidict-6.7.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:eb304767bca2bb92fb9c5bd33cedc95baee5bb5f6c88e63706533a1c06ad08c8", size = 243554, upload-time = "2026-01-26T02:45:01.054Z" }, + { url = "https://files.pythonhosted.org/packages/20/1f/9d2327086bd15da2725ef6aae624208e2ef828ed99892b17f60c344e57ed/multidict-6.7.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:c9035dde0f916702850ef66460bc4239d89d08df4d02023a5926e7446724212c", size = 252341, upload-time = "2026-01-26T02:45:02.484Z" }, + { url = "https://files.pythonhosted.org/packages/e8/2c/2a1aa0280cf579d0f6eed8ee5211c4f1730bd7e06c636ba2ee6aafda302e/multidict-6.7.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:af959b9beeb66c822380f222f0e0a1889331597e81f1ded7f374f3ecb0fd6c52", size = 246391, upload-time = "2026-01-26T02:45:03.862Z" }, + { url = "https://files.pythonhosted.org/packages/e5/03/7ca022ffc36c5a3f6e03b179a5ceb829be9da5783e6fe395f347c0794680/multidict-6.7.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:41f2952231456154ee479651491e94118229844dd7226541788be783be2b5108", size = 243422, upload-time = "2026-01-26T02:45:05.296Z" }, + { url = "https://files.pythonhosted.org/packages/dc/1d/b31650eab6c5778aceed46ba735bd97f7c7d2f54b319fa916c0f96e7805b/multidict-6.7.1-cp313-cp313t-win32.whl", hash = "sha256:df9f19c28adcb40b6aae30bbaa1478c389efd50c28d541d76760199fc1037c32", size = 47770, upload-time = "2026-01-26T02:45:06.754Z" }, + { url = "https://files.pythonhosted.org/packages/ac/5b/2d2d1d522e51285bd61b1e20df8f47ae1a9d80839db0b24ea783b3832832/multidict-6.7.1-cp313-cp313t-win_amd64.whl", hash = "sha256:d54ecf9f301853f2c5e802da559604b3e95bb7a3b01a9c295c6ee591b9882de8", size = 53109, upload-time = "2026-01-26T02:45:08.044Z" }, + { url = "https://files.pythonhosted.org/packages/3d/a3/cc409ba012c83ca024a308516703cf339bdc4b696195644a7215a5164a24/multidict-6.7.1-cp313-cp313t-win_arm64.whl", hash = "sha256:5a37ca18e360377cfda1d62f5f382ff41f2b8c4ccb329ed974cc2e1643440118", size = 45573, upload-time = "2026-01-26T02:45:09.349Z" }, + { url = "https://files.pythonhosted.org/packages/91/cc/db74228a8be41884a567e88a62fd589a913708fcf180d029898c17a9a371/multidict-6.7.1-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:8f333ec9c5eb1b7105e3b84b53141e66ca05a19a605368c55450b6ba208cb9ee", size = 75190, upload-time = "2026-01-26T02:45:10.651Z" }, + { url = "https://files.pythonhosted.org/packages/d5/22/492f2246bb5b534abd44804292e81eeaf835388901f0c574bac4eeec73c5/multidict-6.7.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:a407f13c188f804c759fc6a9f88286a565c242a76b27626594c133b82883b5c2", size = 44486, upload-time = "2026-01-26T02:45:11.938Z" }, + { url = "https://files.pythonhosted.org/packages/f1/4f/733c48f270565d78b4544f2baddc2fb2a245e5a8640254b12c36ac7ac68e/multidict-6.7.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:0e161ddf326db5577c3a4cc2d8648f81456e8a20d40415541587a71620d7a7d1", size = 43219, upload-time = "2026-01-26T02:45:14.346Z" }, + { url = "https://files.pythonhosted.org/packages/24/bb/2c0c2287963f4259c85e8bcbba9182ced8d7fca65c780c38e99e61629d11/multidict-6.7.1-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:1e3a8bb24342a8201d178c3b4984c26ba81a577c80d4d525727427460a50c22d", size = 245132, upload-time = "2026-01-26T02:45:15.712Z" }, + { url = "https://files.pythonhosted.org/packages/a7/f9/44d4b3064c65079d2467888794dea218d1601898ac50222ab8a9a8094460/multidict-6.7.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:97231140a50f5d447d3164f994b86a0bed7cd016e2682f8650d6a9158e14fd31", size = 252420, upload-time = "2026-01-26T02:45:17.293Z" }, + { url = "https://files.pythonhosted.org/packages/8b/13/78f7275e73fa17b24c9a51b0bd9d73ba64bb32d0ed51b02a746eb876abe7/multidict-6.7.1-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:6b10359683bd8806a200fd2909e7c8ca3a7b24ec1d8132e483d58e791d881048", size = 233510, upload-time = "2026-01-26T02:45:19.356Z" }, + { url = "https://files.pythonhosted.org/packages/4b/25/8167187f62ae3cbd52da7893f58cb036b47ea3fb67138787c76800158982/multidict-6.7.1-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:283ddac99f7ac25a4acadbf004cb5ae34480bbeb063520f70ce397b281859362", size = 264094, upload-time = "2026-01-26T02:45:20.834Z" }, + { url = "https://files.pythonhosted.org/packages/a1/e7/69a3a83b7b030cf283fb06ce074a05a02322359783424d7edf0f15fe5022/multidict-6.7.1-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:538cec1e18c067d0e6103aa9a74f9e832904c957adc260e61cd9d8cf0c3b3d37", size = 260786, upload-time = "2026-01-26T02:45:22.818Z" }, + { url = "https://files.pythonhosted.org/packages/fe/3b/8ec5074bcfc450fe84273713b4b0a0dd47c0249358f5d82eb8104ffe2520/multidict-6.7.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7eee46ccb30ff48a1e35bb818cc90846c6be2b68240e42a78599166722cea709", size = 248483, upload-time = "2026-01-26T02:45:24.368Z" }, + { url = "https://files.pythonhosted.org/packages/48/5a/d5a99e3acbca0e29c5d9cba8f92ceb15dce78bab963b308ae692981e3a5d/multidict-6.7.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:fa263a02f4f2dd2d11a7b1bb4362aa7cb1049f84a9235d31adf63f30143469a0", size = 248403, upload-time = "2026-01-26T02:45:25.982Z" }, + { url = "https://files.pythonhosted.org/packages/35/48/e58cd31f6c7d5102f2a4bf89f96b9cf7e00b6c6f3d04ecc44417c00a5a3c/multidict-6.7.1-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:2e1425e2f99ec5bd36c15a01b690a1a2456209c5deed58f95469ffb46039ccbb", size = 240315, upload-time = "2026-01-26T02:45:27.487Z" }, + { url = "https://files.pythonhosted.org/packages/94/33/1cd210229559cb90b6786c30676bb0c58249ff42f942765f88793b41fdce/multidict-6.7.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:497394b3239fc6f0e13a78a3e1b61296e72bf1c5f94b4c4eb80b265c37a131cd", size = 245528, upload-time = "2026-01-26T02:45:28.991Z" }, + { url = "https://files.pythonhosted.org/packages/64/f2/6e1107d226278c876c783056b7db43d800bb64c6131cec9c8dfb6903698e/multidict-6.7.1-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:233b398c29d3f1b9676b4b6f75c518a06fcb2ea0b925119fb2c1bc35c05e1601", size = 258784, upload-time = "2026-01-26T02:45:30.503Z" }, + { url = "https://files.pythonhosted.org/packages/4d/c1/11f664f14d525e4a1b5327a82d4de61a1db604ab34c6603bb3c2cc63ad34/multidict-6.7.1-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:93b1818e4a6e0930454f0f2af7dfce69307ca03cdcfb3739bf4d91241967b6c1", size = 251980, upload-time = "2026-01-26T02:45:32.603Z" }, + { url = "https://files.pythonhosted.org/packages/e1/9f/75a9ac888121d0c5bbd4ecf4eead45668b1766f6baabfb3b7f66a410e231/multidict-6.7.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:f33dc2a3abe9249ea5d8360f969ec7f4142e7ac45ee7014d8f8d5acddf178b7b", size = 243602, upload-time = "2026-01-26T02:45:34.043Z" }, + { url = "https://files.pythonhosted.org/packages/9a/e7/50bf7b004cc8525d80dbbbedfdc7aed3e4c323810890be4413e589074032/multidict-6.7.1-cp314-cp314-win32.whl", hash = "sha256:3ab8b9d8b75aef9df299595d5388b14530839f6422333357af1339443cff777d", size = 40930, upload-time = "2026-01-26T02:45:36.278Z" }, + { url = "https://files.pythonhosted.org/packages/e0/bf/52f25716bbe93745595800f36fb17b73711f14da59ed0bb2eba141bc9f0f/multidict-6.7.1-cp314-cp314-win_amd64.whl", hash = "sha256:5e01429a929600e7dab7b166062d9bb54a5eed752384c7384c968c2afab8f50f", size = 45074, upload-time = "2026-01-26T02:45:37.546Z" }, + { url = "https://files.pythonhosted.org/packages/97/ab/22803b03285fa3a525f48217963da3a65ae40f6a1b6f6cf2768879e208f9/multidict-6.7.1-cp314-cp314-win_arm64.whl", hash = "sha256:4885cb0e817aef5d00a2e8451d4665c1808378dc27c2705f1bf4ef8505c0d2e5", size = 42471, upload-time = "2026-01-26T02:45:38.889Z" }, + { url = "https://files.pythonhosted.org/packages/e0/6d/f9293baa6146ba9507e360ea0292b6422b016907c393e2f63fc40ab7b7b5/multidict-6.7.1-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:0458c978acd8e6ea53c81eefaddbbee9c6c5e591f41b3f5e8e194780fe026581", size = 82401, upload-time = "2026-01-26T02:45:40.254Z" }, + { url = "https://files.pythonhosted.org/packages/7a/68/53b5494738d83558d87c3c71a486504d8373421c3e0dbb6d0db48ad42ee0/multidict-6.7.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:c0abd12629b0af3cf590982c0b413b1e7395cd4ec026f30986818ab95bfaa94a", size = 48143, upload-time = "2026-01-26T02:45:41.635Z" }, + { url = "https://files.pythonhosted.org/packages/37/e8/5284c53310dcdc99ce5d66563f6e5773531a9b9fe9ec7a615e9bc306b05f/multidict-6.7.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:14525a5f61d7d0c94b368a42cff4c9a4e7ba2d52e2672a7b23d84dc86fb02b0c", size = 46507, upload-time = "2026-01-26T02:45:42.99Z" }, + { url = "https://files.pythonhosted.org/packages/e4/fc/6800d0e5b3875568b4083ecf5f310dcf91d86d52573160834fb4bfcf5e4f/multidict-6.7.1-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:17307b22c217b4cf05033dabefe68255a534d637c6c9b0cc8382718f87be4262", size = 239358, upload-time = "2026-01-26T02:45:44.376Z" }, + { url = "https://files.pythonhosted.org/packages/41/75/4ad0973179361cdf3a113905e6e088173198349131be2b390f9fa4da5fc6/multidict-6.7.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:7a7e590ff876a3eaf1c02a4dfe0724b6e69a9e9de6d8f556816f29c496046e59", size = 246884, upload-time = "2026-01-26T02:45:47.167Z" }, + { url = "https://files.pythonhosted.org/packages/c3/9c/095bb28b5da139bd41fb9a5d5caff412584f377914bd8787c2aa98717130/multidict-6.7.1-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:5fa6a95dfee63893d80a34758cd0e0c118a30b8dcb46372bf75106c591b77889", size = 225878, upload-time = "2026-01-26T02:45:48.698Z" }, + { url = "https://files.pythonhosted.org/packages/07/d0/c0a72000243756e8f5a277b6b514fa005f2c73d481b7d9e47cd4568aa2e4/multidict-6.7.1-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a0543217a6a017692aa6ae5cc39adb75e587af0f3a82288b1492eb73dd6cc2a4", size = 253542, upload-time = "2026-01-26T02:45:50.164Z" }, + { url = "https://files.pythonhosted.org/packages/c0/6b/f69da15289e384ecf2a68837ec8b5ad8c33e973aa18b266f50fe55f24b8c/multidict-6.7.1-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f99fe611c312b3c1c0ace793f92464d8cd263cc3b26b5721950d977b006b6c4d", size = 252403, upload-time = "2026-01-26T02:45:51.779Z" }, + { url = "https://files.pythonhosted.org/packages/a2/76/b9669547afa5a1a25cd93eaca91c0da1c095b06b6d2d8ec25b713588d3a1/multidict-6.7.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9004d8386d133b7e6135679424c91b0b854d2d164af6ea3f289f8f2761064609", size = 244889, upload-time = "2026-01-26T02:45:53.27Z" }, + { url = "https://files.pythonhosted.org/packages/7e/a9/a50d2669e506dad33cfc45b5d574a205587b7b8a5f426f2fbb2e90882588/multidict-6.7.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e628ef0e6859ffd8273c69412a2465c4be4a9517d07261b33334b5ec6f3c7489", size = 241982, upload-time = "2026-01-26T02:45:54.919Z" }, + { url = "https://files.pythonhosted.org/packages/c5/bb/1609558ad8b456b4827d3c5a5b775c93b87878fd3117ed3db3423dfbce1b/multidict-6.7.1-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:841189848ba629c3552035a6a7f5bf3b02eb304e9fea7492ca220a8eda6b0e5c", size = 232415, upload-time = "2026-01-26T02:45:56.981Z" }, + { url = "https://files.pythonhosted.org/packages/d8/59/6f61039d2aa9261871e03ab9dc058a550d240f25859b05b67fd70f80d4b3/multidict-6.7.1-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:ce1bbd7d780bb5a0da032e095c951f7014d6b0a205f8318308140f1a6aba159e", size = 240337, upload-time = "2026-01-26T02:45:58.698Z" }, + { url = "https://files.pythonhosted.org/packages/a1/29/fdc6a43c203890dc2ae9249971ecd0c41deaedfe00d25cb6564b2edd99eb/multidict-6.7.1-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:b26684587228afed0d50cf804cc71062cc9c1cdf55051c4c6345d372947b268c", size = 248788, upload-time = "2026-01-26T02:46:00.862Z" }, + { url = "https://files.pythonhosted.org/packages/a9/14/a153a06101323e4cf086ecee3faadba52ff71633d471f9685c42e3736163/multidict-6.7.1-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:9f9af11306994335398293f9958071019e3ab95e9a707dc1383a35613f6abcb9", size = 242842, upload-time = "2026-01-26T02:46:02.824Z" }, + { url = "https://files.pythonhosted.org/packages/41/5f/604ae839e64a4a6efc80db94465348d3b328ee955e37acb24badbcd24d83/multidict-6.7.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:b4938326284c4f1224178a560987b6cf8b4d38458b113d9b8c1db1a836e640a2", size = 240237, upload-time = "2026-01-26T02:46:05.898Z" }, + { url = "https://files.pythonhosted.org/packages/5f/60/c3a5187bf66f6fb546ff4ab8fb5a077cbdd832d7b1908d4365c7f74a1917/multidict-6.7.1-cp314-cp314t-win32.whl", hash = "sha256:98655c737850c064a65e006a3df7c997cd3b220be4ec8fe26215760b9697d4d7", size = 48008, upload-time = "2026-01-26T02:46:07.468Z" }, + { url = "https://files.pythonhosted.org/packages/0c/f7/addf1087b860ac60e6f382240f64fb99f8bfb532bb06f7c542b83c29ca61/multidict-6.7.1-cp314-cp314t-win_amd64.whl", hash = "sha256:497bde6223c212ba11d462853cfa4f0ae6ef97465033e7dc9940cdb3ab5b48e5", size = 53542, upload-time = "2026-01-26T02:46:08.809Z" }, + { url = "https://files.pythonhosted.org/packages/4c/81/4629d0aa32302ef7b2ec65c75a728cc5ff4fa410c50096174c1632e70b3e/multidict-6.7.1-cp314-cp314t-win_arm64.whl", hash = "sha256:2bbd113e0d4af5db41d5ebfe9ccaff89de2120578164f86a5d17d5a576d1e5b2", size = 44719, upload-time = "2026-01-26T02:46:11.146Z" }, + { url = "https://files.pythonhosted.org/packages/81/08/7036c080d7117f28a4af526d794aab6a84463126db031b007717c1a6676e/multidict-6.7.1-py3-none-any.whl", hash = "sha256:55d97cc6dae627efa6a6e548885712d4864b81110ac76fa4e534c03819fa4a56", size = 12319, upload-time = "2026-01-26T02:46:44.004Z" }, +] + +[[package]] +name = "munch" +version = "4.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e7/2b/45098135b5f9f13221820d90f9e0516e11a2a0f55012c13b081d202b782a/munch-4.0.0.tar.gz", hash = "sha256:542cb151461263216a4e37c3fd9afc425feeaf38aaa3025cd2a981fadb422235", size = 19089, upload-time = "2023-07-01T09:49:35.98Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/56/b3/7c69b37f03260a061883bec0e7b05be7117c1b1c85f5212c72c8c2bc3c8c/munch-4.0.0-py2.py3-none-any.whl", hash = "sha256:71033c45db9fb677a0b7eb517a4ce70ae09258490e419b0e7f00d1e386ecb1b4", size = 9950, upload-time = "2023-07-01T09:49:34.472Z" }, +] + +[[package]] +name = "narwhals" +version = "2.18.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/59/96/45218c2fdec4c9f22178f905086e85ef1a6d63862dcc3cd68eb60f1867f5/narwhals-2.18.1.tar.gz", hash = "sha256:652a1fcc9d432bbf114846688884c215f17eb118aa640b7419295d2f910d2a8b", size = 620578, upload-time = "2026-03-24T15:11:25.456Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3f/c3/06490e98393dcb4d6ce2bf331a39335375c300afaef526897881fbeae6ab/narwhals-2.18.1-py3-none-any.whl", hash = "sha256:a0a8bb80205323851338888ba3a12b4f65d352362c8a94be591244faf36504ad", size = 444952, upload-time = "2026-03-24T15:11:23.801Z" }, +] + +[[package]] +name = "netaddr" +version = "1.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/54/90/188b2a69654f27b221fba92fda7217778208532c962509e959a9cee5229d/netaddr-1.3.0.tar.gz", hash = "sha256:5c3c3d9895b551b763779ba7db7a03487dc1f8e3b385af819af341ae9ef6e48a", size = 2260504, upload-time = "2024-05-28T21:30:37.743Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/12/cc/f4fe2c7ce68b92cbf5b2d379ca366e1edae38cccaad00f69f529b460c3ef/netaddr-1.3.0-py3-none-any.whl", hash = "sha256:c2c6a8ebe5554ce33b7d5b3a306b71bbb373e000bbbf2350dd5213cc56e3dbbe", size = 2262023, upload-time = "2024-05-28T21:30:34.191Z" }, +] + +[[package]] +name = "nodeenv" +version = "1.10.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/24/bf/d1bda4f6168e0b2e9e5958945e01910052158313224ada5ce1fb2e1113b8/nodeenv-1.10.0.tar.gz", hash = "sha256:996c191ad80897d076bdfba80a41994c2b47c68e224c542b48feba42ba00f8bb", size = 55611, upload-time = "2025-12-20T14:08:54.006Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/88/b2/d0896bdcdc8d28a7fc5717c305f1a861c26e18c05047949fb371034d98bd/nodeenv-1.10.0-py2.py3-none-any.whl", hash = "sha256:5bb13e3eed2923615535339b3c620e76779af4cb4c6a90deccc9e36b274d3827", size = 23438, upload-time = "2025-12-20T14:08:52.782Z" }, +] + +[[package]] +name = "numpy" +version = "2.4.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d7/9f/b8cef5bffa569759033adda9481211426f12f53299629b410340795c2514/numpy-2.4.4.tar.gz", hash = "sha256:2d390634c5182175533585cc89f3608a4682ccb173cc9bb940b2881c8d6f8fa0", size = 20731587, upload-time = "2026-03-29T13:22:01.298Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/28/05/32396bec30fb2263770ee910142f49c1476d08e8ad41abf8403806b520ce/numpy-2.4.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:15716cfef24d3a9762e3acdf87e27f58dc823d1348f765bbea6bef8c639bfa1b", size = 16689272, upload-time = "2026-03-29T13:18:49.223Z" }, + { url = "https://files.pythonhosted.org/packages/c5/f3/a983d28637bfcd763a9c7aafdb6d5c0ebf3d487d1e1459ffdb57e2f01117/numpy-2.4.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:23cbfd4c17357c81021f21540da84ee282b9c8fba38a03b7b9d09ba6b951421e", size = 14699573, upload-time = "2026-03-29T13:18:52.629Z" }, + { url = "https://files.pythonhosted.org/packages/9b/fd/e5ecca1e78c05106d98028114f5c00d3eddb41207686b2b7de3e477b0e22/numpy-2.4.4-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:8b3b60bb7cba2c8c81837661c488637eee696f59a877788a396d33150c35d842", size = 5204782, upload-time = "2026-03-29T13:18:55.579Z" }, + { url = "https://files.pythonhosted.org/packages/de/2f/702a4594413c1a8632092beae8aba00f1d67947389369b3777aed783fdca/numpy-2.4.4-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:e4a010c27ff6f210ff4c6ef34394cd61470d01014439b192ec22552ee867f2a8", size = 6552038, upload-time = "2026-03-29T13:18:57.769Z" }, + { url = "https://files.pythonhosted.org/packages/7f/37/eed308a8f56cba4d1fdf467a4fc67ef4ff4bf1c888f5fc980481890104b1/numpy-2.4.4-cp312-cp312-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f9e75681b59ddaa5e659898085ae0eaea229d054f2ac0c7e563a62205a700121", size = 15670666, upload-time = "2026-03-29T13:19:00.341Z" }, + { url = "https://files.pythonhosted.org/packages/0a/0d/0e3ecece05b7a7e87ab9fb587855548da437a061326fff64a223b6dcb78a/numpy-2.4.4-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:81f4a14bee47aec54f883e0cad2d73986640c1590eb9bfaaba7ad17394481e6e", size = 16645480, upload-time = "2026-03-29T13:19:03.63Z" }, + { url = "https://files.pythonhosted.org/packages/34/49/f2312c154b82a286758ee2f1743336d50651f8b5195db18cdb63675ff649/numpy-2.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:62d6b0f03b694173f9fcb1fb317f7222fd0b0b103e784c6549f5e53a27718c44", size = 17020036, upload-time = "2026-03-29T13:19:07.428Z" }, + { url = "https://files.pythonhosted.org/packages/7b/e9/736d17bd77f1b0ec4f9901aaec129c00d59f5d84d5e79bba540ef12c2330/numpy-2.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fbc356aae7adf9e6336d336b9c8111d390a05df88f1805573ebb0807bd06fd1d", size = 18368643, upload-time = "2026-03-29T13:19:10.775Z" }, + { url = "https://files.pythonhosted.org/packages/63/f6/d417977c5f519b17c8a5c3bc9e8304b0908b0e21136fe43bf628a1343914/numpy-2.4.4-cp312-cp312-win32.whl", hash = "sha256:0d35aea54ad1d420c812bfa0385c71cd7cc5bcf7c65fed95fc2cd02fe8c79827", size = 5961117, upload-time = "2026-03-29T13:19:13.464Z" }, + { url = "https://files.pythonhosted.org/packages/2d/5b/e1deebf88ff431b01b7406ca3583ab2bbb90972bbe1c568732e49c844f7e/numpy-2.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:b5f0362dc928a6ecd9db58868fca5e48485205e3855957bdedea308f8672ea4a", size = 12320584, upload-time = "2026-03-29T13:19:16.155Z" }, + { url = "https://files.pythonhosted.org/packages/58/89/e4e856ac82a68c3ed64486a544977d0e7bdd18b8da75b78a577ca31c4395/numpy-2.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:846300f379b5b12cc769334464656bc882e0735d27d9726568bc932fdc49d5ec", size = 10221450, upload-time = "2026-03-29T13:19:18.994Z" }, + { url = "https://files.pythonhosted.org/packages/14/1d/d0a583ce4fefcc3308806a749a536c201ed6b5ad6e1322e227ee4848979d/numpy-2.4.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:08f2e31ed5e6f04b118e49821397f12767934cfdd12a1ce86a058f91e004ee50", size = 16684933, upload-time = "2026-03-29T13:19:22.47Z" }, + { url = "https://files.pythonhosted.org/packages/c1/62/2b7a48fbb745d344742c0277f01286dead15f3f68e4f359fbfcf7b48f70f/numpy-2.4.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e823b8b6edc81e747526f70f71a9c0a07ac4e7ad13020aa736bb7c9d67196115", size = 14694532, upload-time = "2026-03-29T13:19:25.581Z" }, + { url = "https://files.pythonhosted.org/packages/e5/87/499737bfba066b4a3bebff24a8f1c5b2dee410b209bc6668c9be692580f0/numpy-2.4.4-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:4a19d9dba1a76618dd86b164d608566f393f8ec6ac7c44f0cc879011c45e65af", size = 5199661, upload-time = "2026-03-29T13:19:28.31Z" }, + { url = "https://files.pythonhosted.org/packages/cd/da/464d551604320d1491bc345efed99b4b7034143a85787aab78d5691d5a0e/numpy-2.4.4-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:d2a8490669bfe99a233298348acc2d824d496dee0e66e31b66a6022c2ad74a5c", size = 6547539, upload-time = "2026-03-29T13:19:30.97Z" }, + { url = "https://files.pythonhosted.org/packages/7d/90/8d23e3b0dafd024bf31bdec225b3bb5c2dbfa6912f8a53b8659f21216cbf/numpy-2.4.4-cp313-cp313-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:45dbed2ab436a9e826e302fcdcbe9133f9b0006e5af7168afb8963a6520da103", size = 15668806, upload-time = "2026-03-29T13:19:33.887Z" }, + { url = "https://files.pythonhosted.org/packages/d1/73/a9d864e42a01896bb5974475438f16086be9ba1f0d19d0bb7a07427c4a8b/numpy-2.4.4-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c901b15172510173f5cb310eae652908340f8dede90fff9e3bf6c0d8dfd92f83", size = 16632682, upload-time = "2026-03-29T13:19:37.336Z" }, + { url = "https://files.pythonhosted.org/packages/34/fb/14570d65c3bde4e202a031210475ae9cde9b7686a2e7dc97ee67d2833b35/numpy-2.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:99d838547ace2c4aace6c4f76e879ddfe02bb58a80c1549928477862b7a6d6ed", size = 17019810, upload-time = "2026-03-29T13:19:40.963Z" }, + { url = "https://files.pythonhosted.org/packages/8a/77/2ba9d87081fd41f6d640c83f26fb7351e536b7ce6dd9061b6af5904e8e46/numpy-2.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0aec54fd785890ecca25a6003fd9a5aed47ad607bbac5cd64f836ad8666f4959", size = 18357394, upload-time = "2026-03-29T13:19:44.859Z" }, + { url = "https://files.pythonhosted.org/packages/a2/23/52666c9a41708b0853fa3b1a12c90da38c507a3074883823126d4e9d5b30/numpy-2.4.4-cp313-cp313-win32.whl", hash = "sha256:07077278157d02f65c43b1b26a3886bce886f95d20aabd11f87932750dfb14ed", size = 5959556, upload-time = "2026-03-29T13:19:47.661Z" }, + { url = "https://files.pythonhosted.org/packages/57/fb/48649b4971cde70d817cf97a2a2fdc0b4d8308569f1dd2f2611959d2e0cf/numpy-2.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:5c70f1cc1c4efbe316a572e2d8b9b9cc44e89b95f79ca3331553fbb63716e2bf", size = 12317311, upload-time = "2026-03-29T13:19:50.67Z" }, + { url = "https://files.pythonhosted.org/packages/ba/d8/11490cddd564eb4de97b4579ef6bfe6a736cc07e94c1598590ae25415e01/numpy-2.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:ef4059d6e5152fa1a39f888e344c73fdc926e1b2dd58c771d67b0acfbf2aa67d", size = 10222060, upload-time = "2026-03-29T13:19:54.229Z" }, + { url = "https://files.pythonhosted.org/packages/99/5d/dab4339177a905aad3e2221c915b35202f1ec30d750dd2e5e9d9a72b804b/numpy-2.4.4-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:4bbc7f303d125971f60ec0aaad5e12c62d0d2c925f0ab1273debd0e4ba37aba5", size = 14822302, upload-time = "2026-03-29T13:19:57.585Z" }, + { url = "https://files.pythonhosted.org/packages/eb/e4/0564a65e7d3d97562ed6f9b0fd0fb0a6f559ee444092f105938b50043876/numpy-2.4.4-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:4d6d57903571f86180eb98f8f0c839fa9ebbfb031356d87f1361be91e433f5b7", size = 5327407, upload-time = "2026-03-29T13:20:00.601Z" }, + { url = "https://files.pythonhosted.org/packages/29/8d/35a3a6ce5ad371afa58b4700f1c820f8f279948cca32524e0a695b0ded83/numpy-2.4.4-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:4636de7fd195197b7535f231b5de9e4b36d2c440b6e566d2e4e4746e6af0ca93", size = 6647631, upload-time = "2026-03-29T13:20:02.855Z" }, + { url = "https://files.pythonhosted.org/packages/f4/da/477731acbd5a58a946c736edfdabb2ac5b34c3d08d1ba1a7b437fa0884df/numpy-2.4.4-cp313-cp313t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ad2e2ef14e0b04e544ea2fa0a36463f847f113d314aa02e5b402fdf910ef309e", size = 15727691, upload-time = "2026-03-29T13:20:06.004Z" }, + { url = "https://files.pythonhosted.org/packages/e6/db/338535d9b152beabeb511579598418ba0212ce77cf9718edd70262cc4370/numpy-2.4.4-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5a285b3b96f951841799528cd1f4f01cd70e7e0204b4abebac9463eecfcf2a40", size = 16681241, upload-time = "2026-03-29T13:20:09.417Z" }, + { url = "https://files.pythonhosted.org/packages/e2/a9/ad248e8f58beb7a0219b413c9c7d8151c5d285f7f946c3e26695bdbbe2df/numpy-2.4.4-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:f8474c4241bc18b750be2abea9d7a9ec84f46ef861dbacf86a4f6e043401f79e", size = 17085767, upload-time = "2026-03-29T13:20:13.126Z" }, + { url = "https://files.pythonhosted.org/packages/b5/1a/3b88ccd3694681356f70da841630e4725a7264d6a885c8d442a697e1146b/numpy-2.4.4-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:4e874c976154687c1f71715b034739b45c7711bec81db01914770373d125e392", size = 18403169, upload-time = "2026-03-29T13:20:17.096Z" }, + { url = "https://files.pythonhosted.org/packages/c2/c9/fcfd5d0639222c6eac7f304829b04892ef51c96a75d479214d77e3ce6e33/numpy-2.4.4-cp313-cp313t-win32.whl", hash = "sha256:9c585a1790d5436a5374bac930dad6ed244c046ed91b2b2a3634eb2971d21008", size = 6083477, upload-time = "2026-03-29T13:20:20.195Z" }, + { url = "https://files.pythonhosted.org/packages/d5/e3/3938a61d1c538aaec8ed6fd6323f57b0c2d2d2219512434c5c878db76553/numpy-2.4.4-cp313-cp313t-win_amd64.whl", hash = "sha256:93e15038125dc1e5345d9b5b68aa7f996ec33b98118d18c6ca0d0b7d6198b7e8", size = 12457487, upload-time = "2026-03-29T13:20:22.946Z" }, + { url = "https://files.pythonhosted.org/packages/97/6a/7e345032cc60501721ef94e0e30b60f6b0bd601f9174ebd36389a2b86d40/numpy-2.4.4-cp313-cp313t-win_arm64.whl", hash = "sha256:0dfd3f9d3adbe2920b68b5cd3d51444e13a10792ec7154cd0a2f6e74d4ab3233", size = 10292002, upload-time = "2026-03-29T13:20:25.909Z" }, + { url = "https://files.pythonhosted.org/packages/6e/06/c54062f85f673dd5c04cbe2f14c3acb8c8b95e3384869bb8cc9bff8cb9df/numpy-2.4.4-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:f169b9a863d34f5d11b8698ead99febeaa17a13ca044961aa8e2662a6c7766a0", size = 16684353, upload-time = "2026-03-29T13:20:29.504Z" }, + { url = "https://files.pythonhosted.org/packages/4c/39/8a320264a84404c74cc7e79715de85d6130fa07a0898f67fb5cd5bd79908/numpy-2.4.4-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:2483e4584a1cb3092da4470b38866634bafb223cbcd551ee047633fd2584599a", size = 14704914, upload-time = "2026-03-29T13:20:33.547Z" }, + { url = "https://files.pythonhosted.org/packages/91/fb/287076b2614e1d1044235f50f03748f31fa287e3dbe6abeb35cdfa351eca/numpy-2.4.4-cp314-cp314-macosx_14_0_arm64.whl", hash = "sha256:2d19e6e2095506d1736b7d80595e0f252d76b89f5e715c35e06e937679ea7d7a", size = 5210005, upload-time = "2026-03-29T13:20:36.45Z" }, + { url = "https://files.pythonhosted.org/packages/63/eb/fcc338595309910de6ecabfcef2419a9ce24399680bfb149421fa2df1280/numpy-2.4.4-cp314-cp314-macosx_14_0_x86_64.whl", hash = "sha256:6a246d5914aa1c820c9443ddcee9c02bec3e203b0c080349533fae17727dfd1b", size = 6544974, upload-time = "2026-03-29T13:20:39.014Z" }, + { url = "https://files.pythonhosted.org/packages/44/5d/e7e9044032a716cdfaa3fba27a8e874bf1c5f1912a1ddd4ed071bf8a14a6/numpy-2.4.4-cp314-cp314-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:989824e9faf85f96ec9c7761cd8d29c531ad857bfa1daa930cba85baaecf1a9a", size = 15684591, upload-time = "2026-03-29T13:20:42.146Z" }, + { url = "https://files.pythonhosted.org/packages/98/7c/21252050676612625449b4807d6b695b9ce8a7c9e1c197ee6216c8a65c7c/numpy-2.4.4-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:27a8d92cd10f1382a67d7cf4db7ce18341b66438bdd9f691d7b0e48d104c2a9d", size = 16637700, upload-time = "2026-03-29T13:20:46.204Z" }, + { url = "https://files.pythonhosted.org/packages/b1/29/56d2bbef9465db24ef25393383d761a1af4f446a1df9b8cded4fe3a5a5d7/numpy-2.4.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:e44319a2953c738205bf3354537979eaa3998ed673395b964c1176083dd46252", size = 17035781, upload-time = "2026-03-29T13:20:50.242Z" }, + { url = "https://files.pythonhosted.org/packages/e3/2b/a35a6d7589d21f44cea7d0a98de5ddcbb3d421b2622a5c96b1edf18707c3/numpy-2.4.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:e892aff75639bbef0d2a2cfd55535510df26ff92f63c92cd84ef8d4ba5a5557f", size = 18362959, upload-time = "2026-03-29T13:20:54.019Z" }, + { url = "https://files.pythonhosted.org/packages/64/c9/d52ec581f2390e0f5f85cbfd80fb83d965fc15e9f0e1aec2195faa142cde/numpy-2.4.4-cp314-cp314-win32.whl", hash = "sha256:1378871da56ca8943c2ba674530924bb8ca40cd228358a3b5f302ad60cf875fc", size = 6008768, upload-time = "2026-03-29T13:20:56.912Z" }, + { url = "https://files.pythonhosted.org/packages/fa/22/4cc31a62a6c7b74a8730e31a4274c5dc80e005751e277a2ce38e675e4923/numpy-2.4.4-cp314-cp314-win_amd64.whl", hash = "sha256:715d1c092715954784bc79e1174fc2a90093dc4dc84ea15eb14dad8abdcdeb74", size = 12449181, upload-time = "2026-03-29T13:20:59.548Z" }, + { url = "https://files.pythonhosted.org/packages/70/2e/14cda6f4d8e396c612d1bf97f22958e92148801d7e4f110cabebdc0eef4b/numpy-2.4.4-cp314-cp314-win_arm64.whl", hash = "sha256:2c194dd721e54ecad9ad387c1d35e63dce5c4450c6dc7dd5611283dda239aabb", size = 10496035, upload-time = "2026-03-29T13:21:02.524Z" }, + { url = "https://files.pythonhosted.org/packages/b1/e8/8fed8c8d848d7ecea092dc3469643f9d10bc3a134a815a3b033da1d2039b/numpy-2.4.4-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:2aa0613a5177c264ff5921051a5719d20095ea586ca88cc802c5c218d1c67d3e", size = 14824958, upload-time = "2026-03-29T13:21:05.671Z" }, + { url = "https://files.pythonhosted.org/packages/05/1a/d8007a5138c179c2bf33ef44503e83d70434d2642877ee8fbb230e7c0548/numpy-2.4.4-cp314-cp314t-macosx_14_0_arm64.whl", hash = "sha256:42c16925aa5a02362f986765f9ebabf20de75cdefdca827d14315c568dcab113", size = 5330020, upload-time = "2026-03-29T13:21:08.635Z" }, + { url = "https://files.pythonhosted.org/packages/99/64/ffb99ac6ae93faf117bcbd5c7ba48a7f45364a33e8e458545d3633615dda/numpy-2.4.4-cp314-cp314t-macosx_14_0_x86_64.whl", hash = "sha256:874f200b2a981c647340f841730fc3a2b54c9d940566a3c4149099591e2c4c3d", size = 6650758, upload-time = "2026-03-29T13:21:10.949Z" }, + { url = "https://files.pythonhosted.org/packages/6e/6e/795cc078b78a384052e73b2f6281ff7a700e9bf53bcce2ee579d4f6dd879/numpy-2.4.4-cp314-cp314t-manylinux_2_27_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c9b39d38a9bd2ae1becd7eac1303d031c5c110ad31f2b319c6e7d98b135c934d", size = 15729948, upload-time = "2026-03-29T13:21:14.047Z" }, + { url = "https://files.pythonhosted.org/packages/5f/86/2acbda8cc2af5f3d7bfc791192863b9e3e19674da7b5e533fded124d1299/numpy-2.4.4-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b268594bccac7d7cf5844c7732e3f20c50921d94e36d7ec9b79e9857694b1b2f", size = 16679325, upload-time = "2026-03-29T13:21:17.561Z" }, + { url = "https://files.pythonhosted.org/packages/bc/59/cafd83018f4aa55e0ac6fa92aa066c0a1877b77a615ceff1711c260ffae8/numpy-2.4.4-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:ac6b31e35612a26483e20750126d30d0941f949426974cace8e6b5c58a3657b0", size = 17084883, upload-time = "2026-03-29T13:21:21.106Z" }, + { url = "https://files.pythonhosted.org/packages/f0/85/a42548db84e65ece46ab2caea3d3f78b416a47af387fcbb47ec28e660dc2/numpy-2.4.4-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:8e3ed142f2728df44263aaf5fb1f5b0b99f4070c553a0d7f033be65338329150", size = 18403474, upload-time = "2026-03-29T13:21:24.828Z" }, + { url = "https://files.pythonhosted.org/packages/ed/ad/483d9e262f4b831000062e5d8a45e342166ec8aaa1195264982bca267e62/numpy-2.4.4-cp314-cp314t-win32.whl", hash = "sha256:dddbbd259598d7240b18c9d87c56a9d2fb3b02fe266f49a7c101532e78c1d871", size = 6155500, upload-time = "2026-03-29T13:21:28.205Z" }, + { url = "https://files.pythonhosted.org/packages/c7/03/2fc4e14c7bd4ff2964b74ba90ecb8552540b6315f201df70f137faa5c589/numpy-2.4.4-cp314-cp314t-win_amd64.whl", hash = "sha256:a7164afb23be6e37ad90b2f10426149fd75aee07ca55653d2aa41e66c4ef697e", size = 12637755, upload-time = "2026-03-29T13:21:31.107Z" }, + { url = "https://files.pythonhosted.org/packages/58/78/548fb8e07b1a341746bfbecb32f2c268470f45fa028aacdbd10d9bc73aab/numpy-2.4.4-cp314-cp314t-win_arm64.whl", hash = "sha256:ba203255017337d39f89bdd58417f03c4426f12beed0440cfd933cb15f8669c7", size = 10566643, upload-time = "2026-03-29T13:21:34.339Z" }, +] + +[[package]] +name = "packaging" +version = "26.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/65/ee/299d360cdc32edc7d2cf530f3accf79c4fca01e96ffc950d8a52213bd8e4/packaging-26.0.tar.gz", hash = "sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4", size = 143416, upload-time = "2026-01-21T20:50:39.064Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl", hash = "sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529", size = 74366, upload-time = "2026-01-21T20:50:37.788Z" }, +] + +[[package]] +name = "platformdirs" +version = "4.9.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/19/56/8d4c30c8a1d07013911a8fdbd8f89440ef9f08d07a1b50ab8ca8be5a20f9/platformdirs-4.9.4.tar.gz", hash = "sha256:1ec356301b7dc906d83f371c8f487070e99d3ccf9e501686456394622a01a934", size = 28737, upload-time = "2026-03-05T18:34:13.271Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/63/d7/97f7e3a6abb67d8080dd406fd4df842c2be0efaf712d1c899c32a075027c/platformdirs-4.9.4-py3-none-any.whl", hash = "sha256:68a9a4619a666ea6439f2ff250c12a853cd1cbd5158d258bd824a7df6be2f868", size = 21216, upload-time = "2026-03-05T18:34:12.172Z" }, +] + +[[package]] +name = "plotille" +version = "6.0.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/46/51/a6093145179139e7dffc01aa499b0169c91154a743197a3fd16252a9e90d/plotille-6.0.5.tar.gz", hash = "sha256:26d2cef5d4feb8632c9710442ad49fc57f9d5b20881c21ac7954c76208b5600b", size = 58167, upload-time = "2026-02-17T14:30:31.323Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/08/b7/a098cacb7f81f617157cb9c9c7c5ac66b6ac886075722d443e8e25b5a4b8/plotille-6.0.5-py3-none-any.whl", hash = "sha256:c35be42f8aa080853904d123894194a5ea7e095bdeb76d7b61fd41c89cdeaeee", size = 62532, upload-time = "2026-02-17T14:30:30.235Z" }, +] + +[[package]] +name = "plotly" +version = "6.6.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "narwhals" }, + { name = "packaging" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/24/fb/41efe84970cfddefd4ccf025e2cbfafe780004555f583e93dba3dac2cdef/plotly-6.6.0.tar.gz", hash = "sha256:b897f15f3b02028d69f755f236be890ba950d0a42d7dfc619b44e2d8cea8748c", size = 7027956, upload-time = "2026-03-02T21:10:25.321Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/52/d2/c6e44dba74f17c6216ce1b56044a9b93a929f1c2d5bdaff892512b260f5e/plotly-6.6.0-py3-none-any.whl", hash = "sha256:8d6daf0f87412e0c0bfe72e809d615217ab57cc715899a1e5145135a7800d1d0", size = 9910315, upload-time = "2026-03-02T21:10:18.131Z" }, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, +] + +[[package]] +name = "propcache" +version = "0.4.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9e/da/e9fc233cf63743258bff22b3dfa7ea5baef7b5bc324af47a0ad89b8ffc6f/propcache-0.4.1.tar.gz", hash = "sha256:f48107a8c637e80362555f37ecf49abe20370e557cc4ab374f04ec4423c97c3d", size = 46442, upload-time = "2025-10-08T19:49:02.291Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a2/0f/f17b1b2b221d5ca28b4b876e8bb046ac40466513960646bda8e1853cdfa2/propcache-0.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e153e9cd40cc8945138822807139367f256f89c6810c2634a4f6902b52d3b4e2", size = 80061, upload-time = "2025-10-08T19:46:46.075Z" }, + { url = "https://files.pythonhosted.org/packages/76/47/8ccf75935f51448ba9a16a71b783eb7ef6b9ee60f5d14c7f8a8a79fbeed7/propcache-0.4.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cd547953428f7abb73c5ad82cbb32109566204260d98e41e5dfdc682eb7f8403", size = 46037, upload-time = "2025-10-08T19:46:47.23Z" }, + { url = "https://files.pythonhosted.org/packages/0a/b6/5c9a0e42df4d00bfb4a3cbbe5cf9f54260300c88a0e9af1f47ca5ce17ac0/propcache-0.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f048da1b4f243fc44f205dfd320933a951b8d89e0afd4c7cacc762a8b9165207", size = 47324, upload-time = "2025-10-08T19:46:48.384Z" }, + { url = "https://files.pythonhosted.org/packages/9e/d3/6c7ee328b39a81ee877c962469f1e795f9db87f925251efeb0545e0020d0/propcache-0.4.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ec17c65562a827bba85e3872ead335f95405ea1674860d96483a02f5c698fa72", size = 225505, upload-time = "2025-10-08T19:46:50.055Z" }, + { url = "https://files.pythonhosted.org/packages/01/5d/1c53f4563490b1d06a684742cc6076ef944bc6457df6051b7d1a877c057b/propcache-0.4.1-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:405aac25c6394ef275dee4c709be43745d36674b223ba4eb7144bf4d691b7367", size = 230242, upload-time = "2025-10-08T19:46:51.815Z" }, + { url = "https://files.pythonhosted.org/packages/20/e1/ce4620633b0e2422207c3cb774a0ee61cac13abc6217763a7b9e2e3f4a12/propcache-0.4.1-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0013cb6f8dde4b2a2f66903b8ba740bdfe378c943c4377a200551ceb27f379e4", size = 238474, upload-time = "2025-10-08T19:46:53.208Z" }, + { url = "https://files.pythonhosted.org/packages/46/4b/3aae6835b8e5f44ea6a68348ad90f78134047b503765087be2f9912140ea/propcache-0.4.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:15932ab57837c3368b024473a525e25d316d8353016e7cc0e5ba9eb343fbb1cf", size = 221575, upload-time = "2025-10-08T19:46:54.511Z" }, + { url = "https://files.pythonhosted.org/packages/6e/a5/8a5e8678bcc9d3a1a15b9a29165640d64762d424a16af543f00629c87338/propcache-0.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:031dce78b9dc099f4c29785d9cf5577a3faf9ebf74ecbd3c856a7b92768c3df3", size = 216736, upload-time = "2025-10-08T19:46:56.212Z" }, + { url = "https://files.pythonhosted.org/packages/f1/63/b7b215eddeac83ca1c6b934f89d09a625aa9ee4ba158338854c87210cc36/propcache-0.4.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:ab08df6c9a035bee56e31af99be621526bd237bea9f32def431c656b29e41778", size = 213019, upload-time = "2025-10-08T19:46:57.595Z" }, + { url = "https://files.pythonhosted.org/packages/57/74/f580099a58c8af587cac7ba19ee7cb418506342fbbe2d4a4401661cca886/propcache-0.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:4d7af63f9f93fe593afbf104c21b3b15868efb2c21d07d8732c0c4287e66b6a6", size = 220376, upload-time = "2025-10-08T19:46:59.067Z" }, + { url = "https://files.pythonhosted.org/packages/c4/ee/542f1313aff7eaf19c2bb758c5d0560d2683dac001a1c96d0774af799843/propcache-0.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cfc27c945f422e8b5071b6e93169679e4eb5bf73bbcbf1ba3ae3a83d2f78ebd9", size = 226988, upload-time = "2025-10-08T19:47:00.544Z" }, + { url = "https://files.pythonhosted.org/packages/8f/18/9c6b015dd9c6930f6ce2229e1f02fb35298b847f2087ea2b436a5bfa7287/propcache-0.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:35c3277624a080cc6ec6f847cbbbb5b49affa3598c4535a0a4682a697aaa5c75", size = 215615, upload-time = "2025-10-08T19:47:01.968Z" }, + { url = "https://files.pythonhosted.org/packages/80/9e/e7b85720b98c45a45e1fca6a177024934dc9bc5f4d5dd04207f216fc33ed/propcache-0.4.1-cp312-cp312-win32.whl", hash = "sha256:671538c2262dadb5ba6395e26c1731e1d52534bfe9ae56d0b5573ce539266aa8", size = 38066, upload-time = "2025-10-08T19:47:03.503Z" }, + { url = "https://files.pythonhosted.org/packages/54/09/d19cff2a5aaac632ec8fc03737b223597b1e347416934c1b3a7df079784c/propcache-0.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:cb2d222e72399fcf5890d1d5cc1060857b9b236adff2792ff48ca2dfd46c81db", size = 41655, upload-time = "2025-10-08T19:47:04.973Z" }, + { url = "https://files.pythonhosted.org/packages/68/ab/6b5c191bb5de08036a8c697b265d4ca76148efb10fa162f14af14fb5f076/propcache-0.4.1-cp312-cp312-win_arm64.whl", hash = "sha256:204483131fb222bdaaeeea9f9e6c6ed0cac32731f75dfc1d4a567fc1926477c1", size = 37789, upload-time = "2025-10-08T19:47:06.077Z" }, + { url = "https://files.pythonhosted.org/packages/bf/df/6d9c1b6ac12b003837dde8a10231a7344512186e87b36e855bef32241942/propcache-0.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:43eedf29202c08550aac1d14e0ee619b0430aaef78f85864c1a892294fbc28cf", size = 77750, upload-time = "2025-10-08T19:47:07.648Z" }, + { url = "https://files.pythonhosted.org/packages/8b/e8/677a0025e8a2acf07d3418a2e7ba529c9c33caf09d3c1f25513023c1db56/propcache-0.4.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d62cdfcfd89ccb8de04e0eda998535c406bf5e060ffd56be6c586cbcc05b3311", size = 44780, upload-time = "2025-10-08T19:47:08.851Z" }, + { url = "https://files.pythonhosted.org/packages/89/a4/92380f7ca60f99ebae761936bc48a72a639e8a47b29050615eef757cb2a7/propcache-0.4.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:cae65ad55793da34db5f54e4029b89d3b9b9490d8abe1b4c7ab5d4b8ec7ebf74", size = 46308, upload-time = "2025-10-08T19:47:09.982Z" }, + { url = "https://files.pythonhosted.org/packages/2d/48/c5ac64dee5262044348d1d78a5f85dd1a57464a60d30daee946699963eb3/propcache-0.4.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:333ddb9031d2704a301ee3e506dc46b1fe5f294ec198ed6435ad5b6a085facfe", size = 208182, upload-time = "2025-10-08T19:47:11.319Z" }, + { url = "https://files.pythonhosted.org/packages/c6/0c/cd762dd011a9287389a6a3eb43aa30207bde253610cca06824aeabfe9653/propcache-0.4.1-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:fd0858c20f078a32cf55f7e81473d96dcf3b93fd2ccdb3d40fdf54b8573df3af", size = 211215, upload-time = "2025-10-08T19:47:13.146Z" }, + { url = "https://files.pythonhosted.org/packages/30/3e/49861e90233ba36890ae0ca4c660e95df565b2cd15d4a68556ab5865974e/propcache-0.4.1-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:678ae89ebc632c5c204c794f8dab2837c5f159aeb59e6ed0539500400577298c", size = 218112, upload-time = "2025-10-08T19:47:14.913Z" }, + { url = "https://files.pythonhosted.org/packages/f1/8b/544bc867e24e1bd48f3118cecd3b05c694e160a168478fa28770f22fd094/propcache-0.4.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d472aeb4fbf9865e0c6d622d7f4d54a4e101a89715d8904282bb5f9a2f476c3f", size = 204442, upload-time = "2025-10-08T19:47:16.277Z" }, + { url = "https://files.pythonhosted.org/packages/50/a6/4282772fd016a76d3e5c0df58380a5ea64900afd836cec2c2f662d1b9bb3/propcache-0.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4d3df5fa7e36b3225954fba85589da77a0fe6a53e3976de39caf04a0db4c36f1", size = 199398, upload-time = "2025-10-08T19:47:17.962Z" }, + { url = "https://files.pythonhosted.org/packages/3e/ec/d8a7cd406ee1ddb705db2139f8a10a8a427100347bd698e7014351c7af09/propcache-0.4.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:ee17f18d2498f2673e432faaa71698032b0127ebf23ae5974eeaf806c279df24", size = 196920, upload-time = "2025-10-08T19:47:19.355Z" }, + { url = "https://files.pythonhosted.org/packages/f6/6c/f38ab64af3764f431e359f8baf9e0a21013e24329e8b85d2da32e8ed07ca/propcache-0.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:580e97762b950f993ae618e167e7be9256b8353c2dcd8b99ec100eb50f5286aa", size = 203748, upload-time = "2025-10-08T19:47:21.338Z" }, + { url = "https://files.pythonhosted.org/packages/d6/e3/fa846bd70f6534d647886621388f0a265254d30e3ce47e5c8e6e27dbf153/propcache-0.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:501d20b891688eb8e7aa903021f0b72d5a55db40ffaab27edefd1027caaafa61", size = 205877, upload-time = "2025-10-08T19:47:23.059Z" }, + { url = "https://files.pythonhosted.org/packages/e2/39/8163fc6f3133fea7b5f2827e8eba2029a0277ab2c5beee6c1db7b10fc23d/propcache-0.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a0bd56e5b100aef69bd8562b74b46254e7c8812918d3baa700c8a8009b0af66", size = 199437, upload-time = "2025-10-08T19:47:24.445Z" }, + { url = "https://files.pythonhosted.org/packages/93/89/caa9089970ca49c7c01662bd0eeedfe85494e863e8043565aeb6472ce8fe/propcache-0.4.1-cp313-cp313-win32.whl", hash = "sha256:bcc9aaa5d80322bc2fb24bb7accb4a30f81e90ab8d6ba187aec0744bc302ad81", size = 37586, upload-time = "2025-10-08T19:47:25.736Z" }, + { url = "https://files.pythonhosted.org/packages/f5/ab/f76ec3c3627c883215b5c8080debb4394ef5a7a29be811f786415fc1e6fd/propcache-0.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:381914df18634f5494334d201e98245c0596067504b9372d8cf93f4bb23e025e", size = 40790, upload-time = "2025-10-08T19:47:26.847Z" }, + { url = "https://files.pythonhosted.org/packages/59/1b/e71ae98235f8e2ba5004d8cb19765a74877abf189bc53fc0c80d799e56c3/propcache-0.4.1-cp313-cp313-win_arm64.whl", hash = "sha256:8873eb4460fd55333ea49b7d189749ecf6e55bf85080f11b1c4530ed3034cba1", size = 37158, upload-time = "2025-10-08T19:47:27.961Z" }, + { url = "https://files.pythonhosted.org/packages/83/ce/a31bbdfc24ee0dcbba458c8175ed26089cf109a55bbe7b7640ed2470cfe9/propcache-0.4.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:92d1935ee1f8d7442da9c0c4fa7ac20d07e94064184811b685f5c4fada64553b", size = 81451, upload-time = "2025-10-08T19:47:29.445Z" }, + { url = "https://files.pythonhosted.org/packages/25/9c/442a45a470a68456e710d96cacd3573ef26a1d0a60067e6a7d5e655621ed/propcache-0.4.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:473c61b39e1460d386479b9b2f337da492042447c9b685f28be4f74d3529e566", size = 46374, upload-time = "2025-10-08T19:47:30.579Z" }, + { url = "https://files.pythonhosted.org/packages/f4/bf/b1d5e21dbc3b2e889ea4327044fb16312a736d97640fb8b6aa3f9c7b3b65/propcache-0.4.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:c0ef0aaafc66fbd87842a3fe3902fd889825646bc21149eafe47be6072725835", size = 48396, upload-time = "2025-10-08T19:47:31.79Z" }, + { url = "https://files.pythonhosted.org/packages/f4/04/5b4c54a103d480e978d3c8a76073502b18db0c4bc17ab91b3cb5092ad949/propcache-0.4.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f95393b4d66bfae908c3ca8d169d5f79cd65636ae15b5e7a4f6e67af675adb0e", size = 275950, upload-time = "2025-10-08T19:47:33.481Z" }, + { url = "https://files.pythonhosted.org/packages/b4/c1/86f846827fb969c4b78b0af79bba1d1ea2156492e1b83dea8b8a6ae27395/propcache-0.4.1-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c07fda85708bc48578467e85099645167a955ba093be0a2dcba962195676e859", size = 273856, upload-time = "2025-10-08T19:47:34.906Z" }, + { url = "https://files.pythonhosted.org/packages/36/1d/fc272a63c8d3bbad6878c336c7a7dea15e8f2d23a544bda43205dfa83ada/propcache-0.4.1-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:af223b406d6d000830c6f65f1e6431783fc3f713ba3e6cc8c024d5ee96170a4b", size = 280420, upload-time = "2025-10-08T19:47:36.338Z" }, + { url = "https://files.pythonhosted.org/packages/07/0c/01f2219d39f7e53d52e5173bcb09c976609ba30209912a0680adfb8c593a/propcache-0.4.1-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a78372c932c90ee474559c5ddfffd718238e8673c340dc21fe45c5b8b54559a0", size = 263254, upload-time = "2025-10-08T19:47:37.692Z" }, + { url = "https://files.pythonhosted.org/packages/2d/18/cd28081658ce597898f0c4d174d4d0f3c5b6d4dc27ffafeef835c95eb359/propcache-0.4.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:564d9f0d4d9509e1a870c920a89b2fec951b44bf5ba7d537a9e7c1ccec2c18af", size = 261205, upload-time = "2025-10-08T19:47:39.659Z" }, + { url = "https://files.pythonhosted.org/packages/7a/71/1f9e22eb8b8316701c2a19fa1f388c8a3185082607da8e406a803c9b954e/propcache-0.4.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:17612831fda0138059cc5546f4d12a2aacfb9e47068c06af35c400ba58ba7393", size = 247873, upload-time = "2025-10-08T19:47:41.084Z" }, + { url = "https://files.pythonhosted.org/packages/4a/65/3d4b61f36af2b4eddba9def857959f1016a51066b4f1ce348e0cf7881f58/propcache-0.4.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:41a89040cb10bd345b3c1a873b2bf36413d48da1def52f268a055f7398514874", size = 262739, upload-time = "2025-10-08T19:47:42.51Z" }, + { url = "https://files.pythonhosted.org/packages/2a/42/26746ab087faa77c1c68079b228810436ccd9a5ce9ac85e2b7307195fd06/propcache-0.4.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e35b88984e7fa64aacecea39236cee32dd9bd8c55f57ba8a75cf2399553f9bd7", size = 263514, upload-time = "2025-10-08T19:47:43.927Z" }, + { url = "https://files.pythonhosted.org/packages/94/13/630690fe201f5502d2403dd3cfd451ed8858fe3c738ee88d095ad2ff407b/propcache-0.4.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6f8b465489f927b0df505cbe26ffbeed4d6d8a2bbc61ce90eb074ff129ef0ab1", size = 257781, upload-time = "2025-10-08T19:47:45.448Z" }, + { url = "https://files.pythonhosted.org/packages/92/f7/1d4ec5841505f423469efbfc381d64b7b467438cd5a4bbcbb063f3b73d27/propcache-0.4.1-cp313-cp313t-win32.whl", hash = "sha256:2ad890caa1d928c7c2965b48f3a3815c853180831d0e5503d35cf00c472f4717", size = 41396, upload-time = "2025-10-08T19:47:47.202Z" }, + { url = "https://files.pythonhosted.org/packages/48/f0/615c30622316496d2cbbc29f5985f7777d3ada70f23370608c1d3e081c1f/propcache-0.4.1-cp313-cp313t-win_amd64.whl", hash = "sha256:f7ee0e597f495cf415bcbd3da3caa3bd7e816b74d0d52b8145954c5e6fd3ff37", size = 44897, upload-time = "2025-10-08T19:47:48.336Z" }, + { url = "https://files.pythonhosted.org/packages/fd/ca/6002e46eccbe0e33dcd4069ef32f7f1c9e243736e07adca37ae8c4830ec3/propcache-0.4.1-cp313-cp313t-win_arm64.whl", hash = "sha256:929d7cbe1f01bb7baffb33dc14eb5691c95831450a26354cd210a8155170c93a", size = 39789, upload-time = "2025-10-08T19:47:49.876Z" }, + { url = "https://files.pythonhosted.org/packages/8e/5c/bca52d654a896f831b8256683457ceddd490ec18d9ec50e97dfd8fc726a8/propcache-0.4.1-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3f7124c9d820ba5548d431afb4632301acf965db49e666aa21c305cbe8c6de12", size = 78152, upload-time = "2025-10-08T19:47:51.051Z" }, + { url = "https://files.pythonhosted.org/packages/65/9b/03b04e7d82a5f54fb16113d839f5ea1ede58a61e90edf515f6577c66fa8f/propcache-0.4.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:c0d4b719b7da33599dfe3b22d3db1ef789210a0597bc650b7cee9c77c2be8c5c", size = 44869, upload-time = "2025-10-08T19:47:52.594Z" }, + { url = "https://files.pythonhosted.org/packages/b2/fa/89a8ef0468d5833a23fff277b143d0573897cf75bd56670a6d28126c7d68/propcache-0.4.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:9f302f4783709a78240ebc311b793f123328716a60911d667e0c036bc5dcbded", size = 46596, upload-time = "2025-10-08T19:47:54.073Z" }, + { url = "https://files.pythonhosted.org/packages/86/bd/47816020d337f4a746edc42fe8d53669965138f39ee117414c7d7a340cfe/propcache-0.4.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c80ee5802e3fb9ea37938e7eecc307fb984837091d5fd262bb37238b1ae97641", size = 206981, upload-time = "2025-10-08T19:47:55.715Z" }, + { url = "https://files.pythonhosted.org/packages/df/f6/c5fa1357cc9748510ee55f37173eb31bfde6d94e98ccd9e6f033f2fc06e1/propcache-0.4.1-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ed5a841e8bb29a55fb8159ed526b26adc5bdd7e8bd7bf793ce647cb08656cdf4", size = 211490, upload-time = "2025-10-08T19:47:57.499Z" }, + { url = "https://files.pythonhosted.org/packages/80/1e/e5889652a7c4a3846683401a48f0f2e5083ce0ec1a8a5221d8058fbd1adf/propcache-0.4.1-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:55c72fd6ea2da4c318e74ffdf93c4fe4e926051133657459131a95c846d16d44", size = 215371, upload-time = "2025-10-08T19:47:59.317Z" }, + { url = "https://files.pythonhosted.org/packages/b2/f2/889ad4b2408f72fe1a4f6a19491177b30ea7bf1a0fd5f17050ca08cfc882/propcache-0.4.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8326e144341460402713f91df60ade3c999d601e7eb5ff8f6f7862d54de0610d", size = 201424, upload-time = "2025-10-08T19:48:00.67Z" }, + { url = "https://files.pythonhosted.org/packages/27/73/033d63069b57b0812c8bd19f311faebeceb6ba31b8f32b73432d12a0b826/propcache-0.4.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:060b16ae65bc098da7f6d25bf359f1f31f688384858204fe5d652979e0015e5b", size = 197566, upload-time = "2025-10-08T19:48:02.604Z" }, + { url = "https://files.pythonhosted.org/packages/dc/89/ce24f3dc182630b4e07aa6d15f0ff4b14ed4b9955fae95a0b54c58d66c05/propcache-0.4.1-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:89eb3fa9524f7bec9de6e83cf3faed9d79bffa560672c118a96a171a6f55831e", size = 193130, upload-time = "2025-10-08T19:48:04.499Z" }, + { url = "https://files.pythonhosted.org/packages/a9/24/ef0d5fd1a811fb5c609278d0209c9f10c35f20581fcc16f818da959fc5b4/propcache-0.4.1-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:dee69d7015dc235f526fe80a9c90d65eb0039103fe565776250881731f06349f", size = 202625, upload-time = "2025-10-08T19:48:06.213Z" }, + { url = "https://files.pythonhosted.org/packages/f5/02/98ec20ff5546f68d673df2f7a69e8c0d076b5abd05ca882dc7ee3a83653d/propcache-0.4.1-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:5558992a00dfd54ccbc64a32726a3357ec93825a418a401f5cc67df0ac5d9e49", size = 204209, upload-time = "2025-10-08T19:48:08.432Z" }, + { url = "https://files.pythonhosted.org/packages/a0/87/492694f76759b15f0467a2a93ab68d32859672b646aa8a04ce4864e7932d/propcache-0.4.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c9b822a577f560fbd9554812526831712c1436d2c046cedee4c3796d3543b144", size = 197797, upload-time = "2025-10-08T19:48:09.968Z" }, + { url = "https://files.pythonhosted.org/packages/ee/36/66367de3575db1d2d3f3d177432bd14ee577a39d3f5d1b3d5df8afe3b6e2/propcache-0.4.1-cp314-cp314-win32.whl", hash = "sha256:ab4c29b49d560fe48b696cdcb127dd36e0bc2472548f3bf56cc5cb3da2b2984f", size = 38140, upload-time = "2025-10-08T19:48:11.232Z" }, + { url = "https://files.pythonhosted.org/packages/0c/2a/a758b47de253636e1b8aef181c0b4f4f204bf0dd964914fb2af90a95b49b/propcache-0.4.1-cp314-cp314-win_amd64.whl", hash = "sha256:5a103c3eb905fcea0ab98be99c3a9a5ab2de60228aa5aceedc614c0281cf6153", size = 41257, upload-time = "2025-10-08T19:48:12.707Z" }, + { url = "https://files.pythonhosted.org/packages/34/5e/63bd5896c3fec12edcbd6f12508d4890d23c265df28c74b175e1ef9f4f3b/propcache-0.4.1-cp314-cp314-win_arm64.whl", hash = "sha256:74c1fb26515153e482e00177a1ad654721bf9207da8a494a0c05e797ad27b992", size = 38097, upload-time = "2025-10-08T19:48:13.923Z" }, + { url = "https://files.pythonhosted.org/packages/99/85/9ff785d787ccf9bbb3f3106f79884a130951436f58392000231b4c737c80/propcache-0.4.1-cp314-cp314t-macosx_10_13_universal2.whl", hash = "sha256:824e908bce90fb2743bd6b59db36eb4f45cd350a39637c9f73b1c1ea66f5b75f", size = 81455, upload-time = "2025-10-08T19:48:15.16Z" }, + { url = "https://files.pythonhosted.org/packages/90/85/2431c10c8e7ddb1445c1f7c4b54d886e8ad20e3c6307e7218f05922cad67/propcache-0.4.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:c2b5e7db5328427c57c8e8831abda175421b709672f6cfc3d630c3b7e2146393", size = 46372, upload-time = "2025-10-08T19:48:16.424Z" }, + { url = "https://files.pythonhosted.org/packages/01/20/b0972d902472da9bcb683fa595099911f4d2e86e5683bcc45de60dd05dc3/propcache-0.4.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6f6ff873ed40292cd4969ef5310179afd5db59fdf055897e282485043fc80ad0", size = 48411, upload-time = "2025-10-08T19:48:17.577Z" }, + { url = "https://files.pythonhosted.org/packages/e2/e3/7dc89f4f21e8f99bad3d5ddb3a3389afcf9da4ac69e3deb2dcdc96e74169/propcache-0.4.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:49a2dc67c154db2c1463013594c458881a069fcf98940e61a0569016a583020a", size = 275712, upload-time = "2025-10-08T19:48:18.901Z" }, + { url = "https://files.pythonhosted.org/packages/20/67/89800c8352489b21a8047c773067644e3897f02ecbbd610f4d46b7f08612/propcache-0.4.1-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:005f08e6a0529984491e37d8dbc3dd86f84bd78a8ceb5fa9a021f4c48d4984be", size = 273557, upload-time = "2025-10-08T19:48:20.762Z" }, + { url = "https://files.pythonhosted.org/packages/e2/a1/b52b055c766a54ce6d9c16d9aca0cad8059acd9637cdf8aa0222f4a026ef/propcache-0.4.1-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5c3310452e0d31390da9035c348633b43d7e7feb2e37be252be6da45abd1abcc", size = 280015, upload-time = "2025-10-08T19:48:22.592Z" }, + { url = "https://files.pythonhosted.org/packages/48/c8/33cee30bd890672c63743049f3c9e4be087e6780906bfc3ec58528be59c1/propcache-0.4.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4c3c70630930447f9ef1caac7728c8ad1c56bc5015338b20fed0d08ea2480b3a", size = 262880, upload-time = "2025-10-08T19:48:23.947Z" }, + { url = "https://files.pythonhosted.org/packages/0c/b1/8f08a143b204b418285c88b83d00edbd61afbc2c6415ffafc8905da7038b/propcache-0.4.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:8e57061305815dfc910a3634dcf584f08168a8836e6999983569f51a8544cd89", size = 260938, upload-time = "2025-10-08T19:48:25.656Z" }, + { url = "https://files.pythonhosted.org/packages/cf/12/96e4664c82ca2f31e1c8dff86afb867348979eb78d3cb8546a680287a1e9/propcache-0.4.1-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:521a463429ef54143092c11a77e04056dd00636f72e8c45b70aaa3140d639726", size = 247641, upload-time = "2025-10-08T19:48:27.207Z" }, + { url = "https://files.pythonhosted.org/packages/18/ed/e7a9cfca28133386ba52278136d42209d3125db08d0a6395f0cba0c0285c/propcache-0.4.1-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:120c964da3fdc75e3731aa392527136d4ad35868cc556fd09bb6d09172d9a367", size = 262510, upload-time = "2025-10-08T19:48:28.65Z" }, + { url = "https://files.pythonhosted.org/packages/f5/76/16d8bf65e8845dd62b4e2b57444ab81f07f40caa5652b8969b87ddcf2ef6/propcache-0.4.1-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:d8f353eb14ee3441ee844ade4277d560cdd68288838673273b978e3d6d2c8f36", size = 263161, upload-time = "2025-10-08T19:48:30.133Z" }, + { url = "https://files.pythonhosted.org/packages/e7/70/c99e9edb5d91d5ad8a49fa3c1e8285ba64f1476782fed10ab251ff413ba1/propcache-0.4.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ab2943be7c652f09638800905ee1bab2c544e537edb57d527997a24c13dc1455", size = 257393, upload-time = "2025-10-08T19:48:31.567Z" }, + { url = "https://files.pythonhosted.org/packages/08/02/87b25304249a35c0915d236575bc3574a323f60b47939a2262b77632a3ee/propcache-0.4.1-cp314-cp314t-win32.whl", hash = "sha256:05674a162469f31358c30bcaa8883cb7829fa3110bf9c0991fe27d7896c42d85", size = 42546, upload-time = "2025-10-08T19:48:32.872Z" }, + { url = "https://files.pythonhosted.org/packages/cb/ef/3c6ecf8b317aa982f309835e8f96987466123c6e596646d4e6a1dfcd080f/propcache-0.4.1-cp314-cp314t-win_amd64.whl", hash = "sha256:990f6b3e2a27d683cb7602ed6c86f15ee6b43b1194736f9baaeb93d0016633b1", size = 46259, upload-time = "2025-10-08T19:48:34.226Z" }, + { url = "https://files.pythonhosted.org/packages/c4/2d/346e946d4951f37eca1e4f55be0f0174c52cd70720f84029b02f296f4a38/propcache-0.4.1-cp314-cp314t-win_arm64.whl", hash = "sha256:ecef2343af4cc68e05131e45024ba34f6095821988a9d0a02aa7c73fcc448aa9", size = 40428, upload-time = "2025-10-08T19:48:35.441Z" }, + { url = "https://files.pythonhosted.org/packages/5b/5a/bc7b4a4ef808fa59a816c17b20c4bef6884daebbdf627ff2a161da67da19/propcache-0.4.1-py3-none-any.whl", hash = "sha256:af2a6052aeb6cf17d3e46ee169099044fd8224cbaf75c76a2ef596e8163e2237", size = 13305, upload-time = "2025-10-08T19:49:00.792Z" }, +] + +[[package]] +name = "protobuf" +version = "6.33.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/66/70/e908e9c5e52ef7c3a6c7902c9dfbb34c7e29c25d2f81ade3856445fd5c94/protobuf-6.33.6.tar.gz", hash = "sha256:a6768d25248312c297558af96a9f9c929e8c4cee0659cb07e780731095f38135", size = 444531, upload-time = "2026-03-18T19:05:00.988Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fc/9f/2f509339e89cfa6f6a4c4ff50438db9ca488dec341f7e454adad60150b00/protobuf-6.33.6-cp310-abi3-win32.whl", hash = "sha256:7d29d9b65f8afef196f8334e80d6bc1d5d4adedb449971fefd3723824e6e77d3", size = 425739, upload-time = "2026-03-18T19:04:48.373Z" }, + { url = "https://files.pythonhosted.org/packages/76/5d/683efcd4798e0030c1bab27374fd13a89f7c2515fb1f3123efdfaa5eab57/protobuf-6.33.6-cp310-abi3-win_amd64.whl", hash = "sha256:0cd27b587afca21b7cfa59a74dcbd48a50f0a6400cfb59391340ad729d91d326", size = 437089, upload-time = "2026-03-18T19:04:50.381Z" }, + { url = "https://files.pythonhosted.org/packages/5c/01/a3c3ed5cd186f39e7880f8303cc51385a198a81469d53d0fdecf1f64d929/protobuf-6.33.6-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:9720e6961b251bde64edfdab7d500725a2af5280f3f4c87e57c0208376aa8c3a", size = 427737, upload-time = "2026-03-18T19:04:51.866Z" }, + { url = "https://files.pythonhosted.org/packages/ee/90/b3c01fdec7d2f627b3a6884243ba328c1217ed2d978def5c12dc50d328a3/protobuf-6.33.6-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:e2afbae9b8e1825e3529f88d514754e094278bb95eadc0e199751cdd9a2e82a2", size = 324610, upload-time = "2026-03-18T19:04:53.096Z" }, + { url = "https://files.pythonhosted.org/packages/9b/ca/25afc144934014700c52e05103c2421997482d561f3101ff352e1292fb81/protobuf-6.33.6-cp39-abi3-manylinux2014_s390x.whl", hash = "sha256:c96c37eec15086b79762ed265d59ab204dabc53056e3443e702d2681f4b39ce3", size = 339381, upload-time = "2026-03-18T19:04:54.616Z" }, + { url = "https://files.pythonhosted.org/packages/16/92/d1e32e3e0d894fe00b15ce28ad4944ab692713f2e7f0a99787405e43533a/protobuf-6.33.6-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:e9db7e292e0ab79dd108d7f1a94fe31601ce1ee3f7b79e0692043423020b0593", size = 323436, upload-time = "2026-03-18T19:04:55.768Z" }, + { url = "https://files.pythonhosted.org/packages/c4/72/02445137af02769918a93807b2b7890047c32bfb9f90371cbc12688819eb/protobuf-6.33.6-py3-none-any.whl", hash = "sha256:77179e006c476e69bf8e8ce866640091ec42e1beb80b213c3900006ecfba6901", size = 170656, upload-time = "2026-03-18T19:04:59.826Z" }, +] + +[[package]] +name = "psycopg2-binary" +version = "2.9.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/cb/0e/bdc8274dc0585090b4e3432267d7be4dfbfd8971c0fa59167c711105a6bf/psycopg2-binary-2.9.10.tar.gz", hash = "sha256:4b3df0e6990aa98acda57d983942eff13d824135fe2250e6522edaa782a06de2", size = 385764, upload-time = "2024-10-16T11:24:58.126Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/49/7d/465cc9795cf76f6d329efdafca74693714556ea3891813701ac1fee87545/psycopg2_binary-2.9.10-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:880845dfe1f85d9d5f7c412efea7a08946a46894537e4e5d091732eb1d34d9a0", size = 3044771, upload-time = "2024-10-16T11:20:35.234Z" }, + { url = "https://files.pythonhosted.org/packages/8b/31/6d225b7b641a1a2148e3ed65e1aa74fc86ba3fee850545e27be9e1de893d/psycopg2_binary-2.9.10-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:9440fa522a79356aaa482aa4ba500b65f28e5d0e63b801abf6aa152a29bd842a", size = 3275336, upload-time = "2024-10-16T11:20:38.742Z" }, + { url = "https://files.pythonhosted.org/packages/30/b7/a68c2b4bff1cbb1728e3ec864b2d92327c77ad52edcd27922535a8366f68/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3923c1d9870c49a2d44f795df0c889a22380d36ef92440ff618ec315757e539", size = 2851637, upload-time = "2024-10-16T11:20:42.145Z" }, + { url = "https://files.pythonhosted.org/packages/0b/b1/cfedc0e0e6f9ad61f8657fd173b2f831ce261c02a08c0b09c652b127d813/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b2c956c028ea5de47ff3a8d6b3cc3330ab45cf0b7c3da35a2d6ff8420896526", size = 3082097, upload-time = "2024-10-16T11:20:46.185Z" }, + { url = "https://files.pythonhosted.org/packages/18/ed/0a8e4153c9b769f59c02fb5e7914f20f0b2483a19dae7bf2db54b743d0d0/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f758ed67cab30b9a8d2833609513ce4d3bd027641673d4ebc9c067e4d208eec1", size = 3264776, upload-time = "2024-10-16T11:20:50.879Z" }, + { url = "https://files.pythonhosted.org/packages/10/db/d09da68c6a0cdab41566b74e0a6068a425f077169bed0946559b7348ebe9/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cd9b4f2cfab88ed4a9106192de509464b75a906462fb846b936eabe45c2063e", size = 3020968, upload-time = "2024-10-16T11:20:56.819Z" }, + { url = "https://files.pythonhosted.org/packages/94/28/4d6f8c255f0dfffb410db2b3f9ac5218d959a66c715c34cac31081e19b95/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dc08420625b5a20b53551c50deae6e231e6371194fa0651dbe0fb206452ae1f", size = 2872334, upload-time = "2024-10-16T11:21:02.411Z" }, + { url = "https://files.pythonhosted.org/packages/05/f7/20d7bf796593c4fea95e12119d6cc384ff1f6141a24fbb7df5a668d29d29/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d7cd730dfa7c36dbe8724426bf5612798734bff2d3c3857f36f2733f5bfc7c00", size = 2822722, upload-time = "2024-10-16T11:21:09.01Z" }, + { url = "https://files.pythonhosted.org/packages/4d/e4/0c407ae919ef626dbdb32835a03b6737013c3cc7240169843965cada2bdf/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:155e69561d54d02b3c3209545fb08938e27889ff5a10c19de8d23eb5a41be8a5", size = 2920132, upload-time = "2024-10-16T11:21:16.339Z" }, + { url = "https://files.pythonhosted.org/packages/2d/70/aa69c9f69cf09a01da224909ff6ce8b68faeef476f00f7ec377e8f03be70/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3cc28a6fd5a4a26224007712e79b81dbaee2ffb90ff406256158ec4d7b52b47", size = 2959312, upload-time = "2024-10-16T11:21:25.584Z" }, + { url = "https://files.pythonhosted.org/packages/d3/bd/213e59854fafe87ba47814bf413ace0dcee33a89c8c8c814faca6bc7cf3c/psycopg2_binary-2.9.10-cp312-cp312-win32.whl", hash = "sha256:ec8a77f521a17506a24a5f626cb2aee7850f9b69a0afe704586f63a464f3cd64", size = 1025191, upload-time = "2024-10-16T11:21:29.912Z" }, + { url = "https://files.pythonhosted.org/packages/92/29/06261ea000e2dc1e22907dbbc483a1093665509ea586b29b8986a0e56733/psycopg2_binary-2.9.10-cp312-cp312-win_amd64.whl", hash = "sha256:18c5ee682b9c6dd3696dad6e54cc7ff3a1a9020df6a5c0f861ef8bfd338c3ca0", size = 1164031, upload-time = "2024-10-16T11:21:34.211Z" }, + { url = "https://files.pythonhosted.org/packages/3e/30/d41d3ba765609c0763505d565c4d12d8f3c79793f0d0f044ff5a28bf395b/psycopg2_binary-2.9.10-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:26540d4a9a4e2b096f1ff9cce51253d0504dca5a85872c7f7be23be5a53eb18d", size = 3044699, upload-time = "2024-10-16T11:21:42.841Z" }, + { url = "https://files.pythonhosted.org/packages/35/44/257ddadec7ef04536ba71af6bc6a75ec05c5343004a7ec93006bee66c0bc/psycopg2_binary-2.9.10-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:e217ce4d37667df0bc1c397fdcd8de5e81018ef305aed9415c3b093faaeb10fb", size = 3275245, upload-time = "2024-10-16T11:21:51.989Z" }, + { url = "https://files.pythonhosted.org/packages/1b/11/48ea1cd11de67f9efd7262085588790a95d9dfcd9b8a687d46caf7305c1a/psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:245159e7ab20a71d989da00f280ca57da7641fa2cdcf71749c193cea540a74f7", size = 2851631, upload-time = "2024-10-16T11:21:57.584Z" }, + { url = "https://files.pythonhosted.org/packages/62/e0/62ce5ee650e6c86719d621a761fe4bc846ab9eff8c1f12b1ed5741bf1c9b/psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c4ded1a24b20021ebe677b7b08ad10bf09aac197d6943bfe6fec70ac4e4690d", size = 3082140, upload-time = "2024-10-16T11:22:02.005Z" }, + { url = "https://files.pythonhosted.org/packages/27/ce/63f946c098611f7be234c0dd7cb1ad68b0b5744d34f68062bb3c5aa510c8/psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3abb691ff9e57d4a93355f60d4f4c1dd2d68326c968e7db17ea96df3c023ef73", size = 3264762, upload-time = "2024-10-16T11:22:06.412Z" }, + { url = "https://files.pythonhosted.org/packages/43/25/c603cd81402e69edf7daa59b1602bd41eb9859e2824b8c0855d748366ac9/psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8608c078134f0b3cbd9f89b34bd60a943b23fd33cc5f065e8d5f840061bd0673", size = 3020967, upload-time = "2024-10-16T11:22:11.583Z" }, + { url = "https://files.pythonhosted.org/packages/5f/d6/8708d8c6fca531057fa170cdde8df870e8b6a9b136e82b361c65e42b841e/psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:230eeae2d71594103cd5b93fd29d1ace6420d0b86f4778739cb1a5a32f607d1f", size = 2872326, upload-time = "2024-10-16T11:22:16.406Z" }, + { url = "https://files.pythonhosted.org/packages/ce/ac/5b1ea50fc08a9df82de7e1771537557f07c2632231bbab652c7e22597908/psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:bb89f0a835bcfc1d42ccd5f41f04870c1b936d8507c6df12b7737febc40f0909", size = 2822712, upload-time = "2024-10-16T11:22:21.366Z" }, + { url = "https://files.pythonhosted.org/packages/c4/fc/504d4503b2abc4570fac3ca56eb8fed5e437bf9c9ef13f36b6621db8ef00/psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f0c2d907a1e102526dd2986df638343388b94c33860ff3bbe1384130828714b1", size = 2920155, upload-time = "2024-10-16T11:22:25.684Z" }, + { url = "https://files.pythonhosted.org/packages/b2/d1/323581e9273ad2c0dbd1902f3fb50c441da86e894b6e25a73c3fda32c57e/psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f8157bed2f51db683f31306aa497311b560f2265998122abe1dce6428bd86567", size = 2959356, upload-time = "2024-10-16T11:22:30.562Z" }, + { url = "https://files.pythonhosted.org/packages/08/50/d13ea0a054189ae1bc21af1d85b6f8bb9bbc5572991055d70ad9006fe2d6/psycopg2_binary-2.9.10-cp313-cp313-win_amd64.whl", hash = "sha256:27422aa5f11fbcd9b18da48373eb67081243662f9b46e6fd07c3eb46e4535142", size = 2569224, upload-time = "2025-01-04T20:09:19.234Z" }, +] + +[[package]] +name = "py" +version = "1.11.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/98/ff/fec109ceb715d2a6b4c4a85a61af3b40c723a961e8828319fbcb15b868dc/py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719", size = 207796, upload-time = "2021-11-04T17:17:01.377Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f6/f0/10642828a8dfb741e5f3fbaac830550a518a775c7fff6f04a007259b0548/py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378", size = 98708, upload-time = "2021-11-04T17:17:00.152Z" }, +] + +[[package]] +name = "py-bip39-bindings" +version = "0.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5a/1a/f41ae3dacae40a80d00111c2bb775b3e89b7175a576b7b48a2378f0e47b5/py_bip39_bindings-0.3.0.tar.gz", hash = "sha256:ccf1f277358c635c28469d6cfd33f40c8cde902dcfdc23f395ac2932d19564bf", size = 14362, upload-time = "2025-09-03T11:35:27.926Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/39/9b/ffacd8e438727cf5d2a91853a7b1a6b3ae39dbc17c2096ef4aaab02e85ad/py_bip39_bindings-0.3.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:8ef28358fda3c69776b2fd505d4ba2e24980ebcdc5d3abf04d5aecf86e6d155b", size = 374346, upload-time = "2025-09-03T11:34:27.263Z" }, + { url = "https://files.pythonhosted.org/packages/72/5c/7f7a456ad5377a9cca53c2720167c1ef8e7b5c6855849cbeed196c1dd84c/py_bip39_bindings-0.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9d9f5b5e95b8abf012ff85f4369aef707a129fc8011d551ae606e2ace6c49fe9", size = 363624, upload-time = "2025-09-03T11:34:22.877Z" }, + { url = "https://files.pythonhosted.org/packages/e6/9c/4e823ece7129cdda9bf4b6e255289ae4291062d48b10e42e035bc9acdda9/py_bip39_bindings-0.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4febd3d90cf6950b194c9a6f6df565cb86ff0e10c89156e6025092b7c272e5ff", size = 421650, upload-time = "2025-09-03T11:33:18.472Z" }, + { url = "https://files.pythonhosted.org/packages/4e/38/57445cdeadc4c7fffb9cca73ec2a994c3c5b6e720a6ec2eb1b2ce9857794/py_bip39_bindings-0.3.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3916b60b83f6c4a79176c485df3b61f437b6c674e39cecb6e8ec5d9e3a28861e", size = 422342, upload-time = "2025-09-03T11:33:34.914Z" }, + { url = "https://files.pythonhosted.org/packages/e2/fe/1d5cd9e2f386ada2a0fc97c47b3696191b756bfcfb224843d9655d79b71b/py_bip39_bindings-0.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:90f19cbfba4834439e49099ca55ef0340fab4fa9abaf6cde9751fd0a05a60e10", size = 585131, upload-time = "2025-09-03T11:33:47.971Z" }, + { url = "https://files.pythonhosted.org/packages/5a/37/c6c520b3f3037b292a11dc071c73345252bb3f449065b241088f3f5632ff/py_bip39_bindings-0.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9af016b24b290937a6b233388fb53ce416b8b4e0b3181c62197d30f1550d4cc6", size = 432384, upload-time = "2025-09-03T11:34:12.415Z" }, + { url = "https://files.pythonhosted.org/packages/ac/5a/53921b79d91e07e66ee4e2a46df78141f0d597be44cdcb5efdf0677c9d38/py_bip39_bindings-0.3.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:c0397a8c6744e906e13b7b4129fa676ff943569fea968cc310f2c9248b9f88e6", size = 438808, upload-time = "2025-09-03T11:34:00.966Z" }, + { url = "https://files.pythonhosted.org/packages/dc/7a/9b730edae87a5714fe06cca7aa8d6f328b588e305d1040299cf7c2b97bec/py_bip39_bindings-0.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3b71b9b9cad54678c8ea4ff855db1e31adf027eab5a1cd2ec0d255178d1d60ea", size = 601932, upload-time = "2025-09-03T11:34:34.311Z" }, + { url = "https://files.pythonhosted.org/packages/2f/ec/e676d7305b0b12e218ff8ad51e4115781a36081db7d51f381d391cf7c5fb/py_bip39_bindings-0.3.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:2b5f3f6dda9a68040c1fb74b9ca721788e3cec3d6e4d1e7ff91659bbc72c2bec", size = 685349, upload-time = "2025-09-03T11:34:47.721Z" }, + { url = "https://files.pythonhosted.org/packages/9c/3b/cf983f9a777de6128244f533495d7787d1f311ab49eb67c71314e18e3695/py_bip39_bindings-0.3.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:086c8e73208f423c3edd051a79334710b5253fb7be5e6a8d0b53da8538c0cc48", size = 614816, upload-time = "2025-09-03T11:35:04.031Z" }, + { url = "https://files.pythonhosted.org/packages/90/a9/81a97ae0850ec2b0b0d4df6757d1dcd478e3b205148d87ca561b95a0d102/py_bip39_bindings-0.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b16eb31bbedc6148a739a6fc8a79053306dcb7585972405f3728802cd844bfa7", size = 602022, upload-time = "2025-09-03T11:35:17.435Z" }, + { url = "https://files.pythonhosted.org/packages/27/ee/d0819e4ae1b2725a215e46182122a8286740d196e1c12cce8fc14f5d4bcf/py_bip39_bindings-0.3.0-cp312-cp312-win32.whl", hash = "sha256:34ac0484b24004d8e29243d345ce98285869f48be8d24a307fd188f76a284e27", size = 262833, upload-time = "2025-09-03T11:35:38.784Z" }, + { url = "https://files.pythonhosted.org/packages/c2/32/1473a512d90162e8d134f76dd4035deb2db0b8b227fad5fbae88a76da329/py_bip39_bindings-0.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:a1506de7cc77827b6ec1eac874f5714be6afdc13b3b3132db71dfde14ee7a4f0", size = 270105, upload-time = "2025-09-03T11:35:31.735Z" }, + { url = "https://files.pythonhosted.org/packages/14/ea/9b3a8f7caf9ecfba1e63b0f3ce4d44930d36415a3edee9cf9f8f17965747/py_bip39_bindings-0.3.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:8430937ba275ce4d559260362c7c3d0df86ffc222c772faf531df1393fbd1923", size = 374325, upload-time = "2025-09-03T11:34:29.007Z" }, + { url = "https://files.pythonhosted.org/packages/eb/8b/2a4830fe07338fe72d03783894efbefe1431312f250c664a170944eed10c/py_bip39_bindings-0.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5cdf11c5b9f6956688af5f1d16e576007619b722b3bbb1eebbb422b4f6ae0d51", size = 363636, upload-time = "2025-09-03T11:34:24.338Z" }, + { url = "https://files.pythonhosted.org/packages/a0/34/5c2e231231cd1c785a367cda37ec350f158c57c67c9b1f5dbc27eb15657b/py_bip39_bindings-0.3.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d340865af414850716a9538d9d6fb269e2d52d7a13352d2b4fba29f1c7077f1f", size = 421878, upload-time = "2025-09-03T11:33:20.258Z" }, + { url = "https://files.pythonhosted.org/packages/f7/fb/1af261ff139b17101bfabc88c06aacf6f8f9af7504703532b5c91f0047fa/py_bip39_bindings-0.3.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:83a1ed311303f0032be84b1c75b1e4fa012278b0f20b95861ced052db178cb92", size = 422226, upload-time = "2025-09-03T11:33:36.27Z" }, + { url = "https://files.pythonhosted.org/packages/76/4a/d570415a4b1e0765b3e496a8297718c78f150b4c0d668d5990ef02bc0138/py_bip39_bindings-0.3.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:897cfc650de45dc7fa005dbec0e6572a16a2d9114e094d33dbe8018e77282d35", size = 587316, upload-time = "2025-09-03T11:33:49.4Z" }, + { url = "https://files.pythonhosted.org/packages/94/26/fc0cbe3c416271fe76a2547302e670b47c02a196080d7b4f588bdbc98e06/py_bip39_bindings-0.3.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fd3cb63109a67419295a7adaf6122559f5991a8ad805fb3f0790c1ceb6869f8", size = 432574, upload-time = "2025-09-03T11:34:13.789Z" }, + { url = "https://files.pythonhosted.org/packages/62/ec/dc84f0f7397d750b0f9fa4ef868a829641d91720236a88cf6d158427f33b/py_bip39_bindings-0.3.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e223923c650a283957bcf6fd6c23c07731ad39c96b08b562ed347f098410204b", size = 438815, upload-time = "2025-09-03T11:34:02.38Z" }, + { url = "https://files.pythonhosted.org/packages/bb/45/9251764e2bcc744c3d4df291dd41a56c6c7df772fe70273bf40ec15af696/py_bip39_bindings-0.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e26947fb62edf9cc4644869ed5cca3feecb444fee0d1bfc99f7ff63b73f03d9a", size = 602196, upload-time = "2025-09-03T11:34:35.765Z" }, + { url = "https://files.pythonhosted.org/packages/f0/18/d761398146bfad044eb3a67d6bc698b80ba9f746759fdc6218082cae3e60/py_bip39_bindings-0.3.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:444e129940844f1ec06f90aa7394be95fa3a57be7466f7981fe68a6d686542c0", size = 685511, upload-time = "2025-09-03T11:34:49.263Z" }, + { url = "https://files.pythonhosted.org/packages/f6/31/243dc2abe1379911ba58bf7b5414b1f8fd764a6e76ae38fbbd80ac124e5f/py_bip39_bindings-0.3.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:fc4d95dea547ba9eb98d58bc431abdca8e06ed75022bfb72474455753644d95b", size = 614886, upload-time = "2025-09-03T11:35:05.622Z" }, + { url = "https://files.pythonhosted.org/packages/ef/c5/19ab78d1fb7f8993db22f358c401ee7e25e998896cefb38dc58112b42f4a/py_bip39_bindings-0.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:70eeee161323a72fde16e26b46b8b04d95e6fc91298548cd88646beb00c95b6f", size = 602228, upload-time = "2025-09-03T11:35:19.284Z" }, + { url = "https://files.pythonhosted.org/packages/85/26/529b72341a9bb9e333d2eb3cad8b0c2b6f56a8dfc5f341c9fe8a509d4473/py_bip39_bindings-0.3.0-cp313-cp313-win32.whl", hash = "sha256:b5a868636452ed8e1ccb5f3a6e069cad1ab9e873545c0234fa40bfdf4c8955f4", size = 262847, upload-time = "2025-09-03T11:35:40.202Z" }, + { url = "https://files.pythonhosted.org/packages/e9/44/d0ec7c241fa37b9dd6fb921585523f200f40e7f6d31aa20d6f9e4c20aa3d/py_bip39_bindings-0.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:fbb4e8d3abee5b58cc719e4072bf31920bef173a54ba7775f39e7cc855ce2e69", size = 270150, upload-time = "2025-09-03T11:35:33.127Z" }, + { url = "https://files.pythonhosted.org/packages/69/49/2c21dd6ee9027552c68c420c23f7b20f456bd33c7d880935b35184c2369b/py_bip39_bindings-0.3.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e408026db2dd14c4bdcf8538ac014672c32cdbdf006e9f2e530450b2869ef256", size = 421567, upload-time = "2025-09-03T11:33:21.732Z" }, + { url = "https://files.pythonhosted.org/packages/8f/49/7e8da35209de056030990bcf7853b3491e505130ff9d2d0503f5ab4aebb7/py_bip39_bindings-0.3.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e7b72ad60d0094259deaa560ff01d16b8de84d2b6bc42ca50797b6e120c0a891", size = 421765, upload-time = "2025-09-03T11:33:37.588Z" }, + { url = "https://files.pythonhosted.org/packages/fa/22/6afb61921e999d51c86d1da865e2869405ecadbf26ada346ffc6f1f9337e/py_bip39_bindings-0.3.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:33a1c0a6db50a13a9d9dc481e806d5ced79e24c927abd518938dda111226579f", size = 584089, upload-time = "2025-09-03T11:33:51.417Z" }, + { url = "https://files.pythonhosted.org/packages/4a/38/f1cafcb900070a177b7c0c644d4554246797d2809874fd471a379d19a979/py_bip39_bindings-0.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:c82b3c9a8eca817a12e9367a0a6b3904a05a78aee36cbf364f0e3e94e48645e9", size = 601803, upload-time = "2025-09-03T11:34:37.223Z" }, + { url = "https://files.pythonhosted.org/packages/8d/4f/291253e851e813e274d796d7d356d730f0c1dacbe4e206f401efd7e754fa/py_bip39_bindings-0.3.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:2417721986f9f5d0cf2898aa5f8963d596a79a6c18406fca60eb7b93424d3c2e", size = 684753, upload-time = "2025-09-03T11:34:50.752Z" }, + { url = "https://files.pythonhosted.org/packages/31/b9/20bd4b3aaf20653319e22eba3e40d6549a6a0979e5e627536ad7f625189a/py_bip39_bindings-0.3.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:43588c856e9ad36b78fa47803e5e0637d44c5cbc0b0c79d045c58d972c30ba05", size = 614248, upload-time = "2025-09-03T11:35:07.012Z" }, + { url = "https://files.pythonhosted.org/packages/bc/c1/3baa4d6ffd1d366737a06719ecfddca7b3c6f07239289639dec1478dbc28/py_bip39_bindings-0.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6bd1cdf3d333b37e39b97485293d4b2eb8cc90a24c64bf530f04a7891cd9d6eb", size = 601779, upload-time = "2025-09-03T11:35:20.773Z" }, + { url = "https://files.pythonhosted.org/packages/a9/e0/3c8d94ed27c8775799bd30116535d3a165f2a09ad627f0f4ea78e3901f2f/py_bip39_bindings-0.3.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a5b4a6011d8e807c22699647d5caf1af231b027448c182b126e127adc6e84aab", size = 432218, upload-time = "2025-09-03T11:34:15.39Z" }, + { url = "https://files.pythonhosted.org/packages/a1/b6/75101dfaa0cd0f68bf1ebd5e0232b5504a6e69b672b27758063badfbf7d8/py_bip39_bindings-0.3.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2a28d7d9c3d8aa46f63b40e59cd8d2fe8f49a1756dd690dfaad6c5a6ba44c5e8", size = 438955, upload-time = "2025-09-03T11:34:03.71Z" }, +] + +[[package]] +name = "py-ed25519-zebra-bindings" +version = "1.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c1/0f/18ba2ae4eb2bc84fd05ebd3689d6d33609198707a602cf332e854944735f/py_ed25519_zebra_bindings-1.3.0.tar.gz", hash = "sha256:c0c4478ab57d65671b310524978122a81b9921847dba07f005d0d2d218f610b5", size = 11809, upload-time = "2025-09-03T11:31:49.185Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/dd/8e/2ad270ce1e4d7e3c5a1ddce2933370f4e60510d4f64cfc243d27ee250caa/py_ed25519_zebra_bindings-1.3.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:dd548a85aae8354e781db7ce2310cd74e11478c95caf30dcb0e61481de9b1ccd", size = 297769, upload-time = "2025-09-03T11:31:03.794Z" }, + { url = "https://files.pythonhosted.org/packages/e7/b8/539cf42a0815d8c6f2d8aaaf8e0c7b8dff7c5e05bb74c440cbf3a2b320dc/py_ed25519_zebra_bindings-1.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:160d09d42c3a71b27b31c6cfe33c495816603407a8dca65848e3daf5264ce650", size = 269824, upload-time = "2025-09-03T11:31:00.836Z" }, + { url = "https://files.pythonhosted.org/packages/31/db/5f9dc670fbd10ac7e8ef6d87bbe003ef0adc6816eb4b7227f45b9666bc53/py_ed25519_zebra_bindings-1.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6955a1689da524991b8940c9fad4c71351508faa9c338d3a59c1979881cb6d13", size = 300831, upload-time = "2025-09-03T11:30:08.864Z" }, + { url = "https://files.pythonhosted.org/packages/1a/24/eca79f4d48896fdd56818751a652c930c81af7a27faa4186f3938d9ede23/py_ed25519_zebra_bindings-1.3.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:44524a78ac106a32cfb468df5bf13b74379df022088751802db5d591db4fbaff", size = 328349, upload-time = "2025-09-03T11:30:21.984Z" }, + { url = "https://files.pythonhosted.org/packages/9f/6c/dcec9acbcec854ffd8e273c20fea07ca5427c823690b7b7ecefac2a41993/py_ed25519_zebra_bindings-1.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bc58becb6f3336e73249916bb0ebb5ca01304f7229066681fa1a54b437c73650", size = 443681, upload-time = "2025-09-03T11:30:32.284Z" }, + { url = "https://files.pythonhosted.org/packages/43/9e/505352973fd7eb2411ec15f2e5795cc16bfa6b970426a155493e6de37f5f/py_ed25519_zebra_bindings-1.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75bb597651de7ae71e22d49284ea153cb4c5f9c085e3745634713d57348b9c4f", size = 322842, upload-time = "2025-09-03T11:30:52.732Z" }, + { url = "https://files.pythonhosted.org/packages/de/f7/3f623955953b4557cfc20fbca3337fe0bb3480ae1c0122b2b70000bcbdbc/py_ed25519_zebra_bindings-1.3.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8e3ca8fca02404f859d2a2d93e2d54e6a2ca49f39caf29a5497995d2566926d4", size = 340445, upload-time = "2025-09-03T11:30:43.179Z" }, + { url = "https://files.pythonhosted.org/packages/3c/dd/95be4b702eab24844a006472ea0d3f6d4e79e7bac181d1f3f1b5478f28be/py_ed25519_zebra_bindings-1.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:506675a6097e771da2177859218ce8cf6a0b96239f38593cbfb38e376530a8e6", size = 479582, upload-time = "2025-09-03T11:31:08.636Z" }, + { url = "https://files.pythonhosted.org/packages/2b/e5/2a6af8ed67538212c4d7de49905ff1d1baa1f109255e8e4006b722fb9c89/py_ed25519_zebra_bindings-1.3.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:9225efde253397aaf9ac8509036314d1390621cf7fc4e43616e741d48af91c56", size = 592553, upload-time = "2025-09-03T11:31:19.283Z" }, + { url = "https://files.pythonhosted.org/packages/3f/3b/c93b3ff723747206478deaa7a8c7448fec7faefd011385f1d717a94181e7/py_ed25519_zebra_bindings-1.3.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:8483690dfebd89d9e2a9b4407c902d42187444bd4c6c2e5a161f39da11c0b25a", size = 517855, upload-time = "2025-09-03T11:31:29.603Z" }, + { url = "https://files.pythonhosted.org/packages/cb/1a/0752ea61481348038e8d66fd95453aae99c606d5c872c9d06dde7b6912d7/py_ed25519_zebra_bindings-1.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:872ee9da1f93d252c13d246cb3d79b50abd834f45b73b3ce9733091a1b31e470", size = 493050, upload-time = "2025-09-03T11:31:40.442Z" }, + { url = "https://files.pythonhosted.org/packages/32/c7/e7f1193b3edd58e8662c6b8dbeecc3c5a77c1df25fa9f45daee62f5af9c0/py_ed25519_zebra_bindings-1.3.0-cp312-cp312-win32.whl", hash = "sha256:6b28b572db32188af60878b11a20cace42a86f0b8630fa09739d871e67d317b2", size = 183140, upload-time = "2025-09-03T11:31:56.576Z" }, + { url = "https://files.pythonhosted.org/packages/6b/3a/44f1aa1e24693abed85478f5fd84a60a8164162ea9d8eac1d2dab48bc180/py_ed25519_zebra_bindings-1.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:015c2ce6b934a6b36d50a21c6213636c18edf9a4c14b847f70089ba1897e119e", size = 188353, upload-time = "2025-09-03T11:31:51.707Z" }, + { url = "https://files.pythonhosted.org/packages/47/50/a548d8ac5d9707305b21417f91e6baaa2de95ba37db871921a1184926c3e/py_ed25519_zebra_bindings-1.3.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:c5ef0249358960af020b36bdecc4b415c2f575f9ec355772c580e1081af1b861", size = 297826, upload-time = "2025-09-03T11:31:05.153Z" }, + { url = "https://files.pythonhosted.org/packages/f1/0f/16be20ac1ff561de62ecaae9a8975b29f0ddee00681793ce2c3dcfa134ba/py_ed25519_zebra_bindings-1.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9ddd511d4a03b1858a00cbc805d3d65fbbf58ef83dcc3534d09b68daccffb862", size = 270230, upload-time = "2025-09-03T11:31:01.833Z" }, + { url = "https://files.pythonhosted.org/packages/b0/23/e57e16ebfecb8c075e9e5cfd5334b9834e86b16580738e7db5afd8d954e7/py_ed25519_zebra_bindings-1.3.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c3f463ff25d5de31bdb183d47c675def29983028495e9b183559cdf5740e75e1", size = 301083, upload-time = "2025-09-03T11:30:10.095Z" }, + { url = "https://files.pythonhosted.org/packages/be/ea/7564194b434f46a97f4d23d4c86ea8bd26ff8a994be59f6c27926bff1f16/py_ed25519_zebra_bindings-1.3.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2f93374f340d3c024724e391e560eb3a61a1c94ea2a9dbc78ca8a9af73fe7263", size = 328423, upload-time = "2025-09-03T11:30:23.251Z" }, + { url = "https://files.pythonhosted.org/packages/fd/40/5556c7dfba4f19902284a91d38085ff0a4eb3a7b0d8d61f409e96e15fe9a/py_ed25519_zebra_bindings-1.3.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0cd77c8c994f0f2f34433fdd58245a1534467d263ee03dd61f7509d3624aa689", size = 446423, upload-time = "2025-09-03T11:30:33.684Z" }, + { url = "https://files.pythonhosted.org/packages/bf/56/52d1054fb40dcfe27ed23ecfe029a4d750f003cfb393d61015d91fc5b24c/py_ed25519_zebra_bindings-1.3.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:487b22228a5b45be10c6f315980f5fdb8b5582f3ee5e8ecfa8f1a2436f9f3bb2", size = 323010, upload-time = "2025-09-03T11:30:53.782Z" }, + { url = "https://files.pythonhosted.org/packages/bf/86/65cd60e4de5548d4dfe4b153fa3b1c5af9a1e8bf46d57a2a41c9342e9e55/py_ed25519_zebra_bindings-1.3.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6cec27b88c9de77c06e95c677bac49fafeb63d3fa2b8696248760da697de1037", size = 340628, upload-time = "2025-09-03T11:30:44.234Z" }, + { url = "https://files.pythonhosted.org/packages/0d/07/472d923bc8476d820b368868f00554dba99ac6743732bac97182f05b4abe/py_ed25519_zebra_bindings-1.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5df054b02db0676c1d0bccb2d4df7614360cbea7d94028e9f43f813e3e4279e4", size = 479756, upload-time = "2025-09-03T11:31:09.689Z" }, + { url = "https://files.pythonhosted.org/packages/a7/3a/32dcf49eeb1d57eb7759a5e899cf7f48661dcaa8f5be165ef607499bab58/py_ed25519_zebra_bindings-1.3.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:78346e532d045b1748bc558105c4ab33f3f119cba60eb66d589ef5bce468921f", size = 592661, upload-time = "2025-09-03T11:31:20.349Z" }, + { url = "https://files.pythonhosted.org/packages/f5/05/8b5f37944001c89efa81c53c86be10d1b242ce19bc66ccc5dbb9794e0fcb/py_ed25519_zebra_bindings-1.3.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:873b6ba1fa5c43297af4007f9bb5c625ab70b84eec5519f5298264ae63254b83", size = 517907, upload-time = "2025-09-03T11:31:30.84Z" }, + { url = "https://files.pythonhosted.org/packages/9b/c9/b5d0e16257964c624a69110e82c24bc60d5b384b515eb6c204b66b2b9725/py_ed25519_zebra_bindings-1.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:aab56c445636a98bbed31210d0d3240760a00e45cbdfe1780ca8c7a07fa9618e", size = 493261, upload-time = "2025-09-03T11:31:41.456Z" }, + { url = "https://files.pythonhosted.org/packages/8f/35/4c8327f83f22b5dc2fbcef19bfdb7dbb6ef8b831efeb2c369ea74696a670/py_ed25519_zebra_bindings-1.3.0-cp313-cp313-win32.whl", hash = "sha256:18b6f28fb5e812d99065b9b229d115f5f6235c3697debb82a37d8ff19138445b", size = 183147, upload-time = "2025-09-03T11:31:57.579Z" }, + { url = "https://files.pythonhosted.org/packages/67/65/942c9434d8b4d346b94a21c741a78860b5b2e961478880f8c124c97379ca/py_ed25519_zebra_bindings-1.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:57c8ee8f1e1b53bd9016afecf7e63d4820697093d875d5fc373595ff0c033c42", size = 188462, upload-time = "2025-09-03T11:31:52.632Z" }, + { url = "https://files.pythonhosted.org/packages/53/c1/a50f709cc33c82bf3b2e779b9cc5f29688477e955ea366aa0f210758e644/py_ed25519_zebra_bindings-1.3.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18116e7a8b41f2197d1be9dc3ce1065237bad93e5e041dafa2e63e9f3378579e", size = 300076, upload-time = "2025-09-03T11:30:13.123Z" }, + { url = "https://files.pythonhosted.org/packages/23/e2/ad92a003ab66b97f5776d783577e00e7b2a778a660a86aeb8a0983b46d13/py_ed25519_zebra_bindings-1.3.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:088221ad2e2587bb27c85745a3ff08ef2a248d1cb345aaf14c9a51e709eb22a4", size = 327760, upload-time = "2025-09-03T11:30:24.566Z" }, + { url = "https://files.pythonhosted.org/packages/45/98/49302629d6c11854a40b7f454c0b7b50b17551cd4752ed2c846cfa8854d4/py_ed25519_zebra_bindings-1.3.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:672e463efbed28ab8ba23573dc0659b282449949270586441e6d732aa0e4d0ce", size = 441905, upload-time = "2025-09-03T11:30:35.086Z" }, + { url = "https://files.pythonhosted.org/packages/76/44/4359a0113d8863b4287f2cc264ce786cd8ad8b863bda1008fe421a7d49c5/py_ed25519_zebra_bindings-1.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:b3ccb411f02662997f883b20dc42acaaf6c7504cd4bba39e0f495c90d52a61a3", size = 479057, upload-time = "2025-09-03T11:31:11.109Z" }, + { url = "https://files.pythonhosted.org/packages/e2/7e/c60624b44eaa77e5ea93e805f96d35d21a5e3a65dd61bd4ed1743e44e221/py_ed25519_zebra_bindings-1.3.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:29c762f34698391d67951f0046647e2ceda8e82f09ea102534d4e826eec4dd69", size = 591882, upload-time = "2025-09-03T11:31:21.454Z" }, + { url = "https://files.pythonhosted.org/packages/e8/74/799b8627e37deb739dc18aae7bd4c372cfef30046344de31058cbc10e8a8/py_ed25519_zebra_bindings-1.3.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3b79e7c0c693ec207259e4e88b352f10e9fa4d26f11c7f684cee9e8694855214", size = 517332, upload-time = "2025-09-03T11:31:31.885Z" }, + { url = "https://files.pythonhosted.org/packages/fb/a9/ec915f981e4b09d1af16e757e7f17260a780b17ad7de2d95c28700e0614f/py_ed25519_zebra_bindings-1.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:cf07f4fc1bbfe3733e496573a3c6e9f3348986a708328e973ae129f3222eeaad", size = 492651, upload-time = "2025-09-03T11:31:42.48Z" }, + { url = "https://files.pythonhosted.org/packages/7e/45/0b392a0d2193a2e8b3cbb7e88f96154ab393c26f7eb0a0353f41a9d8918a/py_ed25519_zebra_bindings-1.3.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:94a4985eeea06c5d6acb37c6986ad975d146b6708a8b8348c4d6ad618f81b3aa", size = 322835, upload-time = "2025-09-03T11:30:54.943Z" }, + { url = "https://files.pythonhosted.org/packages/84/59/57cd15cf06c4ad664c31d349ba18d1af37406d9c3a6c760f2fb38a3cb3ee/py_ed25519_zebra_bindings-1.3.0-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:322b8cc17c42daa4cc0fbba86267ce59c087b4434b414753221e10e3d4d58ed7", size = 340558, upload-time = "2025-09-03T11:30:45.196Z" }, +] + +[[package]] +name = "py-sr25519-bindings" +version = "0.2.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3e/9a/0b23158cf35321bd14af2ea5868b45cc7380af30bbde730d2b152d35fc20/py_sr25519_bindings-0.2.3.tar.gz", hash = "sha256:5a519bc23b4e8993851e62dd625594329e23bfea479137ba037446a35ec839c4", size = 18001, upload-time = "2025-11-20T10:18:35.519Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c1/0f/dabed8deaf4a9fc3f31f73fdd0f3548ddfc8a73dacf055aaf5986cd6a5b2/py_sr25519_bindings-0.2.3-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:83538423c2955e132830a9de6e5196d757fe88ca46ca082b66d29c8fba07ff65", size = 338900, upload-time = "2025-11-20T10:17:25.132Z" }, + { url = "https://files.pythonhosted.org/packages/5e/98/aee67dd40dcf09c0b167ee7d2f7e02fd60995feef455c1195ff5fdcedb37/py_sr25519_bindings-0.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4443adf871e224493c4ee4c06be205a10ea649a781132af883f6638fd7acc9d7", size = 312148, upload-time = "2025-11-20T10:17:19.341Z" }, + { url = "https://files.pythonhosted.org/packages/74/9f/66047b9ce7af41663e997e084e53f8c93c3f4644a2a895a6f9259a25ead1/py_sr25519_bindings-0.2.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3929c291408e67a1a11566f251b9f7d06c3fb3ae240caec44b9181de09e3fc9", size = 345098, upload-time = "2025-11-20T10:16:06.741Z" }, + { url = "https://files.pythonhosted.org/packages/bf/ee/bd12d535cb0ae36fd47698bc853ff0ad1221bd423784142649deb3d01440/py_sr25519_bindings-0.2.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:619977b94225f559e68e4dd18611f14ed61a2c14d34335bb8ad136e84dd9ce7f", size = 373311, upload-time = "2025-11-20T10:16:23.795Z" }, + { url = "https://files.pythonhosted.org/packages/76/99/63aca8766ce656c1bb84309c105df6f8be7afbe763b2141b352765a26dbf/py_sr25519_bindings-0.2.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e2a2e6eb837263479bccd59a4563b537212dd99e57d4b921c3b0b7717bf9f2e1", size = 480545, upload-time = "2025-11-20T10:16:40.245Z" }, + { url = "https://files.pythonhosted.org/packages/cd/92/05d8b0b21aa03b806fbc38960f482228af39bc54d17117665c6e0e07c0a9/py_sr25519_bindings-0.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9da4c9c7f9a0a0e8e3d9ed6eedc885561288edd72267ebc7b0fd11262e8c8b28", size = 372984, upload-time = "2025-11-20T10:17:09.278Z" }, + { url = "https://files.pythonhosted.org/packages/74/67/b8f42c2b6e222dc4d272082e65dcf70159e98620606f69e6813261b6ea44/py_sr25519_bindings-0.2.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b92fc5e18c0c5e7c75fa6a49b48914b8e2968e42a0825449241a46ca00546d6c", size = 390521, upload-time = "2025-11-20T10:16:56.284Z" }, + { url = "https://files.pythonhosted.org/packages/bf/86/216779bf7b88ff6ab788fa0f17d8ed6e1f9b6f15e3ab71d978fcf3feaff0/py_sr25519_bindings-0.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f1b818203e84a2a6f059ed2763d5506b3772127c02ffd428163b033f91c1ad92", size = 526879, upload-time = "2025-11-20T10:17:31.414Z" }, + { url = "https://files.pythonhosted.org/packages/47/80/73f54865d5831bc159020ca47873f35e0f9f990a9031a6dd44494a676a73/py_sr25519_bindings-0.2.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:feacb4aa1adc9b15caf516fb14f2f3d95de7451b67f757da576a7184f34d397a", size = 641311, upload-time = "2025-11-20T10:17:47.957Z" }, + { url = "https://files.pythonhosted.org/packages/c0/6c/90ca04381158f266719a8667001566b5c1d56797eb61a52376629655132f/py_sr25519_bindings-0.2.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:7afaa64cc36539df44779f3ff108cfef93c5462e9e28ac832f8329e4c4c045bd", size = 567535, upload-time = "2025-11-20T10:18:04.003Z" }, + { url = "https://files.pythonhosted.org/packages/16/e8/72bc7f19483602f6b6e27c76e1a61ebc378bd2f6100a0658af641ecd1072/py_sr25519_bindings-0.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2369d7cb3d5ed41e1d797d1c8e9819b0c31377f18ec6fe685bde632586107da2", size = 539863, upload-time = "2025-11-20T10:18:22.463Z" }, + { url = "https://files.pythonhosted.org/packages/07/22/4e4b9e12a3013f433ce65185ea6303f8eb00a8d7812e6f13c8d9ac616ebb/py_sr25519_bindings-0.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:75ad9a3f11b54e2b8fb2db794e3d75a9baedddc9db583985ade536a1103a2d8d", size = 226020, upload-time = "2025-11-20T10:18:39.701Z" }, + { url = "https://files.pythonhosted.org/packages/f7/de/ddedb9db8f1f5fbfbba4f3611de78a1315a5f9d1fff3bb8dbd28b5a28976/py_sr25519_bindings-0.2.3-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:eb5120632e30998aa7b267313017c3498dc47d50b835f724d1d5e01b1fc46083", size = 339052, upload-time = "2025-11-20T10:17:26.896Z" }, + { url = "https://files.pythonhosted.org/packages/0e/ff/9e086bbe621f72523cbd5e9076d5c970a27c511573f80a006cfad2697958/py_sr25519_bindings-0.2.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:d95f5c8023c1e61fef2606d10c02ba98f0b44c7214aef50f4f291eaad15870b3", size = 312306, upload-time = "2025-11-20T10:17:20.506Z" }, + { url = "https://files.pythonhosted.org/packages/fa/49/faa6a803818475d9acfb13fd66ee6fcb1326ea97a9c73b819e4a6f9bd2bf/py_sr25519_bindings-0.2.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bebed545064e2c16d7977e1c604826b8f09cc414f651708236636571d23ca52f", size = 345013, upload-time = "2025-11-20T10:16:08.404Z" }, + { url = "https://files.pythonhosted.org/packages/bd/74/5f860db8796496f3f4aceadd1d0737b93aad6c7cc45ac4d806666fe05572/py_sr25519_bindings-0.2.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:d589ec813c53f91e9fccc1f30b7e24ea32bbb83c33c0e097fdffd995905626f6", size = 373570, upload-time = "2025-11-20T10:16:25.073Z" }, + { url = "https://files.pythonhosted.org/packages/69/76/879102744c8cf2f3698c0127c942a20bd65799551105396c40331239f57a/py_sr25519_bindings-0.2.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc6726fe1edc18ea16803df7c5c54e87f2765003daab50a5649d5874bb7f1255", size = 481194, upload-time = "2025-11-20T10:16:41.551Z" }, + { url = "https://files.pythonhosted.org/packages/57/c7/ed8bf493d2afe7eb5c4af723ccddeea515f7bc297d80a6e81def52dc39fa/py_sr25519_bindings-0.2.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:35dd09f9d289681f7aa45ee63488cea1de7c479ac499715a7044d132bbb1cc8f", size = 373054, upload-time = "2025-11-20T10:17:10.699Z" }, + { url = "https://files.pythonhosted.org/packages/74/de/46834c6618201a8e265b0f8bdb60aad9b090f5cd4942a2002b5b3bf76e93/py_sr25519_bindings-0.2.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:0777dd86e03aa4db29e1238b01b7d82abd08f1d8b5f2aee42549baf22c98059a", size = 390705, upload-time = "2025-11-20T10:16:57.805Z" }, + { url = "https://files.pythonhosted.org/packages/29/77/0b9cefde37dd7309cfaadcb8d1a056d34e706e7773ccbc8ca77b349dc704/py_sr25519_bindings-0.2.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f52886adbd427e2e8874a2708963ee5ec33d2a7e0062d1fe27d3c0b9fb4415f0", size = 526551, upload-time = "2025-11-20T10:17:32.763Z" }, + { url = "https://files.pythonhosted.org/packages/17/bf/51c6512d33f36cbd99691e516fda0c5ae770fa498967a06f574dcc5cd3d4/py_sr25519_bindings-0.2.3-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:80ec7c84f2376762e657de9fcc4acc9a15711524456fe87d3af9e3bbfcb9725d", size = 641624, upload-time = "2025-11-20T10:17:49.45Z" }, + { url = "https://files.pythonhosted.org/packages/23/7f/1455ec98a404c87eaba1c2551ea5553c0b5ea0ea5726c728edce46e50adc/py_sr25519_bindings-0.2.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ac0193bf76cf71ec234b5f4d287640b0b1e0cc63cfb9d457b4579263cbec80aa", size = 567678, upload-time = "2025-11-20T10:18:05.349Z" }, + { url = "https://files.pythonhosted.org/packages/56/1f/b2770f5051d46b26a0ed1084f977ad927bce9e48423d904f5cf666efe64f/py_sr25519_bindings-0.2.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:2bb74afc9309ec78bf4bbeb820e5612112283c8f3e70969d018b48ac6fa5002d", size = 540028, upload-time = "2025-11-20T10:18:23.805Z" }, + { url = "https://files.pythonhosted.org/packages/7a/00/4c3f5434a45e13f85c1cd5bf0b2cff1bbd1228b9c7558cccbcd312ef6a0f/py_sr25519_bindings-0.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:3291c826a16aa57963dc5a0b5c96355ddef1977b5808cae19cceb90dcf0ecc4c", size = 226252, upload-time = "2025-11-20T10:18:41.256Z" }, + { url = "https://files.pythonhosted.org/packages/a1/7f/a453154c8d6e62ebaac1568cb0b99513d940752413ea8479800bbe1d32d1/py_sr25519_bindings-0.2.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:977c0a52afe659a3bc7db1da2cc387ed8ee57131efb131370ed0614d0e105a55", size = 344415, upload-time = "2025-11-20T10:16:10.092Z" }, + { url = "https://files.pythonhosted.org/packages/78/b0/80aa7efdf25165ab2f475a28306aa159f1d5699905cd7a8e74195e7b438a/py_sr25519_bindings-0.2.3-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c10a879ab1c9bb5eee1c6744e8da1ea2600cf9ff358235b2b92ac7dee151988f", size = 372744, upload-time = "2025-11-20T10:16:26.664Z" }, + { url = "https://files.pythonhosted.org/packages/db/f2/55209f53c5525b595fd80c6913a89f995a2719f5598c54a514cbb16ebe8d/py_sr25519_bindings-0.2.3-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:781a5c150086cc13721e01f69355f6079c546eb4f197ef4ebbe394739db52352", size = 482028, upload-time = "2025-11-20T10:16:42.9Z" }, + { url = "https://files.pythonhosted.org/packages/12/9a/b4baf722e44889944a83dc51869aa56cad0e70a33f6fc18ac5c82b7f66e5/py_sr25519_bindings-0.2.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:8f1a7e7791875fa5486c25b531058bcbc4be351245ba28e194c040283ee5d664", size = 526257, upload-time = "2025-11-20T10:17:34.466Z" }, + { url = "https://files.pythonhosted.org/packages/c2/46/2b9e64adcc9ce74342bd959f18226bc998bad8da666f16db61cb2650547e/py_sr25519_bindings-0.2.3-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:c89378818e77119d2bff2662768c2e62396ef45fc6af6e64dbfbc228704f8cc9", size = 640643, upload-time = "2025-11-20T10:17:50.83Z" }, + { url = "https://files.pythonhosted.org/packages/4c/1b/70de71c233af04eee109c42e3bc9561098033287e6dde2102c24d18907bd/py_sr25519_bindings-0.2.3-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:362349002b47d37f9ffdb9b5f33f7dad4831ab864fe29fb869d41b314801ed3b", size = 567198, upload-time = "2025-11-20T10:18:06.749Z" }, + { url = "https://files.pythonhosted.org/packages/af/62/ddb99d42f9a9f2f7083fa506d0e07215ab0fee0fdc9b9ed572b4823f187b/py_sr25519_bindings-0.2.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f81c25229f9b9719462c3ab209baba3cf74ea599944d82da35f74f104fbee569", size = 539811, upload-time = "2025-11-20T10:18:25.168Z" }, + { url = "https://files.pythonhosted.org/packages/2f/08/45842518b1e163debf4d45c55c601ec582af52182d91a1b7f8cf6b2c426f/py_sr25519_bindings-0.2.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:8d9c8c3a12fe4e7c71c6e05683775769eda1f09dfa35eab3e33426512a752b4e", size = 312256, upload-time = "2025-11-20T10:17:21.742Z" }, + { url = "https://files.pythonhosted.org/packages/4f/67/37249e51d290290392946929133608a9947dab959d59878eb00e3ed8b8e9/py_sr25519_bindings-0.2.3-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b532167ea64709dad07a1a4e51dddb580343d30d34c4e6bcf271995eb40818b", size = 344401, upload-time = "2025-11-20T10:16:11.445Z" }, + { url = "https://files.pythonhosted.org/packages/10/7e/9e79faa2ed7d675214a928c2c107ad219ac06f592652115ca3d2844f3480/py_sr25519_bindings-0.2.3-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2d9ccc6b95cd413959b506b8cca2f4847f88e0996ea54933fd4dbf11c28d11cb", size = 373496, upload-time = "2025-11-20T10:16:28.411Z" }, + { url = "https://files.pythonhosted.org/packages/d1/15/bbe8d8979a4804fa8e554e0c3342ecc568f95b8935b6ff03ac8a1cad6220/py_sr25519_bindings-0.2.3-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0581a5502d9e40181b06ab4f40d3865030f4727cf88d7d93d57be74303a0cc63", size = 481813, upload-time = "2025-11-20T10:16:44.273Z" }, + { url = "https://files.pythonhosted.org/packages/bf/73/96d8c953ec9c953a3f918f699f4c8e2964560839b795435f971ff70057b2/py_sr25519_bindings-0.2.3-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2256f5e349300450ae09e1d18cf865c5445c5ddf735ff7cc645870bdcb4ccfa", size = 372755, upload-time = "2025-11-20T10:17:11.976Z" }, + { url = "https://files.pythonhosted.org/packages/3f/d3/21909eb6aa42dde1a5f2f502ff52d6394507bed9fd0472940c9fdefe8143/py_sr25519_bindings-0.2.3-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:8a4ad45a83631d98c61ddc1b1be261ad5cc2f6c16977f9ed9e2844ac976fd03d", size = 390414, upload-time = "2025-11-20T10:16:59.393Z" }, + { url = "https://files.pythonhosted.org/packages/19/4e/564ca61524bc0fa97e76c97a9254ccee4a9797eca7d587fdd6449a5eebfc/py_sr25519_bindings-0.2.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ad286619faa5641ea45dce6e5a23036e4751b95def75390e3058f0449df6e6df", size = 525975, upload-time = "2025-11-20T10:17:35.991Z" }, + { url = "https://files.pythonhosted.org/packages/c5/cb/0ac16aaf9a2f1e5c957a5fdb82eea6bb5c9292bf2621e80e521dae4b6a57/py_sr25519_bindings-0.2.3-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:d3895827d11bb0e9758f191c503be33d91ee8fe5ec5098cc17666c3b3fe49b67", size = 641543, upload-time = "2025-11-20T10:17:52.25Z" }, + { url = "https://files.pythonhosted.org/packages/f3/a8/6dcbc47cbcfd8e6a8ac387acf863f34076af1dafdfd8a787dd646613d4ff/py_sr25519_bindings-0.2.3-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:a3622f8dcc0a15e7b785ae63a62774bc4faffb464c13ea09c38979e9564a6b70", size = 567405, upload-time = "2025-11-20T10:18:08.216Z" }, + { url = "https://files.pythonhosted.org/packages/cf/1a/f8ad41c69387aa2bb8f574577fe2659a196590a74b09cfbde52b4a1f6a96/py_sr25519_bindings-0.2.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:a8192a830726f52975c89961df8a1c5efdbb1789a021f7807792adcb2c77613f", size = 539670, upload-time = "2025-11-20T10:18:26.812Z" }, + { url = "https://files.pythonhosted.org/packages/9d/93/9bb54afde0f932cb23b82d7543cfe5c1555e1d0e04ff13e635d077feb0cc/py_sr25519_bindings-0.2.3-cp314-cp314-win32.whl", hash = "sha256:4d1e73ead4c6e73ce0ddff27423aca60f07cc153ebf7315c0309bc90519f43a7", size = 215371, upload-time = "2025-11-20T10:18:45.291Z" }, + { url = "https://files.pythonhosted.org/packages/4e/2f/d80e00bb4834dea2939401356a63fdec871671a7da9a0787a178fb3ade9f/py_sr25519_bindings-0.2.3-cp314-cp314-win_amd64.whl", hash = "sha256:d4b7e54365e21b5c1c674dea5ba2e74b406bae58d706fbcd5b1498284cdaa66d", size = 226170, upload-time = "2025-11-20T10:18:42.567Z" }, + { url = "https://files.pythonhosted.org/packages/e3/d8/ce7d2d445c9eca1ea8151ffe52d99acbdb3e46999802d499582f3fc1c736/py_sr25519_bindings-0.2.3-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc9ef7d00043e7edfaee1632b21d05590a33c90ccd7d1ed6a6202980dfc3c266", size = 344132, upload-time = "2025-11-20T10:16:12.764Z" }, + { url = "https://files.pythonhosted.org/packages/77/5b/9c10d36df1e92f068f15d850881e3a82f5755ebaabd49eae91cd5f4db657/py_sr25519_bindings-0.2.3-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b1cbc12693e3368ab4d3bbc2f213c63df10468d7c18ff0a0712694456473fc0", size = 373119, upload-time = "2025-11-20T10:16:29.758Z" }, + { url = "https://files.pythonhosted.org/packages/44/03/84080a7406de89feb28b7d517630239e9d77cf52c746b42669a30aad3453/py_sr25519_bindings-0.2.3-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:40810aaa1c02fc80fd362d3b1f2484c8a3a1c93f5c83f2fe3a9ed0f48b921131", size = 481543, upload-time = "2025-11-20T10:16:45.611Z" }, + { url = "https://files.pythonhosted.org/packages/98/ad/2e339150eb21fe4b95d62ae9570b7048cca9d2897ce49c7417296b7c288b/py_sr25519_bindings-0.2.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:2f6e408274a17280769d9f745b38f59994b12c15b327641d77d3fed84d4d634e", size = 525671, upload-time = "2025-11-20T10:17:37.355Z" }, + { url = "https://files.pythonhosted.org/packages/a2/2a/8fce92d6cc8ef118f9b046671e5a520be19f3fb37510f84dc015411623a0/py_sr25519_bindings-0.2.3-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:3c84cf9e5e170282b82c3b7dd5edbc89e9ebab1541e9ae83478e955efe53bd3e", size = 641105, upload-time = "2025-11-20T10:17:53.808Z" }, + { url = "https://files.pythonhosted.org/packages/b5/07/288cbc0f389c8d609ca4658349d7d1b1066b7d2d92708678557c8f6e6cbf/py_sr25519_bindings-0.2.3-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:217dc51050f98aba7df7e8caee74a4539f599ce1a7ac635d208818a8764477e5", size = 567259, upload-time = "2025-11-20T10:18:09.836Z" }, + { url = "https://files.pythonhosted.org/packages/46/c6/b6e883e6f233adb6ee37c0022229b225219ef6c17273ce459cac161e8fc9/py_sr25519_bindings-0.2.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:508215a2992aad684d6d6f9405a0dc4a2b952ca15f64b1b7baaea77b527493db", size = 539448, upload-time = "2025-11-20T10:18:28.225Z" }, +] + +[[package]] +name = "pycparser" +version = "3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1b/7d/92392ff7815c21062bea51aa7b87d45576f649f16458d78b7cf94b9ab2e6/pycparser-3.0.tar.gz", hash = "sha256:600f49d217304a5902ac3c37e1281c9fe94e4d0489de643a9504c5cdfdfc6b29", size = 103492, upload-time = "2026-01-21T14:26:51.89Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0c/c3/44f3fbbfa403ea2a7c779186dc20772604442dde72947e7d01069cbe98e3/pycparser-3.0-py3-none-any.whl", hash = "sha256:b727414169a36b7d524c1c3e31839a521725078d7b2ff038656844266160a992", size = 48172, upload-time = "2026-01-21T14:26:50.693Z" }, +] + +[[package]] +name = "pycryptodome" +version = "3.23.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8e/a6/8452177684d5e906854776276ddd34eca30d1b1e15aa1ee9cefc289a33f5/pycryptodome-3.23.0.tar.gz", hash = "sha256:447700a657182d60338bab09fdb27518f8856aecd80ae4c6bdddb67ff5da44ef", size = 4921276, upload-time = "2025-05-17T17:21:45.242Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/5d/bdb09489b63cd34a976cc9e2a8d938114f7a53a74d3dd4f125ffa49dce82/pycryptodome-3.23.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:0011f7f00cdb74879142011f95133274741778abba114ceca229adbf8e62c3e4", size = 2495152, upload-time = "2025-05-17T17:20:20.833Z" }, + { url = "https://files.pythonhosted.org/packages/a7/ce/7840250ed4cc0039c433cd41715536f926d6e86ce84e904068eb3244b6a6/pycryptodome-3.23.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:90460fc9e088ce095f9ee8356722d4f10f86e5be06e2354230a9880b9c549aae", size = 1639348, upload-time = "2025-05-17T17:20:23.171Z" }, + { url = "https://files.pythonhosted.org/packages/ee/f0/991da24c55c1f688d6a3b5a11940567353f74590734ee4a64294834ae472/pycryptodome-3.23.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4764e64b269fc83b00f682c47443c2e6e85b18273712b98aa43bcb77f8570477", size = 2184033, upload-time = "2025-05-17T17:20:25.424Z" }, + { url = "https://files.pythonhosted.org/packages/54/16/0e11882deddf00f68b68dd4e8e442ddc30641f31afeb2bc25588124ac8de/pycryptodome-3.23.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eb8f24adb74984aa0e5d07a2368ad95276cf38051fe2dc6605cbcf482e04f2a7", size = 2270142, upload-time = "2025-05-17T17:20:27.808Z" }, + { url = "https://files.pythonhosted.org/packages/d5/fc/4347fea23a3f95ffb931f383ff28b3f7b1fe868739182cb76718c0da86a1/pycryptodome-3.23.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d97618c9c6684a97ef7637ba43bdf6663a2e2e77efe0f863cce97a76af396446", size = 2309384, upload-time = "2025-05-17T17:20:30.765Z" }, + { url = "https://files.pythonhosted.org/packages/6e/d9/c5261780b69ce66d8cfab25d2797bd6e82ba0241804694cd48be41add5eb/pycryptodome-3.23.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9a53a4fe5cb075075d515797d6ce2f56772ea7e6a1e5e4b96cf78a14bac3d265", size = 2183237, upload-time = "2025-05-17T17:20:33.736Z" }, + { url = "https://files.pythonhosted.org/packages/5a/6f/3af2ffedd5cfa08c631f89452c6648c4d779e7772dfc388c77c920ca6bbf/pycryptodome-3.23.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:763d1d74f56f031788e5d307029caef067febf890cd1f8bf61183ae142f1a77b", size = 2343898, upload-time = "2025-05-17T17:20:36.086Z" }, + { url = "https://files.pythonhosted.org/packages/9a/dc/9060d807039ee5de6e2f260f72f3d70ac213993a804f5e67e0a73a56dd2f/pycryptodome-3.23.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:954af0e2bd7cea83ce72243b14e4fb518b18f0c1649b576d114973e2073b273d", size = 2269197, upload-time = "2025-05-17T17:20:38.414Z" }, + { url = "https://files.pythonhosted.org/packages/f9/34/e6c8ca177cb29dcc4967fef73f5de445912f93bd0343c9c33c8e5bf8cde8/pycryptodome-3.23.0-cp313-cp313t-win32.whl", hash = "sha256:257bb3572c63ad8ba40b89f6fc9d63a2a628e9f9708d31ee26560925ebe0210a", size = 1768600, upload-time = "2025-05-17T17:20:40.688Z" }, + { url = "https://files.pythonhosted.org/packages/e4/1d/89756b8d7ff623ad0160f4539da571d1f594d21ee6d68be130a6eccb39a4/pycryptodome-3.23.0-cp313-cp313t-win_amd64.whl", hash = "sha256:6501790c5b62a29fcb227bd6b62012181d886a767ce9ed03b303d1f22eb5c625", size = 1799740, upload-time = "2025-05-17T17:20:42.413Z" }, + { url = "https://files.pythonhosted.org/packages/5d/61/35a64f0feaea9fd07f0d91209e7be91726eb48c0f1bfc6720647194071e4/pycryptodome-3.23.0-cp313-cp313t-win_arm64.whl", hash = "sha256:9a77627a330ab23ca43b48b130e202582e91cc69619947840ea4d2d1be21eb39", size = 1703685, upload-time = "2025-05-17T17:20:44.388Z" }, + { url = "https://files.pythonhosted.org/packages/db/6c/a1f71542c969912bb0e106f64f60a56cc1f0fabecf9396f45accbe63fa68/pycryptodome-3.23.0-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:187058ab80b3281b1de11c2e6842a357a1f71b42cb1e15bce373f3d238135c27", size = 2495627, upload-time = "2025-05-17T17:20:47.139Z" }, + { url = "https://files.pythonhosted.org/packages/6e/4e/a066527e079fc5002390c8acdd3aca431e6ea0a50ffd7201551175b47323/pycryptodome-3.23.0-cp37-abi3-macosx_10_9_x86_64.whl", hash = "sha256:cfb5cd445280c5b0a4e6187a7ce8de5a07b5f3f897f235caa11f1f435f182843", size = 1640362, upload-time = "2025-05-17T17:20:50.392Z" }, + { url = "https://files.pythonhosted.org/packages/50/52/adaf4c8c100a8c49d2bd058e5b551f73dfd8cb89eb4911e25a0c469b6b4e/pycryptodome-3.23.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67bd81fcbe34f43ad9422ee8fd4843c8e7198dd88dd3d40e6de42ee65fbe1490", size = 2182625, upload-time = "2025-05-17T17:20:52.866Z" }, + { url = "https://files.pythonhosted.org/packages/5f/e9/a09476d436d0ff1402ac3867d933c61805ec2326c6ea557aeeac3825604e/pycryptodome-3.23.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c8987bd3307a39bc03df5c8e0e3d8be0c4c3518b7f044b0f4c15d1aa78f52575", size = 2268954, upload-time = "2025-05-17T17:20:55.027Z" }, + { url = "https://files.pythonhosted.org/packages/f9/c5/ffe6474e0c551d54cab931918127c46d70cab8f114e0c2b5a3c071c2f484/pycryptodome-3.23.0-cp37-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:aa0698f65e5b570426fc31b8162ed4603b0c2841cbb9088e2b01641e3065915b", size = 2308534, upload-time = "2025-05-17T17:20:57.279Z" }, + { url = "https://files.pythonhosted.org/packages/18/28/e199677fc15ecf43010f2463fde4c1a53015d1fe95fb03bca2890836603a/pycryptodome-3.23.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:53ecbafc2b55353edcebd64bf5da94a2a2cdf5090a6915bcca6eca6cc452585a", size = 2181853, upload-time = "2025-05-17T17:20:59.322Z" }, + { url = "https://files.pythonhosted.org/packages/ce/ea/4fdb09f2165ce1365c9eaefef36625583371ee514db58dc9b65d3a255c4c/pycryptodome-3.23.0-cp37-abi3-musllinux_1_2_i686.whl", hash = "sha256:156df9667ad9f2ad26255926524e1c136d6664b741547deb0a86a9acf5ea631f", size = 2342465, upload-time = "2025-05-17T17:21:03.83Z" }, + { url = "https://files.pythonhosted.org/packages/22/82/6edc3fc42fe9284aead511394bac167693fb2b0e0395b28b8bedaa07ef04/pycryptodome-3.23.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:dea827b4d55ee390dc89b2afe5927d4308a8b538ae91d9c6f7a5090f397af1aa", size = 2267414, upload-time = "2025-05-17T17:21:06.72Z" }, + { url = "https://files.pythonhosted.org/packages/59/fe/aae679b64363eb78326c7fdc9d06ec3de18bac68be4b612fc1fe8902693c/pycryptodome-3.23.0-cp37-abi3-win32.whl", hash = "sha256:507dbead45474b62b2bbe318eb1c4c8ee641077532067fec9c1aa82c31f84886", size = 1768484, upload-time = "2025-05-17T17:21:08.535Z" }, + { url = "https://files.pythonhosted.org/packages/54/2f/e97a1b8294db0daaa87012c24a7bb714147c7ade7656973fd6c736b484ff/pycryptodome-3.23.0-cp37-abi3-win_amd64.whl", hash = "sha256:c75b52aacc6c0c260f204cbdd834f76edc9fb0d8e0da9fbf8352ef58202564e2", size = 1799636, upload-time = "2025-05-17T17:21:10.393Z" }, + { url = "https://files.pythonhosted.org/packages/18/3d/f9441a0d798bf2b1e645adc3265e55706aead1255ccdad3856dbdcffec14/pycryptodome-3.23.0-cp37-abi3-win_arm64.whl", hash = "sha256:11eeeb6917903876f134b56ba11abe95c0b0fd5e3330def218083c7d98bbcb3c", size = 1703675, upload-time = "2025-05-17T17:21:13.146Z" }, +] + +[[package]] +name = "pydantic" +version = "2.12.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-types" }, + { name = "pydantic-core" }, + { name = "typing-extensions" }, + { name = "typing-inspection" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/69/44/36f1a6e523abc58ae5f928898e4aca2e0ea509b5aa6f6f392a5d882be928/pydantic-2.12.5.tar.gz", hash = "sha256:4d351024c75c0f085a9febbb665ce8c0c6ec5d30e903bdb6394b7ede26aebb49", size = 821591, upload-time = "2025-11-26T15:11:46.471Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/87/b70ad306ebb6f9b585f114d0ac2137d792b48be34d732d60e597c2f8465a/pydantic-2.12.5-py3-none-any.whl", hash = "sha256:e561593fccf61e8a20fc46dfc2dfe075b8be7d0188df33f221ad1f0139180f9d", size = 463580, upload-time = "2025-11-26T15:11:44.605Z" }, +] + +[[package]] +name = "pydantic-core" +version = "2.41.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/71/70/23b021c950c2addd24ec408e9ab05d59b035b39d97cdc1130e1bce647bb6/pydantic_core-2.41.5.tar.gz", hash = "sha256:08daa51ea16ad373ffd5e7606252cc32f07bc72b28284b6bc9c6df804816476e", size = 460952, upload-time = "2025-11-04T13:43:49.098Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5f/5d/5f6c63eebb5afee93bcaae4ce9a898f3373ca23df3ccaef086d0233a35a7/pydantic_core-2.41.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:f41a7489d32336dbf2199c8c0a215390a751c5b014c2c1c5366e817202e9cdf7", size = 2110990, upload-time = "2025-11-04T13:39:58.079Z" }, + { url = "https://files.pythonhosted.org/packages/aa/32/9c2e8ccb57c01111e0fd091f236c7b371c1bccea0fa85247ac55b1e2b6b6/pydantic_core-2.41.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:070259a8818988b9a84a449a2a7337c7f430a22acc0859c6b110aa7212a6d9c0", size = 1896003, upload-time = "2025-11-04T13:39:59.956Z" }, + { url = "https://files.pythonhosted.org/packages/68/b8/a01b53cb0e59139fbc9e4fda3e9724ede8de279097179be4ff31f1abb65a/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e96cea19e34778f8d59fe40775a7a574d95816eb150850a85a7a4c8f4b94ac69", size = 1919200, upload-time = "2025-11-04T13:40:02.241Z" }, + { url = "https://files.pythonhosted.org/packages/38/de/8c36b5198a29bdaade07b5985e80a233a5ac27137846f3bc2d3b40a47360/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ed2e99c456e3fadd05c991f8f437ef902e00eedf34320ba2b0842bd1c3ca3a75", size = 2052578, upload-time = "2025-11-04T13:40:04.401Z" }, + { url = "https://files.pythonhosted.org/packages/00/b5/0e8e4b5b081eac6cb3dbb7e60a65907549a1ce035a724368c330112adfdd/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:65840751b72fbfd82c3c640cff9284545342a4f1eb1586ad0636955b261b0b05", size = 2208504, upload-time = "2025-11-04T13:40:06.072Z" }, + { url = "https://files.pythonhosted.org/packages/77/56/87a61aad59c7c5b9dc8caad5a41a5545cba3810c3e828708b3d7404f6cef/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e536c98a7626a98feb2d3eaf75944ef6f3dbee447e1f841eae16f2f0a72d8ddc", size = 2335816, upload-time = "2025-11-04T13:40:07.835Z" }, + { url = "https://files.pythonhosted.org/packages/0d/76/941cc9f73529988688a665a5c0ecff1112b3d95ab48f81db5f7606f522d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eceb81a8d74f9267ef4081e246ffd6d129da5d87e37a77c9bde550cb04870c1c", size = 2075366, upload-time = "2025-11-04T13:40:09.804Z" }, + { url = "https://files.pythonhosted.org/packages/d3/43/ebef01f69baa07a482844faaa0a591bad1ef129253ffd0cdaa9d8a7f72d3/pydantic_core-2.41.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d38548150c39b74aeeb0ce8ee1d8e82696f4a4e16ddc6de7b1d8823f7de4b9b5", size = 2171698, upload-time = "2025-11-04T13:40:12.004Z" }, + { url = "https://files.pythonhosted.org/packages/b1/87/41f3202e4193e3bacfc2c065fab7706ebe81af46a83d3e27605029c1f5a6/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:c23e27686783f60290e36827f9c626e63154b82b116d7fe9adba1fda36da706c", size = 2132603, upload-time = "2025-11-04T13:40:13.868Z" }, + { url = "https://files.pythonhosted.org/packages/49/7d/4c00df99cb12070b6bccdef4a195255e6020a550d572768d92cc54dba91a/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:482c982f814460eabe1d3bb0adfdc583387bd4691ef00b90575ca0d2b6fe2294", size = 2329591, upload-time = "2025-11-04T13:40:15.672Z" }, + { url = "https://files.pythonhosted.org/packages/cc/6a/ebf4b1d65d458f3cda6a7335d141305dfa19bdc61140a884d165a8a1bbc7/pydantic_core-2.41.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:bfea2a5f0b4d8d43adf9d7b8bf019fb46fdd10a2e5cde477fbcb9d1fa08c68e1", size = 2319068, upload-time = "2025-11-04T13:40:17.532Z" }, + { url = "https://files.pythonhosted.org/packages/49/3b/774f2b5cd4192d5ab75870ce4381fd89cf218af999515baf07e7206753f0/pydantic_core-2.41.5-cp312-cp312-win32.whl", hash = "sha256:b74557b16e390ec12dca509bce9264c3bbd128f8a2c376eaa68003d7f327276d", size = 1985908, upload-time = "2025-11-04T13:40:19.309Z" }, + { url = "https://files.pythonhosted.org/packages/86/45/00173a033c801cacf67c190fef088789394feaf88a98a7035b0e40d53dc9/pydantic_core-2.41.5-cp312-cp312-win_amd64.whl", hash = "sha256:1962293292865bca8e54702b08a4f26da73adc83dd1fcf26fbc875b35d81c815", size = 2020145, upload-time = "2025-11-04T13:40:21.548Z" }, + { url = "https://files.pythonhosted.org/packages/f9/22/91fbc821fa6d261b376a3f73809f907cec5ca6025642c463d3488aad22fb/pydantic_core-2.41.5-cp312-cp312-win_arm64.whl", hash = "sha256:1746d4a3d9a794cacae06a5eaaccb4b8643a131d45fbc9af23e353dc0a5ba5c3", size = 1976179, upload-time = "2025-11-04T13:40:23.393Z" }, + { url = "https://files.pythonhosted.org/packages/87/06/8806241ff1f70d9939f9af039c6c35f2360cf16e93c2ca76f184e76b1564/pydantic_core-2.41.5-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:941103c9be18ac8daf7b7adca8228f8ed6bb7a1849020f643b3a14d15b1924d9", size = 2120403, upload-time = "2025-11-04T13:40:25.248Z" }, + { url = "https://files.pythonhosted.org/packages/94/02/abfa0e0bda67faa65fef1c84971c7e45928e108fe24333c81f3bfe35d5f5/pydantic_core-2.41.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:112e305c3314f40c93998e567879e887a3160bb8689ef3d2c04b6cc62c33ac34", size = 1896206, upload-time = "2025-11-04T13:40:27.099Z" }, + { url = "https://files.pythonhosted.org/packages/15/df/a4c740c0943e93e6500f9eb23f4ca7ec9bf71b19e608ae5b579678c8d02f/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0cbaad15cb0c90aa221d43c00e77bb33c93e8d36e0bf74760cd00e732d10a6a0", size = 1919307, upload-time = "2025-11-04T13:40:29.806Z" }, + { url = "https://files.pythonhosted.org/packages/9a/e3/6324802931ae1d123528988e0e86587c2072ac2e5394b4bc2bc34b61ff6e/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:03ca43e12fab6023fc79d28ca6b39b05f794ad08ec2feccc59a339b02f2b3d33", size = 2063258, upload-time = "2025-11-04T13:40:33.544Z" }, + { url = "https://files.pythonhosted.org/packages/c9/d4/2230d7151d4957dd79c3044ea26346c148c98fbf0ee6ebd41056f2d62ab5/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dc799088c08fa04e43144b164feb0c13f9a0bc40503f8df3e9fde58a3c0c101e", size = 2214917, upload-time = "2025-11-04T13:40:35.479Z" }, + { url = "https://files.pythonhosted.org/packages/e6/9f/eaac5df17a3672fef0081b6c1bb0b82b33ee89aa5cec0d7b05f52fd4a1fa/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:97aeba56665b4c3235a0e52b2c2f5ae9cd071b8a8310ad27bddb3f7fb30e9aa2", size = 2332186, upload-time = "2025-11-04T13:40:37.436Z" }, + { url = "https://files.pythonhosted.org/packages/cf/4e/35a80cae583a37cf15604b44240e45c05e04e86f9cfd766623149297e971/pydantic_core-2.41.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:406bf18d345822d6c21366031003612b9c77b3e29ffdb0f612367352aab7d586", size = 2073164, upload-time = "2025-11-04T13:40:40.289Z" }, + { url = "https://files.pythonhosted.org/packages/bf/e3/f6e262673c6140dd3305d144d032f7bd5f7497d3871c1428521f19f9efa2/pydantic_core-2.41.5-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b93590ae81f7010dbe380cdeab6f515902ebcbefe0b9327cc4804d74e93ae69d", size = 2179146, upload-time = "2025-11-04T13:40:42.809Z" }, + { url = "https://files.pythonhosted.org/packages/75/c7/20bd7fc05f0c6ea2056a4565c6f36f8968c0924f19b7d97bbfea55780e73/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:01a3d0ab748ee531f4ea6c3e48ad9dac84ddba4b0d82291f87248f2f9de8d740", size = 2137788, upload-time = "2025-11-04T13:40:44.752Z" }, + { url = "https://files.pythonhosted.org/packages/3a/8d/34318ef985c45196e004bc46c6eab2eda437e744c124ef0dbe1ff2c9d06b/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:6561e94ba9dacc9c61bce40e2d6bdc3bfaa0259d3ff36ace3b1e6901936d2e3e", size = 2340133, upload-time = "2025-11-04T13:40:46.66Z" }, + { url = "https://files.pythonhosted.org/packages/9c/59/013626bf8c78a5a5d9350d12e7697d3d4de951a75565496abd40ccd46bee/pydantic_core-2.41.5-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:915c3d10f81bec3a74fbd4faebe8391013ba61e5a1a8d48c4455b923bdda7858", size = 2324852, upload-time = "2025-11-04T13:40:48.575Z" }, + { url = "https://files.pythonhosted.org/packages/1a/d9/c248c103856f807ef70c18a4f986693a46a8ffe1602e5d361485da502d20/pydantic_core-2.41.5-cp313-cp313-win32.whl", hash = "sha256:650ae77860b45cfa6e2cdafc42618ceafab3a2d9a3811fcfbd3bbf8ac3c40d36", size = 1994679, upload-time = "2025-11-04T13:40:50.619Z" }, + { url = "https://files.pythonhosted.org/packages/9e/8b/341991b158ddab181cff136acd2552c9f35bd30380422a639c0671e99a91/pydantic_core-2.41.5-cp313-cp313-win_amd64.whl", hash = "sha256:79ec52ec461e99e13791ec6508c722742ad745571f234ea6255bed38c6480f11", size = 2019766, upload-time = "2025-11-04T13:40:52.631Z" }, + { url = "https://files.pythonhosted.org/packages/73/7d/f2f9db34af103bea3e09735bb40b021788a5e834c81eedb541991badf8f5/pydantic_core-2.41.5-cp313-cp313-win_arm64.whl", hash = "sha256:3f84d5c1b4ab906093bdc1ff10484838aca54ef08de4afa9de0f5f14d69639cd", size = 1981005, upload-time = "2025-11-04T13:40:54.734Z" }, + { url = "https://files.pythonhosted.org/packages/ea/28/46b7c5c9635ae96ea0fbb779e271a38129df2550f763937659ee6c5dbc65/pydantic_core-2.41.5-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:3f37a19d7ebcdd20b96485056ba9e8b304e27d9904d233d7b1015db320e51f0a", size = 2119622, upload-time = "2025-11-04T13:40:56.68Z" }, + { url = "https://files.pythonhosted.org/packages/74/1a/145646e5687e8d9a1e8d09acb278c8535ebe9e972e1f162ed338a622f193/pydantic_core-2.41.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1d1d9764366c73f996edd17abb6d9d7649a7eb690006ab6adbda117717099b14", size = 1891725, upload-time = "2025-11-04T13:40:58.807Z" }, + { url = "https://files.pythonhosted.org/packages/23/04/e89c29e267b8060b40dca97bfc64a19b2a3cf99018167ea1677d96368273/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25e1c2af0fce638d5f1988b686f3b3ea8cd7de5f244ca147c777769e798a9cd1", size = 1915040, upload-time = "2025-11-04T13:41:00.853Z" }, + { url = "https://files.pythonhosted.org/packages/84/a3/15a82ac7bd97992a82257f777b3583d3e84bdb06ba6858f745daa2ec8a85/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:506d766a8727beef16b7adaeb8ee6217c64fc813646b424d0804d67c16eddb66", size = 2063691, upload-time = "2025-11-04T13:41:03.504Z" }, + { url = "https://files.pythonhosted.org/packages/74/9b/0046701313c6ef08c0c1cf0e028c67c770a4e1275ca73131563c5f2a310a/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4819fa52133c9aa3c387b3328f25c1facc356491e6135b459f1de698ff64d869", size = 2213897, upload-time = "2025-11-04T13:41:05.804Z" }, + { url = "https://files.pythonhosted.org/packages/8a/cd/6bac76ecd1b27e75a95ca3a9a559c643b3afcd2dd62086d4b7a32a18b169/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2b761d210c9ea91feda40d25b4efe82a1707da2ef62901466a42492c028553a2", size = 2333302, upload-time = "2025-11-04T13:41:07.809Z" }, + { url = "https://files.pythonhosted.org/packages/4c/d2/ef2074dc020dd6e109611a8be4449b98cd25e1b9b8a303c2f0fca2f2bcf7/pydantic_core-2.41.5-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22f0fb8c1c583a3b6f24df2470833b40207e907b90c928cc8d3594b76f874375", size = 2064877, upload-time = "2025-11-04T13:41:09.827Z" }, + { url = "https://files.pythonhosted.org/packages/18/66/e9db17a9a763d72f03de903883c057b2592c09509ccfe468187f2a2eef29/pydantic_core-2.41.5-cp314-cp314-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:2782c870e99878c634505236d81e5443092fba820f0373997ff75f90f68cd553", size = 2180680, upload-time = "2025-11-04T13:41:12.379Z" }, + { url = "https://files.pythonhosted.org/packages/d3/9e/3ce66cebb929f3ced22be85d4c2399b8e85b622db77dad36b73c5387f8f8/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_aarch64.whl", hash = "sha256:0177272f88ab8312479336e1d777f6b124537d47f2123f89cb37e0accea97f90", size = 2138960, upload-time = "2025-11-04T13:41:14.627Z" }, + { url = "https://files.pythonhosted.org/packages/a6/62/205a998f4327d2079326b01abee48e502ea739d174f0a89295c481a2272e/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_armv7l.whl", hash = "sha256:63510af5e38f8955b8ee5687740d6ebf7c2a0886d15a6d65c32814613681bc07", size = 2339102, upload-time = "2025-11-04T13:41:16.868Z" }, + { url = "https://files.pythonhosted.org/packages/3c/0d/f05e79471e889d74d3d88f5bd20d0ed189ad94c2423d81ff8d0000aab4ff/pydantic_core-2.41.5-cp314-cp314-musllinux_1_1_x86_64.whl", hash = "sha256:e56ba91f47764cc14f1daacd723e3e82d1a89d783f0f5afe9c364b8bb491ccdb", size = 2326039, upload-time = "2025-11-04T13:41:18.934Z" }, + { url = "https://files.pythonhosted.org/packages/ec/e1/e08a6208bb100da7e0c4b288eed624a703f4d129bde2da475721a80cab32/pydantic_core-2.41.5-cp314-cp314-win32.whl", hash = "sha256:aec5cf2fd867b4ff45b9959f8b20ea3993fc93e63c7363fe6851424c8a7e7c23", size = 1995126, upload-time = "2025-11-04T13:41:21.418Z" }, + { url = "https://files.pythonhosted.org/packages/48/5d/56ba7b24e9557f99c9237e29f5c09913c81eeb2f3217e40e922353668092/pydantic_core-2.41.5-cp314-cp314-win_amd64.whl", hash = "sha256:8e7c86f27c585ef37c35e56a96363ab8de4e549a95512445b85c96d3e2f7c1bf", size = 2015489, upload-time = "2025-11-04T13:41:24.076Z" }, + { url = "https://files.pythonhosted.org/packages/4e/bb/f7a190991ec9e3e0ba22e4993d8755bbc4a32925c0b5b42775c03e8148f9/pydantic_core-2.41.5-cp314-cp314-win_arm64.whl", hash = "sha256:e672ba74fbc2dc8eea59fb6d4aed6845e6905fc2a8afe93175d94a83ba2a01a0", size = 1977288, upload-time = "2025-11-04T13:41:26.33Z" }, + { url = "https://files.pythonhosted.org/packages/92/ed/77542d0c51538e32e15afe7899d79efce4b81eee631d99850edc2f5e9349/pydantic_core-2.41.5-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:8566def80554c3faa0e65ac30ab0932b9e3a5cd7f8323764303d468e5c37595a", size = 2120255, upload-time = "2025-11-04T13:41:28.569Z" }, + { url = "https://files.pythonhosted.org/packages/bb/3d/6913dde84d5be21e284439676168b28d8bbba5600d838b9dca99de0fad71/pydantic_core-2.41.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b80aa5095cd3109962a298ce14110ae16b8c1aece8b72f9dafe81cf597ad80b3", size = 1863760, upload-time = "2025-11-04T13:41:31.055Z" }, + { url = "https://files.pythonhosted.org/packages/5a/f0/e5e6b99d4191da102f2b0eb9687aaa7f5bea5d9964071a84effc3e40f997/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3006c3dd9ba34b0c094c544c6006cc79e87d8612999f1a5d43b769b89181f23c", size = 1878092, upload-time = "2025-11-04T13:41:33.21Z" }, + { url = "https://files.pythonhosted.org/packages/71/48/36fb760642d568925953bcc8116455513d6e34c4beaa37544118c36aba6d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:72f6c8b11857a856bcfa48c86f5368439f74453563f951e473514579d44aa612", size = 2053385, upload-time = "2025-11-04T13:41:35.508Z" }, + { url = "https://files.pythonhosted.org/packages/20/25/92dc684dd8eb75a234bc1c764b4210cf2646479d54b47bf46061657292a8/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cb1b2f9742240e4bb26b652a5aeb840aa4b417c7748b6f8387927bc6e45e40d", size = 2218832, upload-time = "2025-11-04T13:41:37.732Z" }, + { url = "https://files.pythonhosted.org/packages/e2/09/f53e0b05023d3e30357d82eb35835d0f6340ca344720a4599cd663dca599/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bd3d54f38609ff308209bd43acea66061494157703364ae40c951f83ba99a1a9", size = 2327585, upload-time = "2025-11-04T13:41:40Z" }, + { url = "https://files.pythonhosted.org/packages/aa/4e/2ae1aa85d6af35a39b236b1b1641de73f5a6ac4d5a7509f77b814885760c/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff4321e56e879ee8d2a879501c8e469414d948f4aba74a2d4593184eb326660", size = 2041078, upload-time = "2025-11-04T13:41:42.323Z" }, + { url = "https://files.pythonhosted.org/packages/cd/13/2e215f17f0ef326fc72afe94776edb77525142c693767fc347ed6288728d/pydantic_core-2.41.5-cp314-cp314t-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d0d2568a8c11bf8225044aa94409e21da0cb09dcdafe9ecd10250b2baad531a9", size = 2173914, upload-time = "2025-11-04T13:41:45.221Z" }, + { url = "https://files.pythonhosted.org/packages/02/7a/f999a6dcbcd0e5660bc348a3991c8915ce6599f4f2c6ac22f01d7a10816c/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_aarch64.whl", hash = "sha256:a39455728aabd58ceabb03c90e12f71fd30fa69615760a075b9fec596456ccc3", size = 2129560, upload-time = "2025-11-04T13:41:47.474Z" }, + { url = "https://files.pythonhosted.org/packages/3a/b1/6c990ac65e3b4c079a4fb9f5b05f5b013afa0f4ed6780a3dd236d2cbdc64/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_armv7l.whl", hash = "sha256:239edca560d05757817c13dc17c50766136d21f7cd0fac50295499ae24f90fdf", size = 2329244, upload-time = "2025-11-04T13:41:49.992Z" }, + { url = "https://files.pythonhosted.org/packages/d9/02/3c562f3a51afd4d88fff8dffb1771b30cfdfd79befd9883ee094f5b6c0d8/pydantic_core-2.41.5-cp314-cp314t-musllinux_1_1_x86_64.whl", hash = "sha256:2a5e06546e19f24c6a96a129142a75cee553cc018ffee48a460059b1185f4470", size = 2331955, upload-time = "2025-11-04T13:41:54.079Z" }, + { url = "https://files.pythonhosted.org/packages/5c/96/5fb7d8c3c17bc8c62fdb031c47d77a1af698f1d7a406b0f79aaa1338f9ad/pydantic_core-2.41.5-cp314-cp314t-win32.whl", hash = "sha256:b4ececa40ac28afa90871c2cc2b9ffd2ff0bf749380fbdf57d165fd23da353aa", size = 1988906, upload-time = "2025-11-04T13:41:56.606Z" }, + { url = "https://files.pythonhosted.org/packages/22/ed/182129d83032702912c2e2d8bbe33c036f342cc735737064668585dac28f/pydantic_core-2.41.5-cp314-cp314t-win_amd64.whl", hash = "sha256:80aa89cad80b32a912a65332f64a4450ed00966111b6615ca6816153d3585a8c", size = 1981607, upload-time = "2025-11-04T13:41:58.889Z" }, + { url = "https://files.pythonhosted.org/packages/9f/ed/068e41660b832bb0b1aa5b58011dea2a3fe0ba7861ff38c4d4904c1c1a99/pydantic_core-2.41.5-cp314-cp314t-win_arm64.whl", hash = "sha256:35b44f37a3199f771c3eaa53051bc8a70cd7b54f333531c59e29fd4db5d15008", size = 1974769, upload-time = "2025-11-04T13:42:01.186Z" }, + { url = "https://files.pythonhosted.org/packages/09/32/59b0c7e63e277fa7911c2fc70ccfb45ce4b98991e7ef37110663437005af/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_10_12_x86_64.whl", hash = "sha256:7da7087d756b19037bc2c06edc6c170eeef3c3bafcb8f532ff17d64dc427adfd", size = 2110495, upload-time = "2025-11-04T13:42:49.689Z" }, + { url = "https://files.pythonhosted.org/packages/aa/81/05e400037eaf55ad400bcd318c05bb345b57e708887f07ddb2d20e3f0e98/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-macosx_11_0_arm64.whl", hash = "sha256:aabf5777b5c8ca26f7824cb4a120a740c9588ed58df9b2d196ce92fba42ff8dc", size = 1915388, upload-time = "2025-11-04T13:42:52.215Z" }, + { url = "https://files.pythonhosted.org/packages/6e/0d/e3549b2399f71d56476b77dbf3cf8937cec5cd70536bdc0e374a421d0599/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c007fe8a43d43b3969e8469004e9845944f1a80e6acd47c150856bb87f230c56", size = 1942879, upload-time = "2025-11-04T13:42:56.483Z" }, + { url = "https://files.pythonhosted.org/packages/f7/07/34573da085946b6a313d7c42f82f16e8920bfd730665de2d11c0c37a74b5/pydantic_core-2.41.5-graalpy312-graalpy250_312_native-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76d0819de158cd855d1cbb8fcafdf6f5cf1eb8e470abe056d5d161106e38062b", size = 2139017, upload-time = "2025-11-04T13:42:59.471Z" }, +] + +[[package]] +name = "pygments" +version = "2.20.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c3/b2/bc9c9196916376152d655522fdcebac55e66de6603a76a02bca1b6414f6c/pygments-2.20.0.tar.gz", hash = "sha256:6757cd03768053ff99f3039c1a36d6c0aa0b263438fcab17520b30a303a82b5f", size = 4955991, upload-time = "2026-03-29T13:29:33.898Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f4/7e/a72dd26f3b0f4f2bf1dd8923c85f7ceb43172af56d63c7383eb62b332364/pygments-2.20.0-py3-none-any.whl", hash = "sha256:81a9e26dd42fd28a23a2d169d86d7ac03b46e2f8b59ed4698fb4785f946d0176", size = 1231151, upload-time = "2026-03-29T13:29:30.038Z" }, +] + +[[package]] +name = "pynacl" +version = "1.6.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d9/9a/4019b524b03a13438637b11538c82781a5eda427394380381af8f04f467a/pynacl-1.6.2.tar.gz", hash = "sha256:018494d6d696ae03c7e656e5e74cdfd8ea1326962cc401bcf018f1ed8436811c", size = 3511692, upload-time = "2026-01-01T17:48:10.851Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4b/79/0e3c34dc3c4671f67d251c07aa8eb100916f250ee470df230b0ab89551b4/pynacl-1.6.2-cp314-cp314t-macosx_10_10_universal2.whl", hash = "sha256:622d7b07cc5c02c666795792931b50c91f3ce3c2649762efb1ef0d5684c81594", size = 390064, upload-time = "2026-01-01T17:31:57.264Z" }, + { url = "https://files.pythonhosted.org/packages/eb/1c/23a26e931736e13b16483795c8a6b2f641bf6a3d5238c22b070a5112722c/pynacl-1.6.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d071c6a9a4c94d79eb665db4ce5cedc537faf74f2355e4d502591d850d3913c0", size = 809370, upload-time = "2026-01-01T17:31:59.198Z" }, + { url = "https://files.pythonhosted.org/packages/87/74/8d4b718f8a22aea9e8dcc8b95deb76d4aae380e2f5b570cc70b5fd0a852d/pynacl-1.6.2-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fe9847ca47d287af41e82be1dd5e23023d3c31a951da134121ab02e42ac218c9", size = 1408304, upload-time = "2026-01-01T17:32:01.162Z" }, + { url = "https://files.pythonhosted.org/packages/fd/73/be4fdd3a6a87fe8a4553380c2b47fbd1f7f58292eb820902f5c8ac7de7b0/pynacl-1.6.2-cp314-cp314t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:04316d1fc625d860b6c162fff704eb8426b1a8bcd3abacea11142cbd99a6b574", size = 844871, upload-time = "2026-01-01T17:32:02.824Z" }, + { url = "https://files.pythonhosted.org/packages/55/ad/6efc57ab75ee4422e96b5f2697d51bbcf6cdcc091e66310df91fbdc144a8/pynacl-1.6.2-cp314-cp314t-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:44081faff368d6c5553ccf55322ef2819abb40e25afaec7e740f159f74813634", size = 1446356, upload-time = "2026-01-01T17:32:04.452Z" }, + { url = "https://files.pythonhosted.org/packages/78/b7/928ee9c4779caa0a915844311ab9fb5f99585621c5d6e4574538a17dca07/pynacl-1.6.2-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:a9f9932d8d2811ce1a8ffa79dcbdf3970e7355b5c8eb0c1a881a57e7f7d96e88", size = 826814, upload-time = "2026-01-01T17:32:06.078Z" }, + { url = "https://files.pythonhosted.org/packages/f7/a9/1bdba746a2be20f8809fee75c10e3159d75864ef69c6b0dd168fc60e485d/pynacl-1.6.2-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:bc4a36b28dd72fb4845e5d8f9760610588a96d5a51f01d84d8c6ff9849968c14", size = 1411742, upload-time = "2026-01-01T17:32:07.651Z" }, + { url = "https://files.pythonhosted.org/packages/f3/2f/5e7ea8d85f9f3ea5b6b87db1d8388daa3587eed181bdeb0306816fdbbe79/pynacl-1.6.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:3bffb6d0f6becacb6526f8f42adfb5efb26337056ee0831fb9a7044d1a964444", size = 801714, upload-time = "2026-01-01T17:32:09.558Z" }, + { url = "https://files.pythonhosted.org/packages/06/ea/43fe2f7eab5f200e40fb10d305bf6f87ea31b3bbc83443eac37cd34a9e1e/pynacl-1.6.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:2fef529ef3ee487ad8113d287a593fa26f48ee3620d92ecc6f1d09ea38e0709b", size = 1372257, upload-time = "2026-01-01T17:32:11.026Z" }, + { url = "https://files.pythonhosted.org/packages/4d/54/c9ea116412788629b1347e415f72195c25eb2f3809b2d3e7b25f5c79f13a/pynacl-1.6.2-cp314-cp314t-win32.whl", hash = "sha256:a84bf1c20339d06dc0c85d9aea9637a24f718f375d861b2668b2f9f96fa51145", size = 231319, upload-time = "2026-01-01T17:32:12.46Z" }, + { url = "https://files.pythonhosted.org/packages/ce/04/64e9d76646abac2dccf904fccba352a86e7d172647557f35b9fe2a5ee4a1/pynacl-1.6.2-cp314-cp314t-win_amd64.whl", hash = "sha256:320ef68a41c87547c91a8b58903c9caa641ab01e8512ce291085b5fe2fcb7590", size = 244044, upload-time = "2026-01-01T17:32:13.781Z" }, + { url = "https://files.pythonhosted.org/packages/33/33/7873dc161c6a06f43cda13dec67b6fe152cb2f982581151956fa5e5cdb47/pynacl-1.6.2-cp314-cp314t-win_arm64.whl", hash = "sha256:d29bfe37e20e015a7d8b23cfc8bd6aa7909c92a1b8f41ee416bbb3e79ef182b2", size = 188740, upload-time = "2026-01-01T17:32:15.083Z" }, + { url = "https://files.pythonhosted.org/packages/be/7b/4845bbf88e94586ec47a432da4e9107e3fc3ce37eb412b1398630a37f7dd/pynacl-1.6.2-cp38-abi3-macosx_10_10_universal2.whl", hash = "sha256:c949ea47e4206af7c8f604b8278093b674f7c79ed0d4719cc836902bf4517465", size = 388458, upload-time = "2026-01-01T17:32:16.829Z" }, + { url = "https://files.pythonhosted.org/packages/1e/b4/e927e0653ba63b02a4ca5b4d852a8d1d678afbf69b3dbf9c4d0785ac905c/pynacl-1.6.2-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8845c0631c0be43abdd865511c41eab235e0be69c81dc66a50911594198679b0", size = 800020, upload-time = "2026-01-01T17:32:18.34Z" }, + { url = "https://files.pythonhosted.org/packages/7f/81/d60984052df5c97b1d24365bc1e30024379b42c4edcd79d2436b1b9806f2/pynacl-1.6.2-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:22de65bb9010a725b0dac248f353bb072969c94fa8d6b1f34b87d7953cf7bbe4", size = 1399174, upload-time = "2026-01-01T17:32:20.239Z" }, + { url = "https://files.pythonhosted.org/packages/68/f7/322f2f9915c4ef27d140101dd0ed26b479f7e6f5f183590fd32dfc48c4d3/pynacl-1.6.2-cp38-abi3-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:46065496ab748469cdd999246d17e301b2c24ae2fdf739132e580a0e94c94a87", size = 835085, upload-time = "2026-01-01T17:32:22.24Z" }, + { url = "https://files.pythonhosted.org/packages/3e/d0/f301f83ac8dbe53442c5a43f6a39016f94f754d7a9815a875b65e218a307/pynacl-1.6.2-cp38-abi3-manylinux_2_26_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8a66d6fb6ae7661c58995f9c6435bda2b1e68b54b598a6a10247bfcdadac996c", size = 1437614, upload-time = "2026-01-01T17:32:23.766Z" }, + { url = "https://files.pythonhosted.org/packages/c4/58/fc6e649762b029315325ace1a8c6be66125e42f67416d3dbd47b69563d61/pynacl-1.6.2-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:26bfcd00dcf2cf160f122186af731ae30ab120c18e8375684ec2670dccd28130", size = 818251, upload-time = "2026-01-01T17:32:25.69Z" }, + { url = "https://files.pythonhosted.org/packages/c9/a8/b917096b1accc9acd878819a49d3d84875731a41eb665f6ebc826b1af99e/pynacl-1.6.2-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:c8a231e36ec2cab018c4ad4358c386e36eede0319a0c41fed24f840b1dac59f6", size = 1402859, upload-time = "2026-01-01T17:32:27.215Z" }, + { url = "https://files.pythonhosted.org/packages/85/42/fe60b5f4473e12c72f977548e4028156f4d340b884c635ec6b063fe7e9a5/pynacl-1.6.2-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:68be3a09455743ff9505491220b64440ced8973fe930f270c8e07ccfa25b1f9e", size = 791926, upload-time = "2026-01-01T17:32:29.314Z" }, + { url = "https://files.pythonhosted.org/packages/fa/f9/e40e318c604259301cc091a2a63f237d9e7b424c4851cafaea4ea7c4834e/pynacl-1.6.2-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:8b097553b380236d51ed11356c953bf8ce36a29a3e596e934ecabe76c985a577", size = 1363101, upload-time = "2026-01-01T17:32:31.263Z" }, + { url = "https://files.pythonhosted.org/packages/48/47/e761c254f410c023a469284a9bc210933e18588ca87706ae93002c05114c/pynacl-1.6.2-cp38-abi3-win32.whl", hash = "sha256:5811c72b473b2f38f7e2a3dc4f8642e3a3e9b5e7317266e4ced1fba85cae41aa", size = 227421, upload-time = "2026-01-01T17:32:33.076Z" }, + { url = "https://files.pythonhosted.org/packages/41/ad/334600e8cacc7d86587fe5f565480fde569dfb487389c8e1be56ac21d8ac/pynacl-1.6.2-cp38-abi3-win_amd64.whl", hash = "sha256:62985f233210dee6548c223301b6c25440852e13d59a8b81490203c3227c5ba0", size = 239754, upload-time = "2026-01-01T17:32:34.557Z" }, + { url = "https://files.pythonhosted.org/packages/29/7d/5945b5af29534641820d3bd7b00962abbbdfee84ec7e19f0d5b3175f9a31/pynacl-1.6.2-cp38-abi3-win_arm64.whl", hash = "sha256:834a43af110f743a754448463e8fd61259cd4ab5bbedcf70f9dabad1d28a394c", size = 184801, upload-time = "2026-01-01T17:32:36.309Z" }, +] + +[[package]] +name = "pyright" +version = "1.1.408" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "nodeenv" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/74/b2/5db700e52554b8f025faa9c3c624c59f1f6c8841ba81ab97641b54322f16/pyright-1.1.408.tar.gz", hash = "sha256:f28f2321f96852fa50b5829ea492f6adb0e6954568d1caa3f3af3a5f555eb684", size = 4400578, upload-time = "2026-01-08T08:07:38.795Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0c/82/a2c93e32800940d9573fb28c346772a14778b84ba7524e691b324620ab89/pyright-1.1.408-py3-none-any.whl", hash = "sha256:090b32865f4fdb1e0e6cd82bf5618480d48eecd2eb2e70f960982a3d9a4c17c1", size = 6399144, upload-time = "2026-01-08T08:07:37.082Z" }, +] + +[[package]] +name = "pytest" +version = "9.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/da/1d/eb34f286b164c5e431a810a38697409cca1112cee04b287bb56ac486730b/pytest-9.0.0.tar.gz", hash = "sha256:8f44522eafe4137b0f35c9ce3072931a788a21ee40a2ed279e817d3cc16ed21e", size = 1562764, upload-time = "2025-11-08T17:25:33.34Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/72/99/cafef234114a3b6d9f3aaed0723b437c40c57bdb7b3e4c3a575bc4890052/pytest-9.0.0-py3-none-any.whl", hash = "sha256:e5ccdf10b0bac554970ee88fc1a4ad0ee5d221f8ef22321f9b7e4584e19d7f96", size = 373364, upload-time = "2025-11-08T17:25:31.811Z" }, +] + +[[package]] +name = "python-dotenv" +version = "1.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f0/26/19cadc79a718c5edbec86fd4919a6b6d3f681039a2f6d66d14be94e75fb9/python_dotenv-1.2.1.tar.gz", hash = "sha256:42667e897e16ab0d66954af0e60a9caa94f0fd4ecf3aaf6d2d260eec1aa36ad6", size = 44221, upload-time = "2025-10-26T15:12:10.434Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/14/1b/a298b06749107c305e1fe0f814c6c74aea7b2f1e10989cb30f544a1b3253/python_dotenv-1.2.1-py3-none-any.whl", hash = "sha256:b81ee9561e9ca4004139c6cbba3a238c32b03e4894671e181b671e8cb8425d61", size = 21230, upload-time = "2025-10-26T15:12:09.109Z" }, +] + +[[package]] +name = "python-statemachine" +version = "2.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/92/82/7340e8b3ae588bcbe698f38d3d5578fe99a99ea6a9dad083b9e7316ddb03/python_statemachine-2.6.0.tar.gz", hash = "sha256:adda2e7327ed7ecc96069c49e830fcc8b11a5f9d899ab16742317167b3d7997d", size = 552809, upload-time = "2026-02-13T21:37:35.745Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6e/b5/c6702da31050b49f4694617a1a1b47b2509ae65ab7eafe8093f3d1c8b0ad/python_statemachine-2.6.0-py3-none-any.whl", hash = "sha256:1b1bfae954e0a980ef3e8617948efa12b5e9a0fef7bb0284ed6e212efede8db4", size = 53167, upload-time = "2026-02-13T21:37:34.116Z" }, +] + +[[package]] +name = "pytz" +version = "2025.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f8/bf/abbd3cdfb8fbc7fb3d4d38d320f2441b1e7cbe29be4f23797b4a2b5d8aac/pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3", size = 320884, upload-time = "2025-03-25T02:25:00.538Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/81/c4/34e93fe5f5429d7570ec1fa436f1986fb1f00c3e0f43a589fe2bbcd22c3f/pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00", size = 509225, upload-time = "2025-03-25T02:24:58.468Z" }, +] + +[[package]] +name = "pyyaml" +version = "6.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960, upload-time = "2025-09-25T21:33:16.546Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/33/422b98d2195232ca1826284a76852ad5a86fe23e31b009c9886b2d0fb8b2/pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196", size = 182063, upload-time = "2025-09-25T21:32:11.445Z" }, + { url = "https://files.pythonhosted.org/packages/89/a0/6cf41a19a1f2f3feab0e9c0b74134aa2ce6849093d5517a0c550fe37a648/pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0", size = 173973, upload-time = "2025-09-25T21:32:12.492Z" }, + { url = "https://files.pythonhosted.org/packages/ed/23/7a778b6bd0b9a8039df8b1b1d80e2e2ad78aa04171592c8a5c43a56a6af4/pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28", size = 775116, upload-time = "2025-09-25T21:32:13.652Z" }, + { url = "https://files.pythonhosted.org/packages/65/30/d7353c338e12baef4ecc1b09e877c1970bd3382789c159b4f89d6a70dc09/pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c", size = 844011, upload-time = "2025-09-25T21:32:15.21Z" }, + { url = "https://files.pythonhosted.org/packages/8b/9d/b3589d3877982d4f2329302ef98a8026e7f4443c765c46cfecc8858c6b4b/pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc", size = 807870, upload-time = "2025-09-25T21:32:16.431Z" }, + { url = "https://files.pythonhosted.org/packages/05/c0/b3be26a015601b822b97d9149ff8cb5ead58c66f981e04fedf4e762f4bd4/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e", size = 761089, upload-time = "2025-09-25T21:32:17.56Z" }, + { url = "https://files.pythonhosted.org/packages/be/8e/98435a21d1d4b46590d5459a22d88128103f8da4c2d4cb8f14f2a96504e1/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea", size = 790181, upload-time = "2025-09-25T21:32:18.834Z" }, + { url = "https://files.pythonhosted.org/packages/74/93/7baea19427dcfbe1e5a372d81473250b379f04b1bd3c4c5ff825e2327202/pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5", size = 137658, upload-time = "2025-09-25T21:32:20.209Z" }, + { url = "https://files.pythonhosted.org/packages/86/bf/899e81e4cce32febab4fb42bb97dcdf66bc135272882d1987881a4b519e9/pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b", size = 154003, upload-time = "2025-09-25T21:32:21.167Z" }, + { url = "https://files.pythonhosted.org/packages/1a/08/67bd04656199bbb51dbed1439b7f27601dfb576fb864099c7ef0c3e55531/pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd", size = 140344, upload-time = "2025-09-25T21:32:22.617Z" }, + { url = "https://files.pythonhosted.org/packages/d1/11/0fd08f8192109f7169db964b5707a2f1e8b745d4e239b784a5a1dd80d1db/pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8", size = 181669, upload-time = "2025-09-25T21:32:23.673Z" }, + { url = "https://files.pythonhosted.org/packages/b1/16/95309993f1d3748cd644e02e38b75d50cbc0d9561d21f390a76242ce073f/pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1", size = 173252, upload-time = "2025-09-25T21:32:25.149Z" }, + { url = "https://files.pythonhosted.org/packages/50/31/b20f376d3f810b9b2371e72ef5adb33879b25edb7a6d072cb7ca0c486398/pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c", size = 767081, upload-time = "2025-09-25T21:32:26.575Z" }, + { url = "https://files.pythonhosted.org/packages/49/1e/a55ca81e949270d5d4432fbbd19dfea5321eda7c41a849d443dc92fd1ff7/pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5", size = 841159, upload-time = "2025-09-25T21:32:27.727Z" }, + { url = "https://files.pythonhosted.org/packages/74/27/e5b8f34d02d9995b80abcef563ea1f8b56d20134d8f4e5e81733b1feceb2/pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6", size = 801626, upload-time = "2025-09-25T21:32:28.878Z" }, + { url = "https://files.pythonhosted.org/packages/f9/11/ba845c23988798f40e52ba45f34849aa8a1f2d4af4b798588010792ebad6/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6", size = 753613, upload-time = "2025-09-25T21:32:30.178Z" }, + { url = "https://files.pythonhosted.org/packages/3d/e0/7966e1a7bfc0a45bf0a7fb6b98ea03fc9b8d84fa7f2229e9659680b69ee3/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be", size = 794115, upload-time = "2025-09-25T21:32:31.353Z" }, + { url = "https://files.pythonhosted.org/packages/de/94/980b50a6531b3019e45ddeada0626d45fa85cbe22300844a7983285bed3b/pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26", size = 137427, upload-time = "2025-09-25T21:32:32.58Z" }, + { url = "https://files.pythonhosted.org/packages/97/c9/39d5b874e8b28845e4ec2202b5da735d0199dbe5b8fb85f91398814a9a46/pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c", size = 154090, upload-time = "2025-09-25T21:32:33.659Z" }, + { url = "https://files.pythonhosted.org/packages/73/e8/2bdf3ca2090f68bb3d75b44da7bbc71843b19c9f2b9cb9b0f4ab7a5a4329/pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb", size = 140246, upload-time = "2025-09-25T21:32:34.663Z" }, + { url = "https://files.pythonhosted.org/packages/9d/8c/f4bd7f6465179953d3ac9bc44ac1a8a3e6122cf8ada906b4f96c60172d43/pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac", size = 181814, upload-time = "2025-09-25T21:32:35.712Z" }, + { url = "https://files.pythonhosted.org/packages/bd/9c/4d95bb87eb2063d20db7b60faa3840c1b18025517ae857371c4dd55a6b3a/pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310", size = 173809, upload-time = "2025-09-25T21:32:36.789Z" }, + { url = "https://files.pythonhosted.org/packages/92/b5/47e807c2623074914e29dabd16cbbdd4bf5e9b2db9f8090fa64411fc5382/pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7", size = 766454, upload-time = "2025-09-25T21:32:37.966Z" }, + { url = "https://files.pythonhosted.org/packages/02/9e/e5e9b168be58564121efb3de6859c452fccde0ab093d8438905899a3a483/pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788", size = 836355, upload-time = "2025-09-25T21:32:39.178Z" }, + { url = "https://files.pythonhosted.org/packages/88/f9/16491d7ed2a919954993e48aa941b200f38040928474c9e85ea9e64222c3/pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5", size = 794175, upload-time = "2025-09-25T21:32:40.865Z" }, + { url = "https://files.pythonhosted.org/packages/dd/3f/5989debef34dc6397317802b527dbbafb2b4760878a53d4166579111411e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764", size = 755228, upload-time = "2025-09-25T21:32:42.084Z" }, + { url = "https://files.pythonhosted.org/packages/d7/ce/af88a49043cd2e265be63d083fc75b27b6ed062f5f9fd6cdc223ad62f03e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35", size = 789194, upload-time = "2025-09-25T21:32:43.362Z" }, + { url = "https://files.pythonhosted.org/packages/23/20/bb6982b26a40bb43951265ba29d4c246ef0ff59c9fdcdf0ed04e0687de4d/pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac", size = 156429, upload-time = "2025-09-25T21:32:57.844Z" }, + { url = "https://files.pythonhosted.org/packages/f4/f4/a4541072bb9422c8a883ab55255f918fa378ecf083f5b85e87fc2b4eda1b/pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3", size = 143912, upload-time = "2025-09-25T21:32:59.247Z" }, + { url = "https://files.pythonhosted.org/packages/7c/f9/07dd09ae774e4616edf6cda684ee78f97777bdd15847253637a6f052a62f/pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3", size = 189108, upload-time = "2025-09-25T21:32:44.377Z" }, + { url = "https://files.pythonhosted.org/packages/4e/78/8d08c9fb7ce09ad8c38ad533c1191cf27f7ae1effe5bb9400a46d9437fcf/pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba", size = 183641, upload-time = "2025-09-25T21:32:45.407Z" }, + { url = "https://files.pythonhosted.org/packages/7b/5b/3babb19104a46945cf816d047db2788bcaf8c94527a805610b0289a01c6b/pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c", size = 831901, upload-time = "2025-09-25T21:32:48.83Z" }, + { url = "https://files.pythonhosted.org/packages/8b/cc/dff0684d8dc44da4d22a13f35f073d558c268780ce3c6ba1b87055bb0b87/pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702", size = 861132, upload-time = "2025-09-25T21:32:50.149Z" }, + { url = "https://files.pythonhosted.org/packages/b1/5e/f77dc6b9036943e285ba76b49e118d9ea929885becb0a29ba8a7c75e29fe/pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c", size = 839261, upload-time = "2025-09-25T21:32:51.808Z" }, + { url = "https://files.pythonhosted.org/packages/ce/88/a9db1376aa2a228197c58b37302f284b5617f56a5d959fd1763fb1675ce6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065", size = 805272, upload-time = "2025-09-25T21:32:52.941Z" }, + { url = "https://files.pythonhosted.org/packages/da/92/1446574745d74df0c92e6aa4a7b0b3130706a4142b2d1a5869f2eaa423c6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65", size = 829923, upload-time = "2025-09-25T21:32:54.537Z" }, + { url = "https://files.pythonhosted.org/packages/f0/7a/1c7270340330e575b92f397352af856a8c06f230aa3e76f86b39d01b416a/pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9", size = 174062, upload-time = "2025-09-25T21:32:55.767Z" }, + { url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341, upload-time = "2025-09-25T21:32:56.828Z" }, +] + +[[package]] +name = "rapidfuzz" +version = "3.14.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d3/28/9d808fe62375b9aab5ba92fa9b29371297b067c2790b2d7cda648b1e2f8d/rapidfuzz-3.14.3.tar.gz", hash = "sha256:2491937177868bc4b1e469087601d53f925e8d270ccc21e07404b4b5814b7b5f", size = 57863900, upload-time = "2025-11-01T11:54:52.321Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fa/8e/3c215e860b458cfbedb3ed73bc72e98eb7e0ed72f6b48099604a7a3260c2/rapidfuzz-3.14.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:685c93ea961d135893b5984a5a9851637d23767feabe414ec974f43babbd8226", size = 1945306, upload-time = "2025-11-01T11:53:06.452Z" }, + { url = "https://files.pythonhosted.org/packages/36/d9/31b33512015c899f4a6e6af64df8dfe8acddf4c8b40a4b3e0e6e1bcd00e5/rapidfuzz-3.14.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fa7c8f26f009f8c673fbfb443792f0cf8cf50c4e18121ff1e285b5e08a94fbdb", size = 1390788, upload-time = "2025-11-01T11:53:08.721Z" }, + { url = "https://files.pythonhosted.org/packages/a9/67/2ee6f8de6e2081ccd560a571d9c9063184fe467f484a17fa90311a7f4a2e/rapidfuzz-3.14.3-cp312-cp312-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:57f878330c8d361b2ce76cebb8e3e1dc827293b6abf404e67d53260d27b5d941", size = 1374580, upload-time = "2025-11-01T11:53:10.164Z" }, + { url = "https://files.pythonhosted.org/packages/30/83/80d22997acd928eda7deadc19ccd15883904622396d6571e935993e0453a/rapidfuzz-3.14.3-cp312-cp312-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6c5f545f454871e6af05753a0172849c82feaf0f521c5ca62ba09e1b382d6382", size = 3154947, upload-time = "2025-11-01T11:53:12.093Z" }, + { url = "https://files.pythonhosted.org/packages/5b/cf/9f49831085a16384695f9fb096b99662f589e30b89b4a589a1ebc1a19d34/rapidfuzz-3.14.3-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:07aa0b5d8863e3151e05026a28e0d924accf0a7a3b605da978f0359bb804df43", size = 1223872, upload-time = "2025-11-01T11:53:13.664Z" }, + { url = "https://files.pythonhosted.org/packages/c8/0f/41ee8034e744b871c2e071ef0d360686f5ccfe5659f4fd96c3ec406b3c8b/rapidfuzz-3.14.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:73b07566bc7e010e7b5bd490fb04bb312e820970180df6b5655e9e6224c137db", size = 2392512, upload-time = "2025-11-01T11:53:15.109Z" }, + { url = "https://files.pythonhosted.org/packages/da/86/280038b6b0c2ccec54fb957c732ad6b41cc1fd03b288d76545b9cf98343f/rapidfuzz-3.14.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:6de00eb84c71476af7d3110cf25d8fe7c792d7f5fa86764ef0b4ca97e78ca3ed", size = 2521398, upload-time = "2025-11-01T11:53:17.146Z" }, + { url = "https://files.pythonhosted.org/packages/fa/7b/05c26f939607dca0006505e3216248ae2de631e39ef94dd63dbbf0860021/rapidfuzz-3.14.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d7843a1abf0091773a530636fdd2a49a41bcae22f9910b86b4f903e76ddc82dc", size = 4259416, upload-time = "2025-11-01T11:53:19.34Z" }, + { url = "https://files.pythonhosted.org/packages/40/eb/9e3af4103d91788f81111af1b54a28de347cdbed8eaa6c91d5e98a889aab/rapidfuzz-3.14.3-cp312-cp312-win32.whl", hash = "sha256:dea97ac3ca18cd3ba8f3d04b5c1fe4aa60e58e8d9b7793d3bd595fdb04128d7a", size = 1709527, upload-time = "2025-11-01T11:53:20.949Z" }, + { url = "https://files.pythonhosted.org/packages/b8/63/d06ecce90e2cf1747e29aeab9f823d21e5877a4c51b79720b2d3be7848f8/rapidfuzz-3.14.3-cp312-cp312-win_amd64.whl", hash = "sha256:b5100fd6bcee4d27f28f4e0a1c6b5127bc8ba7c2a9959cad9eab0bf4a7ab3329", size = 1538989, upload-time = "2025-11-01T11:53:22.428Z" }, + { url = "https://files.pythonhosted.org/packages/fc/6d/beee32dcda64af8128aab3ace2ccb33d797ed58c434c6419eea015fec779/rapidfuzz-3.14.3-cp312-cp312-win_arm64.whl", hash = "sha256:4e49c9e992bc5fc873bd0fff7ef16a4405130ec42f2ce3d2b735ba5d3d4eb70f", size = 811161, upload-time = "2025-11-01T11:53:23.811Z" }, + { url = "https://files.pythonhosted.org/packages/e4/4f/0d94d09646853bd26978cb3a7541b6233c5760687777fa97da8de0d9a6ac/rapidfuzz-3.14.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:dbcb726064b12f356bf10fffdb6db4b6dce5390b23627c08652b3f6e49aa56ae", size = 1939646, upload-time = "2025-11-01T11:53:25.292Z" }, + { url = "https://files.pythonhosted.org/packages/b6/eb/f96aefc00f3bbdbab9c0657363ea8437a207d7545ac1c3789673e05d80bd/rapidfuzz-3.14.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1704fc70d214294e554a2421b473779bcdeef715881c5e927dc0f11e1692a0ff", size = 1385512, upload-time = "2025-11-01T11:53:27.594Z" }, + { url = "https://files.pythonhosted.org/packages/26/34/71c4f7749c12ee223dba90017a5947e8f03731a7cc9f489b662a8e9e643d/rapidfuzz-3.14.3-cp313-cp313-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cc65e72790ddfd310c2c8912b45106e3800fefe160b0c2ef4d6b6fec4e826457", size = 1373571, upload-time = "2025-11-01T11:53:29.096Z" }, + { url = "https://files.pythonhosted.org/packages/32/00/ec8597a64f2be301ce1ee3290d067f49f6a7afb226b67d5f15b56d772ba5/rapidfuzz-3.14.3-cp313-cp313-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:43e38c1305cffae8472572a0584d4ffc2f130865586a81038ca3965301f7c97c", size = 3156759, upload-time = "2025-11-01T11:53:30.777Z" }, + { url = "https://files.pythonhosted.org/packages/61/d5/b41eeb4930501cc899d5a9a7b5c9a33d85a670200d7e81658626dcc0ecc0/rapidfuzz-3.14.3-cp313-cp313-manylinux_2_31_armv7l.whl", hash = "sha256:e195a77d06c03c98b3fc06b8a28576ba824392ce40de8c708f96ce04849a052e", size = 1222067, upload-time = "2025-11-01T11:53:32.334Z" }, + { url = "https://files.pythonhosted.org/packages/2a/7d/6d9abb4ffd1027c6ed837b425834f3bed8344472eb3a503ab55b3407c721/rapidfuzz-3.14.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1b7ef2f4b8583a744338a18f12c69693c194fb6777c0e9ada98cd4d9e8f09d10", size = 2394775, upload-time = "2025-11-01T11:53:34.24Z" }, + { url = "https://files.pythonhosted.org/packages/15/ce/4f3ab4c401c5a55364da1ffff8cc879fc97b4e5f4fa96033827da491a973/rapidfuzz-3.14.3-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:a2135b138bcdcb4c3742d417f215ac2d8c2b87bde15b0feede231ae95f09ec41", size = 2526123, upload-time = "2025-11-01T11:53:35.779Z" }, + { url = "https://files.pythonhosted.org/packages/c1/4b/54f804975376a328f57293bd817c12c9036171d15cf7292032e3f5820b2d/rapidfuzz-3.14.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:33a325ed0e8e1aa20c3e75f8ab057a7b248fdea7843c2a19ade0008906c14af0", size = 4262874, upload-time = "2025-11-01T11:53:37.866Z" }, + { url = "https://files.pythonhosted.org/packages/e9/b6/958db27d8a29a50ee6edd45d33debd3ce732e7209183a72f57544cd5fe22/rapidfuzz-3.14.3-cp313-cp313-win32.whl", hash = "sha256:8383b6d0d92f6cd008f3c9216535be215a064b2cc890398a678b56e6d280cb63", size = 1707972, upload-time = "2025-11-01T11:53:39.442Z" }, + { url = "https://files.pythonhosted.org/packages/07/75/fde1f334b0cec15b5946d9f84d73250fbfcc73c236b4bc1b25129d90876b/rapidfuzz-3.14.3-cp313-cp313-win_amd64.whl", hash = "sha256:e6b5e3036976f0fde888687d91be86d81f9ac5f7b02e218913c38285b756be6c", size = 1537011, upload-time = "2025-11-01T11:53:40.92Z" }, + { url = "https://files.pythonhosted.org/packages/2e/d7/d83fe001ce599dc7ead57ba1debf923dc961b6bdce522b741e6b8c82f55c/rapidfuzz-3.14.3-cp313-cp313-win_arm64.whl", hash = "sha256:7ba009977601d8b0828bfac9a110b195b3e4e79b350dcfa48c11269a9f1918a0", size = 810744, upload-time = "2025-11-01T11:53:42.723Z" }, + { url = "https://files.pythonhosted.org/packages/92/13/a486369e63ff3c1a58444d16b15c5feb943edd0e6c28a1d7d67cb8946b8f/rapidfuzz-3.14.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a0a28add871425c2fe94358c6300bbeb0bc2ed828ca003420ac6825408f5a424", size = 1967702, upload-time = "2025-11-01T11:53:44.554Z" }, + { url = "https://files.pythonhosted.org/packages/f1/82/efad25e260b7810f01d6b69122685e355bed78c94a12784bac4e0beb2afb/rapidfuzz-3.14.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:010e12e2411a4854b0434f920e72b717c43f8ec48d57e7affe5c42ecfa05dd0e", size = 1410702, upload-time = "2025-11-01T11:53:46.066Z" }, + { url = "https://files.pythonhosted.org/packages/ba/1a/34c977b860cde91082eae4a97ae503f43e0d84d4af301d857679b66f9869/rapidfuzz-3.14.3-cp313-cp313t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5cfc3d57abd83c734d1714ec39c88a34dd69c85474918ebc21296f1e61eb5ca8", size = 1382337, upload-time = "2025-11-01T11:53:47.62Z" }, + { url = "https://files.pythonhosted.org/packages/88/74/f50ea0e24a5880a9159e8fd256b84d8f4634c2f6b4f98028bdd31891d907/rapidfuzz-3.14.3-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:89acb8cbb52904f763e5ac238083b9fc193bed8d1f03c80568b20e4cef43a519", size = 3165563, upload-time = "2025-11-01T11:53:49.216Z" }, + { url = "https://files.pythonhosted.org/packages/e8/7a/e744359404d7737049c26099423fc54bcbf303de5d870d07d2fb1410f567/rapidfuzz-3.14.3-cp313-cp313t-manylinux_2_31_armv7l.whl", hash = "sha256:7d9af908c2f371bfb9c985bd134e295038e3031e666e4b2ade1e7cb7f5af2f1a", size = 1214727, upload-time = "2025-11-01T11:53:50.883Z" }, + { url = "https://files.pythonhosted.org/packages/d3/2e/87adfe14ce75768ec6c2b8acd0e05e85e84be4be5e3d283cdae360afc4fe/rapidfuzz-3.14.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:1f1925619627f8798f8c3a391d81071336942e5fe8467bc3c567f982e7ce2897", size = 2403349, upload-time = "2025-11-01T11:53:52.322Z" }, + { url = "https://files.pythonhosted.org/packages/70/17/6c0b2b2bff9c8b12e12624c07aa22e922b0c72a490f180fa9183d1ef2c75/rapidfuzz-3.14.3-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:152555187360978119e98ce3e8263d70dd0c40c7541193fc302e9b7125cf8f58", size = 2507596, upload-time = "2025-11-01T11:53:53.835Z" }, + { url = "https://files.pythonhosted.org/packages/c3/d1/87852a7cbe4da7b962174c749a47433881a63a817d04f3e385ea9babcd9e/rapidfuzz-3.14.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:52619d25a09546b8db078981ca88939d72caa6b8701edd8b22e16482a38e799f", size = 4273595, upload-time = "2025-11-01T11:53:55.961Z" }, + { url = "https://files.pythonhosted.org/packages/c1/ab/1d0354b7d1771a28fa7fe089bc23acec2bdd3756efa2419f463e3ed80e16/rapidfuzz-3.14.3-cp313-cp313t-win32.whl", hash = "sha256:489ce98a895c98cad284f0a47960c3e264c724cb4cfd47a1430fa091c0c25204", size = 1757773, upload-time = "2025-11-01T11:53:57.628Z" }, + { url = "https://files.pythonhosted.org/packages/0b/0c/71ef356adc29e2bdf74cd284317b34a16b80258fa0e7e242dd92cc1e6d10/rapidfuzz-3.14.3-cp313-cp313t-win_amd64.whl", hash = "sha256:656e52b054d5b5c2524169240e50cfa080b04b1c613c5f90a2465e84888d6f15", size = 1576797, upload-time = "2025-11-01T11:53:59.455Z" }, + { url = "https://files.pythonhosted.org/packages/fe/d2/0e64fc27bb08d4304aa3d11154eb5480bcf5d62d60140a7ee984dc07468a/rapidfuzz-3.14.3-cp313-cp313t-win_arm64.whl", hash = "sha256:c7e40c0a0af02ad6e57e89f62bef8604f55a04ecae90b0ceeda591bbf5923317", size = 829940, upload-time = "2025-11-01T11:54:01.1Z" }, + { url = "https://files.pythonhosted.org/packages/32/6f/1b88aaeade83abc5418788f9e6b01efefcd1a69d65ded37d89cd1662be41/rapidfuzz-3.14.3-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:442125473b247227d3f2de807a11da6c08ccf536572d1be943f8e262bae7e4ea", size = 1942086, upload-time = "2025-11-01T11:54:02.592Z" }, + { url = "https://files.pythonhosted.org/packages/a0/2c/b23861347436cb10f46c2bd425489ec462790faaa360a54a7ede5f78de88/rapidfuzz-3.14.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:1ec0c8c0c3d4f97ced46b2e191e883f8c82dbbf6d5ebc1842366d7eff13cd5a6", size = 1386993, upload-time = "2025-11-01T11:54:04.12Z" }, + { url = "https://files.pythonhosted.org/packages/83/86/5d72e2c060aa1fbdc1f7362d938f6b237dff91f5b9fc5dd7cc297e112250/rapidfuzz-3.14.3-cp314-cp314-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2dc37bc20272f388b8c3a4eba4febc6e77e50a8f450c472def4751e7678f55e4", size = 1379126, upload-time = "2025-11-01T11:54:05.777Z" }, + { url = "https://files.pythonhosted.org/packages/c9/bc/ef2cee3e4d8b3fc22705ff519f0d487eecc756abdc7c25d53686689d6cf2/rapidfuzz-3.14.3-cp314-cp314-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dee362e7e79bae940a5e2b3f6d09c6554db6a4e301cc68343886c08be99844f1", size = 3159304, upload-time = "2025-11-01T11:54:07.351Z" }, + { url = "https://files.pythonhosted.org/packages/a0/36/dc5f2f62bbc7bc90be1f75eeaf49ed9502094bb19290dfb4747317b17f12/rapidfuzz-3.14.3-cp314-cp314-manylinux_2_31_armv7l.whl", hash = "sha256:4b39921df948388a863f0e267edf2c36302983459b021ab928d4b801cbe6a421", size = 1218207, upload-time = "2025-11-01T11:54:09.641Z" }, + { url = "https://files.pythonhosted.org/packages/df/7e/8f4be75c1bc62f47edf2bbbe2370ee482fae655ebcc4718ac3827ead3904/rapidfuzz-3.14.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:beda6aa9bc44d1d81242e7b291b446be352d3451f8217fcb068fc2933927d53b", size = 2401245, upload-time = "2025-11-01T11:54:11.543Z" }, + { url = "https://files.pythonhosted.org/packages/05/38/f7c92759e1bb188dd05b80d11c630ba59b8d7856657baf454ff56059c2ab/rapidfuzz-3.14.3-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:6a014ba09657abfcfeed64b7d09407acb29af436d7fc075b23a298a7e4a6b41c", size = 2518308, upload-time = "2025-11-01T11:54:13.134Z" }, + { url = "https://files.pythonhosted.org/packages/c7/ac/85820f70fed5ecb5f1d9a55f1e1e2090ef62985ef41db289b5ac5ec56e28/rapidfuzz-3.14.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:32eeafa3abce138bb725550c0e228fc7eaeec7059aa8093d9cbbec2b58c2371a", size = 4265011, upload-time = "2025-11-01T11:54:15.087Z" }, + { url = "https://files.pythonhosted.org/packages/46/a9/616930721ea9835c918af7cde22bff17f9db3639b0c1a7f96684be7f5630/rapidfuzz-3.14.3-cp314-cp314-win32.whl", hash = "sha256:adb44d996fc610c7da8c5048775b21db60dd63b1548f078e95858c05c86876a3", size = 1742245, upload-time = "2025-11-01T11:54:17.19Z" }, + { url = "https://files.pythonhosted.org/packages/06/8a/f2fa5e9635b1ccafda4accf0e38246003f69982d7c81f2faa150014525a4/rapidfuzz-3.14.3-cp314-cp314-win_amd64.whl", hash = "sha256:f3d15d8527e2b293e38ce6e437631af0708df29eafd7c9fc48210854c94472f9", size = 1584856, upload-time = "2025-11-01T11:54:18.764Z" }, + { url = "https://files.pythonhosted.org/packages/ef/97/09e20663917678a6d60d8e0e29796db175b1165e2079830430342d5298be/rapidfuzz-3.14.3-cp314-cp314-win_arm64.whl", hash = "sha256:576e4b9012a67e0bf54fccb69a7b6c94d4e86a9540a62f1a5144977359133583", size = 833490, upload-time = "2025-11-01T11:54:20.753Z" }, + { url = "https://files.pythonhosted.org/packages/03/1b/6b6084576ba87bf21877c77218a0c97ba98cb285b0c02eaaee3acd7c4513/rapidfuzz-3.14.3-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:cec3c0da88562727dd5a5a364bd9efeb535400ff0bfb1443156dd139a1dd7b50", size = 1968658, upload-time = "2025-11-01T11:54:22.25Z" }, + { url = "https://files.pythonhosted.org/packages/38/c0/fb02a0db80d95704b0a6469cc394e8c38501abf7e1c0b2afe3261d1510c2/rapidfuzz-3.14.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:d1fa009f8b1100e4880868137e7bf0501422898f7674f2adcd85d5a67f041296", size = 1410742, upload-time = "2025-11-01T11:54:23.863Z" }, + { url = "https://files.pythonhosted.org/packages/a4/72/3fbf12819fc6afc8ec75a45204013b40979d068971e535a7f3512b05e765/rapidfuzz-3.14.3-cp314-cp314t-manylinux_2_26_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1b86daa7419b5e8b180690efd1fdbac43ff19230803282521c5b5a9c83977655", size = 1382810, upload-time = "2025-11-01T11:54:25.571Z" }, + { url = "https://files.pythonhosted.org/packages/0f/18/0f1991d59bb7eee28922a00f79d83eafa8c7bfb4e8edebf4af2a160e7196/rapidfuzz-3.14.3-cp314-cp314t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c7bd1816db05d6c5ffb3a4df0a2b7b56fb8c81ef584d08e37058afa217da91b1", size = 3166349, upload-time = "2025-11-01T11:54:27.195Z" }, + { url = "https://files.pythonhosted.org/packages/0d/f0/baa958b1989c8f88c78bbb329e969440cf330b5a01a982669986495bb980/rapidfuzz-3.14.3-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:33da4bbaf44e9755b0ce192597f3bde7372fe2e381ab305f41b707a95ac57aa7", size = 1214994, upload-time = "2025-11-01T11:54:28.821Z" }, + { url = "https://files.pythonhosted.org/packages/e4/a0/cd12ec71f9b2519a3954febc5740291cceabc64c87bc6433afcb36259f3b/rapidfuzz-3.14.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:3fecce764cf5a991ee2195a844196da840aba72029b2612f95ac68a8b74946bf", size = 2403919, upload-time = "2025-11-01T11:54:30.393Z" }, + { url = "https://files.pythonhosted.org/packages/0b/ce/019bd2176c1644098eced4f0595cb4b3ef52e4941ac9a5854f209d0a6e16/rapidfuzz-3.14.3-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:ecd7453e02cf072258c3a6b8e930230d789d5d46cc849503729f9ce475d0e785", size = 2508346, upload-time = "2025-11-01T11:54:32.048Z" }, + { url = "https://files.pythonhosted.org/packages/23/f8/be16c68e2c9e6c4f23e8f4adbb7bccc9483200087ed28ff76c5312da9b14/rapidfuzz-3.14.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ea188aa00e9bcae8c8411f006a5f2f06c4607a02f24eab0d8dc58566aa911f35", size = 4274105, upload-time = "2025-11-01T11:54:33.701Z" }, + { url = "https://files.pythonhosted.org/packages/a1/d1/5ab148e03f7e6ec8cd220ccf7af74d3aaa4de26dd96df58936beb7cba820/rapidfuzz-3.14.3-cp314-cp314t-win32.whl", hash = "sha256:7ccbf68100c170e9a0581accbe9291850936711548c6688ce3bfb897b8c589ad", size = 1793465, upload-time = "2025-11-01T11:54:35.331Z" }, + { url = "https://files.pythonhosted.org/packages/cd/97/433b2d98e97abd9fff1c470a109b311669f44cdec8d0d5aa250aceaed1fb/rapidfuzz-3.14.3-cp314-cp314t-win_amd64.whl", hash = "sha256:9ec02e62ae765a318d6de38df609c57fc6dacc65c0ed1fd489036834fd8a620c", size = 1623491, upload-time = "2025-11-01T11:54:38.085Z" }, + { url = "https://files.pythonhosted.org/packages/e2/f6/e2176eb94f94892441bce3ddc514c179facb65db245e7ce3356965595b19/rapidfuzz-3.14.3-cp314-cp314t-win_arm64.whl", hash = "sha256:e805e52322ae29aa945baf7168b6c898120fbc16d2b8f940b658a5e9e3999253", size = 851487, upload-time = "2025-11-01T11:54:40.176Z" }, +] + +[[package]] +name = "requests" +version = "2.33.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "charset-normalizer" }, + { name = "idna" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5f/a4/98b9c7c6428a668bf7e42ebb7c79d576a1c3c1e3ae2d47e674b468388871/requests-2.33.1.tar.gz", hash = "sha256:18817f8c57c6263968bc123d237e3b8b08ac046f5456bd1e307ee8f4250d3517", size = 134120, upload-time = "2026-03-30T16:09:15.531Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d7/8e/7540e8a2036f79a125c1d2ebadf69ed7901608859186c856fa0388ef4197/requests-2.33.1-py3-none-any.whl", hash = "sha256:4e6d1ef462f3626a1f0a0a9c42dd93c63bad33f9f1c1937509b8c5c8718ab56a", size = 64947, upload-time = "2026-03-30T16:09:13.83Z" }, +] + +[[package]] +name = "retry" +version = "0.9.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "decorator" }, + { name = "py" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9d/72/75d0b85443fbc8d9f38d08d2b1b67cc184ce35280e4a3813cda2f445f3a4/retry-0.9.2.tar.gz", hash = "sha256:f8bfa8b99b69c4506d6f5bd3b0aabf77f98cdb17f3c9fc3f5ca820033336fba4", size = 6448, upload-time = "2016-05-11T13:58:51.541Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4b/0d/53aea75710af4528a25ed6837d71d117602b01946b307a3912cb3cfcbcba/retry-0.9.2-py2.py3-none-any.whl", hash = "sha256:ccddf89761fa2c726ab29391837d4327f819ea14d244c232a1d24c67a2f98606", size = 7986, upload-time = "2016-05-11T13:58:39.925Z" }, +] + +[[package]] +name = "rich" +version = "14.3.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markdown-it-py" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b3/c6/f3b320c27991c46f43ee9d856302c70dc2d0fb2dba4842ff739d5f46b393/rich-14.3.3.tar.gz", hash = "sha256:b8daa0b9e4eef54dd8cf7c86c03713f53241884e814f4e2f5fb342fe520f639b", size = 230582, upload-time = "2026-02-19T17:23:12.474Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/14/25/b208c5683343959b670dc001595f2f3737e051da617f66c31f7c4fa93abc/rich-14.3.3-py3-none-any.whl", hash = "sha256:793431c1f8619afa7d3b52b2cdec859562b950ea0d4b6b505397612db8d5362d", size = 310458, upload-time = "2026-02-19T17:23:13.732Z" }, +] + +[[package]] +name = "ruff" +version = "0.14.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/57/08/52232a877978dd8f9cf2aeddce3e611b40a63287dfca29b6b8da791f5e8d/ruff-0.14.10.tar.gz", hash = "sha256:9a2e830f075d1a42cd28420d7809ace390832a490ed0966fe373ba288e77aaf4", size = 5859763, upload-time = "2025-12-18T19:28:57.98Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/60/01/933704d69f3f05ee16ef11406b78881733c186fe14b6a46b05cfcaf6d3b2/ruff-0.14.10-py3-none-linux_armv6l.whl", hash = "sha256:7a3ce585f2ade3e1f29ec1b92df13e3da262178df8c8bdf876f48fa0e8316c49", size = 13527080, upload-time = "2025-12-18T19:29:25.642Z" }, + { url = "https://files.pythonhosted.org/packages/df/58/a0349197a7dfa603ffb7f5b0470391efa79ddc327c1e29c4851e85b09cc5/ruff-0.14.10-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:674f9be9372907f7257c51f1d4fc902cb7cf014b9980152b802794317941f08f", size = 13797320, upload-time = "2025-12-18T19:29:02.571Z" }, + { url = "https://files.pythonhosted.org/packages/7b/82/36be59f00a6082e38c23536df4e71cdbc6af8d7c707eade97fcad5c98235/ruff-0.14.10-py3-none-macosx_11_0_arm64.whl", hash = "sha256:d85713d522348837ef9df8efca33ccb8bd6fcfc86a2cde3ccb4bc9d28a18003d", size = 12918434, upload-time = "2025-12-18T19:28:51.202Z" }, + { url = "https://files.pythonhosted.org/packages/a6/00/45c62a7f7e34da92a25804f813ebe05c88aa9e0c25e5cb5a7d23dd7450e3/ruff-0.14.10-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6987ebe0501ae4f4308d7d24e2d0fe3d7a98430f5adfd0f1fead050a740a3a77", size = 13371961, upload-time = "2025-12-18T19:29:04.991Z" }, + { url = "https://files.pythonhosted.org/packages/40/31/a5906d60f0405f7e57045a70f2d57084a93ca7425f22e1d66904769d1628/ruff-0.14.10-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:16a01dfb7b9e4eee556fbfd5392806b1b8550c9b4a9f6acd3dbe6812b193c70a", size = 13275629, upload-time = "2025-12-18T19:29:21.381Z" }, + { url = "https://files.pythonhosted.org/packages/3e/60/61c0087df21894cf9d928dc04bcd4fb10e8b2e8dca7b1a276ba2155b2002/ruff-0.14.10-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7165d31a925b7a294465fa81be8c12a0e9b60fb02bf177e79067c867e71f8b1f", size = 14029234, upload-time = "2025-12-18T19:29:00.132Z" }, + { url = "https://files.pythonhosted.org/packages/44/84/77d911bee3b92348b6e5dab5a0c898d87084ea03ac5dc708f46d88407def/ruff-0.14.10-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:c561695675b972effb0c0a45db233f2c816ff3da8dcfbe7dfc7eed625f218935", size = 15449890, upload-time = "2025-12-18T19:28:53.573Z" }, + { url = "https://files.pythonhosted.org/packages/e9/36/480206eaefa24a7ec321582dda580443a8f0671fdbf6b1c80e9c3e93a16a/ruff-0.14.10-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4bb98fcbbc61725968893682fd4df8966a34611239c9fd07a1f6a07e7103d08e", size = 15123172, upload-time = "2025-12-18T19:29:23.453Z" }, + { url = "https://files.pythonhosted.org/packages/5c/38/68e414156015ba80cef5473d57919d27dfb62ec804b96180bafdeaf0e090/ruff-0.14.10-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f24b47993a9d8cb858429e97bdf8544c78029f09b520af615c1d261bf827001d", size = 14460260, upload-time = "2025-12-18T19:29:27.808Z" }, + { url = "https://files.pythonhosted.org/packages/b3/19/9e050c0dca8aba824d67cc0db69fb459c28d8cd3f6855b1405b3f29cc91d/ruff-0.14.10-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:59aabd2e2c4fd614d2862e7939c34a532c04f1084476d6833dddef4afab87e9f", size = 14229978, upload-time = "2025-12-18T19:29:11.32Z" }, + { url = "https://files.pythonhosted.org/packages/51/eb/e8dd1dd6e05b9e695aa9dd420f4577debdd0f87a5ff2fedda33c09e9be8c/ruff-0.14.10-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:213db2b2e44be8625002dbea33bb9c60c66ea2c07c084a00d55732689d697a7f", size = 14338036, upload-time = "2025-12-18T19:29:09.184Z" }, + { url = "https://files.pythonhosted.org/packages/6a/12/f3e3a505db7c19303b70af370d137795fcfec136d670d5de5391e295c134/ruff-0.14.10-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:b914c40ab64865a17a9a5b67911d14df72346a634527240039eb3bd650e5979d", size = 13264051, upload-time = "2025-12-18T19:29:13.431Z" }, + { url = "https://files.pythonhosted.org/packages/08/64/8c3a47eaccfef8ac20e0484e68e0772013eb85802f8a9f7603ca751eb166/ruff-0.14.10-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:1484983559f026788e3a5c07c81ef7d1e97c1c78ed03041a18f75df104c45405", size = 13283998, upload-time = "2025-12-18T19:29:06.994Z" }, + { url = "https://files.pythonhosted.org/packages/12/84/534a5506f4074e5cc0529e5cd96cfc01bb480e460c7edf5af70d2bcae55e/ruff-0.14.10-py3-none-musllinux_1_2_i686.whl", hash = "sha256:c70427132db492d25f982fffc8d6c7535cc2fd2c83fc8888f05caaa248521e60", size = 13601891, upload-time = "2025-12-18T19:28:55.811Z" }, + { url = "https://files.pythonhosted.org/packages/0d/1e/14c916087d8598917dbad9b2921d340f7884824ad6e9c55de948a93b106d/ruff-0.14.10-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5bcf45b681e9f1ee6445d317ce1fa9d6cba9a6049542d1c3d5b5958986be8830", size = 14336660, upload-time = "2025-12-18T19:29:16.531Z" }, + { url = "https://files.pythonhosted.org/packages/f2/1c/d7b67ab43f30013b47c12b42d1acd354c195351a3f7a1d67f59e54227ede/ruff-0.14.10-py3-none-win32.whl", hash = "sha256:104c49fc7ab73f3f3a758039adea978869a918f31b73280db175b43a2d9b51d6", size = 13196187, upload-time = "2025-12-18T19:29:19.006Z" }, + { url = "https://files.pythonhosted.org/packages/fb/9c/896c862e13886fae2af961bef3e6312db9ebc6adc2b156fe95e615dee8c1/ruff-0.14.10-py3-none-win_amd64.whl", hash = "sha256:466297bd73638c6bdf06485683e812db1c00c7ac96d4ddd0294a338c62fdc154", size = 14661283, upload-time = "2025-12-18T19:29:30.16Z" }, + { url = "https://files.pythonhosted.org/packages/74/31/b0e29d572670dca3674eeee78e418f20bdf97fa8aa9ea71380885e175ca0/ruff-0.14.10-py3-none-win_arm64.whl", hash = "sha256:e51d046cf6dda98a4633b8a8a771451107413b0f07183b2bef03f075599e44e6", size = 13729839, upload-time = "2025-12-18T19:28:48.636Z" }, +] + +[[package]] +name = "scalecodec" +version = "1.2.12" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "base58" }, + { name = "more-itertools" }, + { name = "requests" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b8/3c/4c3e3fa0efd75eb1e00b9bd6ccce8e0018e0789bff35d76cc9ce554354d0/scalecodec-1.2.12.tar.gz", hash = "sha256:aa54cc901970289fe64ae01edf076f25f60f8d7e4682979b827cab73dde74393", size = 150568, upload-time = "2025-10-16T14:01:55.231Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9f/e0/a080f62ccb71ace2330081badae678d2f5349078388459a60af927814695/scalecodec-1.2.12-py3-none-any.whl", hash = "sha256:b9de1a2d3d98b9e4285804478d8f5f13b3787ebc4d05625eb0054add7feebe45", size = 99164, upload-time = "2025-10-16T14:01:53.517Z" }, +] + +[[package]] +name = "sentry-sdk" +version = "2.57.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4f/87/46c0406d8b5ddd026f73adaf5ab75ce144219c41a4830b52df4b9ab55f7f/sentry_sdk-2.57.0.tar.gz", hash = "sha256:4be8d1e71c32fb27f79c577a337ac8912137bba4bcbc64a4ec1da4d6d8dc5199", size = 435288, upload-time = "2026-03-31T09:39:29.264Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c9/64/982e07b93219cb52e1cca5d272cb579e2f3eb001956c9e7a9a6d106c9473/sentry_sdk-2.57.0-py2.py3-none-any.whl", hash = "sha256:812c8bf5ff3d2f0e89c82f5ce80ab3a6423e102729c4706af7413fd1eb480585", size = 456489, upload-time = "2026-03-31T09:39:27.524Z" }, +] + +[[package]] +name = "setuptools" +version = "70.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/65/d8/10a70e86f6c28ae59f101a9de6d77bf70f147180fbf40c3af0f64080adc3/setuptools-70.3.0.tar.gz", hash = "sha256:f171bab1dfbc86b132997f26a119f6056a57950d058587841a0082e8830f9dc5", size = 2333112, upload-time = "2024-07-09T16:08:06.251Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/15/88e46eb9387e905704b69849618e699dc2f54407d8953cc4ec4b8b46528d/setuptools-70.3.0-py3-none-any.whl", hash = "sha256:fe384da74336c398e0d956d1cae0669bc02eed936cdb1d49b57de1990dc11ffc", size = 931070, upload-time = "2024-07-09T16:07:58.829Z" }, +] + +[[package]] +name = "shellingham" +version = "1.5.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/58/15/8b3609fd3830ef7b27b655beb4b4e9c62313a4e8da8c676e142cc210d58e/shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de", size = 10310, upload-time = "2023-10-24T04:13:40.426Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e0/f9/0595336914c5619e5f28a1fb793285925a8cd4b432c9da0a987836c7f822/shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686", size = 9755, upload-time = "2023-10-24T04:13:38.866Z" }, +] + +[[package]] +name = "six" +version = "1.17.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" }, +] + +[[package]] +name = "smmap" +version = "5.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1f/ea/49c993d6dfdd7338c9b1000a0f36817ed7ec84577ae2e52f890d1a4ff909/smmap-5.0.3.tar.gz", hash = "sha256:4d9debb8b99007ae47165abc08670bd74cb74b5227dda7f643eccc4e9eb5642c", size = 22506, upload-time = "2026-03-09T03:43:26.1Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c1/d4/59e74daffcb57a07668852eeeb6035af9f32cbfd7a1d2511f17d2fe6a738/smmap-5.0.3-py3-none-any.whl", hash = "sha256:c106e05d5a61449cf6ba9a1e650227ecfb141590d2a98412103ff35d89fc7b2f", size = 24390, upload-time = "2026-03-09T03:43:24.361Z" }, +] + +[[package]] +name = "smoldot-light" +version = "0.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ef/fa/eb7715935762581fe3082ee215b38f82a5ace0a6e6bd414bde6a8a46c19a/smoldot_light-0.1.0.tar.gz", hash = "sha256:b2e562e458d6e5ca5c1612115fb3729497480cfdd98ee7b98a39dc206b22ebde", size = 3277375, upload-time = "2026-01-14T15:49:17.499Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/08/8b/2a28aa942650e04c9f12fa2a04a17406bd7f44abc95089aea8fb20812e23/smoldot_light-0.1.0-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:19b3fbd6476aeb65a49f30875a66a4fbebd691f634e0b717aa02c8603e0f4d3a", size = 8247134, upload-time = "2026-01-14T15:48:17.608Z" }, + { url = "https://files.pythonhosted.org/packages/94/b4/7e116d93b8c74a979aee4237fe88ca412f3d8265c4ba5d247d05a92d67be/smoldot_light-0.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:15c28869cda7321e1d67c546fa918fb6805a5eaa530ea3998e173860bec3ac67", size = 4347935, upload-time = "2026-01-14T15:48:03.511Z" }, + { url = "https://files.pythonhosted.org/packages/5b/06/46b28f6ed012890c6e9bf090b2c4399123c2ae05636bfd42e83d2f932f0a/smoldot_light-0.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7c39fc756a6c0a977dab23be098d0d226dfb48c8b185a3a82b2a323aac227fff", size = 8009623, upload-time = "2026-01-14T15:46:33.3Z" }, + { url = "https://files.pythonhosted.org/packages/68/f8/b228182db0642844131dcb4a4e25301f995110fc5c0764fae0ac7c83ea5e/smoldot_light-0.1.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:242f1853393009876ab834d3662d3d3c23d5ed289d56000fac80408416979cbc", size = 4676554, upload-time = "2026-01-14T15:46:52.867Z" }, + { url = "https://files.pythonhosted.org/packages/8a/9b/50fd548183a61d76b41f9436e5f12ed3fb0699b631182ec9342ff3238e75/smoldot_light-0.1.0-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b2b8c40fd16f9595cfbfc1c9d412f1df942c6ed95b81d1d71d77f40337143026", size = 5104908, upload-time = "2026-01-14T15:47:27.579Z" }, + { url = "https://files.pythonhosted.org/packages/35/c0/85e6af4a86c08326a6e115eff612810fbd2e74ed335a3942eadee8ab3edf/smoldot_light-0.1.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:292ef91c86e15fe064ad8fef2acf330b09c9964ed750b88e572ee7629d91b045", size = 5055477, upload-time = "2026-01-14T15:47:09.87Z" }, + { url = "https://files.pythonhosted.org/packages/e1/03/328dd5a20a22aa01bf9eea1394ca0c8ce39a33f21539cfe87304d30a62a6/smoldot_light-0.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5580bacd6ca1d0980ec9bfa1e1d28ae4ed5c991c00da18063643034dbc470e61", size = 8603265, upload-time = "2026-01-14T15:47:45.339Z" }, + { url = "https://files.pythonhosted.org/packages/9f/24/2d011f813942b15976c293d0c5676706df0d02dfd3c54ede9547294f8c98/smoldot_light-0.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0ace3ac191c2a6e73a4d10fad899c3d6691cb29ec230e7e81c01d13bc6817b96", size = 4777583, upload-time = "2026-01-14T15:48:33.868Z" }, + { url = "https://files.pythonhosted.org/packages/68/d6/37c9934e7677cb9f199553db3ace343576f0c6f51a80038bff3eb52b70bc/smoldot_light-0.1.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:e8919334111ace1bd23a1d4af72ffb3b245ebc5d28276a99934f7f0c802b1afe", size = 4943995, upload-time = "2026-01-14T15:48:44.636Z" }, + { url = "https://files.pythonhosted.org/packages/fd/94/f34bc682e3ccfa0bbc5d1104d7308ba0da06972a569b68284cdae90d2899/smoldot_light-0.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:6037e44bdeedca2d0f762fc1425d5d28a4ba5d28e7858481fbc5afb8641fbb17", size = 5118291, upload-time = "2026-01-14T15:48:56.375Z" }, + { url = "https://files.pythonhosted.org/packages/48/50/9d17badffa6533657d0f42dcde5dc881856396ecc57468fb7eb7c02689a2/smoldot_light-0.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1f029a28435997e3997a2779d6164b0efda189ba851a9ca03808a83b50aef9bb", size = 4948131, upload-time = "2026-01-14T15:49:08.371Z" }, + { url = "https://files.pythonhosted.org/packages/ad/59/ec554c930e7e065ac80beeb3d28748e4aede51ed035852617f86bc9ec15f/smoldot_light-0.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:85d8e3a320de2aa8fea999166e88176355f8529dfcf3d43835434d71733b8115", size = 7512861, upload-time = "2026-01-14T15:49:22.649Z" }, + { url = "https://files.pythonhosted.org/packages/ca/46/b8eac0687e682448dfbb6b8967e0e6038eaaaef33259e8bd3981f42c7eb1/smoldot_light-0.1.0-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:90ee9bb257eeb9cd5f3f8d0bd0c10f21feef8ded9849a8ba522df574a32ae507", size = 8245555, upload-time = "2026-01-14T15:48:19.154Z" }, + { url = "https://files.pythonhosted.org/packages/23/4e/1948e032afd46250e17f34eb8ce946a48468012d8382bbb7cf6dbed5de0d/smoldot_light-0.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:16f66a73c0fa4b61973d507d38649733d40d06f06242310ae98808d4908b5d08", size = 4347271, upload-time = "2026-01-14T15:48:04.998Z" }, + { url = "https://files.pythonhosted.org/packages/42/aa/87926fc95ec06569df06953ff4b4adb19156d690c121807db46dad901605/smoldot_light-0.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65344b92341c186cb97141cb3f2c33bf6a00ca73cd40f5a9b55a12e6621464a9", size = 8009676, upload-time = "2026-01-14T15:46:35.159Z" }, + { url = "https://files.pythonhosted.org/packages/e1/cb/053a0e3825a5157d84bc77da406088e9abaa4ca7d59c3667763769e1bc6f/smoldot_light-0.1.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:c5315e6ceed119b5c4f2bf662847875f5e27e9de98eea1cae8f6be93528e2b19", size = 4676573, upload-time = "2026-01-14T15:46:54.582Z" }, + { url = "https://files.pythonhosted.org/packages/42/54/39a2baae4ba4eb3b009469933dd9562d11179d0c78d5262505d679a14f89/smoldot_light-0.1.0-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e4fec476129e901316c13c4c9bdd0c771a2621f045e7978d2f9b49e4b29f0d2", size = 5104742, upload-time = "2026-01-14T15:47:29.068Z" }, + { url = "https://files.pythonhosted.org/packages/ad/ae/b40c8420c7c65b860931de0bcd03479d17c6c98f0806bfabd0a11e607d5f/smoldot_light-0.1.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:098d28daec6d3baf1c839a9438ce09a1b01622019b673567bf2887a10736d2f5", size = 5062839, upload-time = "2026-01-14T15:47:11.622Z" }, + { url = "https://files.pythonhosted.org/packages/8e/fc/42ab55a8cd244189c76770a6e14598c03024d305bb845cff464d76197228/smoldot_light-0.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:532dd6223abca07c5a0dfc3af6bac3bb9068551fcb496fecd48dabf47af01145", size = 8603081, upload-time = "2026-01-14T15:47:47.778Z" }, + { url = "https://files.pythonhosted.org/packages/96/cb/9fd82ea78316649bec06c9441ed669f37fb880fbfe75717567a999da25b9/smoldot_light-0.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:391e1d987efe90e626aec46841edb5d61d5a4c8a91522417b077ad695a5d1919", size = 4776994, upload-time = "2026-01-14T15:48:35.368Z" }, + { url = "https://files.pythonhosted.org/packages/33/21/bf6598baef5e57661db88b874b7011fe6f687c80562d745ab7a502c2d866/smoldot_light-0.1.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:e11a2e744ff1b5e8f551fd6d14531f7f217fb65020d8fd41df477b7f837af364", size = 4944269, upload-time = "2026-01-14T15:48:46.343Z" }, + { url = "https://files.pythonhosted.org/packages/b9/fb/b0ddc45c31256668d1feafb3c3543e016e4af93ef49b660f8c22b2d719a8/smoldot_light-0.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d3d3fc21474b869b57df1673e08a4cbd0df14ac9de9c4a05908dde1bb91bcf39", size = 5113516, upload-time = "2026-01-14T15:48:58.004Z" }, + { url = "https://files.pythonhosted.org/packages/13/73/b912dd95e84ef287ddf12aef267088a5c301e3e0a46ca4f905ed280c2425/smoldot_light-0.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ef81679e11fab89f30670d181bf75ba4e9d754bfc22d48662fe02acec70d3be6", size = 4948353, upload-time = "2026-01-14T15:49:09.994Z" }, + { url = "https://files.pythonhosted.org/packages/07/73/1694c53d81a3b9f61a059024139e541b5003d8783edce8ba2a7d555ced3d/smoldot_light-0.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:62f8b461e660017f263570f466fcc61e52e79b2010b735f61046a866063ed884", size = 7513019, upload-time = "2026-01-14T15:49:24.304Z" }, + { url = "https://files.pythonhosted.org/packages/13/d3/7dc606c2134de918b674c8b80934d11a8669835d209d4f48057a4cf4d169/smoldot_light-0.1.0-cp313-cp313t-macosx_10_12_x86_64.whl", hash = "sha256:f9ccc8b2c9c611c8e82f6419b72843bc2345544161d5c4d0eec578190e721241", size = 8245633, upload-time = "2026-01-14T15:48:20.726Z" }, + { url = "https://files.pythonhosted.org/packages/95/07/103c6c1e9abcf07741d18fab1d084b1facdef9658954fa3c29b96eee413c/smoldot_light-0.1.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:52724c4e4ab43f7a680dfe3dab3a2afcbdbafbbc38afe9e569046849d5486358", size = 4347133, upload-time = "2026-01-14T15:48:06.415Z" }, + { url = "https://files.pythonhosted.org/packages/90/81/dbfb70b472cb085cfafa734999b0ac9f3d1d04b192d2565f0c4368f48b07/smoldot_light-0.1.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22a7b267558a752d367962157d45f0010de819be4535f933f7e06e19a5345492", size = 8006857, upload-time = "2026-01-14T15:46:37.576Z" }, + { url = "https://files.pythonhosted.org/packages/2a/fb/6c77b41c525a5ce8593d0dda726a3c9aad17ae56cda7103f02335cd129f5/smoldot_light-0.1.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:305e416190dc5662259b6913010b9c828e5530ebbf266a54ee0fe4bc7a5bb0e6", size = 4672894, upload-time = "2026-01-14T15:46:56.636Z" }, + { url = "https://files.pythonhosted.org/packages/c2/27/bcda9ad86659e2fbc822b61c50d1274b41d654d82543bab50c09631c7e6b/smoldot_light-0.1.0-cp313-cp313t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:568b68a7e02412f1a85ec7015a7e69b288c26d2d149c2ea8d8c5f436ec15ad62", size = 5102917, upload-time = "2026-01-14T15:47:30.668Z" }, + { url = "https://files.pythonhosted.org/packages/8b/9c/774cd1cffe8f309877c6f73aae6fa182a1db807f49a7187b43cf68d8e0c6/smoldot_light-0.1.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:24290dc5ab478834e693ef2256c581597260c1a57cfb8db46067bf432be6e7f4", size = 5053970, upload-time = "2026-01-14T15:47:13.283Z" }, + { url = "https://files.pythonhosted.org/packages/f9/07/7c467fdadadfe028a6110631af668defb0ec452c55d006187efce0b04ab1/smoldot_light-0.1.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fbc0346a2fbb192fd7544f52a3792e452f92e20291a0933a262520335e6ac367", size = 8603093, upload-time = "2026-01-14T15:47:49.595Z" }, + { url = "https://files.pythonhosted.org/packages/cd/90/c7927debe823f73df9dcf1ddfe12f2df66f46ca2557730f9332eb6b81865/smoldot_light-0.1.0-cp314-cp314-macosx_10_12_x86_64.whl", hash = "sha256:8b8980fcb5d020fa10849ed29c1ce866e20e97e56ad7933d81335a9f7202bf9e", size = 8245246, upload-time = "2026-01-14T15:48:22.506Z" }, + { url = "https://files.pythonhosted.org/packages/eb/b6/1ef1224a0edab0ca7651385f98fd1dff787d7e0e334fdc6c830ac59b71c1/smoldot_light-0.1.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:516f1cac31ce7143d1728d1b86d7c53fa44b1b07a7b755d07c87b3852b534eec", size = 4347750, upload-time = "2026-01-14T15:48:08.031Z" }, + { url = "https://files.pythonhosted.org/packages/93/aa/8e70fabeacb46a1df552479b9202307851126ab7906f6fb87f4a7b158a64/smoldot_light-0.1.0-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9cfe959813d5e5f31d0435b1a7f87a0a9bc07302116bf16529393b30ddc5f5aa", size = 8004920, upload-time = "2026-01-14T15:46:39.423Z" }, + { url = "https://files.pythonhosted.org/packages/d8/f3/ce83f714b0cd478b533b87b155c1ec70dfaa1315d1798afcb6b6c73d31ec/smoldot_light-0.1.0-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:215e0ff9b184d359b1b2e75d530db3015954a96d6fd402498155a55cd4126d4b", size = 4676448, upload-time = "2026-01-14T15:46:58.256Z" }, + { url = "https://files.pythonhosted.org/packages/5b/39/9599b0c7943ae316af77142f3e00dd651f048b79d79b1b48be4116be34e0/smoldot_light-0.1.0-cp314-cp314-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:50a2a1f2ff5a23693c44c2b87971a6e6e6f1931f3bf4c0c436e00255a7ff1cb8", size = 5104371, upload-time = "2026-01-14T15:47:32.199Z" }, + { url = "https://files.pythonhosted.org/packages/cf/8c/38bdc8bbe3d35bacba01bb132c295aeeb67daaaddf725488cef5a1844dd2/smoldot_light-0.1.0-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f152f785ca7fc40e4971bba2dcfc5ad80fac123a9d6c83b0124250f0261c81de", size = 5062571, upload-time = "2026-01-14T15:47:15.225Z" }, + { url = "https://files.pythonhosted.org/packages/fd/3e/319834bd36215bbbdbf97ab7673a5c96147fb337aadf556d2916095cf3f2/smoldot_light-0.1.0-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8df901221b28007ba8f4663cb0b9bfa5210db6abd14de5e50190ed84b8feb8d5", size = 8602031, upload-time = "2026-01-14T15:47:51.669Z" }, + { url = "https://files.pythonhosted.org/packages/63/d4/d11d28751929067d356faae8241394680c5dde3ca9d205f46ce372920418/smoldot_light-0.1.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c8b1c3554997f862fe3e46cfbf604ae2dd95fa64e274344fd1fc4ba391583117", size = 4776599, upload-time = "2026-01-14T15:48:37.007Z" }, + { url = "https://files.pythonhosted.org/packages/bf/52/7dc3cde16e567332a8f8b681684885c1278c03983d69343627542753611c/smoldot_light-0.1.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:ecd5b58cb33b28d30767761f26151e36685415ac44ef9afe68bc90729e6b04dc", size = 4944927, upload-time = "2026-01-14T15:48:47.857Z" }, + { url = "https://files.pythonhosted.org/packages/7b/f1/d24bb8b002f1a9b4a8d191543d9f36d5b6544dd17c362ccb49ceb0b4b80a/smoldot_light-0.1.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:1e34f6b9a4fb64fcee25663e56e8e020b441e1fc0f1479523dab6c03f31394ab", size = 5114365, upload-time = "2026-01-14T15:48:59.588Z" }, + { url = "https://files.pythonhosted.org/packages/56/e3/f7e1f858880f6edf68bdfe252fbbc32fd795c0a9e82f45ca0388fabb5f99/smoldot_light-0.1.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:29c7ad3e19621bc65d200b867822899fac987bbc0c51c4b43afdf57247412f0c", size = 4948548, upload-time = "2026-01-14T15:49:12.053Z" }, + { url = "https://files.pythonhosted.org/packages/0e/4b/693e184b6355375262c7d56909f519d5a8c9ecf48b2d0ebad79dd0df4400/smoldot_light-0.1.0-cp314-cp314-win32.whl", hash = "sha256:68fde41f9e9ef692fb15211d806a3cc4dd6132290ae56959a540fc522fafff77", size = 3942384, upload-time = "2026-01-14T15:49:30.291Z" }, + { url = "https://files.pythonhosted.org/packages/d6/27/d91c5584e782df2fd048031e40a5dcb21498e3ed99ff5953d4876f0c10b5/smoldot_light-0.1.0-cp314-cp314-win_amd64.whl", hash = "sha256:5fee9bae7bc3f2c98e8575482bc03d135624b210a477ff22c024d143823f250c", size = 7512974, upload-time = "2026-01-14T15:49:26.489Z" }, + { url = "https://files.pythonhosted.org/packages/15/7b/b341e5debdfe5c56b41830ed7872784825d2dfc1edc2b0d8ffa7f94a9d75/smoldot_light-0.1.0-cp314-cp314t-macosx_10_12_x86_64.whl", hash = "sha256:f6866811c3adab7bd95900140de4fbfda59a9216bb8e360f9d9b8c77f6c6da42", size = 8243296, upload-time = "2026-01-14T15:48:24.198Z" }, + { url = "https://files.pythonhosted.org/packages/3d/e7/e5e0dba4b3cb8da60101e434a768b868241fa9a1cf4ccd26f13748add0bf/smoldot_light-0.1.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:2eefea25756c7ce0ed5afee3d77105423a6c6328d88305edce183676d065c835", size = 4348576, upload-time = "2026-01-14T15:48:09.76Z" }, + { url = "https://files.pythonhosted.org/packages/2f/79/c7b8a238bf094739ebf1e55c75f436f460100d64020c1065f9ae42e63787/smoldot_light-0.1.0-cp314-cp314t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b9347c258b453e38dd8d980f9ef0c12f7958e24f7ef02e16e60ef612079b549", size = 8007792, upload-time = "2026-01-14T15:46:41.503Z" }, + { url = "https://files.pythonhosted.org/packages/86/c5/54f80ebe63e51598ae6f7199f5c28e4c26dc6f91d43ff22ee9c68bd16e40/smoldot_light-0.1.0-cp314-cp314t-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6c0f3d7058014194895ca8830544f838a4be54004f31a6434f5856c1cb147570", size = 4674417, upload-time = "2026-01-14T15:47:00.119Z" }, + { url = "https://files.pythonhosted.org/packages/7b/7d/1cd2799596926f76d0c1fa5ebb92741d7f1340fac71a3c608173046f3f0a/smoldot_light-0.1.0-cp314-cp314t-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e3fee7e40983878a62ebc02d55b3c64edf426a0df602b6806aa46d705dbdfcd", size = 5104447, upload-time = "2026-01-14T15:47:35.429Z" }, + { url = "https://files.pythonhosted.org/packages/d1/37/1921e1ae83448b1213250467141fc17e7415b6fb6b3940e1748a4655917d/smoldot_light-0.1.0-cp314-cp314t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4d661fd6e346e4f831a0ca14dc49e5ffc19e09dbf21b99237db442ede76a7be8", size = 5053295, upload-time = "2026-01-14T15:47:17.223Z" }, + { url = "https://files.pythonhosted.org/packages/37/d8/98abe1a0d0ddcf46d18f246d382c76e2c4e7218281922c67e620dfacfb75/smoldot_light-0.1.0-cp314-cp314t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f10a77912e05bf8f90edec684449bc2a01b2bcce219541fad7f2ec12e97830fc", size = 8603933, upload-time = "2026-01-14T15:47:53.549Z" }, +] + +[[package]] +name = "starlette" +version = "0.37.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "anyio" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/61/b5/6bceb93ff20bd7ca36e6f7c540581abb18f53130fabb30ba526e26fd819b/starlette-0.37.2.tar.gz", hash = "sha256:9af890290133b79fc3db55474ade20f6220a364a0402e0b556e7cd5e1e093823", size = 2843736, upload-time = "2024-03-05T16:16:54.267Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fd/18/31fa32ed6c68ba66220204ef0be798c349d0a20c1901f9d4a794e08c76d8/starlette-0.37.2-py3-none-any.whl", hash = "sha256:6fe59f29268538e5d0d182f2791a479a0c64638e6935d1c6989e63fb2699c6ee", size = 71908, upload-time = "2024-03-05T16:16:50.957Z" }, +] + +[[package]] +name = "substrate-interface" +version = "1.8.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "base58" }, + { name = "certifi" }, + { name = "ecdsa" }, + { name = "eth-keys" }, + { name = "eth-utils" }, + { name = "idna" }, + { name = "py-bip39-bindings" }, + { name = "py-ed25519-zebra-bindings" }, + { name = "py-sr25519-bindings" }, + { name = "pycryptodome" }, + { name = "pynacl" }, + { name = "requests" }, + { name = "scalecodec" }, + { name = "smoldot-light" }, + { name = "websocket-client" }, + { name = "xxhash" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c5/3f/e1483635a892b84727187cf8ba8477a20e4036361202650a0bdb93eacf8b/substrate_interface-1.8.1.tar.gz", hash = "sha256:821be590fa4f3d9bc731b4c370341b57cd98ef3419c81289cd369bdeefc008de", size = 1455960, upload-time = "2026-01-20T15:26:28.371Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/2e/d6e2bb34473bfc8b6e915a21ee1a5dbf67c57dff8abea407e4e46cee5d83/substrate_interface-1.8.1-py3-none-any.whl", hash = "sha256:07737b0dc86fb8962ec09c3518577556534f9ffa58ef1164fc70ffe8a5c54837", size = 1461481, upload-time = "2026-01-20T15:26:26.663Z" }, +] + +[[package]] +name = "toml" +version = "0.10.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/be/ba/1f744cdc819428fc6b5084ec34d9b30660f6f9daaf70eead706e3203ec3c/toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f", size = 22253, upload-time = "2020-11-01T01:40:22.204Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/44/6f/7120676b6d73228c96e17f1f794d8ab046fc910d781c8d151120c3f1569e/toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b", size = 16588, upload-time = "2020-11-01T01:40:20.672Z" }, +] + +[[package]] +name = "toolz" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/11/d6/114b492226588d6ff54579d95847662fc69196bdeec318eb45393b24c192/toolz-1.1.0.tar.gz", hash = "sha256:27a5c770d068c110d9ed9323f24f1543e83b2f300a687b7891c1a6d56b697b5b", size = 52613, upload-time = "2025-10-17T04:03:21.661Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fb/12/5911ae3eeec47800503a238d971e51722ccea5feb8569b735184d5fcdbc0/toolz-1.1.0-py3-none-any.whl", hash = "sha256:15ccc861ac51c53696de0a5d6d4607f99c210739caf987b5d2054f3efed429d8", size = 58093, upload-time = "2025-10-17T04:03:20.435Z" }, +] + +[[package]] +name = "tree-sitter" +version = "0.24.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a7/a2/698b9d31d08ad5558f8bfbfe3a0781bd4b1f284e89bde3ad18e05101a892/tree-sitter-0.24.0.tar.gz", hash = "sha256:abd95af65ca2f4f7eca356343391ed669e764f37748b5352946f00f7fc78e734", size = 168304, upload-time = "2025-01-17T05:06:38.115Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e9/57/3a590f287b5aa60c07d5545953912be3d252481bf5e178f750db75572bff/tree_sitter-0.24.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:14beeff5f11e223c37be7d5d119819880601a80d0399abe8c738ae2288804afc", size = 140788, upload-time = "2025-01-17T05:06:08.492Z" }, + { url = "https://files.pythonhosted.org/packages/61/0b/fc289e0cba7dbe77c6655a4dd949cd23c663fd62a8b4d8f02f97e28d7fe5/tree_sitter-0.24.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:26a5b130f70d5925d67b47db314da209063664585a2fd36fa69e0717738efaf4", size = 133945, upload-time = "2025-01-17T05:06:12.39Z" }, + { url = "https://files.pythonhosted.org/packages/86/d7/80767238308a137e0b5b5c947aa243e3c1e3e430e6d0d5ae94b9a9ffd1a2/tree_sitter-0.24.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5fc5c3c26d83c9d0ecb4fc4304fba35f034b7761d35286b936c1db1217558b4e", size = 564819, upload-time = "2025-01-17T05:06:13.549Z" }, + { url = "https://files.pythonhosted.org/packages/bf/b3/6c5574f4b937b836601f5fb556b24804b0a6341f2eb42f40c0e6464339f4/tree_sitter-0.24.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:772e1bd8c0931c866b848d0369b32218ac97c24b04790ec4b0e409901945dd8e", size = 579303, upload-time = "2025-01-17T05:06:16.685Z" }, + { url = "https://files.pythonhosted.org/packages/0a/f4/bd0ddf9abe242ea67cca18a64810f8af230fc1ea74b28bb702e838ccd874/tree_sitter-0.24.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:24a8dd03b0d6b8812425f3b84d2f4763322684e38baf74e5bb766128b5633dc7", size = 581054, upload-time = "2025-01-17T05:06:19.439Z" }, + { url = "https://files.pythonhosted.org/packages/8c/1c/ff23fa4931b6ef1bbeac461b904ca7e49eaec7e7e5398584e3eef836ec96/tree_sitter-0.24.0-cp312-cp312-win_amd64.whl", hash = "sha256:f9e8b1605ab60ed43803100f067eed71b0b0e6c1fb9860a262727dbfbbb74751", size = 120221, upload-time = "2025-01-17T05:06:20.654Z" }, + { url = "https://files.pythonhosted.org/packages/b2/2a/9979c626f303177b7612a802237d0533155bf1e425ff6f73cc40f25453e2/tree_sitter-0.24.0-cp312-cp312-win_arm64.whl", hash = "sha256:f733a83d8355fc95561582b66bbea92ffd365c5d7a665bc9ebd25e049c2b2abb", size = 108234, upload-time = "2025-01-17T05:06:21.713Z" }, + { url = "https://files.pythonhosted.org/packages/61/cd/2348339c85803330ce38cee1c6cbbfa78a656b34ff58606ebaf5c9e83bd0/tree_sitter-0.24.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0d4a6416ed421c4210f0ca405a4834d5ccfbb8ad6692d4d74f7773ef68f92071", size = 140781, upload-time = "2025-01-17T05:06:22.82Z" }, + { url = "https://files.pythonhosted.org/packages/8b/a3/1ea9d8b64e8dcfcc0051028a9c84a630301290995cd6e947bf88267ef7b1/tree_sitter-0.24.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e0992d483677e71d5c5d37f30dfb2e3afec2f932a9c53eec4fca13869b788c6c", size = 133928, upload-time = "2025-01-17T05:06:25.146Z" }, + { url = "https://files.pythonhosted.org/packages/fe/ae/55c1055609c9428a4aedf4b164400ab9adb0b1bf1538b51f4b3748a6c983/tree_sitter-0.24.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:57277a12fbcefb1c8b206186068d456c600dbfbc3fd6c76968ee22614c5cd5ad", size = 564497, upload-time = "2025-01-17T05:06:27.53Z" }, + { url = "https://files.pythonhosted.org/packages/ce/d0/f2ffcd04882c5aa28d205a787353130cbf84b2b8a977fd211bdc3b399ae3/tree_sitter-0.24.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d25fa22766d63f73716c6fec1a31ee5cf904aa429484256bd5fdf5259051ed74", size = 578917, upload-time = "2025-01-17T05:06:31.057Z" }, + { url = "https://files.pythonhosted.org/packages/af/82/aebe78ea23a2b3a79324993d4915f3093ad1af43d7c2208ee90be9273273/tree_sitter-0.24.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7d5d9537507e1c8c5fa9935b34f320bfec4114d675e028f3ad94f11cf9db37b9", size = 581148, upload-time = "2025-01-17T05:06:32.409Z" }, + { url = "https://files.pythonhosted.org/packages/a1/b4/6b0291a590c2b0417cfdb64ccb8ea242f270a46ed429c641fbc2bfab77e0/tree_sitter-0.24.0-cp313-cp313-win_amd64.whl", hash = "sha256:f58bb4956917715ec4d5a28681829a8dad5c342cafd4aea269f9132a83ca9b34", size = 120207, upload-time = "2025-01-17T05:06:34.841Z" }, + { url = "https://files.pythonhosted.org/packages/a8/18/542fd844b75272630229c9939b03f7db232c71a9d82aadc59c596319ea6a/tree_sitter-0.24.0-cp313-cp313-win_arm64.whl", hash = "sha256:23641bd25dcd4bb0b6fa91b8fb3f46cc9f1c9f475efe4d536d3f1f688d1b84c8", size = 108232, upload-time = "2025-01-17T05:06:35.831Z" }, +] + +[[package]] +name = "tree-sitter-c-sharp" +version = "0.23.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/22/85/a61c782afbb706a47d990eaee6977e7c2bd013771c5bf5c81c617684f286/tree_sitter_c_sharp-0.23.1.tar.gz", hash = "sha256:322e2cfd3a547a840375276b2aea3335fa6458aeac082f6c60fec3f745c967eb", size = 1317728, upload-time = "2024-11-11T05:25:32.535Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/58/04/f6c2df4c53a588ccd88d50851155945cff8cd887bd70c175e00aaade7edf/tree_sitter_c_sharp-0.23.1-cp39-abi3-macosx_10_9_x86_64.whl", hash = "sha256:2b612a6e5bd17bb7fa2aab4bb6fc1fba45c94f09cb034ab332e45603b86e32fd", size = 372235, upload-time = "2024-11-11T05:25:19.424Z" }, + { url = "https://files.pythonhosted.org/packages/99/10/1aa9486f1e28fc22810fa92cbdc54e1051e7f5536a5e5b5e9695f609b31e/tree_sitter_c_sharp-0.23.1-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:1a8b98f62bc53efcd4d971151950c9b9cd5cbe3bacdb0cd69fdccac63350d83e", size = 419046, upload-time = "2024-11-11T05:25:20.679Z" }, + { url = "https://files.pythonhosted.org/packages/0f/21/13df29f8fcb9ba9f209b7b413a4764b673dfd58989a0dd67e9c7e19e9c2e/tree_sitter_c_sharp-0.23.1-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:986e93d845a438ec3c4416401aa98e6a6f6631d644bbbc2e43fcb915c51d255d", size = 415999, upload-time = "2024-11-11T05:25:22.359Z" }, + { url = "https://files.pythonhosted.org/packages/ca/72/fc6846795bcdae2f8aa94cc8b1d1af33d634e08be63e294ff0d6794b1efc/tree_sitter_c_sharp-0.23.1-cp39-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8024e466b2f5611c6dc90321f232d8584893c7fb88b75e4a831992f877616d2", size = 402830, upload-time = "2024-11-11T05:25:24.198Z" }, + { url = "https://files.pythonhosted.org/packages/fe/3a/b6028c5890ce6653807d5fa88c72232c027c6ceb480dbeb3b186d60e5971/tree_sitter_c_sharp-0.23.1-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:7f9bf876866835492281d336b9e1f9626ab668737f74e914c31d285261507da7", size = 397880, upload-time = "2024-11-11T05:25:25.937Z" }, + { url = "https://files.pythonhosted.org/packages/47/d2/4facaa34b40f8104d8751746d0e1cd2ddf0beb9f1404b736b97f372bd1f3/tree_sitter_c_sharp-0.23.1-cp39-abi3-win_amd64.whl", hash = "sha256:ae9a9e859e8f44e2b07578d44f9a220d3fa25b688966708af6aa55d42abeebb3", size = 377562, upload-time = "2024-11-11T05:25:27.539Z" }, + { url = "https://files.pythonhosted.org/packages/d8/88/3cf6bd9959d94d1fec1e6a9c530c5f08ff4115a474f62aedb5fedb0f7241/tree_sitter_c_sharp-0.23.1-cp39-abi3-win_arm64.whl", hash = "sha256:c81548347a93347be4f48cb63ec7d60ef4b0efa91313330e69641e49aa5a08c5", size = 375157, upload-time = "2024-11-11T05:25:30.839Z" }, +] + +[[package]] +name = "tree-sitter-embedded-template" +version = "0.25.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fd/a7/77729fefab8b1b5690cfc54328f2f629d1c076d16daf32c96ba39d3a3a3a/tree_sitter_embedded_template-0.25.0.tar.gz", hash = "sha256:7d72d5e8a1d1d501a7c90e841b51f1449a90cc240be050e4fb85c22dab991d50", size = 14114, upload-time = "2025-08-29T00:42:51.078Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1f/9d/3e3c8ee0c019d3bace728300a1ca807c03df39e66cc51e9a5e7c9d1e1909/tree_sitter_embedded_template-0.25.0-cp310-abi3-macosx_10_9_x86_64.whl", hash = "sha256:fa0d06467199aeb33fb3d6fa0665bf9b7d5a32621ffdaf37fd8249f8a8050649", size = 10266, upload-time = "2025-08-29T00:42:44.148Z" }, + { url = "https://files.pythonhosted.org/packages/e8/ab/6d4e43b736b2a895d13baea3791dc8ce7245bedf4677df9e7deb22e23a2a/tree_sitter_embedded_template-0.25.0-cp310-abi3-macosx_11_0_arm64.whl", hash = "sha256:fc7aacbc2985a5d7e7fe7334f44dffe24c38fb0a8295c4188a04cf21a3d64a73", size = 10650, upload-time = "2025-08-29T00:42:45.147Z" }, + { url = "https://files.pythonhosted.org/packages/9f/97/ea3d1ea4b320fe66e0468b9f6602966e544c9fe641882484f9105e50ee0c/tree_sitter_embedded_template-0.25.0-cp310-abi3-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:a7c88c3dd8b94b3c9efe8ae071ff6b1b936a27ac5f6e651845c3b9631fa4c1c2", size = 18268, upload-time = "2025-08-29T00:42:46.03Z" }, + { url = "https://files.pythonhosted.org/packages/64/40/0f42ca894a8f7c298cf336080046ccc14c10e8f4ea46d455f640193181b2/tree_sitter_embedded_template-0.25.0-cp310-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:025f7ca84218dcd8455efc901bdbcc2689fb694f3a636c0448e322a23d4bc96b", size = 19068, upload-time = "2025-08-29T00:42:46.699Z" }, + { url = "https://files.pythonhosted.org/packages/d0/2a/0b720bcae7c2dd0a44889c09e800a2f8eb08c496dede9f2b97683506c4c3/tree_sitter_embedded_template-0.25.0-cp310-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:b5dc1aef6ffa3fae621fe037d85dd98948b597afba20df29d779c426be813ee5", size = 18518, upload-time = "2025-08-29T00:42:47.694Z" }, + { url = "https://files.pythonhosted.org/packages/14/8a/d745071afa5e8bdf5b381cf84c4dc6be6c79dee6af8e0ff07476c3d8e4aa/tree_sitter_embedded_template-0.25.0-cp310-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:d0a35cfe634c44981a516243bc039874580e02a2990669313730187ce83a5bc6", size = 18267, upload-time = "2025-08-29T00:42:48.635Z" }, + { url = "https://files.pythonhosted.org/packages/5d/74/728355e594fca140f793f234fdfec195366b6956b35754d00ea97ca18b21/tree_sitter_embedded_template-0.25.0-cp310-abi3-win_amd64.whl", hash = "sha256:3e05a4ac013d54505e75ae48e1a0e9db9aab19949fe15d9f4c7345b11a84a069", size = 13049, upload-time = "2025-08-29T00:42:49.589Z" }, + { url = "https://files.pythonhosted.org/packages/d8/de/afac475e694d0e626b0808f3c86339c349cd15c5163a6a16a53cc11cf892/tree_sitter_embedded_template-0.25.0-cp310-abi3-win_arm64.whl", hash = "sha256:2751d402179ac0e83f2065b249d8fe6df0718153f1636bcb6a02bde3e5730db9", size = 11978, upload-time = "2025-08-29T00:42:50.226Z" }, +] + +[[package]] +name = "tree-sitter-language-pack" +version = "0.7.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "tree-sitter" }, + { name = "tree-sitter-c-sharp" }, + { name = "tree-sitter-embedded-template" }, + { name = "tree-sitter-yaml" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9b/1e/2d63d93025fd5b527327c3fd348955cebaec02a3f1bcec88ab4d88ddfc39/tree_sitter_language_pack-0.7.2.tar.gz", hash = "sha256:46fc96cc3bddfee7091fdedec2ae7e34218679e58241e8319bf82026f6d02eae", size = 59264078, upload-time = "2025-04-19T10:10:16.372Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/da/9d/2c6272bf4fd18a22d8c07d3c983940dbece4f0e9e21f5c78f15a2740f435/tree_sitter_language_pack-0.7.2-cp39-abi3-macosx_10_13_universal2.whl", hash = "sha256:4036603020bd32060d9931a64f8c3d8637de575f350f11534971012e51a27a95", size = 28132977, upload-time = "2025-04-19T10:10:04.627Z" }, + { url = "https://files.pythonhosted.org/packages/2b/e2/0f2511019c27b870061f9ad719074095ef84cd7857a730765bfa066384be/tree_sitter_language_pack-0.7.2-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:801926dbc81eeca4ce97b846cc899dcf3fecfdc3b2514a68eeeb118f70ac686d", size = 17576769, upload-time = "2025-04-19T10:10:07.32Z" }, + { url = "https://files.pythonhosted.org/packages/3a/88/7b38233def5c359503ad4d36533f96f9fe2943a8eeeced66b36312c49e1b/tree_sitter_language_pack-0.7.2-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:77be80335fb585f48eb268b0e07ca54f3da8f30c2eab7be749113f116c3ef316", size = 17433872, upload-time = "2025-04-19T10:10:10.118Z" }, + { url = "https://files.pythonhosted.org/packages/f8/27/fc5dce240b68a1ed876bc80b2238fbaaa0f695dbaf88660728a0239a2b20/tree_sitter_language_pack-0.7.2-cp39-abi3-win_amd64.whl", hash = "sha256:d71c6b4c14b3370ca783319ede7a581a10e6dd1bdfe5d31d316d9216981a6406", size = 14316050, upload-time = "2025-04-19T10:10:12.426Z" }, +] + +[[package]] +name = "tree-sitter-yaml" +version = "0.7.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/57/b6/941d356ac70c90b9d2927375259e3a4204f38f7499ec6e7e8a95b9664689/tree_sitter_yaml-0.7.2.tar.gz", hash = "sha256:756db4c09c9d9e97c81699e8f941cb8ce4e51104927f6090eefe638ee567d32c", size = 84882, upload-time = "2025-10-07T14:40:36.071Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/38/29/c0b8dbff302c49ff4284666ffb6f2f21145006843bb4c3a9a85d0ec0b7ae/tree_sitter_yaml-0.7.2-cp310-abi3-macosx_10_9_x86_64.whl", hash = "sha256:7e269ddcfcab8edb14fbb1f1d34eed1e1e26888f78f94eedfe7cc98c60f8bc9f", size = 43898, upload-time = "2025-10-07T14:40:29.486Z" }, + { url = "https://files.pythonhosted.org/packages/18/0d/15a5add06b3932b5e4ce5f5e8e179197097decfe82a0ef000952c8b98216/tree_sitter_yaml-0.7.2-cp310-abi3-macosx_11_0_arm64.whl", hash = "sha256:0807b7966e23ddf7dddc4545216e28b5a58cdadedcecca86b8d8c74271a07870", size = 44691, upload-time = "2025-10-07T14:40:30.369Z" }, + { url = "https://files.pythonhosted.org/packages/72/92/c4b896c90d08deb8308fadbad2210fdcc4c66c44ab4292eac4e80acb4b61/tree_sitter_yaml-0.7.2-cp310-abi3-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f1a5c60c98b6c4c037aae023569f020d0c489fad8dc26fdfd5510363c9c29a41", size = 91430, upload-time = "2025-10-07T14:40:31.16Z" }, + { url = "https://files.pythonhosted.org/packages/89/59/61f1fed31eb6d46ff080b8c0d53658cf29e10263f41ef5fe34768908037a/tree_sitter_yaml-0.7.2-cp310-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:88636d19d0654fd24f4f242eaaafa90f6f5ebdba8a62e4b32d251ed156c51a2a", size = 92428, upload-time = "2025-10-07T14:40:31.954Z" }, + { url = "https://files.pythonhosted.org/packages/e3/62/a33a04d19b7f9a0ded780b9c9fcc6279e37c5d00b89b00425bb807a22cc2/tree_sitter_yaml-0.7.2-cp310-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:1d2e8f0bb14aa4537320952d0f9607eef3021d5aada8383c34ebeece17db1e06", size = 90580, upload-time = "2025-10-07T14:40:33.037Z" }, + { url = "https://files.pythonhosted.org/packages/6c/e7/9525defa7b30792623f56b1fba9bbba361752348875b165b8975b87398fd/tree_sitter_yaml-0.7.2-cp310-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:74ca712c50fc9d7dbc68cb36b4a7811d6e67a5466b5a789f19bf8dd6084ef752", size = 90455, upload-time = "2025-10-07T14:40:33.778Z" }, + { url = "https://files.pythonhosted.org/packages/4a/d6/8d1e1ace03db3b02e64e91daf21d1347941d1bbecc606a5473a1a605250d/tree_sitter_yaml-0.7.2-cp310-abi3-win_amd64.whl", hash = "sha256:7587b5ca00fc4f9a548eff649697a3b395370b2304b399ceefa2087d8a6c9186", size = 45514, upload-time = "2025-10-07T14:40:34.562Z" }, + { url = "https://files.pythonhosted.org/packages/d8/c7/dcf3ea1c4f5da9b10353b9af4455d756c92d728a8f58f03c480d3ef0ead5/tree_sitter_yaml-0.7.2-cp310-abi3-win_arm64.whl", hash = "sha256:f63c227b18e7ce7587bce124578f0bbf1f890ac63d3e3cd027417574273642c4", size = 44065, upload-time = "2025-10-07T14:40:35.337Z" }, +] + +[[package]] +name = "typer" +version = "0.24.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "annotated-doc" }, + { name = "click" }, + { name = "rich" }, + { name = "shellingham" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f5/24/cb09efec5cc954f7f9b930bf8279447d24618bb6758d4f6adf2574c41780/typer-0.24.1.tar.gz", hash = "sha256:e39b4732d65fbdcde189ae76cf7cd48aeae72919dea1fdfc16593be016256b45", size = 118613, upload-time = "2026-02-21T16:54:40.609Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4a/91/48db081e7a63bb37284f9fbcefda7c44c277b18b0e13fbc36ea2335b71e6/typer-0.24.1-py3-none-any.whl", hash = "sha256:112c1f0ce578bfb4cab9ffdabc68f031416ebcc216536611ba21f04e9aa84c9e", size = 56085, upload-time = "2026-02-21T16:54:41.616Z" }, +] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, +] + +[[package]] +name = "typing-inspection" +version = "0.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/55/e3/70399cb7dd41c10ac53367ae42139cf4b1ca5f36bb3dc6c9d33acdb43655/typing_inspection-0.4.2.tar.gz", hash = "sha256:ba561c48a67c5958007083d386c3295464928b01faa735ab8547c5692e87f464", size = 75949, upload-time = "2025-10-01T02:14:41.687Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/dc/9b/47798a6c91d8bdb567fe2698fe81e0c6b7cb7ef4d13da4114b41d239f65d/typing_inspection-0.4.2-py3-none-any.whl", hash = "sha256:4ed1cacbdc298c220f1bd249ed5287caa16f34d44ef4e9c3d0cbad5b521545e7", size = 14611, upload-time = "2025-10-01T02:14:40.154Z" }, +] + +[[package]] +name = "urllib3" +version = "2.6.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c7/24/5f1b3bdffd70275f6661c76461e25f024d5a38a46f04aaca912426a2b1d3/urllib3-2.6.3.tar.gz", hash = "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed", size = 435556, upload-time = "2026-01-07T16:24:43.925Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4", size = 131584, upload-time = "2026-01-07T16:24:42.685Z" }, +] + +[[package]] +name = "uvicorn" +version = "0.32.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "h11" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/e0/fc/1d785078eefd6945f3e5bab5c076e4230698046231eb0f3747bc5c8fa992/uvicorn-0.32.0.tar.gz", hash = "sha256:f78b36b143c16f54ccdb8190d0a26b5f1901fe5a3c777e1ab29f26391af8551e", size = 77564, upload-time = "2024-10-15T17:27:33.848Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/eb/14/78bd0e95dd2444b6caacbca2b730671d4295ccb628ef58b81bee903629df/uvicorn-0.32.0-py3-none-any.whl", hash = "sha256:60b8f3a5ac027dcd31448f411ced12b5ef452c646f76f02f8cc3f25d8d26fd82", size = 63723, upload-time = "2024-10-15T17:27:32.022Z" }, +] + +[[package]] +name = "wandb" +version = "0.21.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "gitpython" }, + { name = "packaging" }, + { name = "platformdirs" }, + { name = "protobuf" }, + { name = "pydantic" }, + { name = "pyyaml" }, + { name = "requests" }, + { name = "sentry-sdk" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/2f/84/af6ccdf95e56f15aceb360e437fbfcca3dc91ad8ca335fe482083e29f7a5/wandb-0.21.3.tar.gz", hash = "sha256:031e24e2aad0ce735dfdcc74baf2f2c12c106f500ed24798de6ef9b9e63bb432", size = 40146972, upload-time = "2025-08-30T18:21:55.138Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/aa/e8/b5bfbbc7f76c11fd0665b92be8a38c6a83b27f353552233b9959b21be488/wandb-0.21.3-py3-none-macosx_10_14_x86_64.whl", hash = "sha256:f85bac45b4482742ec9ff190af38eb00a877ddeb4875475e7e487dc19300ff03", size = 18820209, upload-time = "2025-08-30T18:21:33.47Z" }, + { url = "https://files.pythonhosted.org/packages/59/a3/03f0fcde49609df1cb3a382fb5053f601b88da448bcd415ed7f75272eee7/wandb-0.21.3-py3-none-macosx_12_0_arm64.whl", hash = "sha256:8a2b3ba419b91d47edead2755f04cef54f9e3c4496ee0c9854c3cfeff4216dd3", size = 18310636, upload-time = "2025-08-30T18:21:37.405Z" }, + { url = "https://files.pythonhosted.org/packages/1d/c3/d6048db30ff2e3c67089ba0e94878572fd26137b146f8e3b27bbdf428b31/wandb-0.21.3-py3-none-macosx_12_0_x86_64.whl", hash = "sha256:35a1972881f3b85755befab004118234593792a9f05e07fd6345780172f4420e", size = 19053277, upload-time = "2025-08-30T18:21:39.389Z" }, + { url = "https://files.pythonhosted.org/packages/ea/7f/805c3d2fa9e3b8b6bf2bc534887c9ed97bdf22007ca8ba59424a1c8bb360/wandb-0.21.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d9cf8588cb090a2a41f589037fda72c57c9e23edfbd2ad829e575f1305d942c", size = 18130850, upload-time = "2025-08-30T18:21:41.573Z" }, + { url = "https://files.pythonhosted.org/packages/5b/af/a3252e5afac98a036f83c65ec92cadf6677ccdaacbbb2151da29f694d136/wandb-0.21.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ff24b6b8e0f9da840b6bd5c7f60b0a5507bd998db40c9c2d476f9a340bec8ed", size = 19570305, upload-time = "2025-08-30T18:21:43.811Z" }, + { url = "https://files.pythonhosted.org/packages/4d/f9/4404b5a24bfd4ba027c19d30152b0fc7ebca8c49b202dee6ecb7f316082c/wandb-0.21.3-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:4975dec19e2b343e23ed6e60f7e1290120553719f82e87a22205bede758416ad", size = 18135806, upload-time = "2025-08-30T18:21:46.211Z" }, + { url = "https://files.pythonhosted.org/packages/ff/32/9580f42899e54f3d0b4ea619b6f6a54980a4e36fd0675d58c09f0a08d3f6/wandb-0.21.3-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:514a0aad40ecc0bdb757b1dc86e4ac98f61d2d760445b6e1f555291562320f2d", size = 19646760, upload-time = "2025-08-30T18:21:48.768Z" }, + { url = "https://files.pythonhosted.org/packages/75/d3/faa6ddb792a158c154fb704b25c96d0478e71eabf96e3f17529fb23b6894/wandb-0.21.3-py3-none-win32.whl", hash = "sha256:45aa3d8ad53c6ee06f37490d7a329ed7d0f5ca4dbd5d05bb0c01d5da22f14691", size = 18709408, upload-time = "2025-08-30T18:21:50.859Z" }, + { url = "https://files.pythonhosted.org/packages/d8/2d/7ef56e25f78786e59fefd9b19867c325f9686317d9f7b93b5cb340360a3e/wandb-0.21.3-py3-none-win_amd64.whl", hash = "sha256:56d5a5697766f552a9933d8c6a564202194768eb0389bd5f9fe9a99cd4cee41e", size = 18709411, upload-time = "2025-08-30T18:21:52.874Z" }, +] + +[[package]] +name = "websocket-client" +version = "1.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2c/41/aa4bf9664e4cda14c3b39865b12251e8e7d239f4cd0e3cc1b6c2ccde25c1/websocket_client-1.9.0.tar.gz", hash = "sha256:9e813624b6eb619999a97dc7958469217c3176312b3a16a4bd1bc7e08a46ec98", size = 70576, upload-time = "2025-10-07T21:16:36.495Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/34/db/b10e48aa8fff7407e67470363eac595018441cf32d5e1001567a7aeba5d2/websocket_client-1.9.0-py3-none-any.whl", hash = "sha256:af248a825037ef591efbf6ed20cc5faa03d3b47b9e5a2230a529eeee1c1fc3ef", size = 82616, upload-time = "2025-10-07T21:16:34.951Z" }, +] + +[[package]] +name = "websockets" +version = "16.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/04/24/4b2031d72e840ce4c1ccb255f693b15c334757fc50023e4db9537080b8c4/websockets-16.0.tar.gz", hash = "sha256:5f6261a5e56e8d5c42a4497b364ea24d94d9563e8fbd44e78ac40879c60179b5", size = 179346, upload-time = "2026-01-10T09:23:47.181Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/84/7b/bac442e6b96c9d25092695578dda82403c77936104b5682307bd4deb1ad4/websockets-16.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:71c989cbf3254fbd5e84d3bff31e4da39c43f884e64f2551d14bb3c186230f00", size = 177365, upload-time = "2026-01-10T09:22:46.787Z" }, + { url = "https://files.pythonhosted.org/packages/b0/fe/136ccece61bd690d9c1f715baaeefd953bb2360134de73519d5df19d29ca/websockets-16.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:8b6e209ffee39ff1b6d0fa7bfef6de950c60dfb91b8fcead17da4ee539121a79", size = 175038, upload-time = "2026-01-10T09:22:47.999Z" }, + { url = "https://files.pythonhosted.org/packages/40/1e/9771421ac2286eaab95b8575b0cb701ae3663abf8b5e1f64f1fd90d0a673/websockets-16.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:86890e837d61574c92a97496d590968b23c2ef0aeb8a9bc9421d174cd378ae39", size = 175328, upload-time = "2026-01-10T09:22:49.809Z" }, + { url = "https://files.pythonhosted.org/packages/18/29/71729b4671f21e1eaa5d6573031ab810ad2936c8175f03f97f3ff164c802/websockets-16.0-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:9b5aca38b67492ef518a8ab76851862488a478602229112c4b0d58d63a7a4d5c", size = 184915, upload-time = "2026-01-10T09:22:51.071Z" }, + { url = "https://files.pythonhosted.org/packages/97/bb/21c36b7dbbafc85d2d480cd65df02a1dc93bf76d97147605a8e27ff9409d/websockets-16.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e0334872c0a37b606418ac52f6ab9cfd17317ac26365f7f65e203e2d0d0d359f", size = 186152, upload-time = "2026-01-10T09:22:52.224Z" }, + { url = "https://files.pythonhosted.org/packages/4a/34/9bf8df0c0cf88fa7bfe36678dc7b02970c9a7d5e065a3099292db87b1be2/websockets-16.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a0b31e0b424cc6b5a04b8838bbaec1688834b2383256688cf47eb97412531da1", size = 185583, upload-time = "2026-01-10T09:22:53.443Z" }, + { url = "https://files.pythonhosted.org/packages/47/88/4dd516068e1a3d6ab3c7c183288404cd424a9a02d585efbac226cb61ff2d/websockets-16.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:485c49116d0af10ac698623c513c1cc01c9446c058a4e61e3bf6c19dff7335a2", size = 184880, upload-time = "2026-01-10T09:22:55.033Z" }, + { url = "https://files.pythonhosted.org/packages/91/d6/7d4553ad4bf1c0421e1ebd4b18de5d9098383b5caa1d937b63df8d04b565/websockets-16.0-cp312-cp312-win32.whl", hash = "sha256:eaded469f5e5b7294e2bdca0ab06becb6756ea86894a47806456089298813c89", size = 178261, upload-time = "2026-01-10T09:22:56.251Z" }, + { url = "https://files.pythonhosted.org/packages/c3/f0/f3a17365441ed1c27f850a80b2bc680a0fa9505d733fe152fdf5e98c1c0b/websockets-16.0-cp312-cp312-win_amd64.whl", hash = "sha256:5569417dc80977fc8c2d43a86f78e0a5a22fee17565d78621b6bb264a115d4ea", size = 178693, upload-time = "2026-01-10T09:22:57.478Z" }, + { url = "https://files.pythonhosted.org/packages/cc/9c/baa8456050d1c1b08dd0ec7346026668cbc6f145ab4e314d707bb845bf0d/websockets-16.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:878b336ac47938b474c8f982ac2f7266a540adc3fa4ad74ae96fea9823a02cc9", size = 177364, upload-time = "2026-01-10T09:22:59.333Z" }, + { url = "https://files.pythonhosted.org/packages/7e/0c/8811fc53e9bcff68fe7de2bcbe75116a8d959ac699a3200f4847a8925210/websockets-16.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:52a0fec0e6c8d9a784c2c78276a48a2bdf099e4ccc2a4cad53b27718dbfd0230", size = 175039, upload-time = "2026-01-10T09:23:01.171Z" }, + { url = "https://files.pythonhosted.org/packages/aa/82/39a5f910cb99ec0b59e482971238c845af9220d3ab9fa76dd9162cda9d62/websockets-16.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e6578ed5b6981005df1860a56e3617f14a6c307e6a71b4fff8c48fdc50f3ed2c", size = 175323, upload-time = "2026-01-10T09:23:02.341Z" }, + { url = "https://files.pythonhosted.org/packages/bd/28/0a25ee5342eb5d5f297d992a77e56892ecb65e7854c7898fb7d35e9b33bd/websockets-16.0-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:95724e638f0f9c350bb1c2b0a7ad0e83d9cc0c9259f3ea94e40d7b02a2179ae5", size = 184975, upload-time = "2026-01-10T09:23:03.756Z" }, + { url = "https://files.pythonhosted.org/packages/f9/66/27ea52741752f5107c2e41fda05e8395a682a1e11c4e592a809a90c6a506/websockets-16.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c0204dc62a89dc9d50d682412c10b3542d748260d743500a85c13cd1ee4bde82", size = 186203, upload-time = "2026-01-10T09:23:05.01Z" }, + { url = "https://files.pythonhosted.org/packages/37/e5/8e32857371406a757816a2b471939d51c463509be73fa538216ea52b792a/websockets-16.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:52ac480f44d32970d66763115edea932f1c5b1312de36df06d6b219f6741eed8", size = 185653, upload-time = "2026-01-10T09:23:06.301Z" }, + { url = "https://files.pythonhosted.org/packages/9b/67/f926bac29882894669368dc73f4da900fcdf47955d0a0185d60103df5737/websockets-16.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6e5a82b677f8f6f59e8dfc34ec06ca6b5b48bc4fcda346acd093694cc2c24d8f", size = 184920, upload-time = "2026-01-10T09:23:07.492Z" }, + { url = "https://files.pythonhosted.org/packages/3c/a1/3d6ccdcd125b0a42a311bcd15a7f705d688f73b2a22d8cf1c0875d35d34a/websockets-16.0-cp313-cp313-win32.whl", hash = "sha256:abf050a199613f64c886ea10f38b47770a65154dc37181bfaff70c160f45315a", size = 178255, upload-time = "2026-01-10T09:23:09.245Z" }, + { url = "https://files.pythonhosted.org/packages/6b/ae/90366304d7c2ce80f9b826096a9e9048b4bb760e44d3b873bb272cba696b/websockets-16.0-cp313-cp313-win_amd64.whl", hash = "sha256:3425ac5cf448801335d6fdc7ae1eb22072055417a96cc6b31b3861f455fbc156", size = 178689, upload-time = "2026-01-10T09:23:10.483Z" }, + { url = "https://files.pythonhosted.org/packages/f3/1d/e88022630271f5bd349ed82417136281931e558d628dd52c4d8621b4a0b2/websockets-16.0-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:8cc451a50f2aee53042ac52d2d053d08bf89bcb31ae799cb4487587661c038a0", size = 177406, upload-time = "2026-01-10T09:23:12.178Z" }, + { url = "https://files.pythonhosted.org/packages/f2/78/e63be1bf0724eeb4616efb1ae1c9044f7c3953b7957799abb5915bffd38e/websockets-16.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:daa3b6ff70a9241cf6c7fc9e949d41232d9d7d26fd3522b1ad2b4d62487e9904", size = 175085, upload-time = "2026-01-10T09:23:13.511Z" }, + { url = "https://files.pythonhosted.org/packages/bb/f4/d3c9220d818ee955ae390cf319a7c7a467beceb24f05ee7aaaa2414345ba/websockets-16.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:fd3cb4adb94a2a6e2b7c0d8d05cb94e6f1c81a0cf9dc2694fb65c7e8d94c42e4", size = 175328, upload-time = "2026-01-10T09:23:14.727Z" }, + { url = "https://files.pythonhosted.org/packages/63/bc/d3e208028de777087e6fb2b122051a6ff7bbcca0d6df9d9c2bf1dd869ae9/websockets-16.0-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:781caf5e8eee67f663126490c2f96f40906594cb86b408a703630f95550a8c3e", size = 185044, upload-time = "2026-01-10T09:23:15.939Z" }, + { url = "https://files.pythonhosted.org/packages/ad/6e/9a0927ac24bd33a0a9af834d89e0abc7cfd8e13bed17a86407a66773cc0e/websockets-16.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:caab51a72c51973ca21fa8a18bd8165e1a0183f1ac7066a182ff27107b71e1a4", size = 186279, upload-time = "2026-01-10T09:23:17.148Z" }, + { url = "https://files.pythonhosted.org/packages/b9/ca/bf1c68440d7a868180e11be653c85959502efd3a709323230314fda6e0b3/websockets-16.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:19c4dc84098e523fd63711e563077d39e90ec6702aff4b5d9e344a60cb3c0cb1", size = 185711, upload-time = "2026-01-10T09:23:18.372Z" }, + { url = "https://files.pythonhosted.org/packages/c4/f8/fdc34643a989561f217bb477cbc47a3a07212cbda91c0e4389c43c296ebf/websockets-16.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:a5e18a238a2b2249c9a9235466b90e96ae4795672598a58772dd806edc7ac6d3", size = 184982, upload-time = "2026-01-10T09:23:19.652Z" }, + { url = "https://files.pythonhosted.org/packages/dd/d1/574fa27e233764dbac9c52730d63fcf2823b16f0856b3329fc6268d6ae4f/websockets-16.0-cp314-cp314-win32.whl", hash = "sha256:a069d734c4a043182729edd3e9f247c3b2a4035415a9172fd0f1b71658a320a8", size = 177915, upload-time = "2026-01-10T09:23:21.458Z" }, + { url = "https://files.pythonhosted.org/packages/8a/f1/ae6b937bf3126b5134ce1f482365fde31a357c784ac51852978768b5eff4/websockets-16.0-cp314-cp314-win_amd64.whl", hash = "sha256:c0ee0e63f23914732c6d7e0cce24915c48f3f1512ec1d079ed01fc629dab269d", size = 178381, upload-time = "2026-01-10T09:23:22.715Z" }, + { url = "https://files.pythonhosted.org/packages/06/9b/f791d1db48403e1f0a27577a6beb37afae94254a8c6f08be4a23e4930bc0/websockets-16.0-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:a35539cacc3febb22b8f4d4a99cc79b104226a756aa7400adc722e83b0d03244", size = 177737, upload-time = "2026-01-10T09:23:24.523Z" }, + { url = "https://files.pythonhosted.org/packages/bd/40/53ad02341fa33b3ce489023f635367a4ac98b73570102ad2cdd770dacc9a/websockets-16.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:b784ca5de850f4ce93ec85d3269d24d4c82f22b7212023c974c401d4980ebc5e", size = 175268, upload-time = "2026-01-10T09:23:25.781Z" }, + { url = "https://files.pythonhosted.org/packages/74/9b/6158d4e459b984f949dcbbb0c5d270154c7618e11c01029b9bbd1bb4c4f9/websockets-16.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:569d01a4e7fba956c5ae4fc988f0d4e187900f5497ce46339c996dbf24f17641", size = 175486, upload-time = "2026-01-10T09:23:27.033Z" }, + { url = "https://files.pythonhosted.org/packages/e5/2d/7583b30208b639c8090206f95073646c2c9ffd66f44df967981a64f849ad/websockets-16.0-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:50f23cdd8343b984957e4077839841146f67a3d31ab0d00e6b824e74c5b2f6e8", size = 185331, upload-time = "2026-01-10T09:23:28.259Z" }, + { url = "https://files.pythonhosted.org/packages/45/b0/cce3784eb519b7b5ad680d14b9673a31ab8dcb7aad8b64d81709d2430aa8/websockets-16.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:152284a83a00c59b759697b7f9e9cddf4e3c7861dd0d964b472b70f78f89e80e", size = 186501, upload-time = "2026-01-10T09:23:29.449Z" }, + { url = "https://files.pythonhosted.org/packages/19/60/b8ebe4c7e89fb5f6cdf080623c9d92789a53636950f7abacfc33fe2b3135/websockets-16.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:bc59589ab64b0022385f429b94697348a6a234e8ce22544e3681b2e9331b5944", size = 186062, upload-time = "2026-01-10T09:23:31.368Z" }, + { url = "https://files.pythonhosted.org/packages/88/a8/a080593f89b0138b6cba1b28f8df5673b5506f72879322288b031337c0b8/websockets-16.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:32da954ffa2814258030e5a57bc73a3635463238e797c7375dc8091327434206", size = 185356, upload-time = "2026-01-10T09:23:32.627Z" }, + { url = "https://files.pythonhosted.org/packages/c2/b6/b9afed2afadddaf5ebb2afa801abf4b0868f42f8539bfe4b071b5266c9fe/websockets-16.0-cp314-cp314t-win32.whl", hash = "sha256:5a4b4cc550cb665dd8a47f868c8d04c8230f857363ad3c9caf7a0c3bf8c61ca6", size = 178085, upload-time = "2026-01-10T09:23:33.816Z" }, + { url = "https://files.pythonhosted.org/packages/9f/3e/28135a24e384493fa804216b79a6a6759a38cc4ff59118787b9fb693df93/websockets-16.0-cp314-cp314t-win_amd64.whl", hash = "sha256:b14dc141ed6d2dde437cddb216004bcac6a1df0935d79656387bd41632ba0bbd", size = 178531, upload-time = "2026-01-10T09:23:35.016Z" }, + { url = "https://files.pythonhosted.org/packages/6f/28/258ebab549c2bf3e64d2b0217b973467394a9cea8c42f70418ca2c5d0d2e/websockets-16.0-py3-none-any.whl", hash = "sha256:1637db62fad1dc833276dded54215f2c7fa46912301a24bd94d45d46a011ceec", size = 171598, upload-time = "2026-01-10T09:23:45.395Z" }, +] + +[[package]] +name = "wheel" +version = "0.46.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "packaging" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/89/24/a2eb353a6edac9a0303977c4cb048134959dd2a51b48a269dfc9dde00c8a/wheel-0.46.3.tar.gz", hash = "sha256:e3e79874b07d776c40bd6033f8ddf76a7dad46a7b8aa1b2787a83083519a1803", size = 60605, upload-time = "2026-01-22T12:39:49.136Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/87/22/b76d483683216dde3d67cba61fb2444be8d5be289bf628c13fc0fd90e5f9/wheel-0.46.3-py3-none-any.whl", hash = "sha256:4b399d56c9d9338230118d705d9737a2a468ccca63d5e813e2a4fc7815d8bc4d", size = 30557, upload-time = "2026-01-22T12:39:48.099Z" }, +] + +[[package]] +name = "xxhash" +version = "3.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/02/84/30869e01909fb37a6cc7e18688ee8bf1e42d57e7e0777636bd47524c43c7/xxhash-3.6.0.tar.gz", hash = "sha256:f0162a78b13a0d7617b2845b90c763339d1f1d82bb04a4b07f4ab535cc5e05d6", size = 85160, upload-time = "2025-10-02T14:37:08.097Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9a/07/d9412f3d7d462347e4511181dea65e47e0d0e16e26fbee2ea86a2aefb657/xxhash-3.6.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:01362c4331775398e7bb34e3ab403bc9ee9f7c497bc7dee6272114055277dd3c", size = 32744, upload-time = "2025-10-02T14:34:34.622Z" }, + { url = "https://files.pythonhosted.org/packages/79/35/0429ee11d035fc33abe32dca1b2b69e8c18d236547b9a9b72c1929189b9a/xxhash-3.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b7b2df81a23f8cb99656378e72501b2cb41b1827c0f5a86f87d6b06b69f9f204", size = 30816, upload-time = "2025-10-02T14:34:36.043Z" }, + { url = "https://files.pythonhosted.org/packages/b7/f2/57eb99aa0f7d98624c0932c5b9a170e1806406cdbcdb510546634a1359e0/xxhash-3.6.0-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:dc94790144e66b14f67b10ac8ed75b39ca47536bf8800eb7c24b50271ea0c490", size = 194035, upload-time = "2025-10-02T14:34:37.354Z" }, + { url = "https://files.pythonhosted.org/packages/4c/ed/6224ba353690d73af7a3f1c7cdb1fc1b002e38f783cb991ae338e1eb3d79/xxhash-3.6.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:93f107c673bccf0d592cdba077dedaf52fe7f42dcd7676eba1f6d6f0c3efffd2", size = 212914, upload-time = "2025-10-02T14:34:38.6Z" }, + { url = "https://files.pythonhosted.org/packages/38/86/fb6b6130d8dd6b8942cc17ab4d90e223653a89aa32ad2776f8af7064ed13/xxhash-3.6.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2aa5ee3444c25b69813663c9f8067dcfaa2e126dc55e8dddf40f4d1c25d7effa", size = 212163, upload-time = "2025-10-02T14:34:39.872Z" }, + { url = "https://files.pythonhosted.org/packages/ee/dc/e84875682b0593e884ad73b2d40767b5790d417bde603cceb6878901d647/xxhash-3.6.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f7f99123f0e1194fa59cc69ad46dbae2e07becec5df50a0509a808f90a0f03f0", size = 445411, upload-time = "2025-10-02T14:34:41.569Z" }, + { url = "https://files.pythonhosted.org/packages/11/4f/426f91b96701ec2f37bb2b8cec664eff4f658a11f3fa9d94f0a887ea6d2b/xxhash-3.6.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:49e03e6fe2cac4a1bc64952dd250cf0dbc5ef4ebb7b8d96bce82e2de163c82a2", size = 193883, upload-time = "2025-10-02T14:34:43.249Z" }, + { url = "https://files.pythonhosted.org/packages/53/5a/ddbb83eee8e28b778eacfc5a85c969673e4023cdeedcfcef61f36731610b/xxhash-3.6.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:bd17fede52a17a4f9a7bc4472a5867cb0b160deeb431795c0e4abe158bc784e9", size = 210392, upload-time = "2025-10-02T14:34:45.042Z" }, + { url = "https://files.pythonhosted.org/packages/1e/c2/ff69efd07c8c074ccdf0a4f36fcdd3d27363665bcdf4ba399abebe643465/xxhash-3.6.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:6fb5f5476bef678f69db04f2bd1efbed3030d2aba305b0fc1773645f187d6a4e", size = 197898, upload-time = "2025-10-02T14:34:46.302Z" }, + { url = "https://files.pythonhosted.org/packages/58/ca/faa05ac19b3b622c7c9317ac3e23954187516298a091eb02c976d0d3dd45/xxhash-3.6.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:843b52f6d88071f87eba1631b684fcb4b2068cd2180a0224122fe4ef011a9374", size = 210655, upload-time = "2025-10-02T14:34:47.571Z" }, + { url = "https://files.pythonhosted.org/packages/d4/7a/06aa7482345480cc0cb597f5c875b11a82c3953f534394f620b0be2f700c/xxhash-3.6.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:7d14a6cfaf03b1b6f5f9790f76880601ccc7896aff7ab9cd8978a939c1eb7e0d", size = 414001, upload-time = "2025-10-02T14:34:49.273Z" }, + { url = "https://files.pythonhosted.org/packages/23/07/63ffb386cd47029aa2916b3d2f454e6cc5b9f5c5ada3790377d5430084e7/xxhash-3.6.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:418daf3db71e1413cfe211c2f9a528456936645c17f46b5204705581a45390ae", size = 191431, upload-time = "2025-10-02T14:34:50.798Z" }, + { url = "https://files.pythonhosted.org/packages/0f/93/14fde614cadb4ddf5e7cebf8918b7e8fac5ae7861c1875964f17e678205c/xxhash-3.6.0-cp312-cp312-win32.whl", hash = "sha256:50fc255f39428a27299c20e280d6193d8b63b8ef8028995323bf834a026b4fbb", size = 30617, upload-time = "2025-10-02T14:34:51.954Z" }, + { url = "https://files.pythonhosted.org/packages/13/5d/0d125536cbe7565a83d06e43783389ecae0c0f2ed037b48ede185de477c0/xxhash-3.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:c0f2ab8c715630565ab8991b536ecded9416d615538be8ecddce43ccf26cbc7c", size = 31534, upload-time = "2025-10-02T14:34:53.276Z" }, + { url = "https://files.pythonhosted.org/packages/54/85/6ec269b0952ec7e36ba019125982cf11d91256a778c7c3f98a4c5043d283/xxhash-3.6.0-cp312-cp312-win_arm64.whl", hash = "sha256:eae5c13f3bc455a3bbb68bdc513912dc7356de7e2280363ea235f71f54064829", size = 27876, upload-time = "2025-10-02T14:34:54.371Z" }, + { url = "https://files.pythonhosted.org/packages/33/76/35d05267ac82f53ae9b0e554da7c5e281ee61f3cad44c743f0fcd354f211/xxhash-3.6.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:599e64ba7f67472481ceb6ee80fa3bd828fd61ba59fb11475572cc5ee52b89ec", size = 32738, upload-time = "2025-10-02T14:34:55.839Z" }, + { url = "https://files.pythonhosted.org/packages/31/a8/3fbce1cd96534a95e35d5120637bf29b0d7f5d8fa2f6374e31b4156dd419/xxhash-3.6.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7d8b8aaa30fca4f16f0c84a5c8d7ddee0e25250ec2796c973775373257dde8f1", size = 30821, upload-time = "2025-10-02T14:34:57.219Z" }, + { url = "https://files.pythonhosted.org/packages/0c/ea/d387530ca7ecfa183cb358027f1833297c6ac6098223fd14f9782cd0015c/xxhash-3.6.0-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d597acf8506d6e7101a4a44a5e428977a51c0fadbbfd3c39650cca9253f6e5a6", size = 194127, upload-time = "2025-10-02T14:34:59.21Z" }, + { url = "https://files.pythonhosted.org/packages/ba/0c/71435dcb99874b09a43b8d7c54071e600a7481e42b3e3ce1eb5226a5711a/xxhash-3.6.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:858dc935963a33bc33490128edc1c12b0c14d9c7ebaa4e387a7869ecc4f3e263", size = 212975, upload-time = "2025-10-02T14:35:00.816Z" }, + { url = "https://files.pythonhosted.org/packages/84/7a/c2b3d071e4bb4a90b7057228a99b10d51744878f4a8a6dd643c8bd897620/xxhash-3.6.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:ba284920194615cb8edf73bf52236ce2e1664ccd4a38fdb543506413529cc546", size = 212241, upload-time = "2025-10-02T14:35:02.207Z" }, + { url = "https://files.pythonhosted.org/packages/81/5f/640b6eac0128e215f177df99eadcd0f1b7c42c274ab6a394a05059694c5a/xxhash-3.6.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4b54219177f6c6674d5378bd862c6aedf64725f70dd29c472eaae154df1a2e89", size = 445471, upload-time = "2025-10-02T14:35:03.61Z" }, + { url = "https://files.pythonhosted.org/packages/5e/1e/3c3d3ef071b051cc3abbe3721ffb8365033a172613c04af2da89d5548a87/xxhash-3.6.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:42c36dd7dbad2f5238950c377fcbf6811b1cdb1c444fab447960030cea60504d", size = 193936, upload-time = "2025-10-02T14:35:05.013Z" }, + { url = "https://files.pythonhosted.org/packages/2c/bd/4a5f68381939219abfe1c22a9e3a5854a4f6f6f3c4983a87d255f21f2e5d/xxhash-3.6.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f22927652cba98c44639ffdc7aaf35828dccf679b10b31c4ad72a5b530a18eb7", size = 210440, upload-time = "2025-10-02T14:35:06.239Z" }, + { url = "https://files.pythonhosted.org/packages/eb/37/b80fe3d5cfb9faff01a02121a0f4d565eb7237e9e5fc66e73017e74dcd36/xxhash-3.6.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:b45fad44d9c5c119e9c6fbf2e1c656a46dc68e280275007bbfd3d572b21426db", size = 197990, upload-time = "2025-10-02T14:35:07.735Z" }, + { url = "https://files.pythonhosted.org/packages/d7/fd/2c0a00c97b9e18f72e1f240ad4e8f8a90fd9d408289ba9c7c495ed7dc05c/xxhash-3.6.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:6f2580ffab1a8b68ef2b901cde7e55fa8da5e4be0977c68f78fc80f3c143de42", size = 210689, upload-time = "2025-10-02T14:35:09.438Z" }, + { url = "https://files.pythonhosted.org/packages/93/86/5dd8076a926b9a95db3206aba20d89a7fc14dd5aac16e5c4de4b56033140/xxhash-3.6.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:40c391dd3cd041ebc3ffe6f2c862f402e306eb571422e0aa918d8070ba31da11", size = 414068, upload-time = "2025-10-02T14:35:11.162Z" }, + { url = "https://files.pythonhosted.org/packages/af/3c/0bb129170ee8f3650f08e993baee550a09593462a5cddd8e44d0011102b1/xxhash-3.6.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f205badabde7aafd1a31e8ca2a3e5a763107a71c397c4481d6a804eb5063d8bd", size = 191495, upload-time = "2025-10-02T14:35:12.971Z" }, + { url = "https://files.pythonhosted.org/packages/e9/3a/6797e0114c21d1725e2577508e24006fd7ff1d8c0c502d3b52e45c1771d8/xxhash-3.6.0-cp313-cp313-win32.whl", hash = "sha256:2577b276e060b73b73a53042ea5bd5203d3e6347ce0d09f98500f418a9fcf799", size = 30620, upload-time = "2025-10-02T14:35:14.129Z" }, + { url = "https://files.pythonhosted.org/packages/86/15/9bc32671e9a38b413a76d24722a2bf8784a132c043063a8f5152d390b0f9/xxhash-3.6.0-cp313-cp313-win_amd64.whl", hash = "sha256:757320d45d2fbcce8f30c42a6b2f47862967aea7bf458b9625b4bbe7ee390392", size = 31542, upload-time = "2025-10-02T14:35:15.21Z" }, + { url = "https://files.pythonhosted.org/packages/39/c5/cc01e4f6188656e56112d6a8e0dfe298a16934b8c47a247236549a3f7695/xxhash-3.6.0-cp313-cp313-win_arm64.whl", hash = "sha256:457b8f85dec5825eed7b69c11ae86834a018b8e3df5e77783c999663da2f96d6", size = 27880, upload-time = "2025-10-02T14:35:16.315Z" }, + { url = "https://files.pythonhosted.org/packages/f3/30/25e5321c8732759e930c555176d37e24ab84365482d257c3b16362235212/xxhash-3.6.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a42e633d75cdad6d625434e3468126c73f13f7584545a9cf34e883aa1710e702", size = 32956, upload-time = "2025-10-02T14:35:17.413Z" }, + { url = "https://files.pythonhosted.org/packages/9f/3c/0573299560d7d9f8ab1838f1efc021a280b5ae5ae2e849034ef3dee18810/xxhash-3.6.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:568a6d743219e717b07b4e03b0a828ce593833e498c3b64752e0f5df6bfe84db", size = 31072, upload-time = "2025-10-02T14:35:18.844Z" }, + { url = "https://files.pythonhosted.org/packages/7a/1c/52d83a06e417cd9d4137722693424885cc9878249beb3a7c829e74bf7ce9/xxhash-3.6.0-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:bec91b562d8012dae276af8025a55811b875baace6af510412a5e58e3121bc54", size = 196409, upload-time = "2025-10-02T14:35:20.31Z" }, + { url = "https://files.pythonhosted.org/packages/e3/8e/c6d158d12a79bbd0b878f8355432075fc82759e356ab5a111463422a239b/xxhash-3.6.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:78e7f2f4c521c30ad5e786fdd6bae89d47a32672a80195467b5de0480aa97b1f", size = 215736, upload-time = "2025-10-02T14:35:21.616Z" }, + { url = "https://files.pythonhosted.org/packages/bc/68/c4c80614716345d55071a396cf03d06e34b5f4917a467faf43083c995155/xxhash-3.6.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:3ed0df1b11a79856df5ffcab572cbd6b9627034c1c748c5566fa79df9048a7c5", size = 214833, upload-time = "2025-10-02T14:35:23.32Z" }, + { url = "https://files.pythonhosted.org/packages/7e/e9/ae27c8ffec8b953efa84c7c4a6c6802c263d587b9fc0d6e7cea64e08c3af/xxhash-3.6.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:0e4edbfc7d420925b0dd5e792478ed393d6e75ff8fc219a6546fb446b6a417b1", size = 448348, upload-time = "2025-10-02T14:35:25.111Z" }, + { url = "https://files.pythonhosted.org/packages/d7/6b/33e21afb1b5b3f46b74b6bd1913639066af218d704cc0941404ca717fc57/xxhash-3.6.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fba27a198363a7ef87f8c0f6b171ec36b674fe9053742c58dd7e3201c1ab30ee", size = 196070, upload-time = "2025-10-02T14:35:26.586Z" }, + { url = "https://files.pythonhosted.org/packages/96/b6/fcabd337bc5fa624e7203aa0fa7d0c49eed22f72e93229431752bddc83d9/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:794fe9145fe60191c6532fa95063765529770edcdd67b3d537793e8004cabbfd", size = 212907, upload-time = "2025-10-02T14:35:28.087Z" }, + { url = "https://files.pythonhosted.org/packages/4b/d3/9ee6160e644d660fcf176c5825e61411c7f62648728f69c79ba237250143/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:6105ef7e62b5ac73a837778efc331a591d8442f8ef5c7e102376506cb4ae2729", size = 200839, upload-time = "2025-10-02T14:35:29.857Z" }, + { url = "https://files.pythonhosted.org/packages/0d/98/e8de5baa5109394baf5118f5e72ab21a86387c4f89b0e77ef3e2f6b0327b/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:f01375c0e55395b814a679b3eea205db7919ac2af213f4a6682e01220e5fe292", size = 213304, upload-time = "2025-10-02T14:35:31.222Z" }, + { url = "https://files.pythonhosted.org/packages/7b/1d/71056535dec5c3177eeb53e38e3d367dd1d16e024e63b1cee208d572a033/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:d706dca2d24d834a4661619dcacf51a75c16d65985718d6a7d73c1eeeb903ddf", size = 416930, upload-time = "2025-10-02T14:35:32.517Z" }, + { url = "https://files.pythonhosted.org/packages/dc/6c/5cbde9de2cd967c322e651c65c543700b19e7ae3e0aae8ece3469bf9683d/xxhash-3.6.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:5f059d9faeacd49c0215d66f4056e1326c80503f51a1532ca336a385edadd033", size = 193787, upload-time = "2025-10-02T14:35:33.827Z" }, + { url = "https://files.pythonhosted.org/packages/19/fa/0172e350361d61febcea941b0cc541d6e6c8d65d153e85f850a7b256ff8a/xxhash-3.6.0-cp313-cp313t-win32.whl", hash = "sha256:1244460adc3a9be84731d72b8e80625788e5815b68da3da8b83f78115a40a7ec", size = 30916, upload-time = "2025-10-02T14:35:35.107Z" }, + { url = "https://files.pythonhosted.org/packages/ad/e6/e8cf858a2b19d6d45820f072eff1bea413910592ff17157cabc5f1227a16/xxhash-3.6.0-cp313-cp313t-win_amd64.whl", hash = "sha256:b1e420ef35c503869c4064f4a2f2b08ad6431ab7b229a05cce39d74268bca6b8", size = 31799, upload-time = "2025-10-02T14:35:36.165Z" }, + { url = "https://files.pythonhosted.org/packages/56/15/064b197e855bfb7b343210e82490ae672f8bc7cdf3ddb02e92f64304ee8a/xxhash-3.6.0-cp313-cp313t-win_arm64.whl", hash = "sha256:ec44b73a4220623235f67a996c862049f375df3b1052d9899f40a6382c32d746", size = 28044, upload-time = "2025-10-02T14:35:37.195Z" }, + { url = "https://files.pythonhosted.org/packages/7e/5e/0138bc4484ea9b897864d59fce9be9086030825bc778b76cb5a33a906d37/xxhash-3.6.0-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:a40a3d35b204b7cc7643cbcf8c9976d818cb47befcfac8bbefec8038ac363f3e", size = 32754, upload-time = "2025-10-02T14:35:38.245Z" }, + { url = "https://files.pythonhosted.org/packages/18/d7/5dac2eb2ec75fd771957a13e5dda560efb2176d5203f39502a5fc571f899/xxhash-3.6.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:a54844be970d3fc22630b32d515e79a90d0a3ddb2644d8d7402e3c4c8da61405", size = 30846, upload-time = "2025-10-02T14:35:39.6Z" }, + { url = "https://files.pythonhosted.org/packages/fe/71/8bc5be2bb00deb5682e92e8da955ebe5fa982da13a69da5a40a4c8db12fb/xxhash-3.6.0-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:016e9190af8f0a4e3741343777710e3d5717427f175adfdc3e72508f59e2a7f3", size = 194343, upload-time = "2025-10-02T14:35:40.69Z" }, + { url = "https://files.pythonhosted.org/packages/e7/3b/52badfb2aecec2c377ddf1ae75f55db3ba2d321c5e164f14461c90837ef3/xxhash-3.6.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4f6f72232f849eb9d0141e2ebe2677ece15adfd0fa599bc058aad83c714bb2c6", size = 213074, upload-time = "2025-10-02T14:35:42.29Z" }, + { url = "https://files.pythonhosted.org/packages/a2/2b/ae46b4e9b92e537fa30d03dbc19cdae57ed407e9c26d163895e968e3de85/xxhash-3.6.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:63275a8aba7865e44b1813d2177e0f5ea7eadad3dd063a21f7cf9afdc7054063", size = 212388, upload-time = "2025-10-02T14:35:43.929Z" }, + { url = "https://files.pythonhosted.org/packages/f5/80/49f88d3afc724b4ac7fbd664c8452d6db51b49915be48c6982659e0e7942/xxhash-3.6.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3cd01fa2aa00d8b017c97eb46b9a794fbdca53fc14f845f5a328c71254b0abb7", size = 445614, upload-time = "2025-10-02T14:35:45.216Z" }, + { url = "https://files.pythonhosted.org/packages/ed/ba/603ce3961e339413543d8cd44f21f2c80e2a7c5cfe692a7b1f2cccf58f3c/xxhash-3.6.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0226aa89035b62b6a86d3c68df4d7c1f47a342b8683da2b60cedcddb46c4d95b", size = 194024, upload-time = "2025-10-02T14:35:46.959Z" }, + { url = "https://files.pythonhosted.org/packages/78/d1/8e225ff7113bf81545cfdcd79eef124a7b7064a0bba53605ff39590b95c2/xxhash-3.6.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c6e193e9f56e4ca4923c61238cdaced324f0feac782544eb4c6d55ad5cc99ddd", size = 210541, upload-time = "2025-10-02T14:35:48.301Z" }, + { url = "https://files.pythonhosted.org/packages/6f/58/0f89d149f0bad89def1a8dd38feb50ccdeb643d9797ec84707091d4cb494/xxhash-3.6.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:9176dcaddf4ca963d4deb93866d739a343c01c969231dbe21680e13a5d1a5bf0", size = 198305, upload-time = "2025-10-02T14:35:49.584Z" }, + { url = "https://files.pythonhosted.org/packages/11/38/5eab81580703c4df93feb5f32ff8fa7fe1e2c51c1f183ee4e48d4bb9d3d7/xxhash-3.6.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:c1ce4009c97a752e682b897aa99aef84191077a9433eb237774689f14f8ec152", size = 210848, upload-time = "2025-10-02T14:35:50.877Z" }, + { url = "https://files.pythonhosted.org/packages/5e/6b/953dc4b05c3ce678abca756416e4c130d2382f877a9c30a20d08ee6a77c0/xxhash-3.6.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:8cb2f4f679b01513b7adbb9b1b2f0f9cdc31b70007eaf9d59d0878809f385b11", size = 414142, upload-time = "2025-10-02T14:35:52.15Z" }, + { url = "https://files.pythonhosted.org/packages/08/a9/238ec0d4e81a10eb5026d4a6972677cbc898ba6c8b9dbaec12ae001b1b35/xxhash-3.6.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:653a91d7c2ab54a92c19ccf43508b6a555440b9be1bc8be553376778be7f20b5", size = 191547, upload-time = "2025-10-02T14:35:53.547Z" }, + { url = "https://files.pythonhosted.org/packages/f1/ee/3cf8589e06c2164ac77c3bf0aa127012801128f1feebf2a079272da5737c/xxhash-3.6.0-cp314-cp314-win32.whl", hash = "sha256:a756fe893389483ee8c394d06b5ab765d96e68fbbfe6fde7aa17e11f5720559f", size = 31214, upload-time = "2025-10-02T14:35:54.746Z" }, + { url = "https://files.pythonhosted.org/packages/02/5d/a19552fbc6ad4cb54ff953c3908bbc095f4a921bc569433d791f755186f1/xxhash-3.6.0-cp314-cp314-win_amd64.whl", hash = "sha256:39be8e4e142550ef69629c9cd71b88c90e9a5db703fecbcf265546d9536ca4ad", size = 32290, upload-time = "2025-10-02T14:35:55.791Z" }, + { url = "https://files.pythonhosted.org/packages/b1/11/dafa0643bc30442c887b55baf8e73353a344ee89c1901b5a5c54a6c17d39/xxhash-3.6.0-cp314-cp314-win_arm64.whl", hash = "sha256:25915e6000338999236f1eb68a02a32c3275ac338628a7eaa5a269c401995679", size = 28795, upload-time = "2025-10-02T14:35:57.162Z" }, + { url = "https://files.pythonhosted.org/packages/2c/db/0e99732ed7f64182aef4a6fb145e1a295558deec2a746265dcdec12d191e/xxhash-3.6.0-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:c5294f596a9017ca5a3e3f8884c00b91ab2ad2933cf288f4923c3fd4346cf3d4", size = 32955, upload-time = "2025-10-02T14:35:58.267Z" }, + { url = "https://files.pythonhosted.org/packages/55/f4/2a7c3c68e564a099becfa44bb3d398810cc0ff6749b0d3cb8ccb93f23c14/xxhash-3.6.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1cf9dcc4ab9cff01dfbba78544297a3a01dafd60f3bde4e2bfd016cf7e4ddc67", size = 31072, upload-time = "2025-10-02T14:35:59.382Z" }, + { url = "https://files.pythonhosted.org/packages/c6/d9/72a29cddc7250e8a5819dad5d466facb5dc4c802ce120645630149127e73/xxhash-3.6.0-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:01262da8798422d0685f7cef03b2bd3f4f46511b02830861df548d7def4402ad", size = 196579, upload-time = "2025-10-02T14:36:00.838Z" }, + { url = "https://files.pythonhosted.org/packages/63/93/b21590e1e381040e2ca305a884d89e1c345b347404f7780f07f2cdd47ef4/xxhash-3.6.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:51a73fb7cb3a3ead9f7a8b583ffd9b8038e277cdb8cb87cf890e88b3456afa0b", size = 215854, upload-time = "2025-10-02T14:36:02.207Z" }, + { url = "https://files.pythonhosted.org/packages/ce/b8/edab8a7d4fa14e924b29be877d54155dcbd8b80be85ea00d2be3413a9ed4/xxhash-3.6.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b9c6df83594f7df8f7f708ce5ebeacfc69f72c9fbaaababf6cf4758eaada0c9b", size = 214965, upload-time = "2025-10-02T14:36:03.507Z" }, + { url = "https://files.pythonhosted.org/packages/27/67/dfa980ac7f0d509d54ea0d5a486d2bb4b80c3f1bb22b66e6a05d3efaf6c0/xxhash-3.6.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:627f0af069b0ea56f312fd5189001c24578868643203bca1abbc2c52d3a6f3ca", size = 448484, upload-time = "2025-10-02T14:36:04.828Z" }, + { url = "https://files.pythonhosted.org/packages/8c/63/8ffc2cc97e811c0ca5d00ab36604b3ea6f4254f20b7bc658ca825ce6c954/xxhash-3.6.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:aa912c62f842dfd013c5f21a642c9c10cd9f4c4e943e0af83618b4a404d9091a", size = 196162, upload-time = "2025-10-02T14:36:06.182Z" }, + { url = "https://files.pythonhosted.org/packages/4b/77/07f0e7a3edd11a6097e990f6e5b815b6592459cb16dae990d967693e6ea9/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:b465afd7909db30168ab62afe40b2fcf79eedc0b89a6c0ab3123515dc0df8b99", size = 213007, upload-time = "2025-10-02T14:36:07.733Z" }, + { url = "https://files.pythonhosted.org/packages/ae/d8/bc5fa0d152837117eb0bef6f83f956c509332ce133c91c63ce07ee7c4873/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:a881851cf38b0a70e7c4d3ce81fc7afd86fbc2a024f4cfb2a97cf49ce04b75d3", size = 200956, upload-time = "2025-10-02T14:36:09.106Z" }, + { url = "https://files.pythonhosted.org/packages/26/a5/d749334130de9411783873e9b98ecc46688dad5db64ca6e04b02acc8b473/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:9b3222c686a919a0f3253cfc12bb118b8b103506612253b5baeaac10d8027cf6", size = 213401, upload-time = "2025-10-02T14:36:10.585Z" }, + { url = "https://files.pythonhosted.org/packages/89/72/abed959c956a4bfc72b58c0384bb7940663c678127538634d896b1195c10/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:c5aa639bc113e9286137cec8fadc20e9cd732b2cc385c0b7fa673b84fc1f2a93", size = 417083, upload-time = "2025-10-02T14:36:12.276Z" }, + { url = "https://files.pythonhosted.org/packages/0c/b3/62fd2b586283b7d7d665fb98e266decadf31f058f1cf6c478741f68af0cb/xxhash-3.6.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5c1343d49ac102799905e115aee590183c3921d475356cb24b4de29a4bc56518", size = 193913, upload-time = "2025-10-02T14:36:14.025Z" }, + { url = "https://files.pythonhosted.org/packages/9a/9a/c19c42c5b3f5a4aad748a6d5b4f23df3bed7ee5445accc65a0fb3ff03953/xxhash-3.6.0-cp314-cp314t-win32.whl", hash = "sha256:5851f033c3030dd95c086b4a36a2683c2ff4a799b23af60977188b057e467119", size = 31586, upload-time = "2025-10-02T14:36:15.603Z" }, + { url = "https://files.pythonhosted.org/packages/03/d6/4cc450345be9924fd5dc8c590ceda1db5b43a0a889587b0ae81a95511360/xxhash-3.6.0-cp314-cp314t-win_amd64.whl", hash = "sha256:0444e7967dac37569052d2409b00a8860c2135cff05502df4da80267d384849f", size = 32526, upload-time = "2025-10-02T14:36:16.708Z" }, + { url = "https://files.pythonhosted.org/packages/0f/c9/7243eb3f9eaabd1a88a5a5acadf06df2d83b100c62684b7425c6a11bcaa8/xxhash-3.6.0-cp314-cp314t-win_arm64.whl", hash = "sha256:bb79b1e63f6fd84ec778a4b1916dfe0a7c3fdb986c06addd5db3a0d413819d95", size = 28898, upload-time = "2025-10-02T14:36:17.843Z" }, +] + +[[package]] +name = "yarl" +version = "1.23.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "idna" }, + { name = "multidict" }, + { name = "propcache" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/23/6e/beb1beec874a72f23815c1434518bfc4ed2175065173fb138c3705f658d4/yarl-1.23.0.tar.gz", hash = "sha256:53b1ea6ca88ebd4420379c330aea57e258408dd0df9af0992e5de2078dc9f5d5", size = 194676, upload-time = "2026-03-01T22:07:53.373Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/88/8a/94615bc31022f711add374097ad4144d569e95ff3c38d39215d07ac153a0/yarl-1.23.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1932b6b8bba8d0160a9d1078aae5838a66039e8832d41d2992daa9a3a08f7860", size = 124737, upload-time = "2026-03-01T22:05:12.897Z" }, + { url = "https://files.pythonhosted.org/packages/e3/6f/c6554045d59d64052698add01226bc867b52fe4a12373415d7991fdca95d/yarl-1.23.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:411225bae281f114067578891bc75534cfb3d92a3b4dfef7a6ca78ba354e6069", size = 87029, upload-time = "2026-03-01T22:05:14.376Z" }, + { url = "https://files.pythonhosted.org/packages/19/2a/725ecc166d53438bc88f76822ed4b1e3b10756e790bafd7b523fe97c322d/yarl-1.23.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:13a563739ae600a631c36ce096615fe307f131344588b0bc0daec108cdb47b25", size = 86310, upload-time = "2026-03-01T22:05:15.71Z" }, + { url = "https://files.pythonhosted.org/packages/99/30/58260ed98e6ff7f90ba84442c1ddd758c9170d70327394a6227b310cd60f/yarl-1.23.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9cbf44c5cb4a7633d078788e1b56387e3d3cf2b8139a3be38040b22d6c3221c8", size = 97587, upload-time = "2026-03-01T22:05:17.384Z" }, + { url = "https://files.pythonhosted.org/packages/76/0a/8b08aac08b50682e65759f7f8dde98ae8168f72487e7357a5d684c581ef9/yarl-1.23.0-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:53ad387048f6f09a8969631e4de3f1bf70c50e93545d64af4f751b2498755072", size = 92528, upload-time = "2026-03-01T22:05:18.804Z" }, + { url = "https://files.pythonhosted.org/packages/52/07/0b7179101fe5f8385ec6c6bb5d0cb9f76bd9fb4a769591ab6fb5cdbfc69a/yarl-1.23.0-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4a59ba56f340334766f3a4442e0efd0af895fae9e2b204741ef885c446b3a1a8", size = 105339, upload-time = "2026-03-01T22:05:20.235Z" }, + { url = "https://files.pythonhosted.org/packages/d3/8a/36d82869ab5ec829ca8574dfcb92b51286fcfb1e9c7a73659616362dc880/yarl-1.23.0-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:803a3c3ce4acc62eaf01eaca1208dcf0783025ef27572c3336502b9c232005e7", size = 105061, upload-time = "2026-03-01T22:05:22.268Z" }, + { url = "https://files.pythonhosted.org/packages/66/3e/868e5c3364b6cee19ff3e1a122194fa4ce51def02c61023970442162859e/yarl-1.23.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a3d2bff8f37f8d0f96c7ec554d16945050d54462d6e95414babaa18bfafc7f51", size = 100132, upload-time = "2026-03-01T22:05:23.638Z" }, + { url = "https://files.pythonhosted.org/packages/cf/26/9c89acf82f08a52cb52d6d39454f8d18af15f9d386a23795389d1d423823/yarl-1.23.0-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c75eb09e8d55bceb4367e83496ff8ef2bc7ea6960efb38e978e8073ea59ecb67", size = 99289, upload-time = "2026-03-01T22:05:25.749Z" }, + { url = "https://files.pythonhosted.org/packages/6f/54/5b0db00d2cb056922356104468019c0a132e89c8d3ab67d8ede9f4483d2a/yarl-1.23.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:877b0738624280e34c55680d6054a307aa94f7d52fa0e3034a9cc6e790871da7", size = 96950, upload-time = "2026-03-01T22:05:27.318Z" }, + { url = "https://files.pythonhosted.org/packages/f6/40/10fa93811fd439341fad7e0718a86aca0de9548023bbb403668d6555acab/yarl-1.23.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:b5405bb8f0e783a988172993cfc627e4d9d00432d6bbac65a923041edacf997d", size = 93960, upload-time = "2026-03-01T22:05:28.738Z" }, + { url = "https://files.pythonhosted.org/packages/bc/d2/8ae2e6cd77d0805f4526e30ec43b6f9a3dfc542d401ac4990d178e4bf0cf/yarl-1.23.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:1c3a3598a832590c5a3ce56ab5576361b5688c12cb1d39429cf5dba30b510760", size = 104703, upload-time = "2026-03-01T22:05:30.438Z" }, + { url = "https://files.pythonhosted.org/packages/2f/0c/b3ceacf82c3fe21183ce35fa2acf5320af003d52bc1fcf5915077681142e/yarl-1.23.0-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:8419ebd326430d1cbb7efb5292330a2cf39114e82df5cc3d83c9a0d5ebeaf2f2", size = 98325, upload-time = "2026-03-01T22:05:31.835Z" }, + { url = "https://files.pythonhosted.org/packages/9d/e0/12900edd28bdab91a69bd2554b85ad7b151f64e8b521fe16f9ad2f56477a/yarl-1.23.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:be61f6fff406ca40e3b1d84716fde398fc08bc63dd96d15f3a14230a0973ed86", size = 105067, upload-time = "2026-03-01T22:05:33.358Z" }, + { url = "https://files.pythonhosted.org/packages/15/61/74bb1182cf79c9bbe4eb6b1f14a57a22d7a0be5e9cedf8e2d5c2086474c3/yarl-1.23.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3ceb13c5c858d01321b5d9bb65e4cf37a92169ea470b70fec6f236b2c9dd7e34", size = 100285, upload-time = "2026-03-01T22:05:35.4Z" }, + { url = "https://files.pythonhosted.org/packages/69/7f/cd5ef733f2550de6241bd8bd8c3febc78158b9d75f197d9c7baa113436af/yarl-1.23.0-cp312-cp312-win32.whl", hash = "sha256:fffc45637bcd6538de8b85f51e3df3223e4ad89bccbfca0481c08c7fc8b7ed7d", size = 82359, upload-time = "2026-03-01T22:05:36.811Z" }, + { url = "https://files.pythonhosted.org/packages/f5/be/25216a49daeeb7af2bec0db22d5e7df08ed1d7c9f65d78b14f3b74fd72fc/yarl-1.23.0-cp312-cp312-win_amd64.whl", hash = "sha256:f69f57305656a4852f2a7203efc661d8c042e6cc67f7acd97d8667fb448a426e", size = 87674, upload-time = "2026-03-01T22:05:38.171Z" }, + { url = "https://files.pythonhosted.org/packages/d2/35/aeab955d6c425b227d5b7247eafb24f2653fedc32f95373a001af5dfeb9e/yarl-1.23.0-cp312-cp312-win_arm64.whl", hash = "sha256:6e87a6e8735b44816e7db0b2fbc9686932df473c826b0d9743148432e10bb9b9", size = 81879, upload-time = "2026-03-01T22:05:40.006Z" }, + { url = "https://files.pythonhosted.org/packages/9a/4b/a0a6e5d0ee8a2f3a373ddef8a4097d74ac901ac363eea1440464ccbe0898/yarl-1.23.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:16c6994ac35c3e74fb0ae93323bf8b9c2a9088d55946109489667c510a7d010e", size = 123796, upload-time = "2026-03-01T22:05:41.412Z" }, + { url = "https://files.pythonhosted.org/packages/67/b6/8925d68af039b835ae876db5838e82e76ec87b9782ecc97e192b809c4831/yarl-1.23.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:4a42e651629dafb64fd5b0286a3580613702b5809ad3f24934ea87595804f2c5", size = 86547, upload-time = "2026-03-01T22:05:42.841Z" }, + { url = "https://files.pythonhosted.org/packages/ae/50/06d511cc4b8e0360d3c94af051a768e84b755c5eb031b12adaaab6dec6e5/yarl-1.23.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:7c6b9461a2a8b47c65eef63bb1c76a4f1c119618ffa99ea79bc5bb1e46c5821b", size = 85854, upload-time = "2026-03-01T22:05:44.85Z" }, + { url = "https://files.pythonhosted.org/packages/c4/f4/4e30b250927ffdab4db70da08b9b8d2194d7c7b400167b8fbeca1e4701ca/yarl-1.23.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2569b67d616eab450d262ca7cb9f9e19d2f718c70a8b88712859359d0ab17035", size = 98351, upload-time = "2026-03-01T22:05:46.836Z" }, + { url = "https://files.pythonhosted.org/packages/86/fc/4118c5671ea948208bdb1492d8b76bdf1453d3e73df051f939f563e7dcc5/yarl-1.23.0-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:e9d9a4d06d3481eab79803beb4d9bd6f6a8e781ec078ac70d7ef2dcc29d1bea5", size = 92711, upload-time = "2026-03-01T22:05:48.316Z" }, + { url = "https://files.pythonhosted.org/packages/56/11/1ed91d42bd9e73c13dc9e7eb0dd92298d75e7ac4dd7f046ad0c472e231cd/yarl-1.23.0-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f514f6474e04179d3d33175ed3f3e31434d3130d42ec153540d5b157deefd735", size = 106014, upload-time = "2026-03-01T22:05:50.028Z" }, + { url = "https://files.pythonhosted.org/packages/ce/c9/74e44e056a23fbc33aca71779ef450ca648a5bc472bdad7a82339918f818/yarl-1.23.0-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:fda207c815b253e34f7e1909840fd14299567b1c0eb4908f8c2ce01a41265401", size = 105557, upload-time = "2026-03-01T22:05:51.416Z" }, + { url = "https://files.pythonhosted.org/packages/66/fe/b1e10b08d287f518994f1e2ff9b6d26f0adeecd8dd7d533b01bab29a3eda/yarl-1.23.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:34b6cf500e61c90f305094911f9acc9c86da1a05a7a3f5be9f68817043f486e4", size = 101559, upload-time = "2026-03-01T22:05:52.872Z" }, + { url = "https://files.pythonhosted.org/packages/72/59/c5b8d94b14e3d3c2a9c20cb100119fd534ab5a14b93673ab4cc4a4141ea5/yarl-1.23.0-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:d7504f2b476d21653e4d143f44a175f7f751cd41233525312696c76aa3dbb23f", size = 100502, upload-time = "2026-03-01T22:05:54.954Z" }, + { url = "https://files.pythonhosted.org/packages/77/4f/96976cb54cbfc5c9fd73ed4c51804f92f209481d1fb190981c0f8a07a1d7/yarl-1.23.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:578110dd426f0d209d1509244e6d4a3f1a3e9077655d98c5f22583d63252a08a", size = 98027, upload-time = "2026-03-01T22:05:56.409Z" }, + { url = "https://files.pythonhosted.org/packages/63/6e/904c4f476471afdbad6b7e5b70362fb5810e35cd7466529a97322b6f5556/yarl-1.23.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:609d3614d78d74ebe35f54953c5bbd2ac647a7ddb9c30a5d877580f5e86b22f2", size = 95369, upload-time = "2026-03-01T22:05:58.141Z" }, + { url = "https://files.pythonhosted.org/packages/9d/40/acfcdb3b5f9d68ef499e39e04d25e141fe90661f9d54114556cf83be8353/yarl-1.23.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4966242ec68afc74c122f8459abd597afd7d8a60dc93d695c1334c5fd25f762f", size = 105565, upload-time = "2026-03-01T22:06:00.286Z" }, + { url = "https://files.pythonhosted.org/packages/5e/c6/31e28f3a6ba2869c43d124f37ea5260cac9c9281df803c354b31f4dd1f3c/yarl-1.23.0-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:e0fd068364a6759bc794459f0a735ab151d11304346332489c7972bacbe9e72b", size = 99813, upload-time = "2026-03-01T22:06:01.712Z" }, + { url = "https://files.pythonhosted.org/packages/08/1f/6f65f59e72d54aa467119b63fc0b0b1762eff0232db1f4720cd89e2f4a17/yarl-1.23.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:39004f0ad156da43e86aa71f44e033de68a44e5a31fc53507b36dd253970054a", size = 105632, upload-time = "2026-03-01T22:06:03.188Z" }, + { url = "https://files.pythonhosted.org/packages/a3/c4/18b178a69935f9e7a338127d5b77d868fdc0f0e49becd286d51b3a18c61d/yarl-1.23.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e5723c01a56c5028c807c701aa66722916d2747ad737a046853f6c46f4875543", size = 101895, upload-time = "2026-03-01T22:06:04.651Z" }, + { url = "https://files.pythonhosted.org/packages/8f/54/f5b870b5505663911dba950a8e4776a0dbd51c9c54c0ae88e823e4b874a0/yarl-1.23.0-cp313-cp313-win32.whl", hash = "sha256:1b6b572edd95b4fa8df75de10b04bc81acc87c1c7d16bcdd2035b09d30acc957", size = 82356, upload-time = "2026-03-01T22:06:06.04Z" }, + { url = "https://files.pythonhosted.org/packages/7a/84/266e8da36879c6edcd37b02b547e2d9ecdfea776be49598e75696e3316e1/yarl-1.23.0-cp313-cp313-win_amd64.whl", hash = "sha256:baaf55442359053c7d62f6f8413a62adba3205119bcb6f49594894d8be47e5e3", size = 87515, upload-time = "2026-03-01T22:06:08.107Z" }, + { url = "https://files.pythonhosted.org/packages/00/fd/7e1c66efad35e1649114fa13f17485f62881ad58edeeb7f49f8c5e748bf9/yarl-1.23.0-cp313-cp313-win_arm64.whl", hash = "sha256:fb4948814a2a98e3912505f09c9e7493b1506226afb1f881825368d6fb776ee3", size = 81785, upload-time = "2026-03-01T22:06:10.181Z" }, + { url = "https://files.pythonhosted.org/packages/9c/fc/119dd07004f17ea43bb91e3ece6587759edd7519d6b086d16bfbd3319982/yarl-1.23.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:aecfed0b41aa72b7881712c65cf764e39ce2ec352324f5e0837c7048d9e6daaa", size = 130719, upload-time = "2026-03-01T22:06:11.708Z" }, + { url = "https://files.pythonhosted.org/packages/e6/0d/9f2348502fbb3af409e8f47730282cd6bc80dec6630c1e06374d882d6eb2/yarl-1.23.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:a41bcf68efd19073376eb8cf948b8d9be0af26256403e512bb18f3966f1f9120", size = 89690, upload-time = "2026-03-01T22:06:13.429Z" }, + { url = "https://files.pythonhosted.org/packages/50/93/e88f3c80971b42cfc83f50a51b9d165a1dbf154b97005f2994a79f212a07/yarl-1.23.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:cde9a2ecd91668bcb7f077c4966d8ceddb60af01b52e6e3e2680e4cf00ad1a59", size = 89851, upload-time = "2026-03-01T22:06:15.53Z" }, + { url = "https://files.pythonhosted.org/packages/1c/07/61c9dd8ba8f86473263b4036f70fb594c09e99c0d9737a799dfd8bc85651/yarl-1.23.0-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5023346c4ee7992febc0068e7593de5fa2bf611848c08404b35ebbb76b1b0512", size = 95874, upload-time = "2026-03-01T22:06:17.553Z" }, + { url = "https://files.pythonhosted.org/packages/9e/e9/f9ff8ceefba599eac6abddcfb0b3bee9b9e636e96dbf54342a8577252379/yarl-1.23.0-cp313-cp313t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d1009abedb49ae95b136a8904a3f71b342f849ffeced2d3747bf29caeda218c4", size = 88710, upload-time = "2026-03-01T22:06:19.004Z" }, + { url = "https://files.pythonhosted.org/packages/eb/78/0231bfcc5d4c8eec220bc2f9ef82cb4566192ea867a7c5b4148f44f6cbcd/yarl-1.23.0-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a8d00f29b42f534cc8aa3931cfe773b13b23e561e10d2b26f27a8d309b0e82a1", size = 101033, upload-time = "2026-03-01T22:06:21.203Z" }, + { url = "https://files.pythonhosted.org/packages/cd/9b/30ea5239a61786f18fd25797151a17fbb3be176977187a48d541b5447dd4/yarl-1.23.0-cp313-cp313t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:95451e6ce06c3e104556d73b559f5da6c34a069b6b62946d3ad66afcd51642ea", size = 100817, upload-time = "2026-03-01T22:06:22.738Z" }, + { url = "https://files.pythonhosted.org/packages/62/e2/a4980481071791bc83bce2b7a1a1f7adcabfa366007518b4b845e92eeee3/yarl-1.23.0-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:531ef597132086b6cf96faa7c6c1dcd0361dd5f1694e5cc30375907b9b7d3ea9", size = 97482, upload-time = "2026-03-01T22:06:24.21Z" }, + { url = "https://files.pythonhosted.org/packages/e5/1e/304a00cf5f6100414c4b5a01fc7ff9ee724b62158a08df2f8170dfc72a2d/yarl-1.23.0-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:88f9fb0116fbfcefcab70f85cf4b74a2b6ce5d199c41345296f49d974ddb4123", size = 95949, upload-time = "2026-03-01T22:06:25.697Z" }, + { url = "https://files.pythonhosted.org/packages/68/03/093f4055ed4cae649ac53bca3d180bd37102e9e11d048588e9ab0c0108d0/yarl-1.23.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:e7b0460976dc75cb87ad9cc1f9899a4b97751e7d4e77ab840fc9b6d377b8fd24", size = 95839, upload-time = "2026-03-01T22:06:27.309Z" }, + { url = "https://files.pythonhosted.org/packages/b9/28/4c75ebb108f322aa8f917ae10a8ffa4f07cae10a8a627b64e578617df6a0/yarl-1.23.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:115136c4a426f9da976187d238e84139ff6b51a20839aa6e3720cd1026d768de", size = 90696, upload-time = "2026-03-01T22:06:29.048Z" }, + { url = "https://files.pythonhosted.org/packages/23/9c/42c2e2dd91c1a570402f51bdf066bfdb1241c2240ba001967bad778e77b7/yarl-1.23.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:ead11956716a940c1abc816b7df3fa2b84d06eaed8832ca32f5c5e058c65506b", size = 100865, upload-time = "2026-03-01T22:06:30.525Z" }, + { url = "https://files.pythonhosted.org/packages/74/05/1bcd60a8a0a914d462c305137246b6f9d167628d73568505fce3f1cb2e65/yarl-1.23.0-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:fe8f8f5e70e6dbdfca9882cd9deaac058729bcf323cf7a58660901e55c9c94f6", size = 96234, upload-time = "2026-03-01T22:06:32.692Z" }, + { url = "https://files.pythonhosted.org/packages/90/b2/f52381aac396d6778ce516b7bc149c79e65bfc068b5de2857ab69eeea3b7/yarl-1.23.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:a0e317df055958a0c1e79e5d2aa5a5eaa4a6d05a20d4b0c9c3f48918139c9fc6", size = 100295, upload-time = "2026-03-01T22:06:34.268Z" }, + { url = "https://files.pythonhosted.org/packages/e5/e8/638bae5bbf1113a659b2435d8895474598afe38b4a837103764f603aba56/yarl-1.23.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:6f0fd84de0c957b2d280143522c4f91a73aada1923caee763e24a2b3fda9f8a5", size = 97784, upload-time = "2026-03-01T22:06:35.864Z" }, + { url = "https://files.pythonhosted.org/packages/80/25/a3892b46182c586c202629fc2159aa13975d3741d52ebd7347fd501d48d5/yarl-1.23.0-cp313-cp313t-win32.whl", hash = "sha256:93a784271881035ab4406a172edb0faecb6e7d00f4b53dc2f55919d6c9688595", size = 88313, upload-time = "2026-03-01T22:06:37.39Z" }, + { url = "https://files.pythonhosted.org/packages/43/68/8c5b36aa5178900b37387937bc2c2fe0e9505537f713495472dcf6f6fccc/yarl-1.23.0-cp313-cp313t-win_amd64.whl", hash = "sha256:dd00607bffbf30250fe108065f07453ec124dbf223420f57f5e749b04295e090", size = 94932, upload-time = "2026-03-01T22:06:39.579Z" }, + { url = "https://files.pythonhosted.org/packages/c6/cc/d79ba8292f51f81f4dc533a8ccfb9fc6992cabf0998ed3245de7589dc07c/yarl-1.23.0-cp313-cp313t-win_arm64.whl", hash = "sha256:ac09d42f48f80c9ee1635b2fcaa819496a44502737660d3c0f2ade7526d29144", size = 84786, upload-time = "2026-03-01T22:06:41.988Z" }, + { url = "https://files.pythonhosted.org/packages/90/98/b85a038d65d1b92c3903ab89444f48d3cee490a883477b716d7a24b1a78c/yarl-1.23.0-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:21d1b7305a71a15b4794b5ff22e8eef96ff4a6d7f9657155e5aa419444b28912", size = 124455, upload-time = "2026-03-01T22:06:43.615Z" }, + { url = "https://files.pythonhosted.org/packages/39/54/bc2b45559f86543d163b6e294417a107bb87557609007c007ad889afec18/yarl-1.23.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:85610b4f27f69984932a7abbe52703688de3724d9f72bceb1cca667deff27474", size = 86752, upload-time = "2026-03-01T22:06:45.425Z" }, + { url = "https://files.pythonhosted.org/packages/24/f9/e8242b68362bffe6fb536c8db5076861466fc780f0f1b479fc4ffbebb128/yarl-1.23.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:23f371bd662cf44a7630d4d113101eafc0cfa7518a2760d20760b26021454719", size = 86291, upload-time = "2026-03-01T22:06:46.974Z" }, + { url = "https://files.pythonhosted.org/packages/ea/d8/d1cb2378c81dd729e98c716582b1ccb08357e8488e4c24714658cc6630e8/yarl-1.23.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c4a80f77dc1acaaa61f0934176fccca7096d9b1ff08c8ba9cddf5ae034a24319", size = 99026, upload-time = "2026-03-01T22:06:48.459Z" }, + { url = "https://files.pythonhosted.org/packages/0a/ff/7196790538f31debe3341283b5b0707e7feb947620fc5e8236ef28d44f72/yarl-1.23.0-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:bd654fad46d8d9e823afbb4f87c79160b5a374ed1ff5bde24e542e6ba8f41434", size = 92355, upload-time = "2026-03-01T22:06:50.306Z" }, + { url = "https://files.pythonhosted.org/packages/c1/56/25d58c3eddde825890a5fe6aa1866228377354a3c39262235234ab5f616b/yarl-1.23.0-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:682bae25f0a0dd23a056739f23a134db9f52a63e2afd6bfb37ddc76292bbd723", size = 106417, upload-time = "2026-03-01T22:06:52.1Z" }, + { url = "https://files.pythonhosted.org/packages/51/8a/882c0e7bc8277eb895b31bce0138f51a1ba551fc2e1ec6753ffc1e7c1377/yarl-1.23.0-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a82836cab5f197a0514235aaf7ffccdc886ccdaa2324bc0aafdd4ae898103039", size = 106422, upload-time = "2026-03-01T22:06:54.424Z" }, + { url = "https://files.pythonhosted.org/packages/42/2b/fef67d616931055bf3d6764885990a3ac647d68734a2d6a9e1d13de437a2/yarl-1.23.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1c57676bdedc94cd3bc37724cf6f8cd2779f02f6aba48de45feca073e714fe52", size = 101915, upload-time = "2026-03-01T22:06:55.895Z" }, + { url = "https://files.pythonhosted.org/packages/18/6a/530e16aebce27c5937920f3431c628a29a4b6b430fab3fd1c117b26ff3f6/yarl-1.23.0-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c7f8dc16c498ff06497c015642333219871effba93e4a2e8604a06264aca5c5c", size = 100690, upload-time = "2026-03-01T22:06:58.21Z" }, + { url = "https://files.pythonhosted.org/packages/88/08/93749219179a45e27b036e03260fda05190b911de8e18225c294ac95bbc9/yarl-1.23.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:5ee586fb17ff8f90c91cf73c6108a434b02d69925f44f5f8e0d7f2f260607eae", size = 98750, upload-time = "2026-03-01T22:06:59.794Z" }, + { url = "https://files.pythonhosted.org/packages/d9/cf/ea424a004969f5d81a362110a6ac1496d79efdc6d50c2c4b2e3ea0fc2519/yarl-1.23.0-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:17235362f580149742739cc3828b80e24029d08cbb9c4bda0242c7b5bc610a8e", size = 94685, upload-time = "2026-03-01T22:07:01.375Z" }, + { url = "https://files.pythonhosted.org/packages/e2/b7/14341481fe568e2b0408bcf1484c652accafe06a0ade9387b5d3fd9df446/yarl-1.23.0-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:0793e2bd0cf14234983bbb371591e6bea9e876ddf6896cdcc93450996b0b5c85", size = 106009, upload-time = "2026-03-01T22:07:03.151Z" }, + { url = "https://files.pythonhosted.org/packages/0a/e6/5c744a9b54f4e8007ad35bce96fbc9218338e84812d36f3390cea616881a/yarl-1.23.0-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:3650dc2480f94f7116c364096bc84b1d602f44224ef7d5c7208425915c0475dd", size = 100033, upload-time = "2026-03-01T22:07:04.701Z" }, + { url = "https://files.pythonhosted.org/packages/0c/23/e3bfc188d0b400f025bc49d99793d02c9abe15752138dcc27e4eaf0c4a9e/yarl-1.23.0-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:f40e782d49630ad384db66d4d8b73ff4f1b8955dc12e26b09a3e3af064b3b9d6", size = 106483, upload-time = "2026-03-01T22:07:06.231Z" }, + { url = "https://files.pythonhosted.org/packages/72/42/f0505f949a90b3f8b7a363d6cbdf398f6e6c58946d85c6d3a3bc70595b26/yarl-1.23.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:94f8575fbdf81749008d980c17796097e645574a3b8c28ee313931068dad14fe", size = 102175, upload-time = "2026-03-01T22:07:08.4Z" }, + { url = "https://files.pythonhosted.org/packages/aa/65/b39290f1d892a9dd671d1c722014ca062a9c35d60885d57e5375db0404b5/yarl-1.23.0-cp314-cp314-win32.whl", hash = "sha256:c8aa34a5c864db1087d911a0b902d60d203ea3607d91f615acd3f3108ac32169", size = 83871, upload-time = "2026-03-01T22:07:09.968Z" }, + { url = "https://files.pythonhosted.org/packages/a9/5b/9b92f54c784c26e2a422e55a8d2607ab15b7ea3349e28359282f84f01d43/yarl-1.23.0-cp314-cp314-win_amd64.whl", hash = "sha256:63e92247f383c85ab00dd0091e8c3fa331a96e865459f5ee80353c70a4a42d70", size = 89093, upload-time = "2026-03-01T22:07:11.501Z" }, + { url = "https://files.pythonhosted.org/packages/e0/7d/8a84dc9381fd4412d5e7ff04926f9865f6372b4c2fd91e10092e65d29eb8/yarl-1.23.0-cp314-cp314-win_arm64.whl", hash = "sha256:70efd20be968c76ece7baa8dafe04c5be06abc57f754d6f36f3741f7aa7a208e", size = 83384, upload-time = "2026-03-01T22:07:13.069Z" }, + { url = "https://files.pythonhosted.org/packages/dd/8d/d2fad34b1c08aa161b74394183daa7d800141aaaee207317e82c790b418d/yarl-1.23.0-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:9a18d6f9359e45722c064c97464ec883eb0e0366d33eda61cb19a244bf222679", size = 131019, upload-time = "2026-03-01T22:07:14.903Z" }, + { url = "https://files.pythonhosted.org/packages/19/ff/33009a39d3ccf4b94d7d7880dfe17fb5816c5a4fe0096d9b56abceea9ac7/yarl-1.23.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:2803ed8b21ca47a43da80a6fd1ed3019d30061f7061daa35ac54f63933409412", size = 89894, upload-time = "2026-03-01T22:07:17.372Z" }, + { url = "https://files.pythonhosted.org/packages/0c/f1/dab7ac5e7306fb79c0190766a3c00b4cb8d09a1f390ded68c85a5934faf5/yarl-1.23.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:394906945aa8b19fc14a61cf69743a868bb8c465efe85eee687109cc540b98f4", size = 89979, upload-time = "2026-03-01T22:07:19.361Z" }, + { url = "https://files.pythonhosted.org/packages/aa/b1/08e95f3caee1fad6e65017b9f26c1d79877b502622d60e517de01e72f95d/yarl-1.23.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:71d006bee8397a4a89f469b8deb22469fe7508132d3c17fa6ed871e79832691c", size = 95943, upload-time = "2026-03-01T22:07:21.266Z" }, + { url = "https://files.pythonhosted.org/packages/c0/cc/6409f9018864a6aa186c61175b977131f373f1988e198e031236916e87e4/yarl-1.23.0-cp314-cp314t-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:62694e275c93d54f7ccedcfef57d42761b2aad5234b6be1f3e3026cae4001cd4", size = 88786, upload-time = "2026-03-01T22:07:23.129Z" }, + { url = "https://files.pythonhosted.org/packages/76/40/cc22d1d7714b717fde2006fad2ced5efe5580606cb059ae42117542122f3/yarl-1.23.0-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a31de1613658308efdb21ada98cbc86a97c181aa050ba22a808120bb5be3ab94", size = 101307, upload-time = "2026-03-01T22:07:24.689Z" }, + { url = "https://files.pythonhosted.org/packages/8f/0d/476c38e85ddb4c6ec6b20b815bdd779aa386a013f3d8b85516feee55c8dc/yarl-1.23.0-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:fb1e8b8d66c278b21d13b0a7ca22c41dd757a7c209c6b12c313e445c31dd3b28", size = 100904, upload-time = "2026-03-01T22:07:26.287Z" }, + { url = "https://files.pythonhosted.org/packages/72/32/0abe4a76d59adf2081dcb0397168553ece4616ada1c54d1c49d8936c74f8/yarl-1.23.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:50f9d8d531dfb767c565f348f33dd5139a6c43f5cbdf3f67da40d54241df93f6", size = 97728, upload-time = "2026-03-01T22:07:27.906Z" }, + { url = "https://files.pythonhosted.org/packages/b7/35/7b30f4810fba112f60f5a43237545867504e15b1c7647a785fbaf588fac2/yarl-1.23.0-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:575aa4405a656e61a540f4a80eaa5260f2a38fff7bfdc4b5f611840d76e9e277", size = 95964, upload-time = "2026-03-01T22:07:30.198Z" }, + { url = "https://files.pythonhosted.org/packages/2d/86/ed7a73ab85ef00e8bb70b0cb5421d8a2a625b81a333941a469a6f4022828/yarl-1.23.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:041b1a4cefacf65840b4e295c6985f334ba83c30607441ae3cf206a0eed1a2e4", size = 95882, upload-time = "2026-03-01T22:07:32.132Z" }, + { url = "https://files.pythonhosted.org/packages/19/90/d56967f61a29d8498efb7afb651e0b2b422a1e9b47b0ab5f4e40a19b699b/yarl-1.23.0-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:d38c1e8231722c4ce40d7593f28d92b5fc72f3e9774fe73d7e800ec32299f63a", size = 90797, upload-time = "2026-03-01T22:07:34.404Z" }, + { url = "https://files.pythonhosted.org/packages/72/00/8b8f76909259f56647adb1011d7ed8b321bcf97e464515c65016a47ecdf0/yarl-1.23.0-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:d53834e23c015ee83a99377db6e5e37d8484f333edb03bd15b4bc312cc7254fb", size = 101023, upload-time = "2026-03-01T22:07:35.953Z" }, + { url = "https://files.pythonhosted.org/packages/ac/e2/cab11b126fb7d440281b7df8e9ddbe4851e70a4dde47a202b6642586b8d9/yarl-1.23.0-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:2e27c8841126e017dd2a054a95771569e6070b9ee1b133366d8b31beb5018a41", size = 96227, upload-time = "2026-03-01T22:07:37.594Z" }, + { url = "https://files.pythonhosted.org/packages/c2/9b/2c893e16bfc50e6b2edf76c1a9eb6cb0c744346197e74c65e99ad8d634d0/yarl-1.23.0-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:76855800ac56f878847a09ce6dba727c93ca2d89c9e9d63002d26b916810b0a2", size = 100302, upload-time = "2026-03-01T22:07:39.334Z" }, + { url = "https://files.pythonhosted.org/packages/28/ec/5498c4e3a6d5f1003beb23405671c2eb9cdbf3067d1c80f15eeafe301010/yarl-1.23.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:e09fd068c2e169a7070d83d3bde728a4d48de0549f975290be3c108c02e499b4", size = 98202, upload-time = "2026-03-01T22:07:41.717Z" }, + { url = "https://files.pythonhosted.org/packages/fe/c3/cd737e2d45e70717907f83e146f6949f20cc23cd4bf7b2688727763aa458/yarl-1.23.0-cp314-cp314t-win32.whl", hash = "sha256:73309162a6a571d4cbd3b6a1dcc703c7311843ae0d1578df6f09be4e98df38d4", size = 90558, upload-time = "2026-03-01T22:07:43.433Z" }, + { url = "https://files.pythonhosted.org/packages/e1/19/3774d162f6732d1cfb0b47b4140a942a35ca82bb19b6db1f80e9e7bdc8f8/yarl-1.23.0-cp314-cp314t-win_amd64.whl", hash = "sha256:4503053d296bc6e4cbd1fad61cf3b6e33b939886c4f249ba7c78b602214fabe2", size = 97610, upload-time = "2026-03-01T22:07:45.773Z" }, + { url = "https://files.pythonhosted.org/packages/51/47/3fa2286c3cb162c71cdb34c4224d5745a1ceceb391b2bd9b19b668a8d724/yarl-1.23.0-cp314-cp314t-win_arm64.whl", hash = "sha256:44bb7bef4ea409384e3f8bc36c063d77ea1b8d4a5b2706956c0d6695f07dcc25", size = 86041, upload-time = "2026-03-01T22:07:49.026Z" }, + { url = "https://files.pythonhosted.org/packages/69/68/c8739671f5699c7dc470580a4f821ef37c32c4cb0b047ce223a7f115757f/yarl-1.23.0-py3-none-any.whl", hash = "sha256:a2df6afe50dea8ae15fa34c9f824a3ee958d785fd5d089063d960bae1daa0a3f", size = 48288, upload-time = "2026-03-01T22:07:51.388Z" }, +]