From 58ee03b82849b2e31a172c3b8ec8c134a81aa79f Mon Sep 17 00:00:00 2001 From: aquariouseworkman Date: Mon, 27 Apr 2026 02:53:15 -0400 Subject: [PATCH 1/2] Record: SmearGate BOS Fix + PR #1787 Base + Smear Gate + LQER Asymmetric + Phased TTT val_bpb = 1.06128 | ~15.95 MB | 8xH100 SXM Key Change: SmearGate BOS Document Boundary Fix Builds on PR #1797 stack (PR #1787 base + SmearGate + LQER Asymmetric) but fixes the SmearGate cross-document leakage bug identified by @cocohearts in PR #1797 audit. The bug: SmearGate 1-token causal lookback does not mask BOS positions, so the final token of document N smears into BOS of document N+1. Credits @nprime06 -- PR #1787 base stack @romeerp -- CaseOps transform (PR #1729) @dexhunter -- SmearGate + LQER (PR #1797) @cocohearts -- Identifying SmearGate BOS bug @abaybektursun -- Score-first TTT (PR #549) @clarkkev -- GPTQ SDClip + SP8192 (PR #1394) --- .../README.md | 54 + .../lossless_caps.py | 833 ++++ .../prepare_caseops_data.py | 177 + .../submission.json | 30 + ...pe_lossless_caps_caseops_v1_reserved.model | Bin 0 -> 366510 bytes .../train_gpt.py | 3555 +++++++++++++++++ .../train_seed42.log | 833 ++++ 7 files changed, 5482 insertions(+) create mode 100644 records/track_10min_16mb/2026-04-27_SmearGateBOSFix_PR1787Base_LQERAsym_PhasedTTT/README.md create mode 100644 records/track_10min_16mb/2026-04-27_SmearGateBOSFix_PR1787Base_LQERAsym_PhasedTTT/lossless_caps.py create mode 100644 records/track_10min_16mb/2026-04-27_SmearGateBOSFix_PR1787Base_LQERAsym_PhasedTTT/prepare_caseops_data.py create mode 100644 records/track_10min_16mb/2026-04-27_SmearGateBOSFix_PR1787Base_LQERAsym_PhasedTTT/submission.json create mode 100644 records/track_10min_16mb/2026-04-27_SmearGateBOSFix_PR1787Base_LQERAsym_PhasedTTT/tokenizers/fineweb_8192_bpe_lossless_caps_caseops_v1_reserved.model create mode 100644 records/track_10min_16mb/2026-04-27_SmearGateBOSFix_PR1787Base_LQERAsym_PhasedTTT/train_gpt.py create mode 100644 records/track_10min_16mb/2026-04-27_SmearGateBOSFix_PR1787Base_LQERAsym_PhasedTTT/train_seed42.log diff --git a/records/track_10min_16mb/2026-04-27_SmearGateBOSFix_PR1787Base_LQERAsym_PhasedTTT/README.md b/records/track_10min_16mb/2026-04-27_SmearGateBOSFix_PR1787Base_LQERAsym_PhasedTTT/README.md new file mode 100644 index 0000000000..12cfbc0460 --- /dev/null +++ b/records/track_10min_16mb/2026-04-27_SmearGateBOSFix_PR1787Base_LQERAsym_PhasedTTT/README.md @@ -0,0 +1,54 @@ +# Record: SmearGate BOS Fix + PR #1787 Base + Smear Gate + LQER Asymmetric + Phased TTT + +**val_bpb = 1.06128** | **~15.95 MB** | 8xH100 SXM + +## Result + +| Seed | Pre-TTT BPB | Post-TTT BPB | Artifact (bytes) | +|------|-------------|--------------|------------------| +| 42 | 1.07406 | **1.06128** | 15,952,086 | + +Merged SOTA (PR #1493): **1.0810 BPP**. Delta: **-0.0197 BPP**. Clears the 0.005-nat threshold. + +## Key Change: SmearGate BOS Document Boundary Fix + +Builds on PR #1797 stack (PR #1787 base + SmearGate + LQER Asymmetric) but fixes the **SmearGate cross-document leakage bug** identified by @cocohearts in PR #1797 audit. + +The bug: SmearGate 1-token causal lookback does not mask BOS positions, so the final token of document N smears into BOS of document N+1. + +The fix (applied in both forward_logits and forward_ttt): + + bos_mask = (input_ids[:, 1:] == 1).unsqueeze(-1) + g = g.masked_fill(bos_mask, 0.0) + +## Technique Stack + +| Component | Origin | +|-----------|--------| +| CaseOps bijective case transform | PR #1729 / PR #1736 | +| SparseAttnGate | PR #1787 (nprime06) | +| SmearGate + BOS fix | PR #1797 + this submission | +| LQER asymmetric rank-4 | PR #1797 | +| Phased TTT (score-first, 3 phases) | PR #1394 / PR #1736 | +| PolarNS + MIN_LR=0.1 + FusedCE | PR #1787 | +| Full Hessian GPTQ + Brotli | PR #1019 / PR #1530 | + +## Architecture + +11L x 512d x 8H/4KV, MLP 4x, LeakyReLU(0.5)^2, Partial RoPE (16/64 dims), layerwise LN scale, tied embeddings, logit softcap=30.0. Depth recurrence: layers 3-5 looped x2 (activated at frac=0.35). Parallel residuals from layer 8. XSA on all 11 layers. SmearGate window=12. + +## Compliance + +- Artifact <= 16,000,000 bytes: 15,952,086 bytes +- train_time <= 600s: 599.6s +- eval_time <= 600s: 519.5s +- Issue #1017 Conditions 1-4: All satisfied. SmearGate BOS mask ensures no cross-document leakage. + +## Credits + +- @nprime06 -- PR #1787 base stack +- @romeerp -- CaseOps transform (PR #1729) +- @dexhunter -- SmearGate + LQER (PR #1797) +- @cocohearts -- Identifying SmearGate BOS bug +- @abaybektursun -- Score-first TTT (PR #549) +- @clarkkev -- GPTQ SDClip + SP8192 (PR #1394) diff --git a/records/track_10min_16mb/2026-04-27_SmearGateBOSFix_PR1787Base_LQERAsym_PhasedTTT/lossless_caps.py b/records/track_10min_16mb/2026-04-27_SmearGateBOSFix_PR1787Base_LQERAsym_PhasedTTT/lossless_caps.py new file mode 100644 index 0000000000..98e472f824 --- /dev/null +++ b/records/track_10min_16mb/2026-04-27_SmearGateBOSFix_PR1787Base_LQERAsym_PhasedTTT/lossless_caps.py @@ -0,0 +1,833 @@ +"""Lossless capitalization pre-encoding helpers. + +This module provides a narrow, reversible transform that only touches +ASCII capital letters `A-Z`. Each uppercase ASCII letter is rewritten as +``, where `sentinel` is a private-use Unicode +character that is escaped by doubling if it appears literally in the +input text. + +Example with the default sentinel `\\uE000`: + + "The NASA Launch" -> "\\uE000the \\uE000n\\uE000a\\uE000s\\uE000a \\uE000launch" + +The transform is intentionally simple for v1: + +- lowercase ASCII letters are unchanged +- uppercase ASCII letters become sentinel + lowercase letter +- non-ASCII characters are left untouched +- literal sentinel characters are escaped as sentinel + sentinel + +This makes the transform exactly invertible while allowing a downstream +tokenizer to reuse lowercase subwords across case variants. +""" + +from __future__ import annotations + +import json +from pathlib import Path +from typing import Callable, Iterable + +LOSSLESS_CAPS_V1 = "lossless_caps_v1" +LOSSLESS_CAPS_V2 = "lossless_caps_v2" +LOSSLESS_CAPS_V3 = "lossless_caps_v3" +LOSSLESS_CAPS_V4 = "lossless_caps_v4" +LOSSLESS_CAPS_V5 = "lossless_caps_v5" +LOSSLESS_CAPS_V6 = "lossless_caps_v6" +LOSSLESS_CAPS_V7 = "lossless_caps_v7" +LOSSLESS_CAPS_CASEOPS_V1 = "lossless_caps_caseops_v1" +IDENTITY = "identity" +DEFAULT_SENTINEL = "\uE000" +DEFAULT_V2_TITLE = "\uE001" +DEFAULT_V2_ALLCAPS = "\uE002" +DEFAULT_V2_CAPNEXT = "\uE003" +DEFAULT_V2_ESC = "\uE004" +DEFAULT_V5_TITLE_MIN_LEN = 7 +DEFAULT_V6_ALLCAPS_MIN_LEN = 3 +DEFAULT_V7_ALLCAPS_MIN_LEN = 4 + + +class LosslessCapsError(ValueError): + """Raised when a transformed string is malformed.""" + + +def _is_ascii_upper(ch: str) -> bool: + return "A" <= ch <= "Z" + + +def _is_ascii_lower(ch: str) -> bool: + return "a" <= ch <= "z" + + +def _is_ascii_alpha(ch: str) -> bool: + return _is_ascii_lower(ch) or _is_ascii_upper(ch) + + +def _validate_distinct_single_chars(*chars: str) -> None: + if any(len(ch) != 1 for ch in chars): + raise ValueError("all control characters must be exactly one character") + if len(set(chars)) != len(chars): + raise ValueError("control characters must be distinct") + + +def encode_lossless_caps_v1(text: str, *, sentinel: str = DEFAULT_SENTINEL) -> str: + """Encode ASCII capitals reversibly using a one-character sentinel.""" + if len(sentinel) != 1: + raise ValueError("sentinel must be exactly one character") + out: list[str] = [] + for ch in text: + if ch == sentinel: + out.append(sentinel) + out.append(sentinel) + elif _is_ascii_upper(ch): + out.append(sentinel) + out.append(ch.lower()) + else: + out.append(ch) + return "".join(out) + + +def decode_lossless_caps_v1(text: str, *, sentinel: str = DEFAULT_SENTINEL) -> str: + """Decode the `lossless_caps_v1` transform back to the original text.""" + if len(sentinel) != 1: + raise ValueError("sentinel must be exactly one character") + out: list[str] = [] + i = 0 + n = len(text) + while i < n: + ch = text[i] + if ch != sentinel: + out.append(ch) + i += 1 + continue + if i + 1 >= n: + raise LosslessCapsError("dangling capitalization sentinel at end of string") + nxt = text[i + 1] + if nxt == sentinel: + out.append(sentinel) + elif _is_ascii_lower(nxt): + out.append(nxt.upper()) + else: + raise LosslessCapsError( + f"invalid sentinel escape sequence {sentinel + nxt!r}; " + "expected doubled sentinel or sentinel + lowercase ASCII letter" + ) + i += 2 + return "".join(out) + + +def encode_lossless_caps_v2( + text: str, + *, + title: str = DEFAULT_V2_TITLE, + allcaps: str = DEFAULT_V2_ALLCAPS, + capnext: str = DEFAULT_V2_CAPNEXT, + esc: str = DEFAULT_V2_ESC, +) -> str: + """Encode ASCII word capitalization with cheap word-level markers. + + Rules over maximal ASCII alphabetic runs: + - lowercase words stay unchanged + - TitleCase words become `title + lowercase(word)` + - ALLCAPS words become `allcaps + lowercase(word)` + - mixed-case words use: + - optional `title` when the first letter is uppercase + - `capnext + lowercase(letter)` for subsequent uppercase letters + - literal control characters are escaped as `esc + literal` + """ + _validate_distinct_single_chars(title, allcaps, capnext, esc) + controls = {title, allcaps, capnext, esc} + out: list[str] = [] + i = 0 + n = len(text) + while i < n: + ch = text[i] + if ch in controls: + out.append(esc) + out.append(ch) + i += 1 + continue + if not _is_ascii_alpha(ch): + out.append(ch) + i += 1 + continue + + j = i + 1 + while j < n and _is_ascii_alpha(text[j]): + j += 1 + word = text[i:j] + lower_word = word.lower() + + if word.islower(): + out.append(word) + elif len(word) >= 2 and word.isupper(): + out.append(allcaps) + out.append(lower_word) + elif _is_ascii_upper(word[0]) and word[1:].islower(): + out.append(title) + out.append(lower_word) + else: + if _is_ascii_upper(word[0]): + out.append(title) + out.append(lower_word[0]) + for orig_ch, lower_ch in zip(word[1:], lower_word[1:], strict=True): + if _is_ascii_upper(orig_ch): + out.append(capnext) + out.append(lower_ch) + i = j + return "".join(out) + + +def decode_lossless_caps_v2( + text: str, + *, + title: str = DEFAULT_V2_TITLE, + allcaps: str = DEFAULT_V2_ALLCAPS, + capnext: str = DEFAULT_V2_CAPNEXT, + esc: str = DEFAULT_V2_ESC, +) -> str: + """Decode the `lossless_caps_v2` transform back to the original text.""" + _validate_distinct_single_chars(title, allcaps, capnext, esc) + out: list[str] = [] + pending_escape = False + pending_word_mode: str | None = None + active_allcaps = False + pending_capnext = False + in_ascii_word = False + + for ch in text: + if pending_escape: + if pending_word_mode is not None and not _is_ascii_alpha(ch): + raise LosslessCapsError("escaped control char cannot satisfy pending word capitalization mode") + out.append(ch) + pending_escape = False + if _is_ascii_alpha(ch): + in_ascii_word = True + else: + in_ascii_word = False + active_allcaps = False + continue + + if ch == esc: + pending_escape = True + continue + if ch == title: + if pending_word_mode is not None or in_ascii_word or pending_capnext: + raise LosslessCapsError("invalid title marker placement") + pending_word_mode = "title" + continue + if ch == allcaps: + if pending_word_mode is not None or in_ascii_word or pending_capnext: + raise LosslessCapsError("invalid allcaps marker placement") + pending_word_mode = "allcaps" + continue + if ch == capnext: + if pending_capnext: + raise LosslessCapsError("duplicate capnext marker") + pending_capnext = True + continue + + if _is_ascii_alpha(ch): + at_word_start = not in_ascii_word + if at_word_start: + if pending_word_mode == "allcaps": + out.append(ch.upper()) + active_allcaps = True + elif pending_word_mode == "title": + out.append(ch.upper()) + elif pending_capnext: + out.append(ch.upper()) + else: + out.append(ch) + pending_word_mode = None + pending_capnext = False + in_ascii_word = True + continue + + if pending_word_mode is not None: + raise LosslessCapsError("word capitalization marker leaked into the middle of a word") + if active_allcaps: + out.append(ch.upper()) + elif pending_capnext: + out.append(ch.upper()) + else: + out.append(ch) + pending_capnext = False + continue + + if pending_word_mode is not None or pending_capnext: + raise LosslessCapsError("capitalization marker not followed by an ASCII letter") + out.append(ch) + in_ascii_word = False + active_allcaps = False + + if pending_escape: + raise LosslessCapsError("dangling escape marker at end of string") + if pending_word_mode is not None or pending_capnext: + raise LosslessCapsError("dangling capitalization marker at end of string") + return "".join(out) + + +def encode_lossless_caps_v3( + text: str, + *, + title: str = DEFAULT_V2_TITLE, + allcaps: str = DEFAULT_V2_ALLCAPS, + esc: str = DEFAULT_V2_ESC, +) -> str: + """Encode only common word-level capitalization patterns. + + Rules over maximal ASCII alphabetic runs: + - lowercase words stay unchanged + - TitleCase words become `title + lowercase(word)` + - ALLCAPS words become `allcaps + lowercase(word)` + - all other mixed-case words are left unchanged + - literal control characters are escaped as `esc + literal` + """ + _validate_distinct_single_chars(title, allcaps, esc) + controls = {title, allcaps, esc} + out: list[str] = [] + i = 0 + n = len(text) + while i < n: + ch = text[i] + if ch in controls: + out.append(esc) + out.append(ch) + i += 1 + continue + if not _is_ascii_alpha(ch): + out.append(ch) + i += 1 + continue + + j = i + 1 + while j < n and _is_ascii_alpha(text[j]): + j += 1 + word = text[i:j] + + if word.islower(): + out.append(word) + elif len(word) >= 2 and word.isupper(): + out.append(allcaps) + out.append(word.lower()) + elif _is_ascii_upper(word[0]) and word[1:].islower(): + out.append(title) + out.append(word.lower()) + else: + out.append(word) + i = j + return "".join(out) + + +def decode_lossless_caps_v3( + text: str, + *, + title: str = DEFAULT_V2_TITLE, + allcaps: str = DEFAULT_V2_ALLCAPS, + esc: str = DEFAULT_V2_ESC, +) -> str: + """Decode the `lossless_caps_v3` transform back to the original text.""" + _validate_distinct_single_chars(title, allcaps, esc) + out: list[str] = [] + pending_escape = False + pending_word_mode: str | None = None + active_allcaps = False + in_ascii_word = False + + for ch in text: + if pending_escape: + if pending_word_mode is not None and not _is_ascii_alpha(ch): + raise LosslessCapsError("escaped control char cannot satisfy pending word capitalization mode") + out.append(ch) + pending_escape = False + if _is_ascii_alpha(ch): + in_ascii_word = True + else: + in_ascii_word = False + active_allcaps = False + continue + + if ch == esc: + pending_escape = True + continue + if ch == title: + if pending_word_mode is not None or in_ascii_word: + raise LosslessCapsError("invalid title marker placement") + pending_word_mode = "title" + continue + if ch == allcaps: + if pending_word_mode is not None or in_ascii_word: + raise LosslessCapsError("invalid allcaps marker placement") + pending_word_mode = "allcaps" + continue + + if _is_ascii_alpha(ch): + at_word_start = not in_ascii_word + if at_word_start: + if pending_word_mode == "allcaps": + out.append(ch.upper()) + active_allcaps = True + elif pending_word_mode == "title": + out.append(ch.upper()) + else: + out.append(ch) + pending_word_mode = None + in_ascii_word = True + continue + + if pending_word_mode is not None: + raise LosslessCapsError("word capitalization marker leaked into the middle of a word") + out.append(ch.upper() if active_allcaps else ch) + continue + + if pending_word_mode is not None: + raise LosslessCapsError("capitalization marker not followed by an ASCII letter") + out.append(ch) + in_ascii_word = False + active_allcaps = False + + if pending_escape: + raise LosslessCapsError("dangling escape marker at end of string") + if pending_word_mode is not None: + raise LosslessCapsError("dangling capitalization marker at end of string") + return "".join(out) + + +def encode_lossless_caps_v4( + text: str, + *, + allcaps: str = DEFAULT_V2_ALLCAPS, + esc: str = DEFAULT_V2_ESC, +) -> str: + """Encode only ALLCAPS ASCII words, leaving all other case untouched.""" + _validate_distinct_single_chars(allcaps, esc) + controls = {allcaps, esc} + out: list[str] = [] + i = 0 + n = len(text) + while i < n: + ch = text[i] + if ch in controls: + out.append(esc) + out.append(ch) + i += 1 + continue + if not _is_ascii_alpha(ch): + out.append(ch) + i += 1 + continue + j = i + 1 + while j < n and _is_ascii_alpha(text[j]): + j += 1 + word = text[i:j] + if len(word) >= 2 and word.isupper(): + out.append(allcaps) + out.append(word.lower()) + else: + out.append(word) + i = j + return "".join(out) + + +def decode_lossless_caps_v4( + text: str, + *, + allcaps: str = DEFAULT_V2_ALLCAPS, + esc: str = DEFAULT_V2_ESC, +) -> str: + """Decode the `lossless_caps_v4` transform back to the original text.""" + _validate_distinct_single_chars(allcaps, esc) + out: list[str] = [] + pending_escape = False + pending_allcaps = False + in_ascii_word = False + active_allcaps = False + + for ch in text: + if pending_escape: + if pending_allcaps and not _is_ascii_alpha(ch): + raise LosslessCapsError("escaped control char cannot satisfy pending allcaps mode") + out.append(ch) + pending_escape = False + if _is_ascii_alpha(ch): + in_ascii_word = True + else: + in_ascii_word = False + active_allcaps = False + continue + + if ch == esc: + pending_escape = True + continue + if ch == allcaps: + if pending_allcaps or in_ascii_word: + raise LosslessCapsError("invalid allcaps marker placement") + pending_allcaps = True + continue + + if _is_ascii_alpha(ch): + if not in_ascii_word: + active_allcaps = pending_allcaps + pending_allcaps = False + in_ascii_word = True + out.append(ch.upper() if active_allcaps else ch) + continue + + if pending_allcaps: + raise LosslessCapsError("allcaps marker not followed by an ASCII letter") + out.append(ch) + in_ascii_word = False + active_allcaps = False + + if pending_escape: + raise LosslessCapsError("dangling escape marker at end of string") + if pending_allcaps: + raise LosslessCapsError("dangling allcaps marker at end of string") + return "".join(out) + + +def encode_lossless_caps_v5( + text: str, + *, + title: str = DEFAULT_V2_TITLE, + allcaps: str = DEFAULT_V2_ALLCAPS, + esc: str = DEFAULT_V2_ESC, + title_min_len: int = DEFAULT_V5_TITLE_MIN_LEN, +) -> str: + """Encode ALLCAPS words and only sufficiently long TitleCase words.""" + _validate_distinct_single_chars(title, allcaps, esc) + controls = {title, allcaps, esc} + out: list[str] = [] + i = 0 + n = len(text) + while i < n: + ch = text[i] + if ch in controls: + out.append(esc) + out.append(ch) + i += 1 + continue + if not _is_ascii_alpha(ch): + out.append(ch) + i += 1 + continue + j = i + 1 + while j < n and _is_ascii_alpha(text[j]): + j += 1 + word = text[i:j] + if len(word) >= 2 and word.isupper(): + out.append(allcaps) + out.append(word.lower()) + elif len(word) >= title_min_len and _is_ascii_upper(word[0]) and word[1:].islower(): + out.append(title) + out.append(word.lower()) + else: + out.append(word) + i = j + return "".join(out) + + +def decode_lossless_caps_v5( + text: str, + *, + title: str = DEFAULT_V2_TITLE, + allcaps: str = DEFAULT_V2_ALLCAPS, + esc: str = DEFAULT_V2_ESC, +) -> str: + """Decode the `lossless_caps_v5` transform back to the original text.""" + return decode_lossless_caps_v3(text, title=title, allcaps=allcaps, esc=esc) + + +def encode_lossless_caps_v6( + text: str, + *, + allcaps: str = DEFAULT_V2_ALLCAPS, + esc: str = DEFAULT_V2_ESC, + allcaps_min_len: int = DEFAULT_V6_ALLCAPS_MIN_LEN, +) -> str: + """Encode only ALLCAPS words with length >= allcaps_min_len.""" + _validate_distinct_single_chars(allcaps, esc) + controls = {allcaps, esc} + out: list[str] = [] + i = 0 + n = len(text) + while i < n: + ch = text[i] + if ch in controls: + out.append(esc) + out.append(ch) + i += 1 + continue + if not _is_ascii_alpha(ch): + out.append(ch) + i += 1 + continue + j = i + 1 + while j < n and _is_ascii_alpha(text[j]): + j += 1 + word = text[i:j] + if len(word) >= allcaps_min_len and word.isupper(): + out.append(allcaps) + out.append(word.lower()) + else: + out.append(word) + i = j + return "".join(out) + + +def decode_lossless_caps_v6( + text: str, + *, + allcaps: str = DEFAULT_V2_ALLCAPS, + esc: str = DEFAULT_V2_ESC, +) -> str: + """Decode the `lossless_caps_v6` transform back to the original text.""" + return decode_lossless_caps_v4(text, allcaps=allcaps, esc=esc) + + +def encode_lossless_caps_v7( + text: str, + *, + allcaps: str = DEFAULT_V2_ALLCAPS, + esc: str = DEFAULT_V2_ESC, + allcaps_min_len: int = DEFAULT_V7_ALLCAPS_MIN_LEN, +) -> str: + """Encode only ALLCAPS words with length >= 4.""" + return encode_lossless_caps_v6( + text, + allcaps=allcaps, + esc=esc, + allcaps_min_len=allcaps_min_len, + ) + + +def decode_lossless_caps_v7( + text: str, + *, + allcaps: str = DEFAULT_V2_ALLCAPS, + esc: str = DEFAULT_V2_ESC, +) -> str: + """Decode the `lossless_caps_v7` transform back to the original text.""" + return decode_lossless_caps_v6(text, allcaps=allcaps, esc=esc) + + +def get_text_transform(name: str | None) -> Callable[[str], str]: + """Return the forward text transform for the given config name.""" + normalized = IDENTITY if name in {None, "", IDENTITY} else str(name) + if normalized == IDENTITY: + return lambda text: text + if normalized == LOSSLESS_CAPS_V1: + return encode_lossless_caps_v1 + if normalized == LOSSLESS_CAPS_V2: + return encode_lossless_caps_v2 + if normalized == LOSSLESS_CAPS_V3: + return encode_lossless_caps_v3 + if normalized == LOSSLESS_CAPS_V4: + return encode_lossless_caps_v4 + if normalized == LOSSLESS_CAPS_V5: + return encode_lossless_caps_v5 + if normalized == LOSSLESS_CAPS_V6: + return encode_lossless_caps_v6 + if normalized == LOSSLESS_CAPS_V7: + return encode_lossless_caps_v7 + if normalized == LOSSLESS_CAPS_CASEOPS_V1: + return encode_lossless_caps_v2 + raise ValueError(f"unsupported text_transform={name!r}") + + +def get_text_inverse_transform(name: str | None) -> Callable[[str], str]: + """Return the inverse transform for the given config name.""" + normalized = IDENTITY if name in {None, "", IDENTITY} else str(name) + if normalized == IDENTITY: + return lambda text: text + if normalized == LOSSLESS_CAPS_V1: + return decode_lossless_caps_v1 + if normalized == LOSSLESS_CAPS_V2: + return decode_lossless_caps_v2 + if normalized == LOSSLESS_CAPS_V3: + return decode_lossless_caps_v3 + if normalized == LOSSLESS_CAPS_V4: + return decode_lossless_caps_v4 + if normalized == LOSSLESS_CAPS_V5: + return decode_lossless_caps_v5 + if normalized == LOSSLESS_CAPS_V6: + return decode_lossless_caps_v6 + if normalized == LOSSLESS_CAPS_V7: + return decode_lossless_caps_v7 + if normalized == LOSSLESS_CAPS_CASEOPS_V1: + return decode_lossless_caps_v2 + raise ValueError(f"unsupported text_transform={name!r}") + + +def normalize_text_transform_name(name: str | None) -> str: + """Normalize empty/None transform names to the identity transform.""" + return IDENTITY if name in {None, "", IDENTITY} else str(name) + + +def get_text_transform_control_symbols(name: str | None) -> list[str]: + """Return reserved control symbols used by a transform, if any.""" + normalized = normalize_text_transform_name(name) + if normalized == IDENTITY: + return [] + if normalized == LOSSLESS_CAPS_V1: + return [DEFAULT_SENTINEL] + if normalized == LOSSLESS_CAPS_V2: + return [DEFAULT_V2_TITLE, DEFAULT_V2_ALLCAPS, DEFAULT_V2_CAPNEXT, DEFAULT_V2_ESC] + if normalized == LOSSLESS_CAPS_CASEOPS_V1: + return [DEFAULT_V2_TITLE, DEFAULT_V2_ALLCAPS, DEFAULT_V2_CAPNEXT, DEFAULT_V2_ESC] + if normalized in {LOSSLESS_CAPS_V3, LOSSLESS_CAPS_V5}: + return [DEFAULT_V2_TITLE, DEFAULT_V2_ALLCAPS, DEFAULT_V2_ESC] + if normalized in {LOSSLESS_CAPS_V4, LOSSLESS_CAPS_V6, LOSSLESS_CAPS_V7}: + return [DEFAULT_V2_ALLCAPS, DEFAULT_V2_ESC] + raise ValueError(f"unsupported text_transform={name!r}") + + +def infer_text_transform_from_manifest(tokenizer_path: str | Path) -> str: + """Best-effort lookup of a tokenizer's text transform from a local manifest.""" + tokenizer_path = Path(tokenizer_path).expanduser().resolve() + manifest_candidates = [ + tokenizer_path.parent.parent / "manifest.json", + tokenizer_path.parent / "manifest.json", + ] + for manifest_path in manifest_candidates: + if not manifest_path.is_file(): + continue + try: + payload = json.loads(manifest_path.read_text(encoding="utf-8")) + except (OSError, json.JSONDecodeError): + continue + tokenizers = payload.get("tokenizers") + if not isinstance(tokenizers, list): + continue + for tokenizer_meta in tokenizers: + if not isinstance(tokenizer_meta, dict): + continue + model_path = tokenizer_meta.get("model_path") or tokenizer_meta.get("path") + if not model_path: + continue + candidate = (manifest_path.parent / str(model_path)).resolve() + if candidate == tokenizer_path: + return normalize_text_transform_name(tokenizer_meta.get("text_transform")) + return IDENTITY + + +def surface_piece_original_byte_counts( + surfaces: Iterable[str], + *, + text_transform_name: str | None = None, + sentinel: str = DEFAULT_SENTINEL, +) -> list[int]: + """Return exact original UTF-8 byte counts contributed by each surface piece. + + `surfaces` must be the exact decoded text fragments emitted by SentencePiece + in order, e.g. `piece.surface` from `encode_as_immutable_proto`. + """ + normalized = normalize_text_transform_name(text_transform_name) + if normalized == IDENTITY: + return [len(surface.encode("utf-8")) for surface in surfaces] + if normalized == LOSSLESS_CAPS_V1: + if len(sentinel) != 1: + raise ValueError("sentinel must be exactly one character") + sentinel_bytes = len(sentinel.encode("utf-8")) + pending_sentinel = False + counts: list[int] = [] + for surface in surfaces: + piece_bytes = 0 + for ch in surface: + if pending_sentinel: + if ch == sentinel: + piece_bytes += sentinel_bytes + elif _is_ascii_lower(ch): + piece_bytes += 1 + else: + raise LosslessCapsError( + f"invalid continuation {ch!r} after capitalization sentinel" + ) + pending_sentinel = False + continue + if ch == sentinel: + pending_sentinel = True + else: + piece_bytes += len(ch.encode("utf-8")) + counts.append(piece_bytes) + if pending_sentinel: + raise LosslessCapsError("dangling capitalization sentinel across piece boundary") + return counts + if normalized not in {LOSSLESS_CAPS_V2, LOSSLESS_CAPS_V3, LOSSLESS_CAPS_V4, LOSSLESS_CAPS_V5, LOSSLESS_CAPS_V6, LOSSLESS_CAPS_V7, LOSSLESS_CAPS_CASEOPS_V1}: + raise ValueError(f"unsupported text_transform={text_transform_name!r}") + + title = DEFAULT_V2_TITLE + allcaps = DEFAULT_V2_ALLCAPS + capnext = DEFAULT_V2_CAPNEXT + esc = DEFAULT_V2_ESC + if normalized in {LOSSLESS_CAPS_V2, LOSSLESS_CAPS_CASEOPS_V1}: + _validate_distinct_single_chars(title, allcaps, capnext, esc) + elif normalized in {LOSSLESS_CAPS_V4, LOSSLESS_CAPS_V6, LOSSLESS_CAPS_V7}: + _validate_distinct_single_chars(allcaps, esc) + else: + _validate_distinct_single_chars(title, allcaps, esc) + pending_escape = False + pending_word_mode: str | None = None + active_allcaps = False + pending_capnext = False + in_ascii_word = False + counts: list[int] = [] + for surface in surfaces: + piece_bytes = 0 + for ch in surface: + if pending_escape: + if pending_word_mode is not None and not _is_ascii_alpha(ch): + raise LosslessCapsError("escaped control char cannot satisfy pending word capitalization mode") + piece_bytes += len(ch.encode("utf-8")) + pending_escape = False + if _is_ascii_alpha(ch): + in_ascii_word = True + else: + in_ascii_word = False + active_allcaps = False + continue + if ch == esc: + pending_escape = True + continue + if normalized in {LOSSLESS_CAPS_V2, LOSSLESS_CAPS_V3, LOSSLESS_CAPS_V5, LOSSLESS_CAPS_CASEOPS_V1} and ch == title: + if pending_word_mode is not None or in_ascii_word or pending_capnext: + raise LosslessCapsError("invalid title marker placement") + pending_word_mode = "title" + continue + if ch == allcaps: + if pending_word_mode is not None or in_ascii_word or pending_capnext: + raise LosslessCapsError("invalid allcaps marker placement") + pending_word_mode = "allcaps" + continue + if normalized in {LOSSLESS_CAPS_V2, LOSSLESS_CAPS_CASEOPS_V1} and ch == capnext: + if pending_capnext: + raise LosslessCapsError("duplicate capnext marker") + pending_capnext = True + continue + + if _is_ascii_alpha(ch): + at_word_start = not in_ascii_word + if at_word_start: + piece_bytes += 1 + active_allcaps = pending_word_mode == "allcaps" + pending_word_mode = None + pending_capnext = False + in_ascii_word = True + continue + if pending_word_mode is not None: + raise LosslessCapsError("word capitalization marker leaked into the middle of a word") + piece_bytes += 1 + pending_capnext = False + continue + + if pending_word_mode is not None or pending_capnext: + raise LosslessCapsError("capitalization marker not followed by an ASCII letter") + piece_bytes += len(ch.encode("utf-8")) + in_ascii_word = False + active_allcaps = False + counts.append(piece_bytes) + if pending_escape: + raise LosslessCapsError("dangling escape marker across piece boundary") + if pending_word_mode is not None or pending_capnext: + raise LosslessCapsError("dangling capitalization marker across piece boundary") + return counts diff --git a/records/track_10min_16mb/2026-04-27_SmearGateBOSFix_PR1787Base_LQERAsym_PhasedTTT/prepare_caseops_data.py b/records/track_10min_16mb/2026-04-27_SmearGateBOSFix_PR1787Base_LQERAsym_PhasedTTT/prepare_caseops_data.py new file mode 100644 index 0000000000..5c3f13e69c --- /dev/null +++ b/records/track_10min_16mb/2026-04-27_SmearGateBOSFix_PR1787Base_LQERAsym_PhasedTTT/prepare_caseops_data.py @@ -0,0 +1,177 @@ +"""Prepare CaseOps-tokenized FineWeb shards + per-token byte sidecar. + +CaseOps (``lossless_caps_caseops_v1``) is a bijective, character-level text +transform that introduces four operator tokens in place of explicit +capitalization: TITLE, ALLCAPS, CAPNEXT, ESC. The transform is fully +reversible — no information is lost relative to the untransformed UTF-8 +text, so BPB stays computable on TRUE byte counts. + +Forward pipeline: + 1. Read the canonical FineWeb-10B doc stream (``docs_selected.jsonl`` + produced by ``data/download_hf_docs_and_tokenize.py`` in the root repo). + 2. Apply ``encode_lossless_caps_v2`` (the caseops_v1 alias) to each doc. + 3. Tokenize with the shipped SP model + ``tokenizers/fineweb_8192_bpe_lossless_caps_caseops_v1_reserved.model`` + (reserves TITLE/ALLCAPS/CAPNEXT/ESC + sentinel as user_defined_symbols). + 4. Write uint16 train/val shards (``fineweb_{train,val}_XXXXXX.bin``). + 5. For the VAL stream only, emit per-token byte sidecar shards + (``fineweb_val_bytes_XXXXXX.bin``, uint16 parallel arrays) that record + each token's ORIGINAL pre-transform UTF-8 byte count. BPB is computed + from these canonical bytes so the score is on the untransformed text + (not the transformed representation). + +Output layout — matches what ``train_gpt.py`` expects under +``DATA_DIR=./data`` with ``CASEOPS_ENABLED=1``: + + data/datasets/fineweb10B_sp8192_caseops/datasets/ + tokenizers/fineweb_8192_bpe_lossless_caps_caseops_v1_reserved.model + datasets/fineweb10B_sp8192_lossless_caps_caseops_v1_reserved/ + fineweb_train_000000.bin + fineweb_train_000001.bin + ... + fineweb_val_000000.bin + fineweb_val_bytes_000000.bin + +Usage: + + python3 prepare_caseops_data.py \\ + --docs ./fineweb10B_raw/docs_selected.jsonl \\ + --out ./data/datasets/fineweb10B_sp8192_caseops/datasets \\ + --sp ./tokenizers/fineweb_8192_bpe_lossless_caps_caseops_v1_reserved.model + +Requirements: sentencepiece, numpy. CPU-only. Runs once; reused across seeds. +""" +from __future__ import annotations + +import argparse +import json +import pathlib +import struct +import sys + +import numpy as np +import sentencepiece as spm + +# Local import — lossless_caps.py ships next to this script. +sys.path.insert(0, str(pathlib.Path(__file__).resolve().parent)) +from lossless_caps import ( # noqa: E402 + LOSSLESS_CAPS_CASEOPS_V1, + encode_lossless_caps_v2, + surface_piece_original_byte_counts, +) + + +SHARD_MAGIC = 20240520 +SHARD_VERSION = 1 +SHARD_TOKENS = 10_000_000 # tokens per shard — matches the main pipeline +BOS_ID = 1 # SP model's control token; train_gpt.py:_find_docs requires BOS per doc + + +def _write_shard(out_path: pathlib.Path, arr: np.ndarray) -> None: + """Write a uint16 shard in the standard header-prefixed format.""" + assert arr.dtype == np.uint16 + header = np.zeros(256, dtype=np.int32) + header[0] = SHARD_MAGIC + header[1] = SHARD_VERSION + header[2] = int(arr.size) + with out_path.open("wb") as fh: + fh.write(header.tobytes()) + fh.write(arr.tobytes()) + + +def _iter_docs(docs_path: pathlib.Path): + """Yield doc strings from a jsonl file (one json object per line).""" + with docs_path.open("r", encoding="utf-8") as fh: + for line in fh: + line = line.strip() + if not line: + continue + obj = json.loads(line) + # Support both {"text": ...} and raw strings. + yield obj["text"] if isinstance(obj, dict) else obj + + +def _token_original_byte_counts( + sp: spm.SentencePieceProcessor, + original_text: str, + transformed_text: str, +) -> np.ndarray: + """Per-token canonical (pre-transform) UTF-8 byte counts. + + Delegates to ``surface_piece_original_byte_counts`` in ``lossless_caps.py`` + — the canonical exporter used by the PR #1729 / HF-hosted CaseOps dataset. + Operator pieces (U+E001..U+E004) contribute 0 original bytes; letter pieces + contribute their pre-transform UTF-8 byte count. + """ + proto = sp.encode_as_immutable_proto(transformed_text) + byte_counts = surface_piece_original_byte_counts( + (piece.surface for piece in proto.pieces), + text_transform_name=LOSSLESS_CAPS_CASEOPS_V1, + ) + return np.asarray(list(byte_counts), dtype=np.uint16) + + +def main() -> None: + ap = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) + ap.add_argument("--docs", required=True, type=pathlib.Path, help="Path to docs_selected.jsonl") + ap.add_argument("--out", required=True, type=pathlib.Path, help="Output datasets dir") + ap.add_argument("--sp", required=True, type=pathlib.Path, help="Path to CaseOps SP model") + ap.add_argument("--val-docs", type=int, default=10_000, help="Validation docs count") + args = ap.parse_args() + + sp = spm.SentencePieceProcessor(model_file=str(args.sp)) + print(f"loaded sp: vocab={sp.vocab_size()}", flush=True) + + train_out = args.out / "datasets" / "fineweb10B_sp8192_lossless_caps_caseops_v1_reserved" + train_out.mkdir(parents=True, exist_ok=True) + + val_buf_tokens: list[int] = [] + val_buf_bytes: list[int] = [] + train_buf: list[int] = [] + val_written = 0 + train_written = 0 + n_docs = 0 + + for text in _iter_docs(args.docs): + transformed = encode_lossless_caps_v2(text) + token_ids = [BOS_ID] + sp.encode(transformed, out_type=int) + if n_docs < args.val_docs: + # Validation doc — also compute byte sidecar + byte_counts = _token_original_byte_counts(sp, text, transformed) + val_buf_tokens.extend(token_ids) + val_buf_bytes.append(0) # BOS contributes 0 original bytes + val_buf_bytes.extend(int(b) for b in byte_counts) + if len(val_buf_tokens) >= SHARD_TOKENS: + _write_shard(train_out / f"fineweb_val_{val_written:06d}.bin", + np.array(val_buf_tokens[:SHARD_TOKENS], dtype=np.uint16)) + _write_shard(train_out / f"fineweb_val_bytes_{val_written:06d}.bin", + np.array(val_buf_bytes[:SHARD_TOKENS], dtype=np.uint16)) + val_buf_tokens = val_buf_tokens[SHARD_TOKENS:] + val_buf_bytes = val_buf_bytes[SHARD_TOKENS:] + val_written += 1 + else: + train_buf.extend(token_ids) + if len(train_buf) >= SHARD_TOKENS: + _write_shard(train_out / f"fineweb_train_{train_written:06d}.bin", + np.array(train_buf[:SHARD_TOKENS], dtype=np.uint16)) + train_buf = train_buf[SHARD_TOKENS:] + train_written += 1 + n_docs += 1 + if n_docs % 10_000 == 0: + print(f" processed {n_docs} docs train_shards={train_written} val_shards={val_written}", flush=True) + + # Flush tail buffers into final (possibly short) shards. + if val_buf_tokens: + _write_shard(train_out / f"fineweb_val_{val_written:06d}.bin", + np.array(val_buf_tokens, dtype=np.uint16)) + _write_shard(train_out / f"fineweb_val_bytes_{val_written:06d}.bin", + np.array(val_buf_bytes, dtype=np.uint16)) + if train_buf: + _write_shard(train_out / f"fineweb_train_{train_written:06d}.bin", + np.array(train_buf, dtype=np.uint16)) + + print(f"done. docs={n_docs} train_shards={train_written + (1 if train_buf else 0)} val_shards={val_written + (1 if val_buf_tokens else 0)}") + + +if __name__ == "__main__": + main() diff --git a/records/track_10min_16mb/2026-04-27_SmearGateBOSFix_PR1787Base_LQERAsym_PhasedTTT/submission.json b/records/track_10min_16mb/2026-04-27_SmearGateBOSFix_PR1787Base_LQERAsym_PhasedTTT/submission.json new file mode 100644 index 0000000000..76b8351b61 --- /dev/null +++ b/records/track_10min_16mb/2026-04-27_SmearGateBOSFix_PR1787Base_LQERAsym_PhasedTTT/submission.json @@ -0,0 +1,30 @@ +{ + "author": "aquarious dante workman", + "github_id": "aquariouseworkman", + "name": "SmearGate BOS Fix + PR #1787 Base + LQER Asym + Phased TTT", + "date": "2026-04-27", + "track": "10min_16mb", + "val_bpb": 1.06128, + "seeds": [42], + "seed_results": { + "42": {"val_bpb": 1.06128, "artifact_bytes": 15952086} + }, + "hardware": "8xH100 80GB SXM", + "pytorch_version": "2.9.1+cu128", + "technique_summary": "PR #1787 base (CaseOps + SparseAttnGate + PolarNS + MIN_LR + FusedCE) + SmearGate with BOS document boundary fix + LQER Asymmetric rank-4 correction + Phased TTT (3 phases, score-first)", + "compliance": { + "train_under_600s": true, + "artifact_under_16mb": true, + "eval_under_600s": true, + "smeargate_bos_fix": true, + "score_first_ttt": true + }, + "attribution": { + "pr1787_base": "@nprime06 (PR #1787)", + "caseops": "@romeerp (PR #1729)", + "smeargate_lqer": "@dexhunter (PR #1797)", + "smeargate_bos_audit": "@cocohearts (PR #1797 audit)", + "ttt_framework": "@abaybektursun (PR #549)", + "gptq_sdclip": "@clarkkev (PR #1394)" + } +} diff --git a/records/track_10min_16mb/2026-04-27_SmearGateBOSFix_PR1787Base_LQERAsym_PhasedTTT/tokenizers/fineweb_8192_bpe_lossless_caps_caseops_v1_reserved.model b/records/track_10min_16mb/2026-04-27_SmearGateBOSFix_PR1787Base_LQERAsym_PhasedTTT/tokenizers/fineweb_8192_bpe_lossless_caps_caseops_v1_reserved.model new file mode 100644 index 0000000000000000000000000000000000000000..fffc8bb3062a77df55030b36cb0d85f2c6a9c211 GIT binary patch literal 366510 zcmZ6Ud4O!&Rn`wAOqURvAf6Pl7e(&*uTQ9oxf>)jm=pAmo@RhH49xC$1$us5iTPpHzbYGUFFAVzf z{||k||A)S^qIbOY*00=t_4anj=&f%R!o6d{ePhD?W5NSt!h>VNLu10jW5Oe2!lPrt zV`IYOW5N?-!jogdQ)9x@Bf`C7ecU_N$Gu~H+&k9Cy<>gcJJ!d&V}0B^*2leLecU_N z$Gu~H+&k9Cy<>gcJJ!d&V}0B^*2jHgecU(J$9-dc+&9+8ePeyxH`d2}V}0B=*2jHg zecU(J$9-dc+&9+8ePeyxH`d2}V}0B=*2n#0ecV6R$Ngh{+&|XG{bPOHKi0?nV}0B| z*2n#0ecV6R$Ngh{+&|XG{bPOHKi0?nV}0B|*2e>5eLOJM#{*-1JTTVB17m$WFxJNd zV|_d@*2e>5eLOJM#{*-1JTTVB17m$WFxJNdV|_d@*2jZmeLOhU$Ae>iJUG_JgJXR> zIM&C5V|_e0*2jZmeLOhU$Ae>iJUG_JgJXR>IM&C5V|_e0*2hC*eLOVQ$3tU%JT%tF zLt}kBG}gyMV|_d{*2hC*eLOVQ$3tU%JT%tFLt}kBG}gyMV|_d{*2lwReLOtY$HQZN zJUrIN!()9sJl4m+m$amBdx_pHCdgw5iz~1Ag^&MJY3jb5j#p zyT4P>Qmi+xms*ab;1|BWs7nRCt?4s~^dbirQovS?avSijrY}|Cd%WJFx^mv<^_4|M znk2Er_#g+*q}30r`H229(@!PPw)2^vD9!^WKP%22snMSUys`q@M=i`I@?`xa1%7-5 z(*GwrvzcVrad568KI`>GPtPr70iWkDM=Iot%t&l%YVtx_*-ti#al58HuuUCRMix@p zsv>LTyrNyP{OaT(wY}ECrOdN)@JtcDJXA6Gm5HmOzbaHI?5iDINUpDOa8Gf+HdN*N zb)icCmsJZA@arwrvu-%wm?}G0q;GQk3+3|7l~>o(xA?Dym_>&3U; z20OgFa1QGfE=&B5`sw_(o7i{ylVg2X%2BNEc5qMfeormZE%LX;fEc3Mg{(eGg@AVj~_L8CSCR>&rel>udlVcvR%Hh z=6YA^`tgd~Jbt1^o!JI|vVOd)qJFBP+v?+f+J!A;>DPq^>HKHHr5OHM2UqGCelApT z^YaeQ)OY-X)6LG6`7egyGe!BOT99?VsXXnuw%+tSlzIGejcd#O6=!}b{(jZLx#Ij< z__IjAo)`XJL zOH{S|z9YU$M%%o>J(SwOTf9NtOggohHu^44!C z9n<{PM0=Z(mn3;x7qJqDZWl%BiBfeJ$cyz>5;eebYPC4`TbpNnvw+$2XKF za(m{8TmH6mZtvvqg_(cnHOnS-JNi$ zF77?*hcng9dxl#pF}7W3XiBx(75!nsF?_pRmJU09MoP&wT5$n<_S;hu7zSbnTP zT6+KbX;=FD02k)Y`~xHWu^9cJ6m_Z^{otDQ$x`V*#3^jtAL`(RB7IoR)x*?Fo&0pJ zFCXqmZfPG;bGmD*epw2%hF|Vj3zdD3gNF*fqUO3%_pn`&TiLx9itb;PdS8;O;_o;8 znT+WVxQK<7|6pWtaSu71Vc}tiSI*~=lr5z_>fk;15d28TIQ0PZQ6?XVyH}dLaG&C-h6^{a&ez%@`XE#PcnH=I{jpmmyYr&4$o&&=%-d)@2e;LG$%My zaHp~@c|JX5In&RmDK2EWpIOmNJbhN;xo7|Egk8U%<9we{g3ql9Y=58U4-0idpI=2? zdhq*#3U>{^&=F5PA8>G{G+%5csS44*q~?5~PhVQo-}O*7bxa0K@h~%Md+s53otvj{ zd&zCrroCU&rxTnNP2INwPm;#^TGoiJqgB4R?Z3+5w35>r%T<5&2DUZdHq-Kj>ipbMmo~hM zggr5SHu9glIp6h%UALB3*Zj^jGCgO$SL!2P<47~J_6JQ~yaw`Xs|skf66gKdkttR+oVN)j85so~w-I&UkJBJyD+7a}qq+0%pgf*sh zCG7U~m4&Xn2xsoZ;4FduXoNt_F2Pgr^;J&c@w+1&z7`r5{+PckRPkL2AId1cy2erm z(t)xtx_etq>yJmzC*r09!13vvaPu`a-tvIiR0otWuHHXUKOL*z?|>{7c4duU>)cM~ znrw6clAL%N`zI?8hZNd@!o$g2bKI|s&XBGnn5EwbK>t%I^EoMpaFS%myG#=L-i{!} zEZo=q>8OkeQAaS^d@h~K*VhbYnniU$n3ibGH`Fhl8+1TepqrLXzp?VIDMtsQsQIb7 z{6FLHQbTx`@XW31o1A>*@w@|2L3?F#{AZoy;z-INOetjm|7LUGIkaRAq>*HN_;V>~ zDVs`o;a=fes)Vy6wZIM>6&-pJ^XHvxe}>U_grkh}h1q= z(sKcO9!$Th#-vIqy8|PDCrW?C8O}~s0SRaJ|J{|n3pE2!)W7dZ*GjB8cX;9IAROI!!uoyrqQSi* zTpEwN(7#b5YSitXN9z5rDjN({k_zm?G@|~^noet<4upc1Y75_AtwM!W;Q9aGa`Khz zwIiImxRVL)4@ARiSOoLwZ07a!-wyNAewXm_uI%LpQ+=nJPjvuTn|ZkSI}V?kWx~nq zWbV=9hg^>OhmN46@zUb&hJW=H1f!obtL}&WMRT5xV7{z8p8ma>^BtAUz`ca4HnhbP=NnlZb%w#(tI>M2NIV@r{y`yP@*%8bq zR@Ktb>%zO*3qjfo)~q)D`pW6uO!ztoN%%~9=r=g_iKnm~Ky|C}@{Ojmu#`Xn2L{bQ zsJgkW`Bn#v`OVaE{w=>*GH% zGkZsx{&xVBxH!ianxCxkZmYp`zzD~@N8J2lM_sCe?g-}_twtl@PdU$JY!pDA_jm^W zPaMt|M>u@Esrv@f{B+IZ_WVey2BYeeeK)Uv>U0MevLC`Rkb{Mdwz;mtx{-sD?ZO@J zKeO8Js9p)mLY5j;f2PKo?MrSTm2)7m|9R!(c;<-;xOATySAW)dc+T1pj?{ErYO?>L z#?-jofuzb48Q0G_JUc<(9pSB8Z|9LH3TpnPzs}`9>Dj z38JXQ(rbiYGQgbG+yby>wVZ!j3!7(t+(C%SmM5OmZ>n&+vc(Qac{3^c-wnzXlb}@U zvBuu!P4(l6dQDI=-OcLk-@oD3JKehF330V~NK_)T0rzab>|(Tx=m;YHg@>#E5TO_i z`2c_w4U)~TIF9F49pPAmhRNnX){^heJX{R;cPF@hQH?GclC)NMXoaaY5ubj4U-OX`(1~<{Oky$(Or#7|GTPc z{%X%PI|zJxwHoE`RpHLg-Q9xWZZEEf|Kt2F)Nne&`jDpl`~GmIIblb57#%(2{$GDw zx>XXUq;ql6ywO4IrzMyqnzhTIAV}~d!aZRMdNu{U#T$$6Sqdp7I4LjE4R;Wt%?~0e zKy9jcXx`GFW=C>9w}6=Xu2*!xG#YiNJt#F?*32(3s|Rr15`?o0_n(B5$C3POJus;b zv#@<(&GU@Gs0ERb886}yPC^gDeLn5k)ZVIQ!U%*#wZUE_s0Zl4$TvIF9H$2)pSy0a zZ(ZT(oJm~^B*(0v^&?^&ptZ@bfk`)w#UB?Evq34FMz*oH?or0A0TpN!9cJ zwl(p-W;iX7QkZ+l(cFQ=>xmpNJwR0|p_;d=wRV_AV+(236kHUOrdW% z*3$Fd7Lb}hJNG2NgFpduP567lQOto|m~UU9*F`OmT3cG_H=c14Ut{@p$(T(Cb1w3h!1^#vIxK@6NrB0aFZq zI5lKInBQ}$?_Nupq3RYy%;k~%bRB>aUduSrC+L;^d(`v?nufOnoOv)E;F(6yet_rR zdwNgjsIj9Zh|&)2-${63_6Uc)V>hSwN-b#^XbG23Zt_5Qeby7X9+28S_dKL*o zP1X`7;@z42{v9B-xj#Q!$a~d8DGKo%@_nLL#>AE&30LVV05E)^MO&XZUQN8O;~bt^ z83AYKZm)oMzV-I1zl$?WQ0nQSh0Ws&JG%bB8*jbj){BX=IC-6YpT{0 zrbcG+I=)|mRDVk_nI6qt-C$BLG-&n1H=xGciId6U)e?Cq zdw+kRTPA}xfR+d70GKEpw479ZKF!=AHy==QKlT!>1!o${+%}V=4iaV9Thsz8 z`{V3fRnY<{<;Zdoj*;xnGcD_(AnEcb?vf92T<;vUgp=v{TsF|5$Ow+?V$3%NcqPnQ(lY`aAKQ`(PdL9nu`2{=K7oyn2a6HZZQ=_c+Cn;ErRkl4hz zL9%?hIPrkqgOU($l1jU;;-jU!yDgZc&sv}k5DCw3+hNt`D{F0ucfa}ByRDv=aKt-S z`|5yjt<`2vFx7IVmHh)wb124I!pR*Eu)T8-!rOuU|NVfE?Bi}89Bc^(f_i}#3M#SB zqa#e}+2UyCfwl)zFnbZ+5`3r{&)G#;8n%#R^oPSkIQ5KPs{{Cbes zHGi+Hd>nat*@8>;YZt8pl5nOzii9Bayg1K>acX6GBGi6#tQ&qif~3K-p!I4GilBS9 zJqh`kDC9NRT}zn!Sl`Kw2`D;uS-_GmM_xYG>FoPy36t*B{Za>@IMm|`2PK!e_mx|J zWYBfJfnuk7*r~*WfF@GsiOLephtM9Y`Ouo+bb$LFG=Q;~gECC?1BH26c5NT8Ih?w~ z0cKFd{gf$02PwInJ(qbz4@C+-wz)nrWT3&L9pH)1ZFIn*AMfM#U__bQ_4lMJLmS^M zLDjVkq6_%~Cv{J-^ap5By~k52nYjyz0Qq{5_Rv8{VMjg-(+?5UKazFxbd7K@*K(%? zm;7;psEt8*ms>BQB4*D%->@OV+>z?0`M6Z$`MElf7L@ddtjjcGfq-Vp)N1(3^2t8G zk9S53E1EDdnNX>@0FsY(lGI=lGwZics0!XU&p^1DooJ*^c;Thn5bZ$q(0pRek{Od% zVGyKwtvtGfZ)@MbC!9v-V^5z{3p?g{KgBU$mI)RBX3xDqA)F#sM>CBBeLfz$oqlR9ZqF40lvaHZ zuLFczMiaZ%`yuv$eXXBn13aYST7qFMQ}7NPt;p}uC*mC}wb`i=ckRt+!Qt-wwg-_8 zg6c^HCDO`tB0b>_A<8lT!UYW!dN}mqjTWqLcK;3E&it(h zr%Tp>mbj3<(D~v9M!pun0D8{{AS%Haqt;72PcR*L_GglOTC{oXflyG@B8;Q+sRo2N|&gK?e z*PrLb;2^;OK0QzGKxp#}udr9OsRgtbYb$Euei;(E*^$~DIPBS9OE}!2 zekBFRN{;1y?tyA@nLiyGFu_$107%5UUTFiv15+1mEc6g4Fx%9(><WFQr=uNql}v zui6c8U$6(tWDvYr)T+}NBo)*`Nm3u9>3|fo=lLFCm}Pn)d$>~tlc$7q&tp4fx&gD0 z*1!dhk<5-V@HfxY9EvXkOi^wlUBV|i!r2p+j~rr z+w{WHYYTxo$|Zmf01sy};LvBM`zr3d7I$b?fkhOLjvYAq-NUIy{2l@g7O#cZn-|fH zSZfKV6ZCX4eP{`Sgn3SvNXKPu<_PDg*l*RPK4m=1^mIhe$;5`(Xou`S- zU5m8R07|g*3wSBLan?af$vz+414+qbNfq?!TJp+5wP4hE;NFQa0%GU(<$=P}`O+I1 z&2yC}+KvyHwNR4aOoIwYO zzlIcDu`{R}D*cdTPaWiDNWvPjoUr&Zb7KeFgmB6~o_qBL#x`EYoM}ohg=y8`1Fan- zB#gsz4UkYeCDjv7HgWlE2E1!$R`cbxODcn+M2Dv4u5OPY1_gWS6sAF#_g(d7plHM^ zyETwfeVU?A_(=NR5Do{a;N~l8O|jheVc`jsWR#skpjz8Z=OLzm!aoxQ3LWC^YJ6G` z@XTAf1N5P-4G=-@>hQ-`7S+P|B;kct`z_(*zh`&Rbij=Cm|X{kw}qVD2(wPoxi>or zqZ{TH`m#wxbn~X}8!7LPy1a#IdXg~HM9L!^J7pivli?|pNbfGa8zL-Da7@9%8cLZU zdaCJjAw5L2ZFliz$RTU?ud>wpDmlSjI+T`RF&c(sPco;FP&8-S&}0Y5*rbC5X#;C$ zQO@1D&W-fo(x(=0&L&xuvnhYfJRNF6G9gUf476B&3rJHq$t-RPAu^w-F?GZDqwLGA z^PRn6ckVq9VNVrzLpVGgaHn zN^(B*A&@@lHN~P>04&_L|LBj`ylAP~M41c(22Yt~3t`CIM>eKIl;eolL4Z3ZwP+NuUBn__`{OV8B;V$gES$Ds3FOD2p8K;zduWla#tFQl!W zaIqhs`q+SBc5!x=A?r`N2uSWL{geDn6i6p(y4qz0=UST-u(?>Z6j1wvf z!v~_m7(LWO0wP(w0L^7htYOTZ1{BzpVYu4>7Qc7T$gw=O-$B=BMAdM_0kU* zW*N~o;Am}?-L2*uYF;aCo=t$1gEI}#2ITU^w0yA}vt{?oD9}2&q)f>C#dT1iE=mpw~ktEH03}2g5V9 zAO->!Ht*)o#*B1sa6*_^c)>Kw2T(4m(Z-G0oF=ZTzf0IB&enuek>|_WTldgV3=Knu zw*jHP6?WBpvkRdUl@S3V?92x&S}^hC{+$v`W!T-_0iy;4%rS#CBvr(@`nw)Ld7OHa zp>2X@?4dbdFGWDr!}07#z9HSEJ22t@aEBGGepxgWl+PTejy z5YRY-vrl_|YgHH9eUh^FZ_V2z$pl!-v9mSlAcc{9|SeAhEi%m z8Bua#E4U#@mS(T{3zdhscu6{el%)8bb>6atfWR!{y%9JCii<1^I}j4F4TpH^Au0Q0 zT4D7NDC#-6i8sT?*^y5qHESm=Lp&gAJhQ*31&0f!yz*a8A*il(ZJ|qexrcvdJw&7{ z(s{5R0vw@n4LJb|@8LIpvDVyyPQy)rDH69Vxwc?Ru)APNSVGd{qv|eSmKrP8gkvwb z(Y(LYLrFd--ecH+!#XQq8}fGy5$pq+|L2|tCu}s1#P7PDd6hiHEY3061koYsjUlT^}4Ba+b z5IivdrV=)Qw7cC4Oso0J{fLCWc1s06qX(K#eIooPvw7&2^Y&fVbtESp!kitTYZ#s@tQUx;iLm z_Xxc-f3406b8N(RJ%O0g?)s=>3r>x423f1i=@5b2%IfgIVaJ~L^$=gzwj?p-|3oyiu#%OVZhx$&?XVwtp~yx4r>bQA;8?? zHRblg226(KEL`(9sxY&?Y{yTal(Bwsl8I_NM4F%EM8yDxEn_7dVFX^sN zc;P`V;id0yUDam85{xChI)Mo^()mbcOBmBU#x$n{qtF9y0p-h{8Z*Jt&W`HL1yXop zOA?#f3>fvx#{Q-?9+~fcQEP z8wy>^ouxX(4Il#CRff&?*W`@5+KHJ!NEgET;;dYQfEGDWBRZaf$uQ2O4p3{UY!s}A z_*{uoO9Q7&Z?J8EsCMQfgMTXqsN=vB!c`QHdo2VEq<9gh!)NBF>VG;2Dfvuw?XDYA zO3JR~06kQ1fRSU)x*p+wpcaVNUk;E71S;BR?g%2IHs%o+0LMP5*yY2;i8|LcAUW>k z)&w}^b7B`|15;i2vvY^*Z(F1?$(eviu9coI7`9*vWnG}w4M<{+hiBh9pINmd{Th&> zI2nSDf#`~!PlNI%F_+#JZ+_69$|Z>j2wBpZJ9jk9tpO=- z*C)+^QN9Ni^#Gehr$3Wn{hcAY4hBwuWVPo7Yzs(5?`Bw=4upfu2092xL*Fb-tcTBZ zyvn?XKyG*yQN#ui#^8j4f2hJ#DNy3dlh6`QZQBC>5Ux4&MGZ{@7?H@M14IO_HBgspu-cXnMG!8A zk!iwah|HNna%z60M&LA;8sG#%s^QH0KY=

g^^VO~^xMs<4AXX-joWYr^4V*Oz;G zFg%~)u0Yg)az*6t*O)k#wL}1u%X5;aB^;jTxg9zkA|L0~kzjnV*>+ol>11X)1VWH} zPPFm6A&kk>3u`*j{HS^13_4Oz0_c!6?Es`(;JmC|Q&T8pn70fnGy6FYy5<`t~%ylzNocr5)h0oS;)+x`Vrnv|SQ16p?TZ$gs(@ZRkjQsHxhO zZ+vgS7Ke_kd0j2)L?hq?7*-jiy`j-UNsa361W~3#6zv;&=z&6hIE>+dFwB)pdp!sv z!cl-28$cPMbW2hxo7dOO&tQ!>69{N+MOV-gB-caDET)7@Gx3S44jeYMAEL$9I-~j1R$9$)RBi%t6j5;W(a;*r~ps@Fx zPbWYB5w&eV-)c1O@Z)u6Ku8K)OeS#zUx4YYHv3O&6pJ=?21HybWsz z#h4ay2lPV(ot>0zLqL+5wndsBuh|_VvMd_{taBzU-RKXe` zW02AGNrX4hv;0ls;S<&*<{vuVt~(0CGR0hZYJ(_Bo9R;!wI)AkN034}qpm4Tz+G>9 z^@((X$~Od~m*Y$TnxCk~%q%$<&nJ*lEzkG{22d4Y-|KXUjy0TnBdvpkNIuEEPF$G# zaOBH}mA7^XC(k2K5dTqSgRw-*yvab2H&q=XgjCO6yGEwqWRN?DKy)HswYm!sES7B@ zK-Ojq9+j0qR+hz-|#znIsqYxmp)(7CGK--G6ZGox+mTf zhM`sIgo9=$-n47}am|8uj1f%0G6LN(Y4c6KYR8>|W6V1H$_x(vyCFSuBb5tA++4iv z1Ii2$K+ZmP(fm}E5*4csCJ;zQ<>)RYplFFHq&!E{fkFppUOFh@MZ1=G|1r2Vw5X$8 zH5@R9DUoLbrk=Fv~NR9q?_Ri|N4WXmu&iud%Hm z$rw+x7VQ&Xn@n?;4Mut_#^+SskTWvwv{GNhHx9?}V2&YUIv37OfpLo{5(m?1nsfuZscMnLXH ztplFRG<8U*s|aF#1BEvFidP`>&(xxPj*w{&wKRc9#<>~X0wNjyZLM>r`MR{29YD>U zHnAocm9R7i_lcK_=^Fqxv%B=kyylj0kMtx+(o^CG6(z*8~E2%`bd}6@W~7jBQE~ zCO%TC>V`zpxCG9}nKp|0g#ARrhA@iIodWl_&A)IacRc_RjzOxXbjYKH66SX2hweNQ zeJMdY!tizKO`vrE?+mFxK(x(bml^jasnS#YT(xWZHL4Sc5DPsC&?dZG!T*%7`dRpt zIzh>hAsf6Na5?V}E(h|dvggXV|4s5>?WJz9`IohqqZ-P+y$Ljx?&H^lsTWV-DPkH( zFM2+sAMJo?b6Qk*#8@YWdT;e|J+QViH-Qa=Fr8DK&Ck0q?t3D3!n?joK^ReWonPgG z&?>eexf3ntk`i9XoO{Bh>!2ttr8fEMTX@aCazQh>ye5PblXnC>|7f95+*vu6l(;^y z-4QOWDd)l05Ksw+(mbB_Kx|L@4VwhTWAh7D_rBIqu0Kqmrd$pr;<%=Ep+tiida88_ zDJ%8)$(t|pAgAlwm)m>|gkffgnlSW3Hk_!B^KAnuWicH2tU&XxqX9jyF(DkOxJe;Z zwGc{GYNOvrJRngb4uEVx0o8ivvdVP;K6;f7z+h&2{bNHo^_#um<`?~K;mvBoDHLPT zhH{&a?&hb%M{XZ4v?hW;d1t;+w+19zE-|afAvQ-h++#op|7ej#s_Nk1RFbA^ZrBq@ zQKI+GS|CNlRHuLp`1DET%0>sR6zdJPb<%69s6zYUGY4oj80NE@$8!w3%`er`I2VV` zfoi?u>r4qpF`OaMc+x?LG+M9NL%D`DoGanVk|!yBGQq<{;e;dGxvvNO zTSFWjK1nbMv}X%bZGkEANM03KfPkc$x@7t@M4(M*ezAs-^5ujrla4;IvR1MNAfVB` zjFX$xy|;M?hdWOjT40pzn+ZU5+QhL*ApTLF%Gih zV5sIrkSK6fuQ306L$zDaeStP}H}n9Q;eC-QAbB%u_G82ON~eN$)_{m{EQ{)avftaT zjSVQO8yHB7c4xV}R(t@%1Gu_Xe4)HB$x|n=lc=Lg9SffU?KioqI zH(nTS0x?A!PxBw@*E2nqGXbhU*EmGEpp1l3be`gw4oNbcCJEOJZ2%w{a<+N$Ne9%C zY6BX&npIxk{*_v5yc*!!a1$t`&ryID5IwM5(p)qj56yW8s788Zw={$Z8Xw;CiMeh@ z@nHA1gq#29@X8Zl!pR4JmlW1QAfI|mbqa)uMLinSCGBl3#MXr2R_2MK0~R^D4G1&d z^J{*!3M#ju>llixBKL?}P@*5}M!|G|9CgCDfngD~`a}){WbrEiJ>jY(tvxqMs~MhO zQ~gh2(a+~j0EsJ;N7MnI`;fqNzz3d{bl|YHuldy)h`5Y$$le1GU?KN1r4A|C#pP@B zYyQefG5cmg5kPk?G=4*nMazXV&Bt8PQM>CPWF%*ZV)e>33wdMKNyEu=3MULFXYOJ* zfV6#S$9q}-xi%{9sFn%b1X^OzHTot<3nkLs)^WrskQs><=H2sgaZ2iD-Wm!8;@n~u z&<_ds)g(8-^hbF$$@2fY1>E%jMmWvL7e>m96)jY9VBLK=iC-aVAbhV3Z;GB4ZzS?gUjvGKcAM30Tl}YP8WW{{=YivET>K;Ac}Ek=E1y$fDV=%tw7~~ z=wRme6@g^qKl@lG9*?7te-9{jbAU0b#6W+b1z&&Y9j--2hWfI@aK=_U6A?=W_f`2ucrnc%~(6+Y$@Y zfHPn9?*LIfSNG&qTMqojoD-k>>5@s=xLt!{S~`+aj^pHksT`g}$q6ke z$#J+&jS3)ACpwra+XkU2J1WF@+N4l!JjPHkrV;$FKE}h|@Ku^ib5J=78b`ltNZ{T>pRgtLJsF0E%Wgww`gh zg``UNx?~zaox*Pi2!9OwUY)I>paMVt($N|Fd$i=WVnJ zm~Xye)DkZCtb@8}9s=y;xIveg3$@xcVf5((Q)>S`I81Obl>&h>ku&%3&F{E~eIIlr zoQmKPX*KC!UCZ(2*)(yFH86LCDV^=wdnge=mi3lk&_zzwY(U|gLoXUYoB!p=>|EI~ z213LGjs7iR@?<`b6jMMdh~>oGE9oxZxu*{UYkRSZrp z*k*3j{hk@mRCoeN^k)`QWeW}$RFLikP9dagDHl6ESs7C8dpWrVqZ&=VHEHRAwFf*d z!v`c-r$^g|$>#r5RkA*ybpg{{iu)Rkw$M;7#wR8cpYATNTLB{zbB{gkoUfro4BTfN zb?Ao}bH?30hp%Gn0JIz*4a#N$*sXr z83+C3*6tyMJ!;CWXEP)@_i2IV|JE4n!NvBWgaI~E07t}HU_b*EkI(Hb7~U^vfgsgZyOj1 zbH*N994ND+Tn^oV0>>$u!J(K=69@}92a!7AQ?<$sVWeJU8gK(j<*>q&H8(G;lQd-qZKcYCs2sZ?gJ_q zVjuYUNtKP9E+GGHvmVmwp)y_R zxvFu!!3Th=xlH()a7t!%U5q|)IU$9nZ-(!=ETC5ZwpHvq_F-*-v5b3OO2rc_-8V1C zN@xhlo)(w`_AMkzpY7Av>?G{1gel<|5zqg?@(Nsfn_L>`1_JXKy;pb%A(`uvxodMh zWWqTI$qkMccjwGS;JzPX?D?3-H8`zF+sV0+w1I|1yG-tG2&&(w3NY;E?TqM-HJ)sN zQoaWBOr;=Uww&%U+zuqZ_YGShqO*64=q0R4+mvv)Oh1b7P?EK7TsW@|64kxy=fN%k zQBLVm)k4F7-J2B5lHt*qahe8XEn z_RZUu?%EF%ZGj@=U9ajU;MAA43uFr0P$CN_qvQ>2lYxd;INJds8RwW-yiEaU&u+*S z;b@DAFikqC7JF_ym!M>dm(UX!2xOB%;tCM0@J>%T;nqV-c0Pc29jJp4;$j1&U@s_d z2q$mtG|&Y#FRJ>|0sXR;+=3W0^vh4G-w7m%;seFo0CGIlgPLtVuw#lob^ysJ1NT5rG8uq5oQ%!l+U&Q)1)*>|IwE)&$WND$l{`tE9QTVC?M) zml~Gu`Z^G$$9o8bpYz+kPjUmQ{gV#tQG3mctDHPlL7%2+)fO~ZT516_A)M@e=6xFo zCma|_qin%&%nX6Nc9QN?4)Q5sT7;^%n9eFmQB8R}864dmbGe*iFGDCtqSd4!q{V6! zE8gcTh^obNZA&0NF!hv2Xw@pN6GfGvE@lHvV?FSu{|y+Pb6K@{$NDkD<`y_A(q_JW z^AkwK-?J-x8-Q+LnaU;{-5hJ@ZU;y?9LUeQX$p>j=XoG%1tEDe5h?dMlLl6`;z0*P z6wd5$$WdAtNW%%@D@f^X&Yt0D9Fck`(#o0cwxEF_f{rZNi5rSs(BCmwylApSC#bg-vo-xLoXt16QtDf9>Z-%&@hTi~dL(Q?U zcd`(!(=EcXRg_R{bpnNwI7a1j?b|?dUU>VXO>Cs7lg}L}!*HZ$vZ;{i5Q~dSo=mR> zineBDryZ0CaPE!yOK@osIoh&L1a)loiClq856@qNh+k?xiF9TqkM~?dpn7~UXhS$N zV0A#xmJ<>;P!We78;fh+x#mfaP$qm^&|;{0MDjk51HoIHy z2O8Hj>~6+7ps5aR4x1sL;u2?m11ZL}=l>Xcm)a1RPS`QO1qD9cmBvjsL}3Hg!}0b& z!4c^F@-~Su7P)&6ZfA(XW{j6rQz(&1&)Zo=D@bt?=Zb!@%5KO^w}Hv*5<;4=#3&E0 ztRZFyk}vWKgxL9`oj%~glZk79nz4I_gl9V4cayM3g63VF*Rwi*%eVkajC z{p#(+E+_R{5c&JRp>~Ft9RI=Srr>m7Y(Jjs>6{f5RgceOc8RXD>pqKj2~OSk!0Z~B zDlc0*^b}V^6hAN311TU4*tK;%B*GO^4xn#_MC?!^IS|d&7kXdfLUZ2=;ejw%u66-p zvs9}_?rjJpooB(+2J!BSJPtcUP(w=v-I_m z>@6S+aU74XfG}lfL5wz!>nrpi8Oj?jB42i#RNFCCb3!wG13E_)Yu8spVF#Rn*vLhiuc&t+AB!X zsScxe!)L8r-3eSmLNPP{3Di0uXTPoh43-}E38qQ%wh5EQYcM?O-4oQg0a1eAOTHl- z1=dMI^B$GqXEaUP0@jYy&p$D^OcJHeUblfMjnxyD-GZY7o>5jU?FsRKbb^>ofiBSB<6$MVf)Z=26x%M_twH~u*K}BM0iH5%efxL5OC@IxC z_1x78NXCOJyGvAWdtDN)*5jT1HH27cJhe7~D~Pbde<WyUWtOBUrLyUWD0QLP(Kb?WxMwkdQNf9eI_shotQZ!YgBf z__zkudXCe2lXMK&#mMWg=Dq94s(*Iyj87i-Ey(YFYm1u8HDM39Vr9Yy ziY))lxDBGReR!o!*xl9+;Ud51B-4CbXmfQ%7-8_@(M@##DwHc*_FrEDox2|hT7$?% zZQQ(G1;Z4MDcW`qCa3waCgIn^NB-7}mbw9#seljryH~t{7#rcnZq>m1q%I^i!LU6$ z)m&i$h8-3?`UA`Dpz_jP3raOHO^j=62U4n%rIUBYAtb*8pS)RtOP8bZh&t<_AU`7Q z>cr0_kUARad<|6#UwF78h{C;h)f0w0`Wf96yatgIo9J#Sn?&^EIX8sE7boN7+iKp| zio^3snOlGe!d-FoO2`c?0$%_ zXUAak2WllS6Z74UEhwcD^nlO=D826Q+gmq8XZPIR+!jh2Qt`gnICq9j&gAr)0;wp! z!?sG8?G`GWaQNp%5M6A(3_*i>o~&3yh}QBbug1H8=GP4b-7=wu6ItZx+UZ8bzx*U?^;_n(e1N}UC*-;XNUqFbJzJaPGrW>? zyTm>B-vbVD=`;MVkvmXQA-)Vf1!5NE#>xs50XVfwm(>lJ3Q`*fr~0&frFz#8Qsd=z zEJ6;>Ub8Z_5vdnD!-+&^Wj=k7j{ehLDsf#DHP*Rt9J5Je7 zpi!6p|N8BOml}y$!sK55$z}%xCwF{WY6^%dnA2kLKbpL%MXCu5F$p-!4c;MQdl|kzh^cd zT%+j1vp2i9AVoS&CG-cJ6DY{ez<}(8lL1qH7;FcOse^YC*O^ZVBiVs>23LS^NBfc% zItXZijzoRMWzcE7z3U+|ySZMQL!el6l)L@?5JBE@(qAXN=6C37ljtB^05tR)u-Z1a zMm67QKEy@iL=q!gL3&sc4NX9b%8O5`kZoY8>+*jIEtFahy2Pb=*hv{UME*3z7y10_R0TSSSMP@+bQ zlGncf1?(nYH8rE8=0ly1D*#AMn99jRg&*&PSO(&S6<;>5AaN$lTC@K zInE8?)QR6+%Ky)QSS^cxB&4h@aB}k$W&+F{KW>@SHbj_b7Z|A%AN$E14Oj=-!}KFl zNa*DxEq(=xEcISsm+#ntRsxNu!DOVT zA`l%MarHP?oEkUKVEwNDMB=5+{G9j7Z53N!GDlF1e*#F2=_Lj#Wjh3~RQ%U=Ed-g= z$)OkJAUO4WRSSSAn9TAL#41r4M>K?XpxSThas*nxgp|@)@M?c{JrI}!=&I-y1RFD2 z=?TX^&b-fi4W_I-;j~Fa{)~ePCk%_bIV0M9cr9d}ji{}GpnuCi#>qfnCaq?^4Iw4% zyMt+m2)fckgdGT!fX+1lO^4`t!(;`nlKPZ4!b6HdX921 zd*N#3snpj)NwvyPmgj@6p`o3b{wr}qIJ#WmPe8dh5F*5FpF3zi!Y0mw+ahcMiC-L{ z(n7u?lkc=|10$m6Eg&1oGqAD)hOlGP9q*7v2Jp zNVlgXc0OUxNVXlo9Xh_WfbgqrQ?<(-C=`GTj!GvSPPLgsM?T;gqYl!7DTM`(YVQ(M zI}}0$$g8g2O}tlZ{5LM0~zXdaA%WLz9M@=m}+6YNg2({E5BH& zd#o)esRF!In$u1oNOU%QD1RFilVU-^SzvX#Ei`yqX|1qBSPd-4ApvOP&91JjphN@Q zPLRZj&VdLCoak6n&XpnUKw+@+eD4ZMZ6|QxTRQj_Pr|HhCueG{&zLPNQC`nRl5U@1^AQ`&o2F9udgB~iVH*$tS38Yuk|j2f48l4t=?1{!tNS3pY0gW0}S(GRJSP(wI4BCFj~ zYa0kK>@CHcgb(BxXkKCZLN>BRIHmX?>STa(JY@$Uusf*b9aXmji_pIo=;wL!pQ~(R&3*mGWMi ze4l;3(v4v**8yBJqgUAg5Y*?uZt?+lG9Q|IYaR>Mkt~U}AW#DPT2#~ofad2+Th-1% zVNlCnEfd>BmYy?FA8jKfpbU;Oob-4%5Gi_B9Z_>%t%Jo0f048WOj*dMUTye% z?2{71XC_2^Zu5D@!9Y2rc7WOPtp=94>X1s}QPJR3G*{9x)9N5akNyuJRWuazOw-qa z#r1&Hm0f`lfD8GkfH0MV;iI=}K-kyIJ&2!=ngwIofGCR|k-z-&(Z~C10@08k96o&F z0R>GGgA1MI@FO_P({6M8x`h-aTkJgUyaQ3=K~8c^LCIIQX}lPP665#t^&Jq-^Ufs2 zUV_tBnYbR;YdUMF#Zb=u#KIK>G;yJBq9;s_^#2y^8W=`>c2aKD4W!a-v%t2;1+_q| zu2CAgOa%2e4^+j_;nP=cfg?b66HyG5s8ctV%03IU@W-k|OZFB*N}S_VEW1%VLo7Wq zPGQp_s~mN3hX@TTzfI+L1XHVr{^Nm5a3ss+depTZa`06JzoBsjO?{8g>GVS$xS%nH zYjEi(@-H|W2!p!Pao-FicHQK%M=cLlHF(&P#Xt#j`qvlK0#>~<=>ctnN)N0XZf!nt zI8#0E4hRMhe4%L?kUwQu1+Z|S);d5csBV!ZrXxS`U$=9NPSG8gf%_V zniD$P&>}fau>KQL3lUAt_R;Llkd2OEX?-+>f;1o>OjrS7gCmU^A-jCcc_y?0jCMHd ztVRGPXU~zZhHqy+m*0a+&%qWFR&@+Fc9efWO~~W!jqmurw0J5&Ep%P`U7+umYN@8a?e8I zgG-Ik1xs`Y(FgORddKN9S;Psr1{9sDE4m7_BZc%mu$qg;!)q|+vd|9IhH&M@v7V+HIzTwbG%(IfFr4qp7}tc88-E$2p8W~}*69s%ga(B(KT~iGOil;h zf8T&9hM5tJ-3%YOg-(q$A5|OqY(GbuwxAF_Z!}E;(!p*6!Uk_oskYjDeU3*h5E+o} zLiHK>a(YYIz#SRQEtfuvfrpqdLXeB`s}3c6A9V z_MUTw>mfBSms0%$T>5-55H6yS>Hg&^c{=LaHVdf`IuTOv?(TN zC;>_|T_*6-0kkM4Tc$KAI)Wi08Jy5i7~%tH3QiJrXqCn&3?9N(i7wC$onWO2+_b@zb)N?(ayO>&&%N(tqQyb$CP-s z9npcsAkXM5a@n>|9QQ7$=Q>%)vFe-h8*o_S_EA|ZLz2-k2f+{?Gtebuga^P2x0tYg z!qsHxtPvbSQM{h!B5gZFIn2M^??51DcG^(X6j1vWZY|_*x>ku1=Trw!#j)8f;X$@O z^@4@fBe>V80$2dO+$<#{#ni9!U8Y2{PX#34cSQ0Ur>*nbsTS;)JUC ziKv=Ui&$Hr@W8wlTdbRfv#N|F@> zq~S&t8tVY+C7Co5_i~8KwzxXhHH4IyO#z0DtAWH-at0bQ=^>?`E(=(sxgMewUm-ey zBsJPHZUAVX9b5G9WHlmtd$xe7IQ>;$IjKK^7(r)8-aXugKyGDw2cX=RTI>)G&-ydd zTpNH?nyelYUI7srf3zRLNm@?*p#Z{^GCQh!qw5gpDYWx|tXB}qn54ZuXWv7E6_%~2 z>>7ZD=|7S;1mS9?tCSLu-8!(HposxCU0UI>{dvY#^l$ zXwYnEd(jImENRypnVOHUf^rY3KFG8MG1_4Bg-8={>gIrp{JNRF4Miq|_Z3wH=rRm? z7}B2kCA=vZ0hm!EKVeFzIw@Q$=newy_JDSKNst`#4=n2d9f3ydq@uAhkJ6@JKM?f9 zh*|`MDDaNXN^O81O+A;p0gqYzU`|k}sUKXr0_aM}c|*sQIKS;rs-v+XAK$ z@QA;*J|+;NAkN((7WNG#+~IkMyRIEb!?BGWNYNZ`{~$Urs$lhjDpp`Pcn0h4-YoG6 zBA}d0NGVj#O#R>18VZWlKR?0c6$rb{k?0uXKH;h}9nTgN_+82uHz7>QJTtltNLgGlVy>y)3=+(6$3V?@C$V*>0I>m7 z=u7znxD`Qi^wH@qaTb0B;u1h|Y(#_eI-l}(^;H0U1;)|?qBe#mVqSwwCFP{=CWJiF ztFGfF$@Mov)bFR(=!g8d2j;Xj5VSm$QA3E%SVVg(avO?z!u(YU06yjDAiJF*wcceS zyXl(|Sb`yN;?p4S5WfRSp_-3e5>7#EvD&oPP|(u{+=CSpjF{TxMeG5`@p=uW>SXgO zb^|0|o}fiu!mx`kO7ebMErcfS3E&o#^iw_s6~l&>t~6)$V^G^rQ^|*P+cuvT66JUT zzC$?H<5zE{`Q*#JE5fi)UOEOu6#XG1a)VQ2hZkNnt|6ox9f-2|TqOghu_WjLDaiZ$ z*FbnU({B8RFtvep)jn@Pl%&oGEg}3)6`oV_zS+73C0Y52xCxNt#h0=T3Ztl%ey9aU zQ_r`-9cbaz=lErjQ%DFK1+4%P4qv=1q#F{boAt%a%OuE*X+0$Hy9CsHzOMX$QV$4A zyS|ln4UWy6VErn614T8W=TmM7V?Vp}iOr{1LAmFHl>^~1S4$><$WYF9(QoHFM^s?B z1yHC?8~ZlX4j4Ufi%t(lXUaH*MzKDTz9O8#6}91#^ktMTnNTyDepBn zfuRBxuZ9S^uE+4*LqJv*bI0W|&1)zsR3`v7L-ga>bLvlTAi(u-*{u1DDy7F`HSH}( zR5ud}4dxS2bg65Qc2EqIJkEqFpkOZ~NUsKr<}@|57*j|Q-haK4BOWV=va7|OmT(;? z1@WY(T@RN~s1BC6nD-h`Y*PPVas^Bo7)%`FJtSq9|3$wB#U}19*4BJhwJ!Y;G?HyWD3e3Io?D#E6R2>ZKS#vo0TP~iR6}Y58L|AT&H7(_8Tw-%5eZk zEd-d=`wxt0fMkth!PCnr6lAogbVWG4G5OMi%+xZZ$ipQF>gjUGfL%+ko7YfkKU(js zTtTBu1f<{U0rXpUHJn$vG`<0Zm-IbU`#`}$FFYmx91G4hE6KJ6Le$!zCqS|8V+Vx8 z=WI`Bwp(zxl6~a3-VQX`SpJ=K3Mv($f6(=A3?$fQZc9aV01DIKBj3zr;9@U#7uG<^ zo_Q&jWX#$apL;;Ha?PTz!6_!o3<(Y?ys@cCVP2C%OKAt%xotkz*)#1%YQnWfxs^`4 z&PV?v**4*%yY0#brgZ*Wo3uOmTpu%;CMBL`nl$04A2)&z19th!&NJc?MpsM=)OyyS zaI9H=F7;hOLs_&~O<(}w5htc9_ZmWS$JK4G!)C}MjtdRpH_)OSwou~1Yd+5fpRt5j zjf0Xe=BKIS1Olb%%@Nu-Akxllzb!b0>KdnyltZBaE;rC|WZpA{gvJgK#624@CCp## zmzlaD6ZSx}1(!^)1dYnR5xj;Jp9KRL17~?E=n68XeoRB@3BoA4J9V(z3hx~`$`xgvgX+aq(DXs9~)?)L=v5Q z%+L-gZ2FHM=ltunDgdw6%x2wZlLrL~J?%^5s6{OPaj&!B0 z2P3j~JJlG5Ja~%N2{r?TULUTp0Vp&$kooMD;}$8UZNs z)Bo(N%W0vgjm&($9uU^JYeJiu4)~6@OIHKt2oJn=U^0Idj~BtmB`BHK7ctgQ$XtJ1 zz-X7q#i`Q|k@qwyL#S&gaDB%XPdH}4<5tKUxX+7s>Oh(=s!G)xwLbN|1u1o=Kea)w z354X}a;#8i zl+-(4LPZYTb5uX!u%$U78oPpkMmQ7?=o8b!$u;39FMm5hs}H5TkfS5UH&DWL4iq$B zT#u#rGB4&pok~Gle4rH0KhvK~07=E-jBaupTwJKSuQ5z|AV&py=pzjw*%VtyHoCLr zkEQX82-NE&tw?DN=$Qkp8~qby?!2|eP} zfQ20Z{8?o*4G=xRo{S(q(TsF~e!*n3nFM}E&q)u`&5Wc#Sg|61n_=gBQB!NYs zto1-zakVTj^Fj)1bU%!h)cz2OsTmCp1ztmvlBIR zfDq6;dsD0!;BXSG`+vUCz6~a?)1`e+ZN7R!pdE%CaOtwe{rU z(IuTOmv~IN9MT=|Jf7r+5Fzc!gORHwp(yqO!96(I)E{TK(yj-RPU6lq@7q91N35AC zPb5gsHxN+{$7JN;*sUe)b2C_D$rcno=l{1RfLP+;h5lP*8;qsZ^G@xMnkS^ale+^2 zF&Axcy+C2>UqM1c9CIxHXxBj~b96Sc;?fhrMH9(C+Q~V8KRX+<}RDV+t|2{${&0eiA?NO1-76dl~Kl(ZWk&U15*dC(t z7@E4(7DA-c!ZF{$cWpu4KL zQ*?FHpGFee(T;XTE85YHcC@1%?Py0kAqx`|Q@?YbyjkDOweorH`@WfZ^5n_wWTxr9 zG<`R4zSl=VRLAKKl{D3?XQk-@Lx^Lw`3YL;*F3oxmr}^e$IAPbV&Ag0CY&hj)r(ij z1}cvmM(T8K>?Bk7%GeOAg`3w$q4!mu@p;f=-^QMfMR&+X74A##CWWF(E^x^m&Y2cd{`#Y z*SaZM!?aRSnGNB*Y%b5sTk~n%=bnb$$Oa&L$x_ZI<+KJ(%VAPS|F1@;>_AFSH>rRMSPCY7gDu?LjGbo?Db z2uE4R)Xd8ybnHrdR}MRllOMWULJV*+s;0OXvMTKcjvNk|oGn3lHVCQ8y8~5Jm6APS znm|L2%JJ#|j5zu*W6T1RFPm@p;;v60IK#<%_5fZ0$(~tI8)-@zP^lV}eCb7ZgDkmc z!R&6nW$v@KdXfOD^{J}c6CjOmey(;ff0$^foF6Sycp|Gcfw+cAY`p<57HtD5Hc2ky zvo&Apy6UAJOlrU{b+*PHm@+G04|0G--(G|ZVd>K8_Qx5J+}iJ+S)vP+s7%^d|LY2x z3e{beo9|4WXRc)K^2SFqifZ4(_y23mg?YjZnb+Bt&As5Hs68oGe(V%|eN$KRtO3bM z_mVfjj75f7>bJMBsh+OJlFA)8sVqmvnj7E4MCUA*m&^|GO5JSHs|AbsiVf&pU(YaN zb_##DnHTd53mYF~^D1Ac9nS3yasv}oo$4aOJ4l9l8Sx=cvXnIkOaS?>Pu73QTlTcq z#o;SIEtBcqgPK`bL!|U6zPz-j#|AQ`l!-3JWeX*-b^&+Vcjk8_zZ!@=L?jf)*Ms>$ z-NRBo+4(>2| zl!NKJtXTQ9EIY1&pWbefb<_eaYvFkUp zjyHM5Tvof^+<}o?Kn1EEa+yUaB6FTV zgu3!&nYEvDJk}bbvepZL!u7U!Wd*u5A7zkh*=7eJ&CbTk{Nf%Q6{^#)vb=tPD=o-j zzkp0srR@rl_dz8_O)v5{EA**W53ayS{f!Nd{=Y%w`?c4*{Bh(QCy90CdNl};6wSNa zo`OqEZ^||1W#W(h#g*eu5Np&e#TqzNYS~O~pc1D^&M|GUg&X^xzS`S1cFvbT{85?h zarhpPc|ApQ=X^8y5KJWdvsg|Vlh=-VOH&3!z41TR?P1PO71cdwFRUodk$d- zl=N|n^IFi4MqDy;H9u0GMV7C6Hu5Hq z=;4Ux4wwrH{EK>5GBKc2otNu-PZ%oBG7*AsskqAl)a#W$xqQ|VXP-s0hVg$L^6UQ% zNTR<_DQ+QVoV^O)inN1XRJj`z5eDC^@aEol5UR{6Nd`bbqK} zTThyzs!9<@FLrLgEwCfZ zzR*^S5KcON=K_=9T7czE8nzLTORKhke91n_c7EKMO!u-SW)CB7_NP2i51{0QK^Vu| zEpXK-@bFoJU$h1ndCyXNyDviOnsI$+;Rcj_2ZOO&l;92~V`Vr8G!J=1yyZb8*KmZQ zzta;%h>;ZQ)XCk-&&w*^JE_*dDh4f{oDYx*L!cy@j>=A6d;4CQdvXsLm+AI3`@0%aQP_u^Lma6r>uR&ac<>vdfLK}#b=$jfPK_bt^;X6Qdz4lag zQSIe_J_ywIAg{WYp%ifeC3hT`y0V_-VP16lju$|5{L{B`gTI3Fad&6eq93)-^PKX` z3fuuh>@|%>?GX}ceXW|#Cup?1==BL44N zDzn|Gtna1A&q5w7a&`+(B#){3; zHoLRuR*ozJ5bgp< z5fWRkYMsq5agZoSf^yEhKuUxGlIouDn~%ab!V=WOZsiN*W2U&+ZiS2ym$8SOJO%vR zjn~V3Hv1Bk^Y1#2?hdR8$CUl3mTf@3LfmTWR{k>kUNb&BP&Bq1%)R{Cu8gc2H1xC9 z-KBSd`8`wB(?64{Z5YXIzETU3@_seH!b-Q}@C_!)V`ZaO3GXIRYuSj0e8J5eg`Vc$ zHG`@YzUvCI1}F6#<@?n74NO%TcA`4%Tl0ZA4}gh&8e&!LA)!5- zqU#IE5AuO(j>}(u5vVEdSt!s#`_lGg<)!wLQ4v2iHp(al%8-Mj&nm-)1w5P(#+ zzVfxJI=s}2>}P($B&u>|Z-e$Ssm$?;edSN}cNpV%x2(bRfGqsE0i+PBKaAKyBo>EV zq|b+`I$c};_Jm^qoU)*qIhaq^dC!%~#bm5)^&H+ACdy?{L>TiEk}Se2IMV(;qkDsk z;w~8GJ4k}R^j3w3{8e5h3hxO@YBQ-rDZ*lYt76_ekN zfk6y@HJUq6k`_m(%HJL?O4kP}r>G8)L}a%L0SHG#E?cTm51b^n%aG4rU{Y+xedEqQ zlq_o9s?R^ako#fo;8IvlWqq1epx(=;_WuUQ6i)etF1N5?eK033ul$w$fx7aw%6ttR zDXP^OncBc4MNWjvRLT}=nfK$LbK})czEgwkRt7M-Q+L?;@WCW%Mb3C`Fw6Es=?6ReE!F8b)Ue)d}jC}d**3AQ|os00UrAN z#;l_PS^29X4dUy4ykHXXp=TsF;K;%u0XbQA3ng8jtJ}6CoJ^Zys>8(g5K)MpN;j8R zlE@qFm9$9s$C}tX6HaQ(f%Gc*1txN_gf(XFuO_#gza$SFJvGZw1IarmiT=fL4zJV*?nUVV z@CGW`?n7QPUB8JWC^vF3B*H$*i=qwFHYhnYO-zVDGF>-( zRA#ti-JcKjc=`Y&hB{)aC$J9ZOJiO!9q~L>U+!zOPvPXRoEO`n2@?VIElcO~0UFbr zA1^RcZ(@QpE`g{3hsxfnGoN?nSu@v|2&_kVCBqw3GNe%J?9E=Qy2Vz%PWyZ(C=QFW zq<0UbfF1Q&1stqI81i7aTj& zE0kN9VL;3G+GjkrjTB3t@<%Qc(2w|4n*lz&2V~qfBp|wojyP$na22C zfQOi+29Mt1Vp8SX)#Hj&R3c_64Cu~+B|LMf=Wt})uDC7M3tZw-)V*2f5;F_`mwxC9 zl^oR4w$i$uUmB}@r6KdZ%rm2I=VQwTjh^H&sxZAx?@?8>W3A``l#Qd0Bd3qB0(oD% zBcFsR+)oA)&k6og<77edKl!2W_I?ejnP}2o`D>PGx1+cE&#Qacps(JH>K?4_4idmyTXZ7 zZ_C1K8Dx|%^2VtT>_aJ@MRi9?IWS!hvKRjllo(fPYp;aWm)uBv3iu8SQTI)qK}nfe zkQ$MlPoj31lhZ|>KVkr&%lWrOhf3)cB6`12VI`%E!ZlLlKbF7Ijj&Qvb$@8D^;_nY*s2nF^F|ey#G12?N5szl{sngjSc!e3c{>#J zxw`VT&y&jsRFxWr`2HzkNSR^n6r7>t@5C|H3LcQyaUSvol)AkK>uWfQI~YB>X8NUw`{86DntUp`s_{^ZCCzOx7wr zCOLETv@-{)^ifg3GI^FuyXSJ14^{WTGuIG_+4PZ(csFn|VGuMf8NJOjiv9ynLxm5= zfMjW`2WR)l)D7pyPy;X-a1KG6;75$?f9;m}2@ow^cQOS%dCs3X`TYgRr#{yOTTkbJ$F@QFPs$j zkdAD&^MRbEND1ZxCGzUb?oMWTRN5X!3QWya>%Sj#oZ0IGD9UMhfrt)+YkyRm?FBr_ zyzH6;oMNIDV^nxH30~v1zH>g`={+spxxhpZj0*IcQJ1JuE8f&*&MTPAY+;m&Yd{=k zdVj9T4%R>a<}Bx1p!3t(d>3RmTN5ewLE6WvMh{RTp?~c3BRs@O8%x+v-kRZ7NdiEtHkGP#+j)QJE$mQA33xOrbuek>$7`^IE2OO{8^t^03~G2jMq=+qc*IXTsni36%mvtlu}1Ye|EYF*O%J! z>Jo^Kw=-OGS9zc&qm?$0s+(#VC)hk8Ec#nOVr=xN2ILNsUA`bABln1;rv1fgdgvjq zn-esP@dzZNafN+?6QPSjm!8VcNO9C4lWbm~k-_(0DXnNRKv#ggtOmLIO(a5ouY?RT)z(glr0RsD1U}+!IX9HhuHz8LVt$# z?gIJH2|sS-9$e#_?$YeTF}7uDyT2>#2Us~}I;&%K2#q@No~yVTu)st-Ig_qui%;{? z569e;l|Ll2A8R?`%jo^*0cVwztHlMBlr6rNPxB=lB}m&)b?<6E<6KQDcMV64%R^E^ zZx9jbr{!%(xAQxF5>YGwj+hVY{-S$fdF$>JEr6t==B4|putzw3^PGj$b3TkfKJc*? zXP$*44mPmQxOkb26^TA6@fDMF#luhjR?a)#aY5!Pq|83P&k5^mpk&M=#@68>ayz%G zpSF?LYo$>h?t3+j)a6ttznt4bfS9tDgTPYWYgO_7E*3^VIwLh|`3fjZx7z3fE9t*!irRXN ztgIruTvJCqGra~!5Y|w7;Jl7W>Pj$K#ooZBTA$R~$|fjjb3{P?X*=XD-8u9N+n z`L|(m*Y8^l+5GW8uH^INvF2P|2dWImQg`0V6+qoiJYvYKkK%tyxG!XQ7EWdPjWJhQ zUh-<6#*pi#UooQmfpK2>n>(vVC#e>zU|z1{qoraE5u0Lty)Rr;66`%%typ+|GyEwWoT_cc{u(T`7E@Fn2&ry>KGzW>%*UQIZ}f z6FlYFzVQB8cyw^y(?*FN$!01%w!o|6dB9t$E^58*$t0koOa6Vk!>b9PX zN##6S@1LTi+1ZkcIj<*!=YZ_;5-MwO0S(=AzGxhyFL9$Vy`rQiu2pY^4a;bbc~{n; zl$w86({2G@ugP=~-{nnR3R37fKa z4G04drtB^6)HKEmLMpy~D-pc{iN~BtcluZU_I}+8mlKqgIVjq(dA8Td)UC6vWBRmOC7<4yTusMherpp))_u~#mauf__j`C7Ajvt}*@2gnWloqJ8id%DTa1HzRcTi>XNOigb%AncP*@pNvihT-vnveN;htyB*9EZ&3N4*5^Z;# z?&trkXDQ$VLLStMczr*@QHc0Bs7X&KBK^_QtAfw-bxu}yXjHwxK;BB6|0}Msr|SA)2qip$N3D==QZkMDn08 zdM$K;i5f_cT%W>2nO$ku5E)HL$ z=e+a2Z|HpiM6$g5^L2irwxagj`O3vI8GYE;DoD=kPz_+$porCsTVDsWjv2+Q=J%fZ zjY-vFLT7XnwN#**1MY>cEhORf1w7lrBw+zXuWV;N{o&~Aax!!msadaDxhEJ&+tFLi z#y&#X*4Y$21$h7u<2W{Ac4se7(R+>pYRm#A3+@bGt9yJ3`l`cfMi=@Fkwh=AEhB+* zBzcqzg9iEn67k$Ub_qv5Z%&f)E6iv*YTBp=GS@f}$8fU^^O0Y2CcJ2F^N6%A+V2Y# zPp@BAIot=EV@v&hfJ=P6iTe?#YW79NcuIItr@KBU+?%&vgk__bU|9sn=jSmED}U#* zP??_`&%Qb-=Yp4;3}rz1Mt8-T1IRa|xb_q=2j5><`g?4mB8xiWJZlkK`G^y5%X45m zA8`a;yli%0S?MA2ZvGR2K93r@2j^?n!RrkIa2!iq}mH%W#9~dAG`#jqE6^23x0(d zjoPsr%0{_e=bb-c(S#lV{V_9oxAXnc1-Z+gd`p#n!Y6g-{6oUMpYBmu5jYjMycZNc zoQxVl)#fZZfTEyTiRrsBUN8|O&)b@1qh7DLbd=lEu*S1me}Am}UCS@_%^{@)tEge8 zmMi~M<2!j;D<54e*O3{i){dBkl ztG0tsL_G_ozX*+jtx5N4a`sTmg4GAlw7@=+DXCqt2ZFxt%p!V`a35Q}5YDH11+hML ziV_$7E1!w8pmBHTIh2Iey~9#^k-tZ;;Sw5VlMyIebcMw!YD$QnLe!R3G~TwC=tPP zP#G|3c*&zwhPMRW023c4-XoKoD}Q(Ez0EwO4ZtX&S`eZMh~(b5jBZ62@{arMs* zgp)8jZr?{+hWvk0m%%TDeOyiI`2c80`D^#c&JYox&a!3!%-3=>NX2%6OW~O)C>O0F zFOeyo5?EUXOmbO6<4g^rq|B{B(fF! zaXwH3jp_tFVIrn@uEwkS8C#X2P7ryS@6-|W-F?L5qh@Lp|KGDrpOqStUxmrd8K2eV zS%akZhdy?2eLngwJw`Y}wVqZhUYiIJI~z#h=S>@D{ILxr&Yw!!J798J8e8M+T}*^% zk~CLu?9F%HR(bD(kz-v=p#1Z9ExnfS;~`kqyN@?m0Qs5|-ZQc~MX?!w@W)hrL8*LG z`;E`@S}C>y2TC8!^{XhCP>g7|vfmf(uJS(DBz?rZQm5^BF%Om4LWhpIJr2gdD55p4P)CQ1DRJu z1k9zlJ?&Ti-hSNStcqAz{94c2SPu-j>gDPOz>2EQW2!w38{iNnXRIFVZ03nFq+QTp zGVntVY1^KJgHp>5>>#404BByZ&uX1{cd}{CvO=^s*;=r1v|NvW_OT_g+#CjcZxhBu z#Ni~5my3pM0FsP;>QSO_l4*vwdZ}mmv(KG67nW(0O4d0j0!R_60V9G-C?e1dXKmyP zm8Q^+>8v|cH}4uPK2s`mAz{1R)%k?m3-nGnRpCpWYj6*yX!{1T9+^Gl@%Obj`Y4>3 z%3EeSmpo0nrgxovms#9rbeC*xi+cg&qPD>)^j9dkFnJ%R^p(G_lj{mZIw2f2u=G|H zAEvtRG)U`$N%2Xgcq4D=;HmJ!;yX@D4@AN&NlglGPr^-y)iCWK@}=(Y?1B^?GzT)pkyhl?R&%VjFPaK2}|t>T^!=h78zJ|>yg-?24BqRvneIIC{2 zS`B^AalXk(N7>JhltNaiUgqD=rR-P2L)o3%AkORgxC^5CVAY#E-e)u33MYPDrVJ?V z^0z}M6~1uN((^8MeCZJ8e@Ejed|~4K!F(wr04O0(##iqd9(@ab=X{TA%I9U0b#BgZ za(G21nz$*}EaJ-Fzs!isWO~*B5lR0vW=7WXh^5al3$czE(lmtVuD}LrB*F^jm^|J@ zN@?{Q+Z|`?`+H0Ozim|ItljN*0(?=PcY&cJwFZ?_8BkpYs>7LZ|=8mwW*A*6$ zzMFSt#2lJ$?ag=l09w!QW0HcpuBi3zU{ZPoS!6pzrA#z%*)50!m*)*_gZX+lc)dKigG)Y#Ck}l{7Iv8CP==y=j$Ah^z!$szS{+uq}Q=D zAKS(1GLMMVY!8zG%6fT8NYOPU6V-&52Lr`Gi0D^Stha(9QW9Rf%WI|h&^;7&cFvp6 zJWRa!R_EeA!X;64qQ-nrU?frf!>SL@m?X4%fz_sHJ8+^twMOq({z57;Tlc>9XWZd%h+q{s4}EAHs6?^_^J1*Zsu`9;W3`|g{}FB5xm#8 z;q-iRWdGiB@NLVK8*IlaN|NZ<&Qo1k((`2LD*{3W47%=#bs zbE}a0cDa)yXA|Tugz=)v%xnZC%V$7zGE2$Qe2$t1;zKv;lmeL)o2;t)N-q&g@5kfA zFTHK&3Qi7AYAaR^D=;xX3{8Kh+Uy%_l}fFjQjhsoyZej%4il}Yiv?&2ObhBB!0OdL zmPqz1m zS1N5l$&BMjEr#iEU?Q+STDb>{Ta)|s{o6VTg_yT-WsB7vT0jwED7!+e*ff4 zirv$wdw2!)<8h6__XBjzXv+a0S-m*QctFXCLpcq+7VsGFI-!Ry_PYHkHiE3}k-m6_ zNN(yPg0jHpd8DiWqhH#Ah-jU`q4^Z`B`P_tz18{j6;AFfE>aI+i8hJB(%b-v)1x)7 zc8d_F=BoSxO9AgN`JUpgb1O;x9w&Rcz$Lo}h=S3`_JVi>^Gz;LwR-auqEGkiUf^?H zSl)B}GG8#uQY-bZ2uWm!fR%rEsTC&k*$gl_uG3MwJl9Zi_fvHl*M+658W|GY2$GR< z0NR8i&pr`Y)3Alf$LdzKt{&TYMJrhu-GO8+4zUygj0%pc!D>=x4_T@GQdczW1H(*o zvwQ##0kigmbvOz2vHEKZMC9AYf%ieor?^oC<1|odIYY`??>?XkP%=}))EdrTV4_vy z?6%I&CANyb{Q9qirBrW{q^<#<(_O00`wb+c8X$cdY5w{- zPOESP>tLa3de5Uy@8R8dCP#DY@($4xP8=wOCoUw-G)XU`i7t# zSbE-S=WFD*o4>~#0WE+dv7V+E|2{?vzn=ZFi&xfMjZdtIa=$hG=V=q%6P%D%#U?8lSR3HE)?rEn!_j$z07!PzG4$XniZq z4H)6}$}@dHBzIg0dTZkyCh?X{>)LjYl4R3067c}k7ss6yk6_X?I{H=KPe?hbyPQb$ zIj@tK1il3PbX>ak3M4tTR!Oug|7a(lzelup70%b=tJ;}ZL$-)Ks zyx|XNsIH;PxCxDno3Itr79wTlev)w`z&4J~EZ(soI|(0uRQGpSnN0 zmErwKT6XyK?pG#3Tai!nCg#I@!qJEt`vs81*kstw%9E&FiBfll7{&7bv^{*zG0S3M z(pnu*n(jl-Ukb|Xs4-VizF&)}{=S|s>yy0i*|Zx}6os!BMY)9&W8eGcr8`VAI?PUG z^d2dqvXan4(6>+RwEH+eLD$>bJYkaPIv#`@G-W`|Yg$d=mc=e5dBKx?cB=`;LVPMC zvvSq&$jW04N>;(-&PLjrKU_nMnxf%oBeaehLx{f8$jWf_vNrOn)@XD?HxXi$2pS=r zSldH$dw!x6tIsE_$?KiG;W!X_kvD98?(KVf;3%}Z7udu52AHQ0>GvrWUO)W8;PeNCw)>bYMO0?H^E5b`S8s5E?7Qhvb z%DmNMzZMi{PnKyWf}Ac}!{b{h%JEAMZ-s}1-+VRrDn-Z(?NdMzAhI(vqJN<$36^H+ zl{^WOa8G$Z3(GTCBpG@ElA*Dy;dK(f#FcO5AL~~?)YZMK3HL7HHQ}Vr5SAp?;bc^c zGh78WCim8v8i&0{YZE7tIv?#9umwuZv0F)W69;Hl#~q+VQ=Yy`8A^0D>4s$ia|;%Gm)T(yF5ed#dOVl_KqSI5#0LkYlQ;Z*Pfn+$mP%RW+*Eq3BdKBQ+mz zKAG$sRq&X6yp5fURy=lzl)Dc-a<~fkxi9Ly2BPPZr{uv=4q6Q2yhS8R!sxZYJ4_^C zL9WU^U!aHW`|%K18;Glu_XsBYedA`CvnSLjA(K`1;^3Zf5|~?WU-Aikf}SgU4eZ@A z9$opzm+>oLcon8o&T3crg?+}98%wc1AAH*mz#A}GR*zS~ZO(sdjHJTD1X0V~rP149 z1W|{#_E>hv09U8}nNEC0k&2HXKlyH*kTWa)XybPXecA51o_3zEAd zr`dX8Wz|lnV@78)fB)3hUU(Gldk(R_vz>oC+8tp@>JzhB0EnXGRxL#iMho7!9j7-K z?xT_|b1q}o%faLvZE9MnGIxj@1H@W_sw*E$53p$y=dO*uV&2z)l+4e^X0Vu!OEBtFTYp4zg-E*flkEOdg4ZZ97`yxb2B_MSxt81cbWXgK z`nyT>xl;o*)^o?_nw9x6h*9pKlxy9ispb zqpR;k_J@;DHOpi!5b|9Xu^RW&`2qI<`ui+@*Kr@E9Oq#4uTRbIgC{R=OC6~t%c#{Hgt;)@rvK&LDs#;0#U=p@_m|Iu>@AgFL>hQK#UD@o*d`eunb(-+Ewn4n(=W@P5!) zfTPuQWw-QQA`-)g(UG~rjGEVH!+7f&rEuO5{EeWmYY>&x?R-=@ed=>`?oiTylmig% zp%LA0R>+k9Bn<7#DdiENEbPcv@WP`X>|m+7q`iF^vB?6{UqwGZVfwH{{EA2xZ0zh+ z>6L$GDfBv!&oy<@x;nYOjH{Jy4Hwye{bnSz4odC!qX{<<5rv7;bl^89#iu4cr!AB$ z8}Rq0M<~g4I{-i6or(YCw{pGOg_FsuiDnHl(TpYGUnu*&APLw0cj^$ZI4oO#2qd|E z@$&*qOxbI83QNbh^ZYD`l2y$)UY4I-L$^7j*Q*f z(MQ1CgvnNqmgX%cV{MBd<19R>zC$Zvr}MA{!s4*%>yHN@Rm13ZSm($24L$twhRYJp z(|pug{_rVz#zl}$Zm`QJ?ZyjMY@A^zhHwN#31n*JdZ~@Ht2~KTL)0X?-X1ZnA*8SR zt<+&Y?o83{Hy}oBtfG^J%{#ULI@@Teaxf5uNZ2$TWe^@k*yH8B ziR=0&x&Wu?X3z8qyZd&K4}7ln{!mzgEgaWGb3o0~Xe9vJP**gH=nSGd%boq_pm@tg zzmJN!2zDk&u8X}4wiTHmsmm3doG;V|-@hQEGXM|+ern5e9;ss8!n4?zWS4`D`TG3YE z|Ljt{yfNWbI3N6Q^r=*EZ4y8KdQ_9G>v>~LVQLTHk?}VY4Hz3k_%@`dl+aJHin|F8S?6C)_ApYXAzj8tT3AQfb$|u;jX%YAE*sf}kyZsFH_( zthQ&8vJbMLRn_}6ZxUo(ZBylsNS~p{;yB z0Ms3YNm-+-BidL9R8*N+_SVgOgixfxr*BaaLRs#3W8F=zSC6`He~(Me?vA&eCiTGw zY$C`K{v%A$4E_40Conm&R@?UIc`~l=X*ZJ>OmdPrit0U}QV3R;mp5Ro{BtemkCI7g zfPA%ORso zj3hdpQjcJlLbv5c|!SY79`u& zJY(e6nmv)d0Mej3*VkVm;-A+q|MN>x7%p~KViKQ^jSVRTBE4D{Yw?;ks!H`*_lGYj zr?vtc^Tj&aTe3D0Be(D0@Svj_nB>-nMwfOQSGx1HC!+!!X`xVd0cFq*EPMGQju!hs ziq93UWlIha(R0=D!*IJgL`t4o*7w&fz{v6E-UEILCoQagH6&E+43`g^+U{vcM6zJ@ zvy0;bqc8J8te1jPUe#D5GGDcZQ)#-ML`?|D{0$-z@&`PL9l6EL{?fcDp<0?HB5BSE(5k*gzJ;4tAIE?X-#=;E=V8UuE9PCr!d}@jk}z%+x;srDbd9!FNAAElD^zsi@ZUm zWtHQ;kv=_5w4yVPt*s?@(5U`>1;PEq<_`V;5M1t&efrZQ9I=(lQ4KMkFe$963cdRI zj2gP1d#-)x1t)jM)ggXO*eFIf<6rC&GcfhgDv)e5CiEcCDdQ%5=m40w`$!wlZ(z!d zmN({R9#-^U>yq&;P{z2={nR?O4JS=UbplZv?H%0Evn~!_-2MKTg;2&?!SCV3@>6Yl z*cT?fkLg+;K#|_3H+9_mA)JDt0@dqXAn2pm6!7VM`UhK8nS=SXQyR-Qo+Bv3sdz2~ zM_w1kWxjp+ov9TA%oQfdw~eQ)CPhGvI#KtQ_ZaYIa@|*t65uLJFp^m3%j|-wR+go{j^WhaBy$c^-xalw%-6HwZaL+7FhA(vGWro{ zcG8uUv}1nC(ZI`Q?-WU7Z>w)|CQRFULX>(0nQJKXcL7BP9MKfOB`obVFfQYC1uhln zeV_DtjY+B1oLpveZg3*gkn24p?1mLOA?#z;0Ma&q`IuerY8bnK8M=2|b;0|O^Px9C zXC>(gszB?!@|MK!*mrRurT1CvkvC+zKk6+7k zCJV>X#5y90Vp{5Ob((>a1a7h{gs>!(;}?AZtwJG&!KB~#|oo6_? z`j#VCUJ$8ad~R*EOBWJZHnuMP(m>oasz%>1eH9X!aYBzT*1)8NPe?Tp6T5&?t>1uC zY`w#Trfp89CMwFjZ((FkudNQ|2rvW@^$w6S>C*&u1K+uNO zswM}B#BchdtJ2|o>)rb?QMo`x;&D5zM4jS>=`mF1gjS+BLlW(~>g%2hSAF77#TEqA zTn-%oA`@rxQtc~PE{=5p$^rEntm3cM>}LEm?q|CNsbe+0cX_IB#JSH?I^yP0ls8y0 zw{yzl0R6eAfZw%a?K6<9>*cktz{_O%tS=dQ#U$rU+}GdQ!C>WISvDlv-`arETD6Lm ziA=I+55S10w?EguCWVirH~g(?=(zz!3=&E;Hs>pSw`f#t3n?#kJbGk2Vg$ksQMtRUa0!xnLqa1@b*+ zs~uJHG|5%dsI=kaS9@5y<15mOJS2wd>>&>6n0B`BxiKimX?INyta*6rx=VQOi_m#@7+iq_JK~yEY~_e6MhuI7zg3zdABo z`AmDj_FUd}{^pQVUhF$i%B;p%v|wTtwVD7}qRqdxK=;9+@yEYrc?v_C-+FTI5J)Dz zxS@}}E)Y=~$5Z8&^HY?{*&U>_yll@(DL{ZkQsoPvGLdVOFQH*7`tp|U4P52<&%I!N z4M-4Jd{82e9-Uyc_Wo!5? z6n(Ecsu9SOZ{O(7+zZPEKQMB60HRRpqk2CfcMKIDj1a2cne>#7XIzxjfpp8h_X{%p zx?H$yAWy_7sT7f3EKR85kM8ecwygaAc5qOGW}C z2i!ZgU}dT=5lL{(vgQP*D_jzsL;70~*Lm>wPKl%_xGMfM5&`yPE7?0>=t3Endytqb z`&8iplFZr=)k|TIaC))T3%5(fVb?X|^MsdeV+*R1FI2-R-!D)Kg5s3x^eZAsW`y4l z)4TF-EM=?<^aCyI-GHmuxT4g~yWe*sSo4qgoTNxugcHN(qtj|vHxQ9!ZKSD<%A1%` zSeX?_cW(>r5r* z14L5wE06oCCx>}NkHhZE8geZ1$Oollg`n)w>3pfKmZ@jXFj0KDjZeinDiW<3k6MBe z-33zG3`^zz5-2nA&SDCi5>y-MH1pR`(#0J}Lq=CHiK6C4TPnACXq@0xlR0-`7T@M= zN%xpY)2WAUCaU4v15U)X&MFy?P?Vl^rE5?5Mzw3TIwon)7(LiuNarQ5)^S~>H9_Ss z=R+(1=2Ez_bX=1S2w%3YXlE}ST!Sn3y+pU3r=9c1Y`>HCjSw}RnL8#o5u*JQe}OKA zpcIcgQMaL_n7(WBci`l4@#eQNZr;U3{L^7d*8&+e(*A$?`OqpUaqMG9Wr(UCd?&a- zDUCzK(A^_j8!!uuB=q`BRkc&NYH|zW42aV8aa!kKzOQ%R3tEU2rJZQG+4T~Y7@YLk zDs?p(3)kHvxSp?-b5GB++@QLKD~#Lu0#Eur#rST%;BwrY)pw7IsQLuJ-pBla8%Gv!gOi%RZ4|SQk(=6m=<^4V^Ldtl zRhUmWlquI8zfLN5no8QrZAWOJ$TSEi!nPD^U{d{?HncLjj!C3UjmxE~1(}rnK+(ZX zRAi-3RB7EpkWYJqRcCh_&WC%8RTVyFX)Nlx^qQ{Ro!l>c=hJxZ;gSGNOQtdQCsn(3 z+n^tylxl72WF~;pd1sd>w#9t9`vqmgPf=Nws~&yLV6nhe7H9d4`D&jz2gs&Qnys?A z`2HR15?!R1sA$0_H`~wThZ{@eL|fMR!Xj&IsamvdjB5YYcWb zSzB;a?i{vvk8L%tK8SD%u$|T(!DV&TNvX^}p_uPO2KrgB4F7Q{*9(xu>w{@;Ug0GE z?Kl)<<=-Avr4(TmAk@Ckg`t4FTswN(yL25+Y1*JVwr6kPB-7n7?1OL=WHhr|lc1N? z1K36+<>lYiQ!_jH02e2!AuONW8ztAh`Fh>7;`Mz*Qg1(S-++D)rj2m=DTkP}MI_I$ zx*PAsB$Xla+4~hvk&@nLSe+%@2e+OJN7{=TFD@Z${ifBon#NzeIEEwUDZ<92RKq=B2CSZ#3L%XtUZG| z#}(-lGJQP5$nN)b3ZK!jNp*Sk51DrLjTd}rSWUmZCfs)#to%Ez5$0$-v>M=$H5Gmh zh-BPK$!vf!V%0Y@>Kky1N929%+va@XT`9;G7+I<2b93J|wwv?H>kdG0W9fSrifA-S zHGjB=NED88@1yqi^9Wgud6WZ$Y&q(mkr4Lj__KDT*oXO6c6_#(xkkzsrD=~TD7h%-h$_$9$@EMU z<_WYBeNq618;3zrc*_ zmkU%~jCwgKQ$)F^=?aykvsYtb>>8IETZ46@FzKm@s~ddcsAuc^;@ioz2_&mKjPlhm z*;83B-6NLrt-}afz7I%wG43c6_Xw#bA9%%47$71_FVFcJmEULd(4)SB@db_^IWV(k zD9Mxiu-B$N}PRIWq$yw*}SHsE~zqgL1Bn|WXy zjlh<`OX-@AscdXds`eOnK6g-&?)MJlE^egU?l7O;oA22n!)HqL`;+3*6L0W?`KjuU zeCgi&;pA$`tkyquMJ87ZshLze#m(+OtzoAlf*s1Ju*x)dcj6pPN;L_u^#Q29HOHQ^ zxdfAIb3#-@Qm;_tsD-WRifbrxeDkils5kky&IV!iZs)(mL+W?1sNTMLTZ$4;mQI@k zL`b}K%HQx25#8e{*S7cxqiz_4(+**Y_1TLE=@E5!f^vF@m^@#79i zMvia{xK>5Nfw7n zubu{cYrm2+K-3!I>gcf=aHQ%!&|k`f;$kw!KkZO;iAi#X-<&c>iTZb?_1D5BgU{tr zb`!w;mMy*o(p(?b!1@l1oa=-GDZ58VOmE?@Vtas-7)G;?w|m4y>dS*Wd#VeY1o2I! zxjyxbiji@4TOF@mM*IaCT{QnHaj(JI1?Q!e|Dbd6V|7edh2`R7{IG$MQfK~ob3H^I zLlCcw4a889_UI_J+RP_Rl6hkbNZb}-)spRefx#Pl)^{f1ko(#&gD4?L)4a$uy(~=!mbDoU3XcVj>j|Z>@adSb2Hh_eTR~Y z-VbpvEEPweO1KX|)k`&%FPy9X!s_or%H_XPHFWY#_017Q(-bNtrtU0HgVC;If% zMZ(`csvxJr;;MOKeeg3#GH5uFodc15)9uwWr&q%NG0kteQ^9q^Rc$_2`yZQGxLYPkZR{f(S23eDeXVmqoiRW; zkX2Q>!?_;t%TZO74S;aR!F-!ga;QDyv`rT#<7(ON32$R&1d6|xA$O1>_`!gajR>eE zT-}EG0q%NjxArlSa+$^G;Q=lmt@gS!mU12jRq4jhU_U8BIK17oL zg?+8o`g}5rY7l+&0yBz?J5MM65|spfKkVuXH;U1rgLRb$F1rFQ-R*)tH*0coG(cGi9^_&jB{v z(V`cC&T|K=%0xwA`g-E8mH((;QgK{CybAal14|u*hv{g43b2w)^fIs!#CNSew+W?6 zabi)NTNCFt=k4H=-`MinEV2Wq2z1DMR(%%}p()m^ic~YYhnCKT3cfE`%A<3rzYw4= z@1a9L$}C})C0;;DOWkV5Fifl^mew;kLV!u`Tj%do zjk?05fb@FoVKg&%jmmdxNyj*zV%^}PAIAJg#-~NzA|uysP45CtsB=74^npBRtS@LQ zK)L?VThAT?)KC4-Ctzt+#;oG=q~i8K8ZW5IiN&$r;0Gp=&eI$V?JNKBGCgm;)UhC7 z}P~U2?e&s*u@UKVGu`28ypd*skKD4Yf0F!L=KAs*9 zJmAEuYR~}T2=D#J8X7#o`J^Sunq_~U5AX{o-4{3?tfL^uMjc!-o-NoJf3N(fo$xn( zBHk*H@7DHP5w9VVRA)9=m1G()qk-;~@lNXoGU=0Ny0AIF^WMgnvJQEm_DQq_-wyuK zfWQm_?SS*udVaZDj!c5POvZ3@539ZSg(~k061oj7Y|a5tjhEey>hNLyQaUxj1fpw} zX6OV+vR3+;f)TE($mSOM-3bTBZB~lvMgbzZw*e4jO{tTOVv6mMO6lC z6xBvPQpe&deV}qt^ID1?l0{aeR}{CwsFgQ5(!^o>ENM6T?vZs(ns2Hl5pig zS)^4&bsmKHFA$0U*+OkH07lpOendoBkSvZq3%ZB_*e@R)kWOQpLP;Db80KV*Cl>CPZATCFHPYNEW<}N zo6)n#yCHHTNwpGth-9}%`pmGh$5n1>U$ElOEA>rDli)){sv}>H_NiE4VnbMMGcjXw zFrVS%UJdTq{ETj0S(WqO`wVXbk6d6<8tlx^@7sBDQhnBVJ#~euTKGFDy%r`WeIjDH z0aErSwe9E@jB0+!*&TP2;H_L8#CyatBRTE$0pr`ppMCC8IG@uHs_O+YjbPn5E4FIHIaa zv{JQ0ObV;Es2Z6w1}JHj9?3feBEAo0D9!@?)z>ec2lhF7WPJe+3*QrzDnBNlKYsh8 zafdHnp%hNfEnLqR%mVjKYd4tGZShwXx}9IB4YEBMcZZ6yn;$$(D{-I4Oly}*q{&?k zKWeOG+JM-*+u*m0@pFu?an&;v?f6}H5?|m*+{{z?biX2!LpsnKbyohX zB~z}cr9`V3FXaG}>fo#)B7`1}m3dl65kjwxYzRxjw`_ddgpvfSGV1;ooJv`FTaKf) zvBNjFj9}H&VH$S)BX`m6T_7>nf>+nhy-DTD6D6`gDN6+r=fR}(daG+nQ&36BIIE>K zV1cdZ+CSkGh_d`po9xdf{G~cx=U{1irWr4wh*np)#CC}&k(6uPm2ef22Ag%5Px7xT z%XNbwqPnT4tH~|AG=9|sEVa4ACAD>K7mM85e2>*K{-gI|*a#>^ zU}na*o+oyihyP!KN#>-F!F+{f#SmhY!ODN#gd3xrd0EwZ6|t;kDp~t2OsYK8+nlVU zhW$M~bUMaZXT(O0YwN9hVv3gLO}q@$9S5`|;9F0wZv!L!Z%zns2abI7^p2~O$#mdi zx>PF-+Q)81kfVrsNU-;vtbJ_hXO6l&fKqPiYaJwX7@|&wu?)<_E)eQS+yVOwrJgbPvq1GQxPR8&N>w8*jwPhVh zRcjU82vm@L5BDaR)KrI}v$Tau8T8C)bKAJ8a&;rgPQrRhltWldrlzEI4_H=erMnMC zrfwq6m53N(=UJhnQQCfpNbDA6I9jUE3siE{w`nkSRgq7z;xX^%kuxCi9KS7V0TP&| zugdKrf7W7qpEC=lq&dIVm#z@W9dV2!H^O4_r-yIn!wkr{+xRXIzp1Ku zpRhJw$>+r!yZi_Y1&jaC6C&CAP<#GZ;j;J7dEd;7a!mFkhKPx8bv-f|5RDW~5~%+78KcMa+LVYi@^7LoV)!P}F` zwnZh6N!(hJH^V&ON;9f6z9%f%Z~Lr(eIPaCm!r3`aR3jId~*d>I(6jue(qm8qGU)$y29Ectq*4DiXFe27REg|$LFR{zzk2-vIn2Ia3WdBlQ zz-wVKl*^Rz2S~-?7B!V|3nkIcM5>)?cL=$9^KZ;)6j0gNjg_en|MNws@euRl$y&y26ye}+=5 zChFJ);VRZ{nJ)k$H#t`A_9c{3>T>q^tI1RsxT@VXCTXZKPmknoP*Kmixhl89-QvXJ zXg`K&rUdR+Y%9GECRyLUs;Sffk&GVIi7h3@ zO;j@aPECt<@)j=Dt2}AT39KU9#*(X;YhGk0$SvyCv)YA3ywr%jz}`YczWW2(lUJp6 z0478|fOX7p%O3uja>Jhp+9j6XErND%;edtc{|Z3&mvaxX7s-i9PW z)!_ImjXQ86WEc&SS?*m_v}n2V!4lx8^B?}j6d=K`K6W4|PJXk#bQo}aQr3R~q!RX0{qfgB|U}SR96JU=4BqhctKrEVrQo&~^ zS^Dw)*bDxG8j{p=&at(xdFdyoimJpb|HD#FMv9baH8{t_iee2;a;(jFA9x)jIq9}; z0P>>O)Y`KNM&UkW57E|qy|xI}0C*b{-RWcCEzU^#4l*N<(!*M}WlxA|7hjgEW=r>k zOO3e5LHp1sUq{u_gozE$RH8#z!fI!KO@=N&3H$CIbc%?u=4{y+N@R!S%YQB`lgDq% z8RY`-t(q;Z&h%wqEsm8YU4hB-+v9dTyGBS_t+}`1yn!Ra8cCHYxy1~fZ+{eWzMGWV z6o;LA6iJEa^PfEaqIRVNXO_)m_@O>+ZIj4D%&cQ+BEGI24m5>pcm zI6}3`;f?KpGOo%W@NtuhC7xY~ytC(A!Gn=sxmi?N_Ax`#mn)J7lNv`gQGrR-ftW|t zw_Sv?Q*<86dy10K(e;acJHwSGRLAW+VNC=@5SD@3_D>B0+!6^~1+es9xd!A$eXYda zK%=m0EKoCYw&l-82! zhOm?xp#q!0P>H@3ehW^@Et^*-XB)BnR_3vG@`#qEwlc}>E+T1|b1KRo$@DMeZ(opP zxqZ|e&H)t3rdjlXLsY8A7`$YFzsP&4$L3$C<%cAj^mN3}fF!eaBpQC6PvXSc2y%RZ zkTFMdWVPoKPCm-7wATBtCgT{T?(tn?qW(4HupB3CH}ffn(X~^??PP4_>?(3M-*d_6 zm)tgakCR-}w+itAAc5M-t>5$rMdn5_67mEmGh|xM49|#&2=PPamhuJBRX^-CU&u|j z61nm}FEi(a?^f1TOg_P#Mf)rfYbZKkNrOHJ$2i)*P9?xm>Oc1(dYg!4sasf;rmejF zuFeDBp8qbYx=BbTomHr zWP9PL&eW9q1y)9wSX!|R)3Y;SQnnUfL%VZmXf`)83K9H6oqtsta|tJl%q>qB3V|77 zuV>oX02S39)~9U)a;DW7$m|+Bd5e{*Zx#2QaPrG2x8kRnq@vXOF2d4K+ zAHXEn29i?SyGfYpsL{whBKlS-?jigGiq(8e8y^K}^E+dQ(g{dh4r6W?`)4pYvtVOv zN+mGK2iel38Wy}x?!5s}y!uxD*X46{f@|j6R&i0MqtDgZYOAz{9OBM?lXZmbt17XK z8*q|uU5Rw1auZdW>Y{*O9qm)9f=ROKyn&pHy^TwBWt(igD(&4t(gNGf=!vkbo0|Hj zce4j2A(rE7jd_0(25FV|1B8^ZJQW2^fg>He=6ZO)$SdFPQ#?f^4Ru$i^DS%g`wSWV z?2Vx2NUOGek|q&xG84|S8M?Ceg)3Q~4kVp3&|6ze$6bdUDYw5m5c z9tcbMZ-y2meh8&9A9jaq0V~LnX%Czt%YreH9J`5@_xRhv8WaE zhe?dhd5?&aw(euMQcrM7$fQ;~d_L!ujGoJCyda{BeY$+V_BuHeW_7I!{p2xKw(5`Z z*jN=TF@M%JtqDi4)ih~a;Ang8rR}xG4b+Ii%24`Co0D_ft33cc^uR4;@UBY!sOAQ@ zu_SNyr+U^qK~?irtz9T$G}%?1_C18u)FO5d%lF}_p*{LN$SZXeEln!TKrGGjaD`q> z>VqlI9=D(7L%kT6nYc4dQu~X^ioV##*Uqs~%y&m7!Zmtr=r7PDW{r=k2-7RmD2&@x zz;PeFehtWKU0gxsZU%}WzlA7JuQn3%T_C&9YS9cZBCZymG$rK$m!w$jbL-o3=L1?& zKEElmAzUhMiyNZ>L`Chqpch~YwhS#}k-tq_bV02A?@Q6w74V{4MRa+mupY! zm`M6v-NjSAvJF(y-4{l-8f@YsSc{4L8KVYUSjp}q7q%0&rbHoxWx+D3H0%bb!)(!B zKs6c~5g@tome&s8Xikl-ym~kZr}os4ZIL$!+;X7&pTfzHt6=-U&@)VIYMD5|+)4?$Kft9f{Ymcsa|C;b#$E=V>D(}dt^uH>C6z$vBgH-WrP zu4dg!<(P>7qszh;dYM`U5=qr%h5}Bms)tu1U&rLLV;dqXv+;YJS*PBrw25PR>SWn1 z!HAJd6x%RiAMdsUMk*?8&O6yfm3ml}q7ec5SfsoU5Rh?i4F(RNq&62z9m4rk&YYFp z#iXcAPJTK?4GoazvH$rDNmEU+&<0`AJ0eNmE`Y>Q)0vb29@eX_jiLv6U|v>u4JQ$G zY*t^Nbu+0C<=#_LzePzz9j^FYLGJP?+K|0$_up~v=6Zk=cdcjWc0D4bRL@sJo&d5j z;H}BC=lQNdvZIY&;FM%l+UoSYPG-4svhsf}jgQW`k;ym-U==--qHD_<+*a+gdyQ#C z{$|}5cD&ODI5G7|n?N@w<4CV|@!P@>(tC0IHg;-nqf#t}^*oH*D`f{6vtqed=Pl!< zU97_TQ1fbg!bG5VsQT{%U4Pnx>Htc*xywk#;l%ow61sqgIk9(uL{9U-7)I($z(Z!= zvY>uGnG@3l)ls~_q&{fg>x4vwdx=X{bitPXTvteGQ@9`4xofC&?5O6KZ-7(~6IXO2 zPySegbS5HyYWlk6y9bl#kMR?GKqQgxzkKamx*jLfzG!m(gc&+%Sk~jGXH?RCQj5Pm z6ZwLZsBfRzXBMcc#v4f?ul!$2S>}egRd`ebbL3>VhDb7WA$lHi9h0)Nyg@h_t#4q* z-oD!JT^m}yRmW`;Uu9Z`c1w8F>puK#8zvu4I-T?#FiE$V@N=%)*~Llv$KNXHd-MGo zjZlk;-Ei*z1K9U!({EYULoi7{JQ};EFuqpXtY}4^R&XDCOC?AyznYLxI_FT5T014F zLmsZYR;zFcE|q$m^21_Gl1f{&Yn{2yi(=`9`vx5H&Asurh-5n5)wjNWhm%Zm-*o>T zkglFw(sl&>V&0RR55KM zqQsh_cuVb|WMjD{o2Kt#A|?-wMdCeN6j76}PNgfjkBuV!-VC)b9N;CNBQecLxc4qD zgbBMG8R-O&Vt>=q-)FFV_EB^ijr07?ZSTl|$Fu@Y(%&7|E!U_^l)NkNs`6KmkL#29 z_;vnbO{)IiKt5e_`_(wzf+arZc%v+qO$`j8KZsl8$zK@mG7 zzvnfBC}K@yKj%@duI)_s0w%6RnNA9xCmCCU^}R$$H{94 zN;2KoR_=AXh$_NbYS|MWCDmQV{a}R9k!tBbSq){*}HfMB-3sox`m7C)1Z>O7#sUW5KF3j)=>A$t{|+zg6A76Bg|s z){iy>y-zFn0LkW3%#YwGY@INfdYVM7Tgmrxh|HB%v+;sR$$Vgax|w=KB~87mzU-0N ziPaj=r!@&nQ&~2GS%XOFb6*d)p1-VQmH;=Ps7kFAQv^6NG3VGLpe@X>Z3LSE$Tluz zk$Tk0+Ch%KhVziP-6$j~fsLp2pRGxI{wWJld+qjxrLb1f=>||bH4ZR`h_FsB>d_^b z1ojzZr5Knbuudj_+u1mq+?eF{+&NAhy45Q8ML_8h%?J>m1YZG>hzh79c@3AQ>atY< z-Q;hc!1l{a>@7SLl@#iUJIts{F8b+x5%;JhvEDOolAH%zWQeQ9ovI{{|36{x9$#ac z=6hdjt(-JTXr)m^7-eHCgV8nyg;5%#Y>duel#NnKOsLSL(JGa-a!z5;q0vgCSLK|Q zgO!zpBrA+E7|CEO8?Uw{lQL*!!d3=bd+qP_+$*d5^S*!Vd_KQihv$Co=RRE5eSV&W zl80#%t();J!TYqwHzt1{9ob+eO@sFs!6fz*Yaai?+;4q>nrBF>WpAtmSZ@n5w?x6s zdQc0qK%kth#R3`$BQY)3Z1qP9G$EXf!7h+ZrmSzqXg)`xGMQrOlrtq-tmDv`Oj*A& z!16x(K;!L_1A=<`$!}y1Ad3N^qhIG$4WRWRT!&lo%mD>W57jKIvyfc>tdDkX|(jBQVt@ z=0XIMIIE$mFrwApP$9L5lpD9LvDRo$AqR{>duuJ)^Qbx;N`ISAb8s1*acLY1*93ky0xxsOCLR;*=TiJE?FL?(0A3c(A+K#?;SCAj>nl`eiE&+HjT zAP3TgJz77p*%cK}s*t^o`QDmX={%lRWwm`cGE%!W9cfO)W! zY<_`t3c8@p)7e|y?@W~14(RX3&Vwhz4ED(IA|nu;t~(*9u_ju-rx$M|mZ+_bP9sqt zT25P#OxAu*`^aF7ZUU7_i|*x6NVr;dP#3UhN^y5k$6WSPof%^Z=uFkD(&LRQt(Qfx z3Dl&TRuq$fH_pO1NjnYXKyj_Mz6ex{B2X4)*Oq6ULZ@}hDls-;1YJSH7!b%R;~m}6 zx`EW;iL10tK-P+<$JK7}z7h2WLY?e9Ml_G6{np*=7v^>y6Z%%Gg4#5L+BylDes0EG=Vo384Oxga-D(gi-*{tX+ zM<%}<+O`&l;}K0}nClv&UXRK&N6hD~pN`=fTY^gK%kQR*a-uXQ$7gVCr@WYp5zG?lQ3R^NI(US=AdxIqLao)c3Ph7E{iz+x*J^{R&DJ?@3iK2zlTt0{*o{s}u;B<4#%{>F)%kWsWAemE$Gqv$3zaEAy-Q!O)J3=c zK`p-`$VkmBPYprK(Lk+)EzGjh6$NfUR$_8iSo`SQ9BWM5|eDR z*@+l%XI8Ow5`w~%4O0|rrS?Ej!%NN9DW9W3jV>Oj`xKgJtaWOu52!E-zHJm(OsYj; zN?3O+tyb|AD&uozXG8n0^XN>eTWzN<08;9g(FW{>FLGF?)2ycU26_tH`jlP^yylXP z4Eh)&1m5XHpc>e;)PDa35(PjzX(dAbt)l?euCcy}9}!gOr~vosY7`xgP0xZN*f%mS zQ;X~Wr#hAev{xB|&bR=R5d9*fb;1TE$>db_yEG>Vq<=*ML;5L(XEPV$Fnppem(4Rb z=!a=B3|QY^!~tKQTAs zf!ba(Qg~q$f!34M$~`N3nvp;+6@h}tIG9V-!xI@}Y8|HaGC_$9Kh()ie-x(VR;=b* zSJpWhRDn9T7RaqKABmDES(Tv5;VHKyP8Lz;2-u>&FsL2>Ohl9U)$EySZOuaya0gvZ z4V-3KgqHYtdmalgJTql#-|&fA4(s80&M!=*Bk&wj{;?hnbs~~2{o)h$osDFg0A7&M zdERam9Y(|HA~Xj~5UN-A>M4q&C``^j()o}kwGdU%D*`jW3UtAN07Yh55@QV1G?$-X zAE#hRrmpPa{0O8Am(p*3T{hBc&LNV78%TjvuH+CBqe>Jem3kma{p~&)lPK%wFtlBI zYywd??5SDX^DofwK)wS>V<-15LBDcHuYh18)K?NTJzde52=zy0tX%d&Wpb&%!C}m= zKRT*|Jvn?Hp6Kb5Fx`;>ftHtw4+z#8=@bNVs=q!>!e<)ES_h`ijifJoX;*+`^3ATk z7Q}KCfpWGc_`2j0AJpkF4701X9-Xno#E5 zK))N2^s(!sMSZ|Xf$hViMoM7T&5eafCVhORUk?mcpupqJ$3l5bEwU-r_$1m;a&RiB z>Db;mkH!=<-_c@h%x#y0D&76F%Kio_;|v|YiKdNGOHfNKTdQo=%NJ-(emE3`9Uzh) zrbueNFOW>5lw`foEDx-}`W12Qj3bOli)uCFT@g(VX+y24CJJt6pO*7TR9}A+;1fne zOjyX^5EQ298R)W0W{ogMqBN-we2c`&(>SzL2tUwGYXDD$tY()Tn?NubSeMA=);vTq zphqy(D3DAB)>-f6EjNJ~d|P`W@#ZokC=N#we7%taZ<5moAe$;-uhsf4kTa;k@;9mN zW}`7xqA#IagR$K~744ttC=LWweBg|)!IJr-r~*xv^_3xWyr~ea>A`8ntrujBz5+dY zHlI1GMIe(phEp>^GO5%LSLn{q`JhHk(XtXWrldI<&c=;36ue=SG-jXvas1JOl7b$p zpP{)AZ_3tc?;a!aSY9F27W@U0DO;Uh(nkEgL*`Xy3ML{bug4*2B#a#VT6QmF?_~mFR4_@k{`G7a&!OvA-E{H@)1Ug{cVx$DtGLf7} zrgX>~Jy^Dp6ZFF@y7RFc*;I)6PMBrn2T+nK4vR2Swfb4i1RW2d4bgqzLL}p3e1TR+ zd=){3eu05p)S@u`d3Q#0YQCX9|q<{1IQpq>^Pfe$)R|H~1m3*21oCEiF$yx@tn9?9gI{ZhslTaY{2 zg}}m|6P3w@9)&V{C)wysnd|I{lP0=!zZ)eVD;&_$lE zMzR*>8BQYc3|7UgMU(!Bf#!}SI(3R6CiuD)j1Q@A6I?T+CyK@)n(|``sTDN^1r?>A zHnxU!Gm%YvI+C^qlW0r{f5M`+X&X=(FQ7?JWYyW2>T=Xn)S68!^u-(AYMs`@Q+m8g zrws!Fr4X1lIg!Xs$CstDjcEP0G>O}d7|0wSPSTmn0W>Btx)N_4YCjrO>3iVjd>krM z-{jkx_-We^RGPhOV+1IT5p;aFRnvG1Eg9i8KY9;%60e`Gr|JU&XK$!_Bk=K=4U8uc z$QENtZC>vii3F+7Jw^&N0oK={ULc!FposOw26@zxE6z6HLEIC8%6JDYgPzN0tlTxI zvwmRF+JNna&SZ5nUsb0k*MR*|GE!d0>emoh&59b}NO<*YYs-5a0tvP@)i47jlcb;V zdMpMx69sA3Z!uYO=6T4b;Pn?*tfMT8P!Tn2__PIv=PTt@Jv=cpZHT9o)*FdaU^;8u zVkE||)*KSaJe4&OYiBd4;9Er+V)q{v44Npa13?9^!s^WUXi&ihgSNkgD2&mVuV+&8 zXiVC5tZeE%sAAzx_p+&URHnpP{pMJaj?vDWU@SW6*&(YSE~7FQ6F9zNdGn2+maa*U zeOPwbf|knp<5Hj+*L{@K!Yb>VWRDRjt4|WCcG?6ayg|2nbXqR|-cdaI-D@=p6dlfK zp3oJINi>}lPu3Aoub@tUNQ^Vrlv00m9ZMmaZtKhti5Y^PI%L@l^MDVSDV-5*MNr0J zh+_&esbg2qD#DpYIVV#1}T5budw6n3Iy^qQi10|ygA+3+m1?-OPpN=0ER=* zkfpuDLRN78RSzyipfPdjZ(ZuC1R4@_9mPjm%eg^Jp%==)&<1NK|7KX+A2n^sN^Tos z*aSM6k+75sEu@YF#~F#lTBA**fFAqOmhk6>Ek z=jEsZ$=3I1tYV2rXObQKb}8)(N@LmjBUD-tW;V42J(iuQ!%HVTE=K|2=h+A(omDGc zGuv$>J$gYQA3!n=#!9U9g^Q!+){m@Zr$Qs(RV2E?2pg>f6}1RNYBj+$B}U?vb`5wQ z37gRcb#3D=Ba(R<1v7KFVPxy4m9%nNkWFFGm#pHn0!@aALB~YusE-jlnoXw7xBj%1Kr|23 z*XB@+G~xC8Bi0aY9I{COm1Wh{6f`Cm47pZX%ds;}Al(QI9HW_shC1Tp5jFs?iL+k& zTW$m$2hg7&kQfqYebsF}qDc&=thF)N5>#ML>u^G+QP65*4?5P$M!|DFTFcmKH$2Z_ z(UylEK%m+f$*FT5H4+0f-AOJqlJ(&fY8#0}@I?jd>_aW0$*_*Bbd-83sIfjrgE5^) z6YzC*SgcLS%cwdWTUXN6JZ_*Pan?sHTi{6?8}Q`oz7hBiv(`BRDdkam=><~20`&b} zx{4-947MTG2 zl)d?Sz;a|$z>Ey6&6apHG(>~{9G^GrM#f!RV70`o#5)mq=FeCGr(m;<#ECgwnb?h_ zCcR0I4p8%mB#0TZ`TYM;Wb-_}Id55TVNgS&a9wP`L_zi}52_6Y_=r)IKJnBaHLJ{d z$n$7Ssq0EIKYon=2UU!)&8f5-s7ys9ex%9KDsDk%N?6@qPsC9Z_feADU;M(_0!JXX z7}g5x`UR3H;q`0vyXx}4I^?8J_4s}SGWoYYbx%xP5lsbfh*!JWUPi&UYpm5UECEFz z={ir=Gz|%=^utY7DUC!$t!TqG4wl66TUJCj1);-Bt<8a%LB-ZJtE{g=p}_7x)pe>x z@FXsQg+9G`c`z(L9-y@qkAlqc{VcPdx*plo*6KiUZ3$|uc@mL0O(guxtM0*(`fL<| z3~@fi`u^Q+RHop`kRBAnD-NJD1>f-%G9o*Q()7N8Gs?z9(30Z>^EQ?d_!KkYYZ264 znKkg_Q%0g~)t2Hsl1aS&h>I@hUq;i>x6|fx99UiPMo=5i-eO0^wVVlskG_#VC$R_b@yYW_y`L&pFCKQb}r&&qe6l9*sTQSyF>r6yb7woAdST9S> zLltNnlW4cCCCNqTOkJ49$4cOGl#~G9n!qsNDFK$z5Y`)kp&HrPf?)jN=hlY46PW~9 zT^i}hHkYX$^ZESUMh?6KjTImVEKJXJVuMG~P!QI#o3$rUh{EK7@?UMObyfsb488(0 z4OC={m$3D7v!ewMiNT1fNZKmhiGK_ zz6rpVHJ^c1Ge9=QNQ=YdSIgoBIy1D>(YHC}N>KUU^4(9U)CgD-!79G4)^tTOnP-LB z`fg$`G^UbiwD_Q4;6#LpK!1s52rQKnzk(KSBs@jUXZYPI{`?I>k!#l%Ej z&9M-T$ttVy=5sj}L6v^`$NKsC+CQpv@msr=Q>X%iZw{rH?{$-c^QbA+6}rxU8J<+r z5vW-p7&F+!+m<}J1%*k=W?tV?d%cgwq{aGXv}WkBi9%1bU=&7R08jrc&pNWoRE6|J zAoH+9Yx%gV5omphqgOB_g-OW%L7DS)+D9Opl(F}vZ$6Dg5hx{=a<%-&nIQT-A5LtN zwJ9h}H5r>_H;Q00gIa5hK)a!XmU(DRrEIbm(ifS?uoQjukOVA8Ve-w-F7YdCY8$AG zPXvA}li1e>b^22}X0v4rI%8ORO2(Q=(2t<}{~eIfb~Bp`YDU(JD7z7`d6L0_Crkwq zLt+l5v+tuuvf4+C#01seUu~uWkqq&Mu(8=%WK&)g7O#0@96^mbpAPrV2Q}IV>v4n2 zXgX?}?oJz)8>oVA!Y3Onk7z-OMUhS{1U}*iHf=NvM`~Uej!KdLu&&Y^L>P|Nr>o(} zd@sZCG;oZ`1*3gu~Z&!IQ3)0QI%T^ z$I6z|a1?g7;r<+tH5_fw0mHpZ;Tw)pD>U42%G_`x>8K4iic&TlL#6YE`wL3daJ(>a z15V3LKa1C5w1X)@!~G>CXgF3rUl@+DsXVtza402cxY3lL;f7Fxh8sf(8t%uGpyB?E z5;WX+O3-lsg%UK}I7-lP11Lemu|2iOaAPS!!;PQ>4aX1At%tMH|0QME!5^j+JNR76 z(S)8rIU4RiQI3XthjKLBAj%QWifb<=X|(^Dl6-#lhjq3ex@;q`b9OIQjo7qq{RkG^ z_`S@H8<+7xH2t@66<=mq_VZE8Fm8s!t3F<)p9~tYi7Ai%Ed9@x>AJ|Lf0Q?l_=N78 zk3%dQH){M}Ec{1&lvh&a2zlEP9 z;!^%Qqu-@`F5)$P*%68bqQ& z6nwX{!Y>YrbZ-{<;RTT%*F@gBBGU7=$d7J{^u8+`i5KbfNaXDYA~DZI`aThfeJ#@O zrO1kLog{&DuACAjA3ctcRYRiXV`r4Cz7Z{-JdTz%>$^z&$S#ubxQqP#jV`ioW~8j` z8!3rhe<+`>jg(pBV||zIvaztc{Or5#k~E~NZ2F+9Y`)x0e*RWh*?Rd$bjV$0CC~W9 z_dk+t(2XD^*(Nz%#5vVzm)6eWoYqU^uTF_fpDFVGa*-Lk!{jIFHkmafOlGdN$?S3A z^4E{UWzPC=`QZIltcEwH{B3fqH%&VZ;Nv-W#e2$T#>{%NhB@C zAx&NF(oDWmhZ261L)@+~xky-NC20Ux;ayzIqfT$ZH{Ewg^K!ey5mzDgwgkEYtOjiw zb`A0TE<{qh+2kSeI`mJWjy~e-3|_%kQ$|fkL!_B-O%XOZfm}iwGr6Z#hDpn3gQYEd zru|>LjI+DZXYH{`9n=e)H*J&+&-mIdzkmN@`yq7QiMRd5YP+jjxTHR@$>Uy`_MAA% z^{!3&a4oCVMnaGy$@5l`t)$5Xb?&oC(>R+nzh#phr)`pgzLB)Jd)Va6-k$Q0U%n-0 z(|gL-^Lom;?|RBVIe#d*Ja0SUE@DUj-0~yt{gI5L%rDjVmw)-Pw_Is@hYwv0lB@UM zk;?ftd5>!ax5MQO_x+FhcjV^z!E)o9Ka*Q82Fqwo+oN#FAiky-Au59dUA*g%v+!DovfYWq9TrACTeCCsi_A{7rP zKdwC`Zud~^W)Shk*_+@MWE_cWBdnt`D)xA+EW}cX}x0 z2!EGrUTC2Yf7SbVZXLAg4}?F3y}@5peTGBcA}_AH4tc^oSq~iYFItv)4!O$p*XTUR zw>0cQeEGIRb~I4-?~u0sK>c|nzDrx0g)KPaDBH!14Y)6j{O!OF9&&FRxI#RxWWsE~ zpZXI=1^%DxkZ;htyE)`3<>?x3lgiih7qls!sSbIF&P|)-=!Y-EKP8?5WGCg|iV>-V z&mJyvh&&YRLr2_Bo}UJFKSL%>9+$`~?yKzNkQ~Y(eG<>d57VhH?X#Snw5=0y^rd_* zhf5RfY-jjAIfVNlOqxTm_w~?C&?(_^Xn3?V6=FN3;gU&RX2nHg@6pnQI!v{7ku+o{ zb?2h(C?H+CsN)@vME1eAk%#T1D|=nEw)t6TWBBh+Z_x81Hov} z890ZMC-8=Qx|7EAt@LeB*Bor;80`;w_iNHOj(VY=9jMnRkLJ(p*ixhn=K6=kUw9!} zvc~hp63Ra3ShR$p-_uRx7}sv2`x5yhc;|^|{5o20k?ype(ee`gFnum|HX1qrc>;7e z)ODCR2T&e9EgznLpJ!Heq228w`OVbnS&;>V-Q|jw9XZjWemk1`rlFe*t-Qd!#JdD} zHprr$Rzf$Cj`3V~hmw|;)JdU35;P3D9kZzCC-@igF9Z`WQ_~0ycir+L%1o80i+a+y| z$VI|u-K6e@^KA0y!j4jTRua#=#kH%TP2G1`&peyBr#mElJZ&1` zo@&_NQExNtvXS(EN7-*9uI=z1%0Tn@iZCv8?km(a`fZ>oJ49Sb#0}E*nveK}e*dR- zabUk+pi9S3c0)TOcLzs}m&wQr!CZark8SdTu+H0**8`EC zXnXmZv^OKa)G`d0M8d5GIpncv2;&9#&EQ+oulN5?sB1LQ@ zLvx-n_T-*j^xZpd0^)l`m=VPH|3dd1Bc7?m1JsW%@r+dBxemPp-1`_GWQ2>337U3O z?=7KH`GB-j&Wnimzk!dz-vb-STNN(%xUZ?!A%$__(hk2K{c~tI?RhkK10RCU?P1OU z-7zRkexP)YNIB_BT}PQP4oI~zKB1fnH2=^>rQH}Gcx+Mw-!zZ(FvfFHZ_W#pB~;sg z=WFWjnoW)nW(WO6&KQxO6TjM+_Nke}DKB*C$;>%OPihAD!aKEZp}mMjXSM&WBA;ehH-YkW53c$z#^RR)eQIzK_>a4fvnFa>!c{{!B= zlRUje+jN5Z00kM$6$ziZih3cOKI1<$uJ8Iv^BOMHVA0nlQtcjg=!y^xzr}lDXVm7JG7ZOWz%Eq#nxAHw%R0|c5+u7eas4* zyy3d*c0Y0Th?OHe!?iS4cDQ0CYica{r{3b}8|K7{D=$`t@T{~Vt`m00m3}e|Im;JI z-^BHUfO1M}K+b^AiIwNXr{k>Okmnq(zd_bMwG;UZr|ot|QeUr`$HTjC#)^AuKXFrc z(Omb8iIsvC{iJ~BeZ~Fhge``;(f>Km^$>sh2I_i1tW-|wCzU+&0N3x4pB~(Q5;Rp& z-xYmj0s2bvvlw~=eJuI!2}Xdyz`6SIu*y@!5?^*oo8wp^dyft`)qQ^#kFQ`1iIGJ{20Hu z%UXo?WsmUNJZ%^8&kvc0$FM#@9EC4M26O#C!!IMgRbT|Nj=#o2-T2KrT$`+M%;)~^ zx&N|FMxxX4(?azB1%3wfuT;l<%i!mNtS5};Zvq|H@4;`;Z6^L5FY$TC_gN>oX>X_d>-XT9m9t#QxQ?QtLStH2Cm)!FmQjTspaiy(`l!C(|`5yGp z%1FY|Ki=S4fe$@s(r40sxUPlEKPk`k{|=o&@qYsK(ZBwTXS!Y!ALV`o{eDmboCCOyUAn2aM&xtg5@=%Z*gVB9 zHGLhjg?qD@Ti-!{9So%Wo3quHn0MR`wfw5LmUD|;?r}YvGI7lzE#Rlz+m7zM`)s%rB7eb{-dg zjxFvY{1^23+Yd5+*h+tQn0A)3+qIan!@6*JL)=YH+L?FkQo(p<3UOv}&tTG@0?i^G z=T6FtI&Q+Yn(v25IM;Tfch3r!Txc}1yCGb9Li>=6-;=z~(692szQrS5v->#y~{ z1^>OlIsx{eW3GFoCG|D;_ZIn2T+hO825Pt;;};`CZ^NbWm_s6X-v6Wdp-`K@?Sk!gk({aYHF3;)u!%1~ zhL3SQ$lAvmpvxg4q&*z`033LibzABqpZQty4s5?LOfuHcUZNWXV4Y2m9TE;51HR!| zmHR}7ajzG8987?J5Bvm71wRH!gv}?tgP?~<|G)vPv&A`N7T03o`+(K`DT}otdsjuu zFU2ljQqPC;Z1U?I+G%j$piK@!oBGr5>imBS^g}3x*34RLQ$6cw`XC!~(#U8@B)*f} zlMyPi8Gae?XJ8vcSqm5xEeXh}1EM8ks7Mvh$)K&+h|bGA_;^ONR9>)21#1U2%#EwD z9UrIyu2r-_APrlr)p<3IaXNP3Y~{JPqh%X$tOQNxu@ek8gEldv2Ym*4oD7=o_m<|W z-tyH_n|wh2e>=y{d^uW<#@XZ;_oq^)8z`gqz*BrK5&8=H3+Qub71whku@Cq+ApKew z$#@VYd#RkP+fh=7?jZXJXP_5B4VcDr8=*%wu@(m1(~Af1WUl0jV*DQ^`*vUxIZ?9z zefkv2p{Q0^9Kz2ZF=itCT!KG@W-N}Dv>etXs8rYVDCRX$(vCilxlhIl%Hr``GLRw1 ziLY6+!>OB@Gg|Yh)7S|5#@(S-Iahv%AwMI2^aYfa4_h0}^&0BbNgiC}r)dcOOg@~= z$SNwF4ygJ$~Qy|gR2#MewZ zo4Qa|C%Bhqq|tul=D~Z2;~H@sh8EFh5OxvI)4FnA06z3C+7t9?YeHCuqn@j1Pc%;Y z-BQBl+CpeQSZn&8`XjHJuD$fB*Ga=Ia0giPcgEd1PSrKEa_-l%b~3)xd0KNkHnf*G zx^`kc^+(dQD2jPLcJWpu?feh057x1m7&W!@1jyq$OY`>;mVGTD4bJ)Wso-9J(zP4= zp|0BDn{E)#cZ|tZFX(w@Gy6)-tSL1Al=$O$_FBf5T>ltf+k23<<kb9PSJoi5&&g=ok9-r#-8H4E_O>>oC z_oSCG!x73RqZ8}V&{u?UfHu$>bO(JvGzbS?%B&|e7Bto3FN{63L)A4Gwf}+WHLeWo zAe^w}S}yecE;=~Px6X^B(Pu1Xtrhx)@Dz%b&s_C)>L&dGYgF)=cbT8+b?Q>>*R9uM zMCPI^QJ(T$sMq7TzRo;9cRKY>SoN*RT-#4L%m0Y?NZ|h2=nDe(FVOoRP<~IaUE(WO zy`~>5;o3C4zK-jZsoF|fo=v)I`pK_8zbAZWFqm*n=P8fh+2uHSzN*h1PrAS@_%?7I z+)+Jw(EQwkAExo2MZQLz=sb;akQ3g8E`xj1F3^q?Hh&Fu6-vBUT;Zn%86Jn?hx;ZKCYwS_)`p47y#z0j;M)&@?cC z=ZsLOevnC1(+l$XH9m;%)sik3{kn4}^4F9R{+uzNaI=#dT$(s`V3+7D3A z*g|tX{XA>^O)c8r<1g`0_F{Fe-n<1pYyQow{Wsl*o@e|!XR@Tdrry{)bv>}lLgHQq zoS~B>Bbj*t=^ah^>7L&Rx-b`gx|2Ra%P+MVN*SeUAA_Aa7X$iwt^e)BVWpq>MJoBy zH9_n8Q1}g$FTSkpf!?1>SZ&kQwsyhq2b+1WwMX;qB=SLfbDw*<5I=pCu5YAbD`|Hp zOEh77f>uunqIXN;?w>w%>{pXl1%82|Mpk^ zrLAOKzn^ep$j1?A4(lLo8Xs|JzhdQM9eY~(OzzvmGk0Taxwj|CFKr>Rk96F(5e4khu`~*`5e?*x4KPw?$Rc3yaONaONzYo+bB*+n|5O`e48|-?ACs0QyZm(^zNRjuCjwvXMqFQV zy@LCyXn($jo`O0}zPyCjy@{p+v=j5t_eZbK=%Vcg*aAYx*J6>gR(dG6uWd39{vx=d z_rJyc5%}nN(y#T7j^USq!^KJvHl6=46VQ27;o%D!I2 zPyOdEVouC6vq{tL7ZW9u^7&0+sH9I9`Bd-!#xB3EBJ5I;1Erx-@Lh;xYhBzRUiwdW zeTY^58VX*1$ZJtdfl7K(lOvTyCfhth1g{~Y1+m0 z&B(iYlTO-+U%>BV?w(DY2iV6x_^w0tq5HM^(g$|RgZKmGcR=F_r1ymKswcjB_^fA~ zOZdhiMf9g_$T!I|$Bvx00Mk3?-Z7NtD3P1-cKM&kH;H>M`TiDu-vHVf{NZoAGFST% zQ=^{ZyaClXCc*n|IHZcXv&M0AkzJOf(|d3JlsI_)Smc&XtZOj$uJ1wFKwF%4*@@nD zlzRWpE?=Ez-TFhW6aI*XPq*v(hq&RtdST~KF6%Z;4P3z%3OaAZ4f^8>ou$aK)(b>GHmi2=5|?M+Qsh?`Bk3C{s!v*$E>^U7%NS+Btf247rayvDqn}FuDQ)G4A#w&eo&9j@6<3YD;fn-HTFylwY=6JxI)+0WiiowcKHJ<59LxVPi#yeSoWC9fipHT;IQu)Ek>6 zqyMiZeYan-A}--0|0X$W$J(-6-nDJ-^||ef8{KU?Tk>o>9{-(f*B>6*{$bHBTgA1W zQfZ5q<2w^rgH4dCSqb9XmngBs-LjrFWawZp42%Fy{7Bc6)IUcfPXNdA?DAXI!`)NE z<*TV-l6S3-&zS~hfZ1R!SO98S$3Kx2E+>=2r4Cw;+`zhm zpS^)bP*D`d{#=+G-x@AnP{o?N4^%G=mzub6IY$|LPlw4u;u;I8W5Q$!bQ$oWKe;AM z>PXj0pNeX-#D1Q_#WxxYUK_Sm5 z8g7$f{HSCS>q$@#av8K7zs};G?O+$k1^Yp219bqLp!P89=^pGWk29mRt@TORS2Df= zZA7l1ov3_G`*NDT0aW?$%?5lFTEl&Zi0=q+1264C0n`Vr1|{&B*j5(f%9Et6ZVc=B z_;3U8C)=bkiFN-+q*Lz&HJ}dUqss%0@FiQx+Xl}2!Jh-=f{W-bfvey; zxCL5ES&xFYEf146=soZdwEr|r+M!Q@zv^vy4t)jQfXw*5l0}>Dn24`{&LA3etb?e% zc1P|5V!=Q#7!=0PPfvA7F(?70z%vVf-+;e^^!_n2jBq2sXfPH`0F%KqQ2T(k?U~5Q z*UTHAQ2u);f8eL>Yy=gp><@zDzzeE??~zFLP5K(p)+Jn8`-73R;Su37gE(h{xnKcU z2$q0lU?pfj5H72s3Cd?<(>ZoYgx?7K&bMVVv>KfcY=f^`McsiE_%zVS7{ia;K;6}Y znn__&4Sb+#0OxSW;47dKyRGOHCXLv#e}0%WKDf5i1SQ z`qTZS@JT-@0>z*Nl#U^O_Vml3<-_~QIqta#E`h7yI=BVyfHqKjH&#wOij|Ylx|hWH zfVlVY4A8hURx08+AF#Ba9EW;e$4V99e1xxt*4)-}2eERGcpidw@D$`X#7Z7`4qtMR zwv)Irv+2`6z}Luk*#Oe0W#M8HJ&U#gq%Ow34)&8*+~dpXC)M*gUjgr!gg=0j!|~ri z)H#0K0QG~$9_;7MA^)VUm^79^OOZW?@lnPRx2N{qfNLK)nsLKkz={uil64Ev5aq79)d+XBZd(MuYqq#yr&j zSoo4Pb}3$AmkIEbLHT0FLFhc7bc0={p_>6_gSlV3{Gb8U)7BKuw@VRuDkeXg*HRyOYaox%au0LyLi#nm51avd zcKjUF3Dy0ITKxG0IN6E%1;O?iz7bShV~_VX>%vg)8p@yc*axcNYgX|7K`rMrBg{0{p5A7g>$Vlhz8X;v`0I`q&s{a?Zipsp74D@<1^YLp;sp>2N%;V>HI7GuUQ)6f+@p)CT%k+e;rv{As* zi8hP&u$*>q24Q9c%m2D^{tSKrSO}JY+HSOky-6c&XC0`2#<+kr3_rB-0ppK3w54&h zsY_{Fp;gGfQM9?B2DmxPu#9+Cg4G}a`(GGC{s);cN(r=-e0#`8 z8R;!2t^2v>5I6$dpa7(vU`$@gx;wP?5%!mX{q4m5p!J9Gf2be15n4ffm67D1a`oQD z{-}2!v>Mc0!~gR_rGz-kffv+){9~b#=L?la_>v2uQv5PhGTH3U>Wfbez&FO@Bct$@ zE1_~m?|WjGbI^;Rntk?5(5t}L%6OYTyl$&Qt|Q+9{!tEToW$54pKJhygYdsG*ytqu z4_Z2$xcAWigK}_(d)mM~@DP+Tes7091Gv<&_?8nCl0B6 z$=Qa}v;m;1lD46NbpRjp&mxDsB91q}F&Q5Kok29{4%!Rf7C#5GTlaEy54o)`{cR-n zy8-(JebBj|IV2W35TrAo9tO?F-}3G{WCVN(epsBueEB@(_im(=Q+8$K(-TKtLF+vB zq6ydb7VoE=>LO#|CxFRd8khlQgSlV|-hR0d1T`{j(4|nCqLtHjo1H(dETONg8~KD~h>8lw`qg2jv-2 zQg$$kI*O7~sP &~k#-o^FyUY#Ttx*r?@Z4)Eq2()!k7jZ)~Su4t_XH6Qu1QZfa z5%CmXh>{W(QcGDN^_=Dzp!`{sq_b~PPB<^90d=4eoB`)R?dfPa0ZxKCP!Ae_A2dFT zmI}@tR8EhQ<18q9p;b$x#7F+BK@BLJ&+`}ae8QC+=K18$178L$Z-|nMJmV6$3a*1& z;0|a5_dqS_KSBCWdN>O)o${q@8=!um zr{FoLjwB3t1z*>RxNTAL2Hr7+yl;pSKXL>5`fgE@${a)M(Ao!G#o0INsx#rDK^}bZ zbn-Qaa~CspO+us$xqJ-yj3b}u3zy=5E3na3_#bv!x(?f{#703mNN0VvJ8|^{eLyT2 z2nK^;pmqcP2P{8_)?dT_ZsUK@#%BEQ3jH5A4!odhKK=))K@Au|9Nq_Z84dM2Ls*}) z%UJjcz-m9b*`*FlMxF-zdx#g*+w9T+W}ur5+{}UJLi6ae7eE(+{H=Cb0$m15$Y(L| z%ps4UoHUk!!fWLJHrIwT-x@@pIElUyT83N>UCF(xK>|nw8^Jb^0@6Tj2JLSS?eAgy zYajJrN&N%=R^}g|0(+>u%lNC6@z+b*-v_k6xAD(ww7(7X{g3f2;%R#;O148g=9RnP zQ$4Kh3;p_cp^}Td9~=TlK>PA2aYGA03CQ>`S<0ba(DpHLLtCpz6SVzklr%zXXy?*j z&}PG*0~djpK4&xZ61WPkgKBg>`rcdcb;sz#=wI)^w}D3H41N!N7pV7fu6`ZuG3VPl z?9cz9L+%kS^9A;{miM>d+rd-t9J~T;?}x}6XzLE5LO5G5;ryBKpW;XbIz)(!<2PlV?=Y|AGnJGZ{<+Gl13pSJK{r*~qn&&k4%H>gP#w zJ$$fzorC{hq5lQPuhIW~s}&8prqp-?#i9S^`Rehc=+>Fg~)6IV|_2fHaT=wu4p}XGiGyo(pb?}``AnQW{y4rogUz< zH1UH{cuzC&KOuf_hcIp69(V{Y==cNr6g&sDr}2Mq64c$MT$?G^XZZhX%K0$$KNbH6 z$AK4A0UxLaHK27|n7ksMHqIHmfja&QUjUs!`rc&mmwqPE@ZCXA;IB=QKG62Ayjubt z2nK^;U<7Ezenvyv-sfy2RM&2upxuehj^|tqx@jOB|C<4Atw@n``uuM`liA2)!Ca78 zi@mWvzW{zASOS)Tm0&e!@Bf)3K)pH1k_g=hHiK=TdS^28hh#~Cubck4oCIm`S)dW# z4;tb=XDsqL@2VurcJ#YIF4zz9=OjxWI0Rpk@wpU(Bk*oe4qpa5`#zUapzV)$)aSwW zr}{~Xq_ZzkKv>@Zp52@xbyreMJNk{T%camRr!Y=PVVsb{Z?vRH(X$jOew`vE(9%aK z;u*s;CVejD&=P$n@PZmp2O7Z{a1PYo{9I17@*GeH>hF@b2jubK=h6r&=6ue3*`LdC z;00CBJ{RB1&!w6$HNZ`JE)vfra1~q!`M%F3Z&ixif-l)e{&PN;JMe9woOG3uUk_za z3htrP{6B=YgX*(3c?x|Fd{?l`3)m%ih5QEivCqaz>=a!CD7>9a{*%dnGV3qNQW}H( z+pz!f*gtSg!&X3N5Dlz#Jg7B~>&E=I8~)!L{~v(=gZgXq%eQIAppDJ!56;pv&iMag z{2x@!!T(3$|DXo=SzmH*NVfJMwExe~uuC7}j|F*q@XH+h5@fQko7IIrd#CZsQgj|L zkZ|eeId{|)%KXM5gOPpE>PHS420sFf1}E1sKUl@MA({3G>IZrzcr4c@fJO-s_aMd@ z@OjYu;jC@I&j7PQv4?cz(LOZrTxc24_Hi!P7J!9d30MZ&njEqcs_Uq$q3z6@5}=7- zBiIbKffUfXK1CL)|ApWq$lF!Nd}9~1ZIO-5LfSKsItU;B_H)TazaO->d@hHeUK@Lr z(9HY1bN4QJfUh1y8xzV}GQ6&do^0h@CVV+)q~G#iVf}+R>(Tc_mj%3_1{|}6$kA@Z z(_7@XXG5fr{;G&Rs`vr@<7@hyNAx|{=!0(4Khl3z(2rKeg|oiKTIf2~_mf!P@4@E_jQojElILKI-8mqJL7qG zcav;C`!l)4wL9P+ZhtPjMkULRNgHJ6;SKW3-pP{N@C#|kl;yXyrV_ms?qH=DQrnGVoV)ZVv7Kmh<0vwg}$a<^7hTPLkiy zNs99LAPsvpC7}3q5AmGnCZ(0#q?~=7vSVGP@^xRSxEmwh2fgLEt(W+0??_d0Z%Jnl z(hscnC-z}Kw3DxRPUbw`(f*kC$Kf6C;}_Scm)q3SY3d31M}Z6{a zm3h?bVe0i6^$hRZN8PLmXWfi(6et2^pd6F}eZL}{XLJV9pgZUZ`hZw45EOO`m7*Bd z7{;>>0ZNC5iih$mn-wbMQ$uAiVTOSbU^EyDCVR;mDz?vuc z7=Lu)9aC_;mG%czCDHyQ)BZqf)}gBmk#zQO{k)hkgZO5H>MJ4Q%L|dY@O8~0^dTX# z0Dd88JVD%*JYx*c0!z@jpM}UW=t_{kFGLcci6E~vg!U_h_KUWwFj6v`x=7hG)-_(U z&#{4h4xsb@)*pupV{2(!#CcwP({-Ery2gGmacl?uKy0NrEqf5*HVzHSeF!9m*m`Lz8x^Z`)+V%q;lky7+JQi`8NO36Crg-Of{ zlbQcQ%aK=eZviL)<-iMSKpkiVwa=LUzGjXJt((gH8#K&f{=I?uH>l{r{5OXAFVx%2 z{P!Aj+uO{4p*6_v=o7l3hioDrh#PbB!Z1#GuQ^a>l~5-)wTEPB+gdBXMycN*Wl|wJ@(&#{rex# z|F-fDA=h%jesBmJ0d7zLNw9xp-UdBd#!aZ;C{w}mLhz8w(7dz_-?E_-LKv4af{vQm3*Ez{a zklqp^Baj=9QSS}dD5wX8q4e)DW{z1h+{~#xljz^a(7z95j!HUGY42TkX`8@c;usAw z`{Gv>llqg`K>f&#(2B*(f5CC$@`9@I%zp~{Ue-h)4WX2&b`Zs7H=_n#y#iXkQl)`&J*;ewO6efw>vk`0t+dv9P16g1@sJ%x1 zZ}nUi4MZ$Uot#p*6_vO4d-ApSkc+J!3nA^3E?J z|Hs%fApR2KFTM~ax$ypY=KrLzY!qpPmYxlhL+HIvIBOJd)(5JWG9ElY3#fO3C1iswTu0}RkCCU`{~Wvm*17IZ`0GLTSCLP&VuM4Odq7V{=;k!HmGEffsa8E24 z2nGYQ{?7YLm5gtYYv(X;jiaAmN~`KICd> z%~o^|!es>Uj0WkasJ9!`O)Fv0k40zY)5iLHHh#x?P9ypDBR4>`TqY34?cv)_&}kq) zk27S@*&q*n@pSSvhc$lW1)vOB_w`HT$S1Urycew^|LX_`{r~1)&;Kmsz9nE8SP52x z1ds@7_h5hYg(n%;)q(mAwC%tT8bQSq><=7&hW%leRcEokO7aM5fcI{wY$Tq|U>isQ z)vclAKUC7->+s`~APaswXiUO?ZTN2|{C5Cnwb8kcu|Eaf5Av~_L(n52&%?Za1AYbE z$o?UW+o#jFkHU{v;m33E-)s1<&Uaf`qauu+|0##+`5!M-+oBq%b^aip@jq=%9daW$ z1H3yOat?YCq&vIFC1`a6a{w3TlHu!^7n}sw;ctOPct2jJO#D*^9lU=B+#>d_4s>356ZtAJ{- z)7E)G6}%5r6Rsu+|KGyg{{ZtazAx0u_XVC4=PU3AIQ|-+0-ZrL=nlRk|Lwe!(S9G_ zK-Ruj+dy{)XZE3aId+LfKM>^mn3qi9+`-pfq=fuw9zCEGlmo4c!kx^2_XOs@Ma(Ig zpL&>|mO;yzd-o)s!C)8|0Y-zdU;>y7YLnQ1PiFreTF3VU>d!L2Zf1TBZG6rApExU- zOCJZ`4CcSgdwtB)t3eI$GXI@MJTt&-Fc(x0Ck$8sU$=p^g>_M~5Pk`0+(+Dt+5eu; z{x|dBW$4_cyk{e$`p>VGHok8kNZd&Oe>4;+7_zJ~w3 z#{Zxl=Wn(Xhwjsg-y>|;OS+`XgZ z5cG)hpaA5*jRx=pI8rH#{DZn0`T+B zTOzcUx;+6-uAy!~J!RSe{P2ym2NjGhDtF@l8TkJ?>Yr=D^M8cN>`(bb&;}FNX0Q#U zfHa_fuK92MkTU|vZIt(R=q`{8_5*hud#KPO%7X%6o!^WhKky};$lG}G0`CRE^M5a? z|8DsI8u}lgeRK&Zh4+B6!}L@8!elk~)POqB2+n|W;3BvLYM(LxeaTp0Cv(OO`b6$) zfcinBkNNK-=D&nF{+hW8v$PE%N+%?>}$gARF&QrlpQ;F(?IqTxC zqpIq|c6Ggejc_j-R?F9X`6Lr7w7(POf3oAY%0KFnKm(G}z3o4DaUTy+@D=Yb`lA?a z@|z$3cdkEr{+{oX@&72V$dCVb$-lH)U2CeJzL9hWV<<|{(ntQqkDzA;$v;NX#~|k! zW!K7oU-`#4ak07Pkdb$luj-j>Mfwy}iLaB_s1G(>cn0!*ol^OHSw69qjU27ycBCEq-x%pvm4)f6e8E4;&-z0d2F&&A3o8u$Q{XpC%p z9AopR&`eLGWhwukx}j|~{~upu2QvJFo#G?keSRL|Xg)>fOCp5~y1!8ndVga*=9i6M z3ZFpF)F^IJ=>DDkY=lqaEY73i)#;(~Z>;ryX}I~{`WfOL&o8`8zlt7o=x1cmi7sT3 z!*##iKre3NF7D$YdVkCQ_?xMrd#X7ybEU&C^Ay=1#Tba#pn@=%9EuW*K=tH;P&MB8 zCq1qXt3&qHXz3u)wu6_Kp(18p(&rM_7O$%A%&P)r9OQ(gV;nTvg$BxPYY?Z z43YLAaaGg8B4cI;mZ!a-?t ze|=hbdxtULjZ z>Jp`%NolA*Qp$&3YJ9UaWPeo{V*Yy+aU_vKD>7(bTpHTuml}I14IL{=jlGoW7nX*c zdz2-cww8uQX{W>=+fl0REe*|UOGB%03mF?eHRPmo0;g~q)nliIDx9UqU!JOMoEpy4 zFCy7KmEUqIKju_*(bRBR+*PDD%P$$Nd%RBGK(r3Bm%NS4Np%pq+SNg6{z=a?8b`eA zxBGaA-mj~RqqdMQDOzvY``u}e+W+583kCmkRQ{)h{$%9;D~`gS)9=4i5C+mK-KUDz z3&UV~^mEqqozszx*T`LaaZJzjK&rk?r8tnaa;70w(0}Ec~YP5RrZuU z6)S@^D$*)L+d2KeOZtD^`hV!$qyMMf%I?$u+oAo}=WAN8|G!!PpKKOR>zlWr6)ElY z2I*`=C3d1^v-V&7ZhEFk+O_t0q3=gDcP}g4b;P;IgW_U+J{}H764m-LN69#<#OM8r zcl7^-Q^@-j*XsM-WCO56x?gY|;(M&w&X75rz$u)@S)9j3Tt+naJz9g>YhC=C8-0sF zTo1~Brhn`GzClluEqK)bXMcYA8F|qDR(bcklz}6DLvEjUae({m`9^aY6Vdfnc@E-g zW=;;ZYbS?Z>D|U%+{Z%{%=6BoKe9KyM{~Uk=e!fwyeoV>nd9EOiTY(1yoc^bYmt7J z_bg36HeR2rRKIk-_ZBt!#qi(+`fvF$SIhP8JLA$dl&Zp()^j(9}9DY=V1YspjuoNmeJ#*ACEGvQ@gNk!*A6Y``Xzp+#JJqV)&#%ywxH zVgJx~BDX`@Tb+aK+*2HOi;KO&W+wNentqTxjH2`__Q)Q8b(^4#_j@f7hEJ zYtFI%FF9^9dl~hkl?yZ|!_oTRxc?-PLM!tA0Cq~W1~?6b`HJ$N{=NS#Cyf(GwR>lN;@wcDPYIt!vlp6c8sZ&ueYuT} zAE1p_hn#inb>-qbc@fp`v;Wmam+5h3J=!buD!m8E@$$A<9_P#JD}BOsaW~M5+lc&b zcgY^M{CzU#Q(h0rUjB}PuPS%wk75kOVDx;;_b=EF-7lGsAY6hG7=)#);|IxdHLiXG2lBe}Ap0f_!e`a77=Ai9P zw8xS04>GfhO~2LsM;19WUbFu9ruDyMip}3lPoJ~?_k#7mXY3~@jRjbQ)N$(t$h?2< zmhwYij+OX>^_!kUcBE(ESyb0rBk@=Ei*ZaeUvUi?J7C`{aswJRdyigruUEL|o>|>` z@07Srek;Q^RAMJ~qxYBgd?EMaAbLLWY&cAI+mA3tZW;Tbzh#g-Hk$Jor6<1fQ`Tr{ z|C976Z)xXC(K8?3o7PB2?uh?+ce}2*SE$j})TY(DUFu!3KA|4gJ~wEeljPCpKRAI? zIE}M7kBey2rnjTxJ@qa+FR6!-J)>Se$^Who(6mwiV79g%&5livR4?ljv?8XSzAT-q z=)rYV@3a0+eSL!-@8UyG+xwq>8%gbVgMLHekTx85#ofn46wLR2BHHUqw=Zl#f8k;b zMEN_~op;$w^!)mSU&#MS`M)6lh+mWc2l5}45kzHWsAEeo0;4bn<1hh}(55`ME8`v4 z9{J`v_kE=O-=+Rn&Z6%!9YYGuca;AJ%0Joaf2pO$5Tr95GcXI0Z$7==oCkWerlbS& z=nIgW?A;sh9NX0cs2XehM*I>)<$f8t9I?LoL1e=9)g$lsQxB{XH_5$Bq{&p&Eax3sbG z+b8OKmqxxM{f|ESAL@!|4RB6fkkUS0cb*&Q#ci|j81V&*D z#$f^`p;te@{MW|+zv&$j?)fU8CpiPNFbC0?={#})7GVjZwG&n1m(d@u-(F77);t&f z+x6QuZ`rj!DTOO2zmC{*-{QMt#5Vf6rKC~Op z?jSQ)jER$7!dWu6*!ufY>+kzoe_v$%J=r|W*t+=xE%eq0;@Y1NYoxah8?Xs2{+l*7 zUPjLtFYmbbeAq^>M9y#7OU`l5xyYU3V$#`7?nU)hM{F6!J_ecFFjwDiOMFu&XK->HBf5$aGkbkoCrfYNF zEOKZZCjZyuUwn#ec3hfl5pE?@SL|^hozpmr^LV^}Lit~-Q-(`hKC>?vyY*X6I6K)n z#yc1J`2CCRGZ(@8SCxJ~Ty)%3RMRUxzaDyAUg}&=Vxv5+eU!HbGVjOXpHFR74jh*z zZ;-ucp?}AC(`|ajJJoU1dw$m$Tmqyn9AKlu*hr&I<9D?7Kgn|X$ z`Kix`-hVZR;2S03DdGMo#z55e)dy9M;$$5XMf#;=a-x1J%3tpr20LyjN-zSWFb3n$ z{=RpxTiL#+jFVl$on%g3kR_W&oBuOX-@8;Fe7JJ0|J$tZ+j@zOGQhe+X-vWt#J1~S zkX2+gX3*zg9_pkUzo!0qp#G6=1Ckfm>$9~r3-t#PQzvA9SjZovUw{SvS7G11O61oU zs=Mn4=m+%Gz7(mG$wiJ?f@SFaSNo`uE3qmHf5-O-)RW!6Rz`$-o_jv5BP08NLlpjk z`b*xnf8+lRn}o}-4K3>RG%D$t^rOD%PWo;{HfeUN_JfSpiq8_a7yEG#hY`b3#8K17 zyl&L3H_w~xU9T-pXpwAz{;T#!_t!z19&ZTojb5v7gD>BHTrl@bI-R%C&HAn1-_PI*?tdEi(pU_q}ZfD2s zVb|5N>-1ro{hs!F3)#BS9x1cgf#_JwKaWm&7as3V6t%x6oTuUi<9RRYd(&GENdu?p znH~Ns-Lv%b$UX3191|*Iv zYwuYbRAugt_?1|NHK;zsza@SheFHY34BJqNkM>`73h%~VlxuS<{@L|^fuBI3S4nrA`5jRqkKiT=#XM_4mdmqxnM( z!b$g}o=iA)ljF*;4VBo5-5BM*?Wef5AJIy2HNk{L`N6 zF%O%k)D~a-!S`Q;WGUy^8E(kXbh~AjEye}J>nz(!40w()h`!?s^LZ9Ha)&a zzhH-c2K_#g+Uy45guY3%25^hIE!v0Y&AHYz`MvwqPlg`*aP&OgKSb;A3jSAV+PARx zGoKDm(JQ`cf0n;02>t2B$dCU^>ks^Y!VTN?L5A7y-7$kP6eSpeQ5b{rw!Wdwy>8#E zjFXvu%Cx#Xzdsv2*T?fw4>YMGj*%(HHNWQhEcJXAvj67V6I?nIFbPvI9W&7T5}V>D z&xUC1zxyl2_LnqA=hrO$+eFPm-26oNVd*Er{tE-b z51IyreSJR_-nup@>`e~|-yi>(uxIjT!uN`v3%g%=E?jrtQ|#wu(Ydvai^%0@(Jrqf zS0S@cJBqG_d<4iLi#2{*hd&x$*s7gO8&j^a_paYJAvUsaC?l)L>apgh(JQeNyRjGh zaS(?Q!%@VML<+ImzTwB`3d500);X^5{o>)n!nTRSLaX00sMu#L?#ic(Zw(3C@ef6V z!p^2og`D3`;2%#82)i#l6TW-q6Jgh|0pa_v6otL(i|j*bpI5*C;AB5*5evgxyPh$= z)i3<$W%J=s@w11~7bgLQaecvF3GZdxar+t5E;@cj3O{9-?58 zwgCN6jDcv|*w^}xzSe*A4H>f2m}3{270%tXH~E3W(1c^N`Kr)tu74UW^j5@%6^883 ze-{QzYbZ)ky|^$`O)Lx}=<%hl4WsB|kUV6K4HBrwIB_Zd^a2chyOzYAG5MUE}dsf=`yHCN<+wmyjQviNF!4t~ePK79~$02jh+o%uIbb)^YDZe+1e>^{+Rd|UsmLYad zKZmTkQxH~?t57}6x7NsYi1TIC^>uA)U1LAjMmB6@Z@u8VTz>EQrTs3+?icJu6@{%8 z*+y1kCwl+d{ygMf?2p1fvv!26nfri$hcD`tJkJ{{zqe1JC~h_WuVVcjp7+*$?#3AB1B_p?Tp4-q8=# ze;=s-KCmX-yDq(odhH_{GEQ$<`yiw@IzK(L@ImN6ir$Lc0cr1Xj+qZU!v`TFE;j5z z$dM;dJ@G*}MV>}gseBEPFPs%VkA@2$grsY%?<1e6IrA{o%2yrY!^I#(2Lt>+a>?5r(>V|C*(iv+S*-Xt^A|0`vYb41O8`uz4w9nDe~_cT3kb`<5G(s zhP%=^rF`8dAEM=mWBj+^ujPNFwDALJ8UZ?FWz6|2^%y(ZZuJ2IElv z=f?m4yf93l_x{xfVG=n7(cV4N$?TVv<1h6MGw3bKW163A7Cobl?&vZ`MW2V8(N;}$nBuj7Vrwl6RDdq53AJ=tB89ZlhkZ?3UANk|fNT=uDtY0BF;G_1#CgCz{ z!%pnRUi7}G??lGlXG@Te*Z&=+=k1dNY!`a8X0c9va5U19_rJ+kKA(G(V{s|8B7+=G z;1qiGBg@|@G)DL^oEGjW*H)0{aS@l1(q>#GxBq;I^~2szL}Sb8du$7Orb^$$F}?KL z$Q|+?pi6vbLjT{GYi$ubb-<%>*ZT31|C@b1TKhXv-@$+0e9!v7<4=Vx#>?*t-$&m6 z_dECgCj0u1wq3iQAv=Y;=09rNqjA~BrR?+J?DNU&Dd{#3VOyi+3j6$m^#cd&dm_Dp z#o8A1M|G3+1MSui&?BFGwExdQ`d}peFWP$`AwJrRV5qp1>nb5fpoPz26gdWwA0og0 zZ-qJj!V{2R7a-iZ**L{|;H}P%u7A{;obp6 z`;TsL%qElR67eI-cfxyLrF?6blgfUyhsJ;C|6{+V{J#|$R;<`=dS7h>*qHu)&D0`(rccmY);ny zr?+|@TRe}gRi0l$f8RWS+tR#?`%(JZ_Uw!HAGH4Op>U6H2N(Qro&*2iQ)JIG-YNBG ze|qJY?Jw{Za~0?Wa6whKL@?01|q=0G2cT$g9xptPfhu{$!t{o}NHMEt?RHm)O@g+1K~j(s$U```FiP_ZGCKi$fc`zWo*NA2R4f z7qZA9wzTN?>tBaiqwbv5JFI`_LzqsFA7K0Lu||MC3&{s;-&<_pdu-vY&xSeTqVrb# z%09)N>U7sKPuv2eukj0P+WCjG z|LJkpQrE@)r*A;A`@{1)cRl~cCUIrhhDz+jZtTT=9K>P7a1{CR@C%;z75+ZqnwRbT9a}@I@xb3|Jv|V^p*kosCVr3PtTNUS0~yxi+&!t^^QZ=AnhzJ zii;g$-;-BSz0f>3@;a)Tl&iJ+bQ{_B!o6r1ugydKe0^EeOg6rUf7k!%KX-8-4^gm0 zJAwWv#=qz{C)xk698Ra~hJ1~nr2f1O&A0Dsgm?y6LtJb=ZJyX4>$RH+vH?lX^ET>WSIvg~jRxG|-d! z@A>Z|_EX-5c>krd4x13|Em`s0bNZj||1J0bj{CpM{om*QZ*~8X>vI45MK*xGzvEKf z>OZ8>a!vhr$v5GowJk~yDfQJ(GJS>rOn+cEeJ}Q-Ltdh_%$?fruF3A3`2u+Ra4&A-F7D$Y zYK#%r_O<@rI{G+S-^cp`H_!T`^MA{~;rou#Sb|Fam5R6Re|Jv*?~*dst$&R_n*Tdj z`Cq8~FIE1@{QB=(`hUpJ|1}1`OnS?)606YinmGcm81toP_DFk&zC3*c^7DU%yS6$P zxhabGjAG^jl+mk3n=3(9qS{=6o#bxBU1QyG@8EmhMKZZr9Y^l<+kPCxVZ?A0aU_vK zD>BI81Ww~DqCP|SFO5+f^Y8gpNr=|nN50Lih{i?EJLd8Fzl-$CxQZV1e$Kl^-iUPE zj-+(%lK1ft1^>HqnAiRk*&oFii2NKHvd1`NNXpbS(D#(Mj(@ zcBA?M`T4)K<{osp*0i|??4C4Q{1(msogtlBn1gv(fJIn>-hX~Rzy6{7>)sLJ9&^u^ zlPj?bYY@%(V=wk2-+$F_jP}1hD4e%{*}xt5KD7VYyd&)7951}>|NS5>WRxOmTjq)|uf7Dll z#W&2BFGPDUB(TLe`_}i&UAw|xb;jPCj*0g0siv3ED?ISZx3teEBl}qUtn4I)X`jiY zaQEATL(gaU2tGxBNnSosMaTVeWSBya_To;=f8^^+ zuJAtgQ6BqwXBRu)LVb!~7lrALn}J!#{C|?E7{0i&imtqQ^Gyk)J4*XzQeJEEdQE4^~vQ}i6ifSEW9&_pT>NHAIonq zAL=UcYp@O*unF5=`il_lMNmdhA8>>^nq*FRr(yUxJTLv6t@+NZs|SbgpBWtXof{b5dhL_;sQhI3!I^7orL^Lx+%+sz?D&_t)GxesZ;-JsK9&*mQONPVWc%=4 z4C2GU7;&*Z#bF#d0oCc^Fo~Rks;%;MNWNY+k3x6`8fMCuJl4;bPtn&)TX#dSpK7RE@W}o+am-jpE{np;)$i~&moA>qn6$>r1}(vIdXbdzhzn8^lHoLa<_GY8^AUD~wk>9R`kLse>#J*vr_=~uX8>rr6 zP3cJf1$w;8+`F{BfarITyl4FNj`7!h#$T~TJ+l>Wva?gW>}BqlXzpC}4WWnhf>)Fk zbj)R|&t$VNWxK!3hDQ$l{Z@>-%GE$}FovQ8BarW(Xd~-h(~hBDdzSB?MC~{qLgTd$ z=O2>McL18-=Q}{lV)GCAC)(Kg?dU)T(cFVB*AUG;h{h*k$N4~5Rs?OQ} zhaT_e+|ECRJ{?JIZUYji*G7MI|Fao>&;F3jj#>0Ms0h~6|J3|_`T{J%5-h`V)M&43 zS38IN{3-v#O2@3i8mz+xY(g2fp^Z(^&bG+!Kl2{@mreTz`=2$jKib(JJJ=s+)-Oq; z#rxUn{Y)97uawSC?8aWSXn)h#Pk;RW9HbvcRHn0t+OSS!*+kKtikRPyqFPyxlSx$3 z>&6Rv=TgGuztC?`9-@8q>xU?Z$bM&PkiB157Ram7Z@7*d=*4aHeAl=Z**&!| z+$SHR;Q#R6qWp9G>trzoVlaANe<}Z(fJAMXcA^YysFo&Fn-sg-J7?)W<&*#6e zG%TX0Jl7>;dWh%S&sq@ra;!v0ANC14(SF$vKcIUxS zX~dC43a!W>hZAUfMfpPqGU!AXvdE$FjQn4ae?)s7G~0_HT`T|YMJr$(&GbMoA71&RU}utHrJV0>stFv3q9hl;|6+h8+UOZ5Anob-8Y1S<=&$< z^%MHji}45JU&>tlMCDHzY#@`u106FMLs5be7=jZD37?ERkpI9lkf-Qv_C6Qon|f_D1n_T#7L_wVW|2vg`8X?5%|$Cy3?(YM30 z=tAdK`~HVdg_wF|mfz-}TH5o-1*oFeUEo)|V*elEC5YzCBvF4-KFOZ1O$*)sI^7<- z?&VZ_M>r-bU(3mrScRy3t|6oG&)#oL3+w2c(EH`-kM)+(#ZS$Y( z9bD#^ZK%Xf?8aW~$3YxMn>`8I(Q&{Y27ByX&~6U{WY5{d0F8aUKSkajGBwQmL#BmW z$X3VI9P<7qydzcK-!AV2nGkLulbh`yP_Ivbm~$OP97&|miW=>CE$T*Ve-Mo+M|;cW z$Nz;lI3|M}PT&+y<1Ef2Z~yLL|Kf@LtDS$`zCYRj_a$((t5Mqi=>*Xd6oH-t^(y7KnF>x{~M?3#M>WqndqJ4dhMZlmgscJ-2c zA^-iW*Nv-bYm-ambF+NXYow9ifAF=B^e;Bbzi<+FrE?z-QShp3Kz|ftApZN`-&FpG zu>WyvtNf!GX|$jf(YFH!OJ^ubFaj<29COQ_KlIEM`NtUgION#+S#&LA|6_u<=o?Ct z$SH{SPM%KAK=cizXzoNDvxMiMp^tna>boS6|Nf13D8K*hpD6$H{AU58y$_a<%di|P zu?l}s{@J*lhm=j_GK*aLKQ8}UR`WkpDL)DIw?63_X{MXh4_HSp|Ha3`2C_rCnSIjV zCH?WzM-EZh-{kl0-+V2Uk=s!Hz&Hdd>G4bIfOEzQqV?~c%Km0`8=^JF^~bGebj)t- z#eVcQdVhXuP7nPsdcMuK@fP1kn{_z8jld4 z!wKZS|M&HxaGHJ==W!92aTPtdjvMI3?MSls%j=7*|M4CQ_vjzoCm*6NqJ?4he^W%@htqGuyK#u<)i!NhAony8CW6dw0XpFm*?Lp?}|8F<0h-CY- z;goZY^1m?{hiLDIt%$}JCJ47JHolMiIQvF6>1%9LWQQ1MM`M-w|F!1-ODol6{{LFG zEu#JTTTx|fJ^LMXy|k*?%ah3Ih$W2qlhL;bqHhu;6KrxUay_%e$Hmn}_H(=a>xC=+ zMmztEx}P5HEtps=cb(U%~{{>`GxF`X;eziZ_mb%>*WkGTbCKoZORXE|14 z71m%KHeeIl+^6xybwCVvDUgBn$pe_pXoeQnEj))D3k-@^!Kx6blbT^v%Dda6OsO zH`u}twDqQOapk4A&He?B>HcYC!x_`~7yB9uXMbH7Mv%QvhcGJtJHHFr^Qv_nf&KJ1 z<{=BGy7Xnpbb@_frO!{Fg6ZgZS^p57%4XMG&xXw1DEX=_?EWjcxLGLeI$Locm z`(M0=!adK~=aO8BRak@Acs>xa{Ljq;CO4sawDrHkKNia9kJou`qwhq1-Oqc*fsPvo zB5U5){v-OvYaI3T1fn@+(O$iS{pZaykLC(Qa|SAfccbDnkLCxoo#X$$q>a*!Wze}t z+jK}9wT~Zuhxu=_wSV}7@9*By{-G6(-RyT9>&Jda^APqsTIj9F?=MiqeixTPr?@UW z-rskxYdPxK?Ht+TY=> zKMHGWJ8*)23OVg-7F~#Z{insn^drua=TSXGe}=q_suhoX2XXD~RpA~q+;cwEBY_(K zt!1Ov%{KqQJ5kS;Pmm4XktBKDe?B@t;D&H7ZX^4(KA~-^@o(ea9ow}x2iX5R)a_mB z__VPSebUBK_J6neKS&`z{{6o3Z}w#?GJTb=OXkPF7_EO)W{Z?BagXQsMBmK0?_3X2 z?)QR~?$0&lQ{C_sJ-=_}%huD-i_t(&&Q`wWD_% zn)bN|?n!E{dq7Vwbq~-g+=3a>n1wl*hXq)KC0K@!+E2@cS78k*f9sxodzgMfVaWdM zIp6R3cvvTX12&-y+fa#}*p0o|kAsNzK|D;xa1`+g9QU3Zc?}z&5fP1sUJ(};{Acxra=fg_J zT*PHuMfGC)|IdFu^w6Vk($~$lPW)-*{dLdw(DNZ$L!UtX%;&>($E5m|gd1cpTIjdQ zyGYYJ`kEI|R1)qBKSWkIC%$u#F?{ij*US&NX?_5idSHG4nSS5=fFb4wpl!7I1rw!H zDjl+Ofd36M2S6G*X%+m9eh}_@Kl_u#7>L0bikfr2FM#|!fUf`M`vQkb>@D|vC~@2f zjKUaHi>tyodR*S?UVc7Ipie?le1mX8e7$@0(e`(W-=|{+%A4I|~9~pGw z5BC4}yqj=rG+!T@OZ5#${(k;xW;Op5Hb`p|p6t(mg?|dsy7@Bk+fa#e?_$eF<$tZQ zE8#kAO|-|%}M}=*>{xVe5Xh*$Y z6$d^aD$k4yadL8b2x$S&wVnyeM|fPxlf0~!%Mx{%u^?(=;x8 zZ{nD+JMkBx=kH6x{}TfLojv)Lu~W{m<(7U0vNcbI)52$Q9?|pfeX%55q+dq&yUN+m z`TxG{`wE@r@{8-ib(Fu&1}3ww`ww{=chUO^{=dTK!+rWg6uhPkpg)Q+5Q8xkTT1oU z`>7wadnLlLd&V*K%Z`jM2=AOx{${TYO?|!{j`jO?NDbc{n*02HNDun^FiX0v7~wzJ z7xZ(U(ZBw#?<)$A!8lC7Bpgwn{&*ofd~*oz+FP<}&@-X>&NJbtJt=F3+i&vZ(|qXP z3h{p5;P;E&UI~$LfDf)c+{e{~(jXQ=~r~GcXHtFb@l` z2yGAaKXz%Ox2r1?+T?a^Goo)4=c@EC7W)3%Qr~|gpUl5M=N(3V|KW{=VTp8>VL4W! zrK`{wO<`C?&lr#FFdn&vz7DxT`W}<@LB{KgY%L5M#KlSr!zQu})%Oa+HnI{`^tua$ z`W=N~r|@nxY&NfPzJAC_>+IH>fB2gI3F-E1Jmm-_z>)R}q|Aq2DSDZC| zG3gvd97(inwnh&rdZx+RzQxw+MLKc^__p?0<7dzR&RT2y#KqEu;RJaK)sy>%)8tuH z(d&kJu6O!|^THR=aL#kR;yD|4NJP)R-58vHK>R&@buyt&sK?{|KQ8;vRrKIGZlD*p zaTodd|7`ro_n6=R_@rx0CJ^lipOAJ^)))8fwJJT4;NADQTVQtw|D zzKR}{duOt!_=0}OZ$r2tt`{}O^})}5=>OBN&wu}FA|IJ@ktAnwgAdj6~L3X#$L|0U!yEXPW$Ld7@qTb}kU z0D80+$T~8;`O*052KpwHp+jFggU);2k8_>{a@gj#m~vQ2?nHH$??8}yQI(G73qP9A zzhC$u8iwc>qJF%7B6(O`G;bhA_I&DDJGkkGd|TTag}?e|AxM2-eO^@vV`{H72&2=CPUibWl*rT27kE&OGZ~qqy z4@C6s!sr`@dHes4{{Kk*|B0TnetDd%AM3g9`ThEz!TvK8B^ZHGc(aSo99z)F=5J5v z|5xe%uh#!xum7(fmL+q-e{%g#7uz?@2ClUyr}V~Q0?M~&+n)1%4SL0Izim9u*mk}0 zB0K}LFbDIn0E-aKKUqR9!}q;=%gGkyFPeX{lAhVDe6RQYYx)}GCM(|)mG6bhH)0d{ zuf%V_CX}Ii5C8p0YYypgd8|uEwx{xe%W7-hY>@? z&*UelE9gn2(25LlIDu0*jk7q9=sN)Azy5ZJ?Cp!fJzppYm&vPo10zH%2d5?OfW2{Q4iyJ@UUlC9Xe;5!C|&$ttos;kd7TJ5+3O-L9eT zruXBT_oLhU@jm-d-I2sl$9J<6$_vXvw0@xHUp^j6qPQ;ZdnC;f z^Wr!7*53#%j%!W75o#8`5o%v~Bh(>|dL+<*Bqlh|QF)&fK_84Mn2s5!k!CIG5J&x< zGVgyG|9ct#ds&#}m^qk-1z3b7Scc_j+g_&sU#9d~u{I^SD&iD8`bk*+1HpgYZXD^SXzt2~txSeQt*?mF&J~_6AUOV4$^!R4+?svld zZn)?9-qKzxQ|O4Fh82xChrjk)De61f63@u7AM5T4r~sm$kV8<-5k!6 z=TSwk8>DYP%=d4FFQegtzWo*bjK2E%~dXXLO5ER9JRC2*!d*KO~G``z%0zcJhT;=|1p5?5l_}vU+}*x+IO;1J8{O-* z?*C%&Jy8}mMR8wwBb1RnfBr_e>z=Np$JvT?$j@sdliJ1TTQL>t%*q$k z)7$O0k4eZmn|0Iz@D>BI81WuvteeFLw?rHzgN$)~-srFy{{-l4?hpjtI|3==@ zY~$!#hpm^)|7%h|e#HJbEzRm$^Z$-K70%M*5A;oNo?ia0cj7&LljHg*Wc>x_`MGB( z9~b?887;N?6J!sfeF*dGLe&2ie7lv;crWPDy0K{OT^F9@f3du>&GX;?TkYPbmH%4f zKgNKrOX~)DaT|AW9}n>#&Of^3{X63QLki7^_84#3XQqnd4AWejK_ zJ-$NOP<{u~ha#!GG>A*AS5{CWZUjbQ48~ysCSeMm_@DTwrwh-(9Au{&t0gM}yZTf1 z2c$>y@fVTlQue__d*jfTA@|)sQ@+>==+tj{GQOeTv)u1J-|7=ql9B&x6}blMumPJ; zhHa?CPDFc|?hY`b3#F710QD|GO{P$Hx z)dNrLKjEx!j%>VS{1e9&Jdm|0SIx zg$!~yf&Wi?aEk0DdtX&Pzv2G<@|h4*C!ZEywee%&EO{Q)Yt_5tWkhQ!>X5jn4{^)8 z^}ap?+3L4hxQZTJ#|`x2Hfk2Kj~BB+*{X5l+Vu&x=RP*5aqHwA_QM|bEsi0DC-!X< z`xb5D+7Gdh{U@@+JB`bC;qm@_seaFfyUueT4^i+zi+mlmOc(`?|J@+_E^cB`_R5~kFy6n|9zhScK64# zf3p9Z=bjeMKYz!i#y%S+NM{nJU^-e}mVbGiLC+|w9m2EdbC46xD#Kl5XOnrw;$p8^ zKTa+}b>C;h5^@=;T>*ZUh~liZ6e ze@@S*ltJxz_ZHte5qB6dRQ^nRKeaGq`mirvv%ikGICA|wXK_!)FIUKi_!L@^L5sNb zdcFqw37o=d#C9k#2h_o6bSexN6>nOE$$tH^%2Zz%st z-_S$9jvMI3ZQR9uJj6%m&lIdxXMM){p07Xa`=3yc^%qx+ff$VZ{>-oT4MXWA7=cmf z{r0n=x5oRk-n%F~4ihj5E#tjogZhLi^vp2t4}Cg)26EkO!w2e6bmA#-voHtqu-`Q< zAQxc?YIbR#wQqHYv`^QxPY?7F&S{^JL}L^GK|BA!Y_>F-7qhR;|7$_?ecdPD|Cy|P zM*jOhqqUnuw9kn4|6k@j%drxxP`%dvHw%4Njvm(!h}I^qqi5goEVauGU601_>&Y$i zy+4b+KiPPortn-g_{!;w|GK>o~-}Wr+)JN7xlnn|4w6-`=cLljLh#(OwX^6 za8IN4pV8jIyAN#)JEgrFdl7xZr)vDi!+v_S<{?_cc#wV=$;GZsT=b2`Xbs~(?RYvI zKHfLH{a(Lt2tTrB=U~?}#;)w)*j^NVaAa-RKXYw(Yv%f}uW4=g{*m?ea9kh0cWqYFCneOQ99r-?P8rV&iL1e>Qx-U$MDMe-<+0d%yT;Ys2IVZ*Bgw zuy11jP`m5Xp=SH1L%h!?)YYF3iM5{$_32NBWc%ksL!TGJk$7 z;uj#>R^l7L%K!WPFT&9p{|esq58nEP_<`R*g%h44_Jpy^fDwj8viz~ni%kWsGn%9xwzEol2A$RM9Yej zu$$bAwD=C=s~PMUK8Wl{**2BvNQa25kxR z1Jcf8Y(GPG3U@6uKVZJ|%r(9#jV2sJ3e8pK7u1?xVC=tjpYi|E_RZHfjpj6ZSwc>l z(YpC)9sLP6eSpe$i|P&9~MT@t2Pb`)%!jc#?Z%M0-mhTzoq?sp#8n0&hhR~@mu$A zv~Oh3xBKM3#j%yYE1Hi#LtK9Ufp-hTEczVG!vZWq?{|$o&es1&-BRtldaQn<^wH2I z{j|QvJAK0v$1cNiWIGGPN^%v}plz)C?>TmOt{F1&>2+Oj|F5|JXWaiY`u`X7|F0N> zB%6h!wz*}P`%iY48~=a3B=nd&(EZo;_m}1dY(g2fp%Oc>8}CQ`{@?hX!dIRTdxcZ` z%&{O3;xJ-pX?Kn$`Jq4Fe>wksM!t;P9@i$nkN01m6#ZwuFQ5ze7OIH^6ZCll=dEj!E&#G41S|Ju(Jj(I$OaEg8!XYuc?|Eqaf`#bkh8~;c9 z53KljIPbp~aT!<9gX_3~UbHP{|D)qI?K?U*YTuDv@u+{%c#{AB4F5lwx}<;dp8g41 z9NUW66??8p=PvH!A*!#j|L@pq?OV!!Ki9^0@D#m2{%HLlAA5d&h5Dn|?*kF7|BL*8 zgXtANVfX8Q45gP~1V&*D#v#A{Zx{dnA^zQc`sM291R9jt8|M8ky%7@y&KgbO*l4N`NtFg6TLNx8|3>j(pi8-Sb~?2$N`T63fpiVfx%Xs27`}7IVz%0x`i@5ZiXTv;t<_P=U*!KeZ zBIK&r@9gZZCiXj7Gl=~?lAX#9j+6DJ>~FF`I7#O1!w>sEOZ0QV^FwTF+`h%tHZ|NM9Ui7WOX6dJg8-J&_4l!<8RUB4Hr{V?o;w$#gq*sfp zvW|WoJ>G2%{ej}JfxZdJYt|v$vv&TLHS}bexYYZ;RY+E%Wsh&Ple>|==DOrFBcFSP z_v7)pz|I4%6E&`**0trozc5+;U3+4o{JZXGe#AllIgA*NB90_dXhqu{@1OGBfebnk zecL74<1vRP-(Tn_|7h;k{-Xu0+NW5eC}gCQ!wH;1^#%Lyvm;K^;}6W;xKk9)($6C~ zQrSS_igyEBmMXvMz&BU0zvi>O-)38z57RQlSefIa^*dL|{P(x`^g6|NA$y7ajhgND z|JrAa4{^SMdgFc3y!?i9Y(3=d-9SfwDvh#13ig81<4EU9}Y1*#&X=BF|Nl(iu$qK%#1uD$Ij(*E|< z{tnarl3C;?^1o|in{bRxo^ot6dogYPTMJqd8_WKc&R*=tK~#&2zH4`w9({AD4l(*s zB=@oZYuW$&0QKZY>xX#sJ5nhBns@B$#<=MjRQyUi{1xvJ{RB?oG|r-@urQn_yPNxl zi{xcojpA_~H_(gQxEtxlneLMhk-BMbo(@o&`!StcXy{v3% zFQPfvon*~O{U3R)BcF_a%xB}zcK`K_ws?mh_b*EPe+0^3l!iHgW9V%Ulz(YHSzkiW z??0t&$?rd;|I&nG%6jUg_Fum=O-A2{Y9%Y)nHr*f*T+dKW=`J(vMS;G3g+ldqEA8g z1$(dXF~kwQvx#BeVKP}{oy*JS2RLR1qCV#=GIhlK8FC)7Gu`vK%H?|Hp6uXz%dF<_ z_uC>Y!7^M__Lh??u?lOj4mH}T+JztKpKH4k%5+1M`hg#@QC-n=MfoOEH8FW{O+XhMskR}4{!}+ z{c!!4*E}EO=V$u;APysjXdQ9+>wK4Q^$ACXdyE0a$s{WN+I|13?~>D7kwJO6HNWHu zoI_9G_G|ZWu8q#JT2;Q|E-#;yIxS>4oOq zyXJ%AdmcG{f_~rh&7FlJ-zsHx&AlvmvKVe8MNLte;GOR z+uQD_JMOcmxp*p-r&T{U=8|4}VUG2`=Y*m3T&n%Q7vyj3_a6sw7{yQgKZ&Dc zHI5^R_NDUItMc{h^4A*qYpZ-t791DJ<}CkLoF{hhf6@Ae{D~ae5a<7;#gjoB;yysR zrTmZS#`pQo^6KQBZ>YQatr*FQ8&B7-IVCL4|2spTMXkJfp1g<}*ENVIIa?dPqT~1>3L_ z?eFM+Lgy9!?*(<5hw3&c98&*7^9uEkvFabl%vS#pXBN4H`Uf(*>~SET-Pnu$$Vp?I zqkoW|zrlVV(*90AisBvP7tnpn_=Wq%H3&P7B+|&B4SDo^uPhYFo`2OYLiYYnd&xxh zBKkA>y9(N?;y%4+{Z=af_#fx#wbRvqJl~7-*e24TkKi)>D&pFOaSek~`~OB`4TNQs z@4e(r#C0WalYPk2OX~vp_U<`;fP!aJL}?x>StpOw$9#e$8neEC`L(n@R!aNV@175j zT=xY1{%>VI24XOVqFvux2Rik;UnDQ1cHf|ImAsA`*ENVI*=>(E z$2ZZ$r%m&18=aRz9ld@P{})L#;@SRo^xLlK!#zB}BRoOBExtvxTg$Fvv2p&$qw5Xh z3s682%_skC{XNH7;`sQ|{CW0dr{g$=JdXW|V}Kf!6)7A&_I&u~ z&3_$^y!O}O?F%0chg<$r`0>X76b@bZbof!ve+&m-|BvB^YyP9YzVh&cRdd4rH|B)z zPoEX`z44#J_cs6Mu>FAY@QCv8nErqN(f4=i%ikCr_R@PlX5Tx{azDN2H@@G0P+rPU zKlQwiUHeG*Nr$$a`hMY{-z$Dz7IMe57yRD%w0|p8u3N)Hc=sF1BxOyJpHiS76?R5h z7TXFsl|^q}GygC#%iIV)>(7m+8B!jqMwFX>I4kU0@t?y#syFQ(KP^k`B2UZOXKkh#`99ADs`z?bu zXr2gft0(2@J)ifTl5uYTDe?@?;yf;*<>le}u!e_B{q4kw#BUSX#P1TVLvAPfzWb4I z+3)qI^`Rv`rj1qq>Au+aGBh+=<1>9_cxdWBG#tBYjDqv3-x&~odQ!VY;-lebYX*el z<~i0r9BKTS@%byX^DpJ!z0SYetX&^X^zC<+b!M~ZaZ#&tAFS~=MjAb z`U$!^%nd_<4_id@jsBtKj`??F=CS?X=vlsH?vgoW-PVnx_lf5oD$Fr_K-NA~|G3*f zJfbJvU&C1ID*cFmpe;N-!o3Mgp>b;e@WeTpRebPoDG!hvYfW5oFtTiDr?!Lqo&jN~ zK~2P(RFbI03li#b;#lC^F!fN-Jzku`fWPfSFMfz2w=}mar{#W^N&+iwb!gas(;wEmR zFJj*BJ+kL()+Z!;zcC;@BA=k&R^K}^%9erTU<}1@#!ahunFxK^#5Pe|9?mS{|){BJ^KGqKoQO6>$fZ(9Zn#F z*1P)ukwY8m^yk;_@&D42jB6T@DaTD6{y#f9Z1Er4uoG3-jlI~9gJ^e6M@m0`&FGMi z&s5zmKHJjzF6xBM7yLg?AcIzMX1hm+9NFf&%%IWXuy~H58pn|nmPL}DA0}?3=@}Hg zm%9g?qxP`RfnJN2uLs zEg-Vrx23=4f0rWz91q0Peuc)Dr4w~mq<^gbS0vGh6q=C6VAl-AaK!r0NOBCuVG`Q+ zsQ=4@orv|Uu5Rytqdc~ipG7v`;Q!y@|3Bpa-{t>bkvEZ(Kijg}{y%D+3GwuP(jFJ& zj+)@cLzqE-lPvXr{J}hU$K{xddFcD{=&*oXgeBPZ`#&U>k;~Eh6@3C^EXP)pab1zM z)Q2rts(nuHDR z=M?=6(xdplNR8M2Fq!WxERH)oPhLcB>ws{Xyo#*;hED!}9QSwKaWCTDK}EKqbl`G;2!uDOjq+`|Ku*1uN&+1_F88PA}j+cV0t4gX*H{~G)2 zS)FiRM!H%N=LO`nDYW^&>Ot0Y6Xz53GmR*&JN0z@U(CnJRqy%#l^<4)YA=Xu62?6Y zQtWqJ3wfYxGQOk1ahy-*8@|iU%74e>P?)M-Fxz*!*msJ*=>Jw0r1r`G zNMnL)CSeMuVFqTQ9CPuf>tCyDZ8hve#<$v@0kBh69txu9^WYC5@D(b8`K%T-G z6q*Nzvt+CroF^}$O?h{jyo%nx58*l)$3OIvecFz1lT{y&`vVRRrTG|RJ)iOHweZZj zL17bl6Zh}{xyStflY_z|dS0J#XW}E_3BBKT`QLHDaW~ne9bka4UTqcw$(}!2%bpyH z;TVZA7>5a%gemCz^7CODIRn)T2ZdQ=Ip$&>YPafte%U?`^wR!&JNUo!B}l);|8<-q z<63>u{$-}?H+|p!<@EVi{Hr!c=Vpxo=)9m^0Ht#DUt_t<-&&)cfbZYE+4_I_^Xu>E z#+fw(9RF-=<$q@N7oI{=9XuqZ@ds?!y83Kjk~Ek^gU4pFtZ>tfR(xdZqTY*}nhj z^4AJB+cj;-qli-elE-3u+bPHGldXU6+)jBjkFHC`ms~Txge=Ne%@f&wzn>6ihHQ0y zb{PL)l=bh)v*M_DTbn&uy+b{jta(fRZ_#E?ul#tK_7vj-dfeY*_jg17yyO1JIFB&) z^A+@8?)vWIMC|vw>Y5CBo$N(HSybwO)E5!Ur8pNk&Pl%Mx7+B$JyeugmuS3t1U=3@ zu63V}=ugmZhw>-o|7-kzxBn+g^8*i91I=#(F&INJ9KHInMv^^^%3ZRL{ToM4K*@i; z;rZRse;d#FvNn#-$*Z5yZuiZPgek(Nq3{DXpL{z0znnf7^U(K4WB-l+UqH_&!xxbQ zi>vs% zKZ!d0fP0 zTt&Mwy(8u`fMlWj7A%onXC<8JiWMy+sG|eM;LGY z0eaqZ=w#>b(I23AOxzv*qb1sKbq8V9>I6^7es6kq!;DoV2coq8z-;LnWSp4ep=e@r z(l^v|otL`rzAt$%*Sx2b-V+K_%R&)dcfBW^NO`YW<2cBc8t?Up_j=!Zre_a$H?L{8 zz|(b%n&@dXB8BZ8)+*o&zo|Vg_T^Om{T~yty`b=EdBpwHtzmyRvcFs9FS7A<`75h^ zzgs^YImS8TFaeV=1=BDCv(Ucy{r*RN33+r4kn?c>B=`hFb5 zVf3jR_UTuuo~}*PaW#%3iQ4usn)5g}$FX zPMjgn;yil4`8aWr?D^au6PL-WxE{lC6PX+DCO+8yaa&kUSoZ$Ai9ULM)ssZ$m3I^O z=oSCwoxJuWQ4rR>@kyd<^OHo~<98GFwAN#ewvCy7kYlSC^$ zOV3?-l4wJ9&6C7H@eIaL3`ec78gY!IC&#)s@s6R7Lwcfn6PB9pUT40Wm>?{3>)pg8 zatd+>-c3v+XCN!Qv*q1HzT@4*EXU<2?F*v}=-T&gq8oLiq+ePaq%|q+jbv)E|3DgZ zT{900un0@A49l?+?N^jP=)9x+=@BowQ9u#RQ>EX1o=Ci#$e>l8&ng3QWZNxuh1BE3 zYVj1lpr4gokJ<~56E(s&(36iJCmQZQPHduYL0Ub$iJebfdYov)_J{H}-dq^Mu5Wxi zY;#VX|Y18jUfnG$jeBGi9IB{BgUrPUDhxS0^=xsF+M;=9- z!Wo=J-*2_wkv;m)E|R@pQ@0?ut2gh^ha1PlTz8y#XpSOT`BnWs|DZ3Oo?D@ggWL4{ zeB}mTrH_6OMP*6>-JWCDV)YGSJ)iZRTkEs;_tp$?`~>|feBT&|!5E6+zWd?iNQ^<> zzv;&(C!kuHHi@jcWWP0X8fxh?$XQ6r?+tgow-)*Sin>$QdqmH_%Gds{_wUO>-*?LF zw_-eh^q*!Mf9Se6Ze=bx4-2peJzsF{zghG28)a<1eMp3r=5hZOY``XL!8Ytf`$qov zR{uffNrVz=0{0$Ft2%=Wg3`@|9cH(6T$j@^tlwhh&zv|kMSU7|`{yRjFwuNeP7 zeL&bxPcCNT<_`!5>4#Aoe|Ugxdzp`EDw>gNX)$QZC{J5X8 z>owL~{MofZ;rNB;LalzO`oxRoIDa@KPrn!%&J9zy84?=x6{YnVHEkSX9711W=fu85 zS{xZv_S{cY4f!95ikbf{v1`wNOZ?;J|B=|;b2ky!pUeCGpB}PB%9QWU{_lxBtNwf9 z`}5yT>_74@oBn*L{Mz&3hb{jraqwaP@Pk+XSKW!*sMT*g&gM=x&THu})MQ9sRA{ZG&7w?UU{x)b`J zP@Juw<_-NcIDrgW7wgAC4s8pCy;2tLiRS?x;R$kE%R*Lozg@oHY?-#5vM_)?5XA#! z+8@jKPU=V)EG+I_H`(LRmB&9X22iShd`;b%-h>nyaojl> z_n#!wh~xh=WE-+Q+6A)G-3`Y@6xjYaFYnp-{~_(1aT&f_93<0{(O{SN+oCrax(X4(IZ?0@_pvU$4nYb!YMocls6+aLFa$f4~L`(HmO zTo+F-ZsImd9Onv+x2Yx{~+%}TW_?7kNo}w{VKh$@A-zm zC9nLhEDUr!7(+1}BQXZ!P@0$F9!u*`j`v3+0%L(z*`bLEUN3`I_f?#B=q08*6-L2ZRq&{}{$LBDs)l zd{x@eL{Gn>|HHb(^Te|Ni%`*C7M769upI4=wST?H|u+|2U@(ep>s- zHFX0r=eUjR`4#{F1!MpJ!2f^VI{)HajkQ>Z_1J*IC(6PWQWZG#{T+TOZ#aH)jrK40 zywv{nx^>--7^@+?3fTkd9&c%vL1mqK*l)a#6?_-zihH-kF_hKH%K^^agx%PS+E?|z zDIo

|5G>gZy_D? z|E2z6xbsG04920hKmSpFm_Sdik~e(6lju{BeofvG_H_N9X~Je;7RoUfJ=Uw3N0!Dr z4pBZJj(6<)!-v8G;ft^Y1#`G6e!+*PXL{&ASO4M9uXMZ`Id*C-xeiaq>vU(eTdCI< z$+*YCdcXBV|C;Rm2jBVE^{2C+n;dV!Hta+dc4II0;~@IJssB$NsFxQSq$#Gg@q)CW z332@MVdou1HI5^RG%{#I`)l40I-m1?@NE7cz1ZLT&AvbWkBn;twr=)*k)yY*(*FMo z-`3C6{oAzzeCtE+wP#I#ul}7Ee-Wop_|2$rmOPKXFMF21H9nLc`#vv|S8*M^h-0AR z-X}NdPvXx*CRfQW!}>Udh(<`?svR*GVU=D=N88~#+&@M1>3L_RjBx=wf)Jx*pGua zjH9SV5@{5^{+>@#n*XxdT$fPx-u`#r;A{K6?|EY^$eu=HBgo$0j}Arh6wcr*&f_93 z<0|^TXucrXih6g)c-eA{zt3?T7k%F` z|6Bd3=ZmAmJjcCXw3h_A2urXG)vqW&$(2}*wWt+Vqg-1@Ps-yB^X;of-+=T)X%m*3 zF0ImC+FzG#Xy6McUuPe*HJA3+rKi`Jx8|Bn*n(~NVE@lf$L%-XUmy4(TaB(;>^kBY zfg*pqc@+EWoD+B0U$XTv`>PKjhc;xcIQL()4~c6x_9ExJxc}XLdVZ`rgmX&c|Guf* zAMgB`{$r~DA`d#J+J7A;k0Op8s3wo2wEvyo;u!j*<20JKO4lmsBx4_X-5v7})KeP7 zmqg<};b=k{8P~KSk0MUt49?;_+ILC+9`{Jby$ZYNac`i4`c3Km-<5uxK<2dcpOiM_ z(1uKha$G!@aTV8*8{(W1Yyv&cW_RKy{Wjv7{snY9r)#`^l9%-p2)~Eg`5!9P#kb3g z)jjq`9A-ZqVKJ?7&%-8EeA-+z`6j;W1{8j2{d?EObtImU*(>J1{-gYa9Q#+f-FW)X zXj?n2E{Uh>Y88jcM;Pe5!RY&vxIZV)(T8Is#$X&KU=pSvws%h>dq3g7zs`sBZKlw~ z2B&?qb))3R{=Qixg;mr&A7(gz7RpiR8WiS|^RNJ23*}{by?{91sQs4qKyY zQ5GO~MOz=5N3h2@LC;{3IC}rh8sB7GKX4ft+rD-n_LVPpyb`Oi4(rk9nQb69q54|C zu!XGrZJc*t|8aWlA>;q{8MjDJ^2ZxcMc<9sryA25$2>M-udw|%h{HIFY8*#t|1WDG z^nBO1={WBHlO{80Lq6*2a7FSI&fqLc^ZyQMU!gyp|96po70=cWJ)*2aU4MQNzp!Bx z|8p3>h|ifq6CXK^wXS*d93P*ryq(W{+3|H$n%h{_9G|!TZ*)H9LkV0Im z?}@OC`|7vbH;dd3gIBQQ! zzqFOpliCI1`u}t31^&ge{{MKdGxY%rk7Ita?9V)UoMX0#to)|;V-84hruqUuqkFvf zjXXWBN%}xuU*f!FSdNM^Z9#8Y^Mn3${kzrl;wNW@wPeFC`SFf#_MvYVPuIWe{gSrr zpYvVWs&&p=j}6#_E!c*gsKRdaeKO8@H=cps^LL(oR-S*&9Fc@P|D3qVIL0SE)ql|A zxLnw2k^@$x*f6WL&4aqK}3rSU(?xrY1F zHz@Xtsw4GRH$t5Mm-ZhSHItA zWXM*>A6)-uyYM*XFYbTZXRm?EW@GMut=&`HecA>}`~PVhxa>I2hqy{cf9HBU)?N_z zXX>Tj#BKB;ciMkk<8#vUE$*$}`=&oaY5gC^-N)Q3*(U4>8eUPxun&!7Y9U)errC{t z-<7X05Y-o~g+Ru&>1x@`q4eSS;QZi`j>ljeV*mRDauO;(uMAXfPN7f349r4ZJ^PCW zefdemIsGX#CG;a}gD-c^T+G7)EW#2j!*aAwj{VlgI2g~6e}7CvxA7GP6pt8xsr|lX zg??=3Wyb5bo~_@S%%N?VFlkvSp4C{3b%<>kasR*d^!y?A_ki^W>6=hwR|^l>-@EKD zxkXs@Mq?Prov39~tH|9b?f-YdbG>FPgX8^Z>h@e~JZCbMjqM-S|G%$p?z>5njlP?d z@1)6hktVkrqp{q4f(zJDztv>fv+ zQqsIa+L85qT5($(eYl4Q*fE{mAEYgl{wBT9I6FL{_uFHP?N?`qf#hIRe&(~G>gUQ8 z_5Goa|HVBHCr2VDEQ>Mp{6zV0ynZ_R1QbWPKXi|Ee}3!z^ncRs$M2M@FGyJ$_x;1Oe{o4GgaOjPx;fG@BpsCtnIxLneocQ4_3-!sjz3%)bRaP)5HFA)8lu%J&hl(ni&or`b_x2vCo79TRo3ev%}&3)_L$- z8}caPZDrLd@(j-6JTBs(Z|yR971vR>Z;tjE@8HgyknHiyk-Fs_T$rPM%KZMt=J(?S zGHBgA$1^m)|8?{Gy|?zU)(1f6OzQ)nYohf5hFKqAi1h&m%?aDpRd*nc=j-)<1%35B z&9lR8dhh3~Tksk7oqi8}e<%+R$VaIB5kJ^`?u@ime7Gz;5!UaY*b3yHn{E7=yh6|4 zx3@h8(}$vX&EEFtzG1I>3>P*M)#rr&qAb*$oE^qE9*0``1acCRvuA6|nH^G-XNN|l zhs_R6$i1e&+waqmrOzNU^Q{X=mLu*ZR77cC;*;tK57iYg*KhMsH`)3_#v7E@A4(~g zvc?~DYbPSpSF~>^3!e6GEO7oJEJ2mEVk+M*3+>9j4s=de7An*JWc=eh>K^W?1t(rs zR-kpJvK6@@>L6GyuHH}22`kB-#yMd%+4n8&p5!{LN8EpW1KIN}`H<}U?VPZM+=iW~ zLN#A!H(9gMekkOA)Y1=)Eo`MYA|t2JwIU^L6j=fA6!cJ@5Pf*M!yc zQzyUojJJ>TR>G^lW&CZ^pR~TPV^8eZF?wRZ_+Zy){uNLo3-`^SM)E+V? zfIN?yThi4nU3;`2d|rL{YitW!UGo1ANat4e*L~Ofy}@;*@&EK_mrL{SQhamqT*g&g zM=x&THu})cmUpaD#v_j|bmQqdU(HvfA19#xU(mzW2F@Qc0#Wm8rS^Bq1KL!h{UaBre4o6%&c_cXov27rZolIh!;|XY*E&XG? z=b6%pI<~pq_s}rW`det6>H9|$J&j4OnSyDUg>uYApfmlQ?L1|pcAB-!ZFULx( z##&VHKV!eaI{JESz$WA#`j5N(a(bSB-idAWohYtyZ=2ntI!GKVR3&UT_M&>S^-jnd zb?RFA`5^r;j-o+cP2$hjN9g^9@jowEzt37dah~4xH>3x#Jgs(J<a6;uOx{EY9O1`hFJIe;gEgwp;(faqkP(ktMH39XFA=ufK`x!#zAePMt5- zKOWKZi+!8)C-i>b^IuQ>!Iyoj!k_uSv)}iBckqAr@PBvlf3qJ6$6YtTbptUNA8h{{ z>bU&~|3Ah5t>ORk_q+K1-7EP2^ZEZT^Z(EBe=qTW`Lm_{@5Gty=KqszuB#?v9v&{P zkr;#8LHyt6J`%>!lSBBw^wRtnW3bYj-P;@PF~R>8KEXMYFa??H;4qEMx`&*5nBm@M zIG%-0WBv2!(zpNY{@i2TYkW{Nh%<>eryzwUq*3nLxtNCqScD~5hUIAYoI3VMKbiL| zy6D}~SCFpv+W(~guJj{wn*V=N`jJB$svqj}70+s{#X8guk^T|JQqYsr-5WO0HzD2M zy&>hC#_=Ed6Z^Eq@7u5w73zEiRDRC5G;5;o7Pc4raS(@b6m@&#{{!+rlI&Nb@*vuX zI7T%6SpTqdjw6XQia}kCY(pOHTjl?g@)+_c+1YpGG5N2E=3VUHEAszBd6jHkA#aj7 z$8BU>|EDOPQ#gaOcshRWJUx~vo$TgC`el^%^*kj1?(<*dRbg@cpX+2VYPB8QByS`3 zftA`3Ps?+T@1aRqkapZi#&rfC35)yhR+QnHH9b=#y_Fvt*tuB6q{L@BUBG^WEkk95Xh+*u5_83q`Vk zx&-?_-hYf`C((%1WcFWJT<7?VYd_e&an|v9Ttwkh{BO^w1Eu{D-;sXLq8kNz5zXRi znJWF$rT;nUA0hq7(c2K`KUP?Wy7H6OQ2#sSyLf9;@6CU_PEX!mgW z8hryPH2H>NU(o>948&jz#do8hOOC`CwBJ$xmB%}kBl#Wbzw~Z;pOqc z4ZHiCHNWYF9~lEjE<(j8jDaJozG^(ucRv=E(U)T-R%0#JA@Yq>U_$NQ7;E+lead#$N2lLGwq+dqbw-EOZjC;(-HPGtzjSBVF`)@clDkRZ(VU%xcRA{<1 zDx}{T73R674Oej;y|{_n=tKKv^WV{VP9N`See&1z&65Qb?~e}69iu{v{R2+yGRAeG z{{N-=wpZx?C)?(a4(;cRAGl=vfI0kmvdf(O?#IRt+%kRu%{A7aZ?XP7GJCAwf580x z`{wUEr;V)B-(8P}$@=g0r8kb(e@|~B)8f469v|Qlo}k};&p}=oKn_Ia#+Wde9E#x> ziQFCkfid*_bZHxFoB@3Tif>4pe)Vqu+2udpd`&&?*qE^WRo~xY^Y81&gh|euf*N|m z8u#Uz(mbx;dG^vB*EMKd$)}^##(4 zksi-Re(S#b-u$;}`TmsfGdPR$sO^w{TI3n})AHvs{VL*`0C8==)KvM0ye_O4eP0<9 zZj!g7j(d23XXzWJP0?}BZ$1{DkiFj-6Z-u?8sna;-__>-dF=@=m4$)A2BYt1*7*O! z=+G1J_x1;P$vy$iqr*_Y4aZ1~!8lAnMMYVN`wLE@XSFZph9$xj`ZUbIv+-m1rTwvI zKo;+4b7gDlyV;m5`}Yo;!gi(5L{FbI#@Myxn2UK>fJIn>Wq7v#?jUwon|&9$Q9u#R zZ?S(k@j9PpjrRAg+TY0>;{Lk_J&Wp@<_d^w0BTogk0)0n?g7*=-+f`N<8_F8|DX70*#B^uyo%WWaGmT$ssBOzab4kxM%Nj? z_;lac#u3tqx@+qHci0H__%G(y&h8g(i>D9wQ25I8;Q{#wPtfmx^sDQ4p!16Q{{?mO zhwA1i98&*3!2YY_x2Wr%h;@B=v6aj^j$?G%oVR1CHh}qlTf>&qW52@y@eV|eUh@C{ z?Y*{z&bP$PABgq-k{{qW`s{<9I~2n)5^?Ts4aU%u?yZ4eGLAk0X~#{DQ)Hw2?fo?y z+sJ=^n~lXJ=Tul5U1QPuM4f$(V=bPp=NQ)?SR}kx-^>y+uD`K7 zp7(WqHUCvRz|RJSm5x_qE!Lq^-7Amwn*O1~+=qg?R}o$GZgsO0!ZRuJ7>3A4$kE%5 zn8%PckAWWdhKg%M)E$wJ5BP5BrS`2iykD&M9~-a$Us^KV;@@qdv) zE3)fB-6;HoP!KKzlLw^zS^RW;_G~CVci?~+KBnS`VPM+*8k|W zx8l5e`3N=i2JHp05B-eevuL8neGHz?^RHVa{o-lZ>O8Vh{HgoW|JZqCrEzCf-s5@K zUc_Zw#dY-JPu72AS7V=3_i1?zrTo7~`VUC|5$TcMzu14*f7}$$ZS>(DavP=phA|KH z{9@^!|B>*B{shHC;@;;!UXgy`{eI{@v{;9M9EjSf9|?oWp{Vh!8wN?&F#97q9*L$) z`W4+{qwv^ShXxYzey=kCY9 zxBl;rUD^b3(7A8k;e(JzQH|sHlk#6X+rPX1uR7lc>wigcq>(`zdOoE-@)dIeKB+$P z3-z{-Zw-0jefqBYKI)sIpTZfOMP`L(@%e9s^Yn|jj5w|-t_6OTo|pc(Hp_K-FXEo$ z1#~;73)M%qhAQDVaT|T8y|gvdWVeQU^yI^>zM-w*0sRru!@eDwkaA8VKDhsQrsdn= ziF2}Rw&{P~7W(}t*1wf~W$eSPZ6S}YhuhQ-x9J1k7K-P#g#pePh~cgqOb(4YMq&)= zUjKHeU-9kGu=Lv@xocZ!+_5dB(1bLaU)~m4Z~_^$Zrr90`QHDxk?kwCg^tPFLg#q@ zKimJ)yGQx|Vg8>FRQ-5c80UW`U=pUFcI>yU-~a6}jh?LW-T-zWXqZ|e*C zb|@D%7xS(JF?)|e8bSZ+4vuva9-wyF)w_-ESW>w zC1Jjwo#Ls&ZtO+wkncxc*-wvox)TTKhf!pI3p3f^scbPBee>!O#t`|f8nxH{DjX-1 zsJSAqJ(TB=cAP=eRyH`=UNW`O_5Zajv zI8)9r9e}(RX7n_TwN9 z<0$G5@hi}9haGv$ZrtEk&`az8Tw~wZpq3Q-M#i!1t@LQSO8fgPw64rX zKkOCHi(dO2JM6k5YUmC9-S;5l2OOV46Fq&OogXG0elN|BTk1R{-;%x-;oZ_#?>w}* z<}A+RA}-@9uA>+2^ZEaXV`}qwHvdB%JC?Q0p3$@Cv&ZxCEVIbb+jdE#-*1ZNHu`W6 zv2Ku^sU4r5e~tYgVVxiP6BJjl|LA^I`rj}{;K$yt_y&*zQM=ig0CFg5wn~?ClatNk za6A%C;!2}&x^#|`{!8!o|NCB(Xq+xz&Q?bz)05TD&+*aUFTcjPejMWbpu#td4WK_8 zZ~vD1`7UL`k@tM`!VdLw_50?f>i;X$|H%wLp!K2pKXUZ89`*8%^Fv=SE>v98Faxtt zjz07G=8`?~!#uLrUVerDX^a4U5tbm1<>};C6jY>_IWGFI0=m&9{>*d6*a%;V)mV$% zHTQsZ^g8dj9;NlIIy@J8sr|>i*z_=cYR=h!P1u5M*oi9aM*9eU`(*16jDOFM@4E7y z-(EPUEIZ9_f4zTb*~pJa<`sVX3V!=+>kmxl$4~4Zdd!LJ`67G&B75Iu{)T70y*`A! z;(l|ceo1Qv?2rHV9qTJtC*UysDEfXmAoQ6#SUpks>-ad5NTc@gd-b7A42xy(1$`~_ zeEfZ@`VdlRJgNPkZ(YwvZP>-f;G@Ph3vWxHEO%@y%$>vM^-)sC|vgH3C;M@A29NFf7GyKr2;<=7q+(eukki~6!UO#x} z5dIl`0*bHl|M|ov|DXSQPgpfS_yPF{we%-szr((Ndc#ubTEUO{S-&tqSko}+8YP|l z=eW*aU5oUi;hghM3wN(6vI)`uFU|kDt3Mb6T_4BXR<&AhlRgyB)_+vq=H>Y=<#G38 zW&9!e|A73zNBUo8f002evWWA9+mLY|!^JZaV=xXmVOjS*fu2|QjQ;;5`VQmkoW!nliI8tFF+GL&i!k=z&@Zf{%tlNS$Bn4 z^6(O* z;^XxELgfRJ^fZd&l@BxDufIrlMp#=sFaCzUFAWZ-$TK*L-tT=RoF{v%yLpkkjH|eg zUToLKu|t~UUK|x4wU#~q=(g~}i}ZNTzw`_D$R6#`56H@Yk%li>+n)Xe{odB@je!`9 zp%{*l=xa3o|955f^Y};@<2cU$8%IvSBuqhEGj$p{1G7+$+E?ZO#mY{4T+^Td^XLnZ z9xwmTl>hkzjkD$dL+{)76g$za{A0&c^d>U4f8TVEv2UidKlaH1VUakNAkO_Oun}4(Nv;`-Wtr@oUnf=?W{U5Q9dbxO3B942j{1iL?58lh{ z4~5mjO5@x%@_Sxk_pr(RtQDRY7Td+E*L*0fbG#lKunDmZyarq7(e^c98+|9DPyfO7 zgX@+`|Etb%er!i=d`X1^g*eBr`k4C9 zD(?t!?7%7K#r=nCYNYEm=|Y7z%(KFq=y5LK({z^n-#xyO1MGi?Z$f>bH2#N7pNsSV z^fQ0OT39j8FZT-<$;-Hk>uBF7|8JGY$h>^sbw%DnfnGex{&(~L#eZTK|6e|AT`F&` zkpIax$8r2muXrjxtnNL|-9Q0RVpyR=4x};pYrkooV(`mk`;EoQ7E_ zN6+>FVJ_MGWzSumGd}k>k=d%86`p-v`M$>3e0t@_N6GJ_Lgz#Ffek734_#4)x3CZB z`$-6koVNtaupBF~8gajqim!bv^!!r)m*Y79uh%%%b@ZN3sq>T7;@m*)GA?TqxdpYi zy!#$w6X;3z)UYuTcG9bmp3S#JYAG9lx;y&+^_Mry)c=geiTb6{bVUF2KI{IwW;ga? zKMvx}htjP7bo*oLU)SsZN9TO~_vm_6|NSfa?-%O7C!6n@|NqeZ|C8qWqqRf-KXNJk z|24)34AH-j6Ub+*4wDU7)LmoNl&c0&&LOjk& z8&1*Bptwi>{|^2C&gpXhXN6U79Uab-7g2l3ehTDO)FeDt=}+Ri<6bnK_J8kq&OM&9 zbL+f=`W@!Ez&fScH_Ux`%Q)p0Yn9@rYi^?t_wWFZ@C5z-S^i^VJJ{MzGR}qRBD)V6onVExi{EIGa2=jzh?=fD7T!grH z#u9QF;vO0e>hQ@s+8iCPMANPJ^Y4B26ndeeUsx@CE!JT@HeeIB;DhZ4+Z^x2Zd82Q zIC(NN+5SRgm2n4^U$D*%Jtr)S!}NUg?KUUEQF=9s-SRHFJLKiX{P%B;3CD%^{)_L6 zOryuR%S;UWsxpquqlmum^b4oRGf{6G6V8(7aS@ks71z;=o9O$y(cw1P^RLDN#PE-e z4)@4Mc!GXM%~yI&KJ>1uZ>Xbs|250~n((U+k3D*LM6VeNFq{c>ag)IWw03fh=-pLup)_`rsyUZ9(xg9~<55e;ntQInqCD z6TTBwh~xiq{k3mh>mPPIF0I?kw(q6yN7D}V29(y(*r(l4ST&#cFnJWUFKcrqkE7;L z|M2F%|6)w(e+fy)X;gOnZK%36-2PjC8+IM~+whOa{wC~3#&2!-C*|(mS4W5M9vT|< z3>h81|Hgj^`)~Y*koQ~PZ$A-!c-NQ>@9qb}4tVcBo@fmp=_&gC6waXMlOx01*VH>c zJ~$j5YYj*B+9T72O&%Ez+ZXG{566T-|YKS zhlhPF!^8I`t_yn?uM54#4^)&Hr~fhIQ3iQm)4i|R_5$)>=W!8*kJ}fIypFyv3<#B9 z((dtP`{FsiiQ6$875^|e+>5&QuD*)*$Nx36`@b>%?(C;VeIT(mS;46Gi z-!FYAhGQhgU>qi35~|gIeu`tt-k-=rYsGUJQ~Wj!Gw|jiY1kLc4P!s`-_;+n2HXL2 z7{>FDXY-Hwl}$*ac`N^xuYRIGUw*Rw?U{U6l-6-`{#ltPoKwAd5Idl*wB5X9 zKKl}3%TRNH|Lfdj$~yjzSE5N=X*6b~!+o6SKV84nr$h7WpOTL0VYPGCqBZeJ?KBg^ zI{JESz$R?LHta+de)>vWL(4u9^kWMXp;o)k@x*7s&&)@x*{WYH@!61kWp=2WKQq+d zn5oR28q!;*hQ<}ddUuYWSJ|~+eFW&t)h{HIFY8*!r?W456 zc+Q>9&68cTJsWvG_VEYa)aiSKTMw}Nq5U6S`{sPM=ky@!#%d>|*IIk@lsL|y=92FQNnvLl zpGQ;H|5;nG(KV@V`#-Ax*T2I5SfQT2RJpNPy&R?Xhqw3}uDOh>xQ@7YO)q&9x6$sL zjy38Suk%01zu14657L4Y$eiOd0GB*<}?geiz)8?r6xMfAM1b)vNX-p{lN(hGRHe{tLovgal1e}33H82b53 z;~_36KQP1fvrvw?n1=;egud@+6MM=0kzwjfj(7cHQdmYVN9|1GALQ?q^yF#J7OUxN zk)}5xrF@URUfm$}kxeSK6E9_d*sm0tI@l>paLqcb#|CV|7Hq>#v@c|zm$J``+2_~T zXFh%R8us~h_L*#chyBNiYkb{H{NHoz|3>zGEBj8yuqyHF#$N14?xAyHIZn^JzfP3e z|6h{-WBdPe>~DhqOU6AntM~E4URGwIw%eF%*BwP^{h#^lDykhHN7F*-y00EUre0D1 zXmNiyanAjnbbrXcqkiK5@xl3jy^hbahrnqjS54HX0XuFyd?$MKPxVP8lyqEv^Q_t2Pc-6hG znG+s3C$q0SJR+YU=fC?^s}~{LQLbIeeQhld104@W!9B(MD&6N&_jkbk9dUp4?$3X= z9&>-lp$+Xfr0o ze8tmr-}4n$wRomDZyIJ`7HUUHzjMmz$(gR5Iw#Df&qKPuYmpiv{mzSPYsNa~0>8yE z28+lgD9!(S!MM{n|1TJaDr_ZIV=dNUJ^o_;pE`dErFA;ki4D%#ge};HSpVNiR$({V zN3p-`=>Kv4-&@-MaAKnTk5;xRiyXa8yFhIJ+bf>^IEcf@y~@APws(}CU&wxM$K0YO^@IT8!rMCL2FIaQ*d49kz z)z#y*=Kq~1FXA$;;yNl`Qs*FV;x^*^zdrIF;`(0?$VYgBen0d64!yVj*8uv{^}hzv z2P3TvXo}?kV*C40VZ$*JV=xXAFbRF1^lgwmpPdt?k-eWugzf(?W%nQ7_cb^E|D|tMUJL{@>T#)AJmcZ-4XUc^&wker#cEb{J+fqd#7~XgZJSBXkCAL{@;h`XXgKXjQ&YHdj8)gaVdQVpC-3qaQd9^ z8S*Bi*>$6~#sAyfB77UNZ2la^_iCq*!|U~5V061ac8se_Gl)KR`T2|Uv=2(P57faa z`wggb(-@==tyO=nQomO|to;<*73$#bYV|DM==nXo{MraEgxh%G^wBIdH=tc zvH9t(Y2z0S&PVTbEgkk5qK~2Hq_mMlSG{W#7w7+dh5R~V-^I7c@8aR}|9&9+BmC3+ zzs&!A{@;H(=I5BiuMzYA@5m|q31{d3vH2e{|F4hDjsKVVe}9qA-|!F2Ot}Br)ARq% zp`V$5^BDRCc=Y_g`VI0o9%oyeLzjKa`Qb_QOE4AJ*4I9mc(OT$!jpeLCp?{e1{#0N z{|K|VqEc-qO&%+C_952R8@d`Y`|8L*J`uFL%J?yWv@-L(O^L$cx^!$%kOC$Cj zypDV$vQIugyqTP`zTVV55840!#y{YL+In9wH^#Pi;yrjDqW{(h$PeRV_$1Ew|81h5 z@&EfY{WJJy|G%Q}1o7K&6K+9%{dacSZS*tizvuh^jQ{i$vrUm?*L7jDHTLVRv$tip z_5Txd!yS%E?`B_`YkxPr1z*DGO8x=-k;lo5bpoEiDTAUqn}xS;OF#7^yr)DW~ZN7f8f{R9-hB{Cp?8e zAz>~pKRw&eY3uqq;IGDgt7oz@6ktZ z-^btrJPuF5lW+-!|F{0X75e}1Kl&fcn-QKSou}g&coqg%sekbt`dEYd7tf==06BUV znI`>%Xt?&`5dF)R+xB9#8f$wg`3fZH9eddIc(w5B&`s~bnRT{fo8^t-&$KJwOn)2R ziT5F{|Mvm%!}u6JiB0%4wqfcw{*y@N#~)s!PoI8Ej5n|DHgdu{xPzShcYXL#1~zky zzs^5W$hK0gRNcsqV3m%6oxQ{9KM3#4G*&qwFk%Nun)`Ghh_5r zA^Bfhst^74ORdxYTBmJJ#__?SO5+D_Hi?h9Q_7l2GF``ix<;Ft{yR)zv_koearQ#qKZVR4W8>KN zBXsn+PvrgLO4Tvyl%67M)7bVG{0%LY)ek)Xq@iDe{QrY@s$&!C+IID)bG0u}$KmY!dwSkZC7xpo8YD?k6xA_CF+XZ$vud z-dC~DDz(3b--hf`&mH3{y$7<*b+v0Vd=x_x(vT{u_RfKD1l=3ka7@ENpDtA9@v zhMVYd&6^JS_!jzYc!d90fxdNdccTSgLjJpDk95-Gcicm!kiAs@9GSyc(D-A11=``4s=#qECFIApD4&dNx0H^5>Yuukkxf;ZKPDZ+{{GhJRpYo9}PrbpP8q z^sx%I$tryp^a~JuzT|yuVVrxf;U5 z#sEjEJpZ+xKgQ>I{>UP(5s+X1`=I=PSp829>{qr{d;S>0#J@k}|HqLb`u{ymn$JMv z6Fr~TmxgE2pM&S&1<3yO$zeG;^+)|YzwrGw@$(UWDPDnBxW`8Wsrjr2F8 z=eW8>IR5)Q&cSZmVE%c{{Lo3kcg)-HPP_;2!w2wTdHnXs)BjgBKWw%BL+*RV-{>7H-Ph`e z=HGw8y$kmUcU8Ji@(yXIvq^d`-6y5v+Iz+K zHMu{e67DbK+8_3Q^WR5>?*nlyrbm~*sfF(6Tw{^Wm5S%Z-~wrsd7kvKYWKgMF9Q85 zh(3j9|9`O7{~5%!kl!M|i`I4X!w<+GA>r8kx(L$xPvM^<|Nnyn+8NFp|9>#?`isM) z_{q1N7k*9t4paCO{(`?D>mNA(AZNB~BQCijJcgY5>fgczBY~6RB_S|{KWX=t7UG#gW{X6Xl z-yhe%rQjLi(|7{?NoZX1ENjs}D{S5VjL@+1%5Z~w`Ap&C^gqSo*F+)^JX^wZfPoK40;q(6sd8aC@C^OnrQbw4a80WyR*of^cV* zensCy_8V*x@z21s@Ekl3FTm9QnID#u6PNl&1UY#f|041gcr{*!vGv+#Rq{9e&B&E1 zvydrK*HZ(ZBE0j!9SP-(RBNiT)Yfgi&?q7{-x77CCik zoBFbSMEzHz{%=sH?ozj^$Gi2>_mr^P9eW$@z};xUm(YnGJYs%9q4Z0o@7!7B&}&Sh z5B;^q8ZbbA`22(x=SoRu2xG{hwT}H=%%-MyCS4o8PX89-KLNUhqmN`4zANqr_!0gS zXX^)b-_M07@jK-IKWtoSivB131%E@^4*oyQ+W&{N|6{(K*8WFaVtT60-jKuds&tV)H^zZO|v~_yZo}>2@*HjwPz8aYJ9MPeT)miC1t}>4j-G{xm zy>WcSdpu}vqiehbPs7vk3_J_Z!Sj$hU>-5K9P$5mFD3`4eV-@T5%e?bkGz8ZYCL-V zk-Yz-I`FLjW7@nM`@I>7%;~x>u0N8Wf8XWbb@Yd?Kk_gCKisGNj~>MJ_ueM0cj7&G zA3lH&<6{_JqW;GyVqg0|&Hp=~{f~acdmWhP`C|}6m@u~gN$E_+zKR!K5H``{_~WNz z+?S1wk)Od$m@*FVd;KrBMEzaX&LHo=-Dp8-#M~<~-LCyFk9X30Vmx9QF@|yXpF!R~ zy;=LO#=HsJhcJd5TKkGZVx4hZdS}z=vH$EBwbg{yPq z{#|;bKErK~*9K!xkFod1mCYG;c$Ph$BipLjzxob3(1|W2(TyJbKpH>7f8ytu#INx? zOkucy{f|+Ml}f)z`eo8DkLmmVT}vN(zn`t2TFw5iWdE;mee|If?EgjP5J=}Q_#6I# z)&=Z;@iRN!KYPDJf5tiV#~}I+#I*sFWc-KR1>zouC*Vn#y0j=N&z~2ju@l^8X_FA8}2YB)aK6$SU(+V82G~;kf4aa{7z$QoI8H za{gbv{15*w*Z(H}qyL!lS@|#^->1nzwXWIu5=bn6+{1_6)<-bF|Lwr(r6COSPZ}_P43#03mUl^}deywx; z+I2ayw~zng2>(U*;=k?24_!Y7-OrGFXfx*DUc*nY&R75$=kzBJ@Sj8vKJA>_@EQEN zT|Odj!ELw$n@?KrZ;tm-62jfWEy(_xYbJXT=O3iVZGTmU=nEL4k0FPzAmtvvPJRpD z#SakIc8P18{fIus-XE>8t{eU5$i=<^WV8XsG4Vn6zxc^vb5y=;&cHYLd#Q!)Np z-zPbtJ^L4O>qpKBe~a6E7ylqum0cjP`|tQd_R1jV-n`MJI~Yc3_J^iNon9Y^s!@( z#q;PdKx{K)kvSqQ@yo@vsJC8Bz7(xT&k3&}UyTI4quq7mb;55%ceCq8{yzh|&k1i9 z_cpu}@4@@<0el!A!Ob6Id}{%z~k@)JPDUzGMEEHPW+-EJe?d(>sQE_i%5SK;@ZABj3?Om zLfw&!7RC%mMNB&$CLNUL$X>kjIg$WB(Vc|4Y^XWNM%K|B$j3gY+RxK85|S4G{lt zHu>1ukMJLSv-IDFcjA5c0ODFgA0|H*^;h~nK6^5JlHSrhBWxlQ+xZ`m+t8X(W_|hZ z{O10yZ`nQCCt2wp?GyjC)Qz^fQ=z^7lx?R%C%V{KNpz2#^6pNB_eke6xCyu5Hr#=` z(SqUhsW6f`rT^m}`ak{=#*sl5IrQ#375dPB_>}e!8%~;O4DLJ?hLDs8orS1z|_DilfWqzyjUmd&!DcAL%_DQ?W!Gov5&*_bCm(MXOe~!tQSa@Hn(B^8C?J?mEj{mwQQ8 zdH$}i$MrwKK2O3Wcp9FLXW&_Q4jwW8SiN($|F6XL75o17s{h?%KbbnL{ZHoqFG>#8 z7KGnc=m@C z*t2W+pO?%GZ*a_;&^k|^{_-r}pSrn79gSpxI=Yk{Np?G?cY=Qq`Vr^y4Y148e0=ij zgO~faPa*#T$BmL>jvFU4!dWtx3E{2I^A5Zl@5Nt~CD)T5!bj0It$lF9x!YX>x<*_B zy6HU$=Ug!(e8PSku?3Ad<9~1i{fz(Kjr5z5Tcu2JOl(7s%Rje@yB&AoUQB(~KTyaH zbYt@U?v0#yjW!E8itNV=!+qq|53*adVd{^nuTO;VHF1sVjBT$k4Bwh>LJB9K4!zy2TFz7PM? z`hP8lW{3IGc`7c$6^Ls9Cb0ER-yB=K1F`@3O51wW+1(@BAleRHxAAxW<0V@FKhft;Qx2`Z8Wl z?>yrC_IVZkwdgtK*@#OX_pC7S{KD`C@spR%3U4Ccig)1McrUKUhwxE+0voXfQ-7Wn z8p*a3`akA4$2`w}u{4)Td!cjCd&nE?b0coXtvEAI7VU@Ig@=!@KaY9GY7vN9jij1Bm}+j{g`OLfZlLpZX}~%}#kL=FKG8jh-_3>|OfjoTn8X=te&V zF^b`Z^5$XEva%(h?RSNJWazA-cWfxI6NU~*t)c#w?k?^9&-wW_c2 zz14*xQd*3=In4a8OKJV{eKO88D4~!pha8$<>adn=l;Exd;=2nj%w}7 z+Jf*V;kP1hSE?7fv@Mg1j6d-OZ|6JSLH^VFe@%14T*tfv@5Xy^J+|!f46qr)#_>nk z()scID)#p(_BZnX*<*9}PV+xM!T+4CoEkAdp#8LO|KKk3|LfTQ`T*Jwv;XI?|It;# z{x4+zv*UZn4>{jQ@d;%4W^Rn<$F`6ACir0u7dwBc^DkikV_cus+4(PQ#9lUjpX=*q z^QYMU1N3wm`+o`hAC1zQvKHbEd)?!Kx#64iv3<4+e~12kCqt7>w^N@YEH7^#)`z6W%3>V0s@?RDi#QA-3 z&Tm}%Kdu29%Yvb$^1k@=3T1)tT7j2OF}Ezfo@Fc^XVt!`u~IE$7L&ZLGXMpnjz| z2k>j+;~cJepmlz*Q?*r zE3H0#3H{m%DQ$)U=SX9)R{sQMOXEDu#f6wMjx+J)IpHGuRYu*#1;phAS|*P90k_J6uU0Q`e4qzE{z&My}YiK}Ovk=kLe% z$4$+D57*fCBD@6o_J?Qla(d@c?`DF3LHs|!o$Tp@r`u-9Biag{?Ohw@guC7EokiAu zJT@oXZf?#UON*=t`Nz<#483(?I^1@2I(&Zbabt3SGFJDOaLe@Z@VS#G!p>tS_{9Fh z_}7EsRnGNVG!{P)wk>@iY@PF9Xqf(UxM9y9!)J`Gy}`CO;l_#c!%f9=!)MFx4?7B^ zmz*1JsXsTo)wXxw)}yn+Z8hfK)t(ocrq2tvA21(7S@Uk&-i!LunPD^TtkO?#Tz)Qi zto(dYXy0*BXhToamB#a)X$6_$t@O|X_X{waR70P3BOu0Ks{?N9cU>UmcGyD?2!f){h z+>f@T^#Rh>aYyGuR0|-B(&4!2JFknJO&}4>-?*IE5K0_hbLG<7d;y_Brn?NM*{QjA0y^eaiP~Z9Me4 zmp*Cd{R5Q2XZ^p_4@1Hoi#^w}(r~r3u0gkOPt%iqhyKH`uvGt!F&h79SZ3Rc&^oU) zyo7u?5+s`WQL7bbfdT{oTk-xHk1r#?(H=^%)KRJle7{-108m4}FN%$riefYjZPCmIHe4qRweuAH2 ztj71eUYkRH`K549*^)&DoxLy9o+5W#O&sN=^Go| zmBt`_Xt#e3UC!6xx%{3!B7Z;8cP`DjXdFJ>53u$5=Y$J|FT%yhnrl3toccE3h;Q(p zI6fmhRroSofh*CvMt|&)8R05=oKqS7f3BuqgGbCiSgHSSE&CH^))DWbCvj#S@l)<~ znd4rBm*C}i6<&)sU|5|#f>Deizy3|1Iu1GXs;BzYQ~hM>r23ys3lAPr|EtTQ|KOXX z^H#hA@5UMb!S~Y7)SuVWKZKn6G_I|iA;*_IwEkaGzv)Wf;TrirF*|(JexE=>e8)O= z^#&_#MYQ z+`sty!au}M@JkG9OO7DFzKnc*wtcBw%2g_V7JB}RT_YLSR2rbCOFjQ$&)+xQxY=|0 zuNmQ2(rV%N@mn&{$G;PQpx=)N&~c1^=n+02(f5PwM$bX(*lEK*X#3W$%2O+6hEw!7 zmM|kNe<1qWX!$5hsio6=vU>RP7=tG+y-%~!lL^#_!GrXMK_I%~jPtOmprN04h!dvkU zOuc1xcsKc8WPf9B?&oKR>**7Gkv>Fz6raFGq$>0sla06mHzNA>rE3eq&Ga+n_pZ)`mN6M9s2k2L&UW|I?=UV`ybt|ug7)%%=>zk?LWcKki{?YEBqF1 zyUhQO{R2z&4=gd}%`VN)|L3dU!*{*6gnf^G+fo=f!1g`FUfIvSXZH@DG(TN59){LJ;44)KT;S-v;Q;fe{#riDfabT>0F45a53`h4~m~pAJf(y#Z&2*Ay@0# ztK9>ed>ncIlht$o+y7+M+;D~cvXA9|D88e@bHmn4_2r1~F7w>VJ;xQEBifege?dpp z>HWH-okaIe?+1RP7m*C}iHn7ES`lav3YpQdY8 z7^IJ4>U?4HzNo+5{D1!|_!|A2*!pj?LTc@d@Ev-Z9K`qOKg3TkxaAsnBRTd%V ztMFR90dK-v@eaHj@5S|)`i*DK*3FN1YbVBg|B(2&c0hWMb_jh;-9M_0@CkaB{SfCw zo~{4Y{Vi$7+m@gI_e%Ml-nzv2LxH}3dgoftf8OkHBmHLdRCxaDJ%7Zt8g3Pr%IHTT z@4}#R{a&&au?`xQ&&SrVGlaVl$9{50loKm`2PgT@RQvvspZ`KW`v10De{XKR;|4Jr zkH>xZ8or6)HTwRvv;W8SHx6q59yb0>rZ%vDYqb;XKS&=Uuk_t~M>^j}Y|H+TOf2Eg zf}hZThF_wiRQ@b>eMRzTTAQGceXbplw?F2n|4KsmmHmE;`;mR@wc!D>@ypkSZN9q) z>8CJbSlxm1Fc<&Z{J#tBa}h4ad^{DG;R;-dN0z_x*CEe!ul$8r*Z=Rk#D)mCKaB%R-m^ zl4uidHo0L?YMpnViZ~2hp%CH<#PQm=J}h;KeoZVe)Ih^=KE*K zoN(`+<)IJ#NMQhJ3}OhIN0je~&+ePjo7lW8e4m`WygdAnY}CiQZ5F@XbIZd|gnx!# z;#c@BQtivbcgR2BemsD|-OeHYLHgLrWnmPj=rcwCB^U*_(YcnL9JS#j^T*^DSjJyJa3zmf|$*YjIZosIzYwScpxLWuc z!t% zTj}q>yYXIJj}PIa_yjg$3*x+@MzT$r((WEQT>shspDcDR;piiCgMDtq&A1h}<1TFN zofYmShYz^_L+;=8jbVJJ``_jMo813)_b;tJ_u7wCq5D^krKjCLhA{E*l$`qUvM}{+-x2*jq&(;R{73g6|2^G1gzI#T%fxl#hxiG8hF{`W_$~75=c&UwkY67-sqU#)2eEJC zSa@#}`*Iiil1$k)pgxQ9-r_vCA>m>1BhBhS{q+B|{=c{`;}1!;W!|2g!{&6(`_aL+ z?A%`%9-t@8Gib$w^i!BIstiYqxRiFtdGxus5Eo%kota)aJ6ud3Tk72E)9kxKm@k}P zKUaO4DPeyvQ2%Z>cR`t5kInk-o@$@`{J(~C!e#W%6WS)z?5=b90ewoF{E)WE0c{kr zYvP=6g?&=T_4SiiVX(niEqM*no=bjR#0>>unedB{U8i0U9AV-&m&xDOrEA@z{dcG^TraM3k-V|5FnoyqQS=CRFHi@Q zT}$}Rh}(!QXvD+)6K)XRs+_o)%=`C!`}}Y#{dU}id(md>uf19S`#!e4GA93D?mfzU z^yt%WwND4S(T_olB8&TQ`v2n(`9Jda9oN3eAls~+gWetb-&dX|2!!6sFhntI^A9j{6 z4WGkJ>lX5XwLV^$6}H_|5bB(#9u1iKO+naBPVjNvLB?`^XOw(dnqvI9MPc{ug3wHF zQTE-dO`O<2D|~UDH3UnI1*3g;sXl@db@P+?u<3i>IWKe>GwUgMy!SUR^qnZy$6Bl( zbv;}Cx{xk-zc%*zuuGb|kw80==tK0=ySK!7PZouHuw~(8;nRiJgtToVXl&PBF4OKR zy(To2zaZSOeVOsUt3t-MJ-D&&x#6ZISBKA*Uln#NzAD_j@ak~Ofpf#we^Yj-e{L;) zOt{VS{`?x_f0f4nd|!8LpBwf%b|3cR0Pd_0;UGDzZW_tRW8}XaKdUJUeRV~le-&Q~ z45+iBkMbZnw8QtC(LbdRp`+3|gX_&nv2W7cmF^AJ8Jt%XYMf(>^3G66-ab1dd>A+S zuCx08Q@hSL#$wD@+)*6EVB&o3-}A#Xeat`cMyLII<8t%=o?`vJ68AYT`Zt^(#&O&} zEr-qzC&-g%H9lG}9yS*u;kXXzbY{*EbA*e~U8!7SGj%neACj0St{5dK#R4qEB20}J zgvI2*`^*`2X9~fuZrZ3?U{U-~o zt5m7)`=B{|d->8~jbmEtl>fDbVJ*G$g!zBF3&T44di1Clx(}$c$*zgQut8j^>YPwb z)*#x|wPYQlojuB)A49!x1G4JV9LABs5znkr{C4cXPBftzyU=FrKHoo()<1Aeo$Ht+ zy6HVfjPJLz@6o@CeUE`E_B{saLl~~*AF$GOtZ*G`oR^+i%|C$5E#?1LYA&erB+!l| zTC3RK>ft_m=W2B`()1DZXxnrni7q_+|HB#E_h9Q6)W7Vmz4XRly!NXFVIO@z4&WdT z;V|0PsQ;VT=jcr6pVc-`9#l?goA+SCK1XmA$1siKIDwNGu2TPFR6oerUiIH$^&go< zoa^43*8e2U{(AL425L|DO%JYD|0AxyRgm$$BD?v?VGcQ1>H8_05sK(zwen{5j4+R0 zjGS*Pi!v?{DW~i{O5>;4(wOEJA_sxjUIY8lIX&5ajApz!wK>v2928+ zWPMLa8$Tc2ZQLJogo}_(n9JU7?A};=24#*(V4iJTXX($0?;CM_vtn^=3yja>pZ-r! zV0<3E=tDnJ7$`LMJ%|5+vHhVk{s%?;4~kt!sp}xeg)>L^9~|RB$PN$~LnETDJVrvnS=i_qh^?g{@8IPPBpSS&8Jcz#$ymLd8*EG5ek{UAo& z;}}*5uSB*+ySz?2eYfjC+X44?$o)<5KR{On{{wWR2df;j8WpHS71m%a)?xUt{I7n? z`9uoz1)==eB(UPd1)PzKs(DZLyy;cN-}eIEOr zTp_MSTVy4<3bCJHHCchgw6;rHKSHK3R0>z2yTZ6N;@SmC#DC|;zT^D=jV21h8vC@a zv;I%rIbkimv!4BLpSb={wo9EX9On=wcWR4}>+F+iXNUbp9q>%~N?p8O+y4HHf}9Ippnhvh9H9>;Ce73d;Mg4W92V^?#Fd$9|y3Fy%5LWAmv$ ze~*87{=SE_?;+p*a@@pq+8=*p|Ht)z{_H!KemPcP6;`7Hm8e42*#8=G>iyaum#e?q zXN0xFjo0xDu#Vt5dTX6`hxPP0r?I1AM%X~FMqE>_ds-Wl>{{Xe8`%Fl*vVx6|9|u( z+rK-_PG+;ObW9CuQHOdoU^{kTIB9&}zN3yABgc&yXX@DKWR88`+s^(M-*0=09I$^} z+duFBPsacM?UYUvnz0Lm<@PC@8+Oyj*z=_wZzdj_4uXikRUuDMMtzDlM*S*+%6&rsilM9W%?{fbM`z7s1cDc@^euM7A_S^5j z1NscMy~{gxYzBL<7yGau!?m_ybie$!*ZV*0{ge6eA9=4=dVM?Ge}j8n<^5NA{}`kX zA?-R2Nar91m(C4`$iqmLTMv$mW3}V%FOJp5|Nb2jchEOJ<~fXd1}*F6hNI$+VH(HL zDn3y&H=Lm7=MPBtB)wpd_wV|;kwo<6C=@pbQ{THF6p`~#j1rV$0kYT442#Icn0P%~ zl$?Cm+)zfwy1yPFi*;C!4X8#9YEg$a zzCrEi&_~#bE_xE(%7-{VsaGA|hy4FV>(uYX-gBw?og6AMC&7Ma=MNrJX0VC!{ewrf z4^ZzM4cLxUxqccly;%Q({{Nlyta1Kka&$!h1jdu;39>X|2gRD9;hu%5t8uXWFA7Fr< zMqJ~62&we!P$Zp313S?_Zyr7V8!wKZ7t_b~OB*HhQsn4aWH62e;##J)A;?8&-7z~X zCYKh$QvqQOKR$wJoVKpjHi7E^q zcmLWdqvSuWA6)-`@qbkB54rv|(m3NUxRxG$B-fGau>rZI?stj%CF6L-nemK;-b=OZ zElK_ZwWq(q&G8$23*Rqs(f6^|_W19==-b)&vQ7N_%pVYrzMXmhf$G_z&OTE=)jvZv zU^{kTCz{ZVt@rqb$=s#2VK+H>o%Rtq@e%Lufclf3um3mKg(Q8DeILiT`si`&X;dAR zrjH;eub%BcHwM*S=o^%;;@Ey15GjB74Ze>BB837(VI%AvKYGG4||8_lQziGrW{y2F8 zW8z0ws~f~c-;3NL<@r+W72BqMIy)46MVke4P=tAydbT;R&)gU$jP(}_$Fi7q73jUF6veM=ozjulvmN6i0QyfKVSYz(6q!#Fa?B8T1;8$;jfjiJA4V@OqQ z3HtE0J6jnLkYE+;SjeoR0 z$#4i&^fef^&q&FpFuGuq`G1>Em+vRF5sxe1mG`~Xn?fJ@ky^Pa46N{uR&NS}<(tA# znK;+ARyymj9vd*&Za#!x4&rF3JcCmYauU}M-$ z?m%Lx=Q{1VVyAEuy7zdl`#opRCV9~F(T42Px7E*AFNFbmdcXO5)7H1aFq?V=qZq4G zf2~u0ZE$~N&bQhu&0W}y8t*DWwj+r?v}tR!qr$`;3$q^8pm-0CsFWK-}IL^ zhC*@NBB|GR)bCZCO}@3WUG+1+No<>aR}hLyq- zn+w7!ay2S2^K|!by zSBpB-qXFBo15;n!6n2smf8Q9I$jLu$^3ar1HQF1#g=TTPup2EWq#Mo+an5hMa1yP} z%A~f!&_|EHza1m;{Z9G5ooz{W+kUqGSHEz8!Q2x1*QwpSf>SY1VCk(*Zkm8t*B)&FFwNc~@|ZZB2;lS6aNecS#1 zkYY>c``@fDIJjGXz4K&{?$d8@M8AQ){yoBbk)_8qfX0vO$EV+Y;^X12BOkY(WNo-} z!KcC<)1L~rudOqFR~^1^=!4<5i4TTbYd;d2Hhd&}9=9ad`)AyBVQ1}i;d7> zgijy&K-eek{b*c$UD(!C6Sg+54-L2>Q4>BhQWFl?b`UpKe=yus|EciVrPW~vZg#F) z+Uvq0+YaN_^|j$P&*$?K9}i7+wbs7$?>qgq6Sf_}Q5?ga?IBE)E&ic(ulKPV$A!Pt zHz%}Or!wKcYhP?OrnR$}ZB`uGjrX-x&j~%Hj}6^N=dsaFj{~O6gX^Dua_HNmuSkE< z-ml1S(n(J|QD00+I7u(qt8T#@SxabcK8HvR|uQCqcy-n!8EPg!9o zrFX8^hqAUXETAt$PnA3*E{U${!mvo(Vr-ptepo`5VdDMlSaS0BpA6;X3arFb+w8E4 zT#X7;qFw)XTfIJ-r1LaOceiv$^wA`or`?*G?C&c3t-)HX!+LB$_RsnoR`~`!<1yDY zN@j$|$((D;*7^3O(JRfAH2cZ4a}0;tlQy><}J`adnz7Q)~>Z4flx z6OoP&`j zM=(gwkb5w8#5E#w*fka5iAMMZz?YE}6;kbQIAhFAJkGSpzcD!}fwy`m~ z>s>dxY)jhrr2SGm=Z1o>$s-s{%nftMBBYbfJ>j}BPq-M_qipem&VAf9&|4Dcg%a_l zSb&9S-EaLwETVUAFb0gp^d;yiw+1DWmFMeA_5X=Eo-aB|U1zcDTHw0SjUJRaW+}?C z0xPi!t5JbrZMczx>KOL?7&$JSsZbXnhhAy)p?{(G<2?>6@@$=F(D{b+PgF{y3Tx1! zyje>o5=CqX|M~kVpE~{b>&0~(RTiO(&p^^Y0K3t1m_N%(&waJ>#Pe_Qy={ocui&?X zYI=VCpP(LItR7w{f0fH$WH)+>tvOJ|257Lo-gZ1X|J6vN7Immc1GZxac4BxP`+kEq z0y$Q9dd^=)I4hhxto@f@10?kikSXn@0eX6udZtPLz#4ILW`ri`G-DTbW3WgbMS`AR zUzeTSPER7|db9F#2627eK5=P`AcG0xj(f=X@87*-v?=16kFj4M+R^*O?Z*K`|AV+L zMk3s3#k`W0{2KAWX{Yf{<lrSQrm(%> z>+XYINX|iAdwR6Ww{ct>a+Pm`jBVt+{aeXKL|p5(E3N*|sQ<|x;e*mDa{N3LqXeZ` zfQ1-dufE2aasKfV^)j+W>gfXZ|3US?HbFm`vfn_h@p;7cd4|>+f46OsbQWU?%FtTQ ze#KIHoD&!Q-^=MM(38>sjwHGYX8Uk`f7-)a)@eUiIPZ_W-)iY&rTrU!A>Zqd8eO9u zu~fTZk?Z4^kS%q6hunXY{*PVGLH?`zUnPyzs6ZvEum)?f4#U;*A4V~Tv+KVe^o-@j z{QdX2f24M3|2R*&LjNCzuwEK1?r{TIjaF@`b)I<*Jwfj%c3stMdEq+b+hseo;i3({ zW_G9-*MRNVft_eVGj?G&5@<&f53j%agtuy{WySwIE2HPz!9Y6xufJ6|Ndk2X&lFBw9${qpC{Ex$K+3K^Bhjtb`k~OQ2t^L ziZBnwXj3M%<81jrPof(==v^Uyua>`8%FC7Vcdhr2!4ml!!;6#;7+vc87^mmgC$cU2 z^5n;T<|(@toL;vuL6$ggDOwlJ2n)zX=-jP;Lf>Dqo$WuOf1*kM1hyJO$iA#JYgq*r_W+)?V*9-`Onl1sAsqh! zR87{P7IjEzkJpn8*p3~DzMbjge9h=%yM3F7)Cd3dR1_ihs2g+~7;+GZ~0@E+sq7{wUIkwF$Y^sX^} zDV=`l#kKAS78t*Do;XK+2ravF2;vLP7p^u>Fm}@&?jC0zx zo^UR4spd<<9&#@RGna&YzK8|Mk+Wl{UKR zJvii;!RQGye$+J~YwSPA7ofMqeV|`l3Ip~@V~{?Cmcysl zFv!;vr6HVq2J8kbkI_8mmvHjrM5Z5kgNza%W8PdtBq zSWGTK8J40PE6}o^9~zm^wp>N7Myt3AvJ&y%I`LmXN$uh;^vF-~A3N>Zply5*Ity%{ zV>{VZV!JkVH+}2-3qzIT*I+HyVaqPxJT_xQnKF$0I$>l6NV!_fMlNI<7qF4B-uX758nuZ34y+^V(STw5kCb}Oi#%sM z`u`63_17!;U#;P5MGkl$=`!Vix$@uhZ#1WH+cyfsc4a(iKZC-toQp&Yk>T5 zerOh+eDeITi=1E+?2ch$fGN)^L2pMATlE19?sbmC#(jmy4!E}c!t@d3`g{ZXy!$<_ zm25dw7;404um^k5TB!alogMbkJJr1%?DqZi1Bm~$>t?qn)xBNhL2(c7KRP6Q7!x>x z{QBQtRWHzwVH(GA0wrJ zeTkeY1|28(|DlVX#F_Q~H*2HSpVYqD6GEjl;+kjMKBCWwUg-J#kMkdQvX$_^t^Yq| z%z2Hp)?yvvyqNXm22^A67w3c;GWLJf#<20Ix)}D&)|1iyt%2N*TlvB5AkX-}?WCXa ze`}&Q4rM_sBf1dFh*&oC;NkuSBlgQ+53;X6 zKkOqLzs$bAlur-+01o014r2mw?4eyg??5NI_Aiw_d(pdi%g(LPmies3@ah$+O z3@?`d<+rorzw%Xn{TJ^t`g!EnU#^h<<^9wm`Cppph4Md!irMyj4hz2Jx$-3}Bk23lytVU~*@nclbqfHwBOXVCI`(f&lS~SClRYCBgj&bcp&kv`jvd&E zCJdWnK4K2SXr=S4moDFfjJXI|z6ZIr{15h-w~qcp`Wi4mPh-%2@&A9#(unie2eFI3 zSz9o_KEH9HRI&6*r6-*%a*o+;p9Ds28=K?*0ZHo@2 zK@UC7K}aIbL1?RX|L8y`x^_vU!S{!rw6gm5$}`99!CvgcejLC-ob^9htNy__GRPu_ zUS)q@!uPk^_lE&`I_dk{>H90@j48>yF%(WjAHue~cS zQ^Wqnad9nt@K2B@(Yk^^@wb&tNK~j-SIf8R-8sTV$j_@=EFUX3lBhq%KE~Ad%)!+D znP;C8G(MkQ`VX$V>@=t7~c|XhwWx$<$x2m-X(lbJ{?fLb8+*{+zJkM{>3TOR)+;qBQb9i=Wkj5_TMgr|fq7P~0*Z-|y|JSjv$?*;B#=Y$8{p@S9 zS2}&t?pN2v{{s!EPh;IW$Q~b3FQ*o=H>H!o9_+>70{fI{f6&L)tAE#Of6xygSE>F* zW}W(XgZ78G7TXVzhtVp2f;@smwfmBOTvzv~@G*4Hb6?1hb2v|1JNq9UuCKFC{C@Yp zME*k$Ht*BcM4TTy?fC4+wMlWDeggli|80f*hknGhVjk^(8~;goQaX*7tJi;85DNCY z|32Ty%GsfiKGx*h!W?=L;=cp4OMGJuzBLp(wxwovm}grtT8|qeCQA|55a>9dUc&<6 zh3NL(#dR(6{~6s=5EhABj3p?;Qj}u_R$>)aqXLzv!Wyha_Jh{n4c8jmFi!n%#>zf> zjt#y=#BXv+A6Ub`+5T8<3j0s~_!*(`HRWNOF}4lDQ(rW1B+3`g2sLCa>QIjcY{%AX zuMIoMooGTcCXGk#BIEdXs=~j+=n1qViNV^~2c+#!k2e445@GrX&h&d_^gWKp{>O6p zzh3@tmj8Fk6WRm$|9{G}J=p3$;#p)Iy9ax*5BqTd!xid(j3V~2jaPd&$RdY-n*T2@ zjWhrM+nn~iGM*QCNP3OE=H>t5+HjaY^?H4D&onRi^}cE0$^Qu92zeC8FpZW9-y)g& z@nzuzc@nL)@_)5*_&f6dQunrGMku7uLH_@LzL8|Hd&TD#UmA86ULI~Px;)%+=<@LS zhQ*<&?3v-#z0VA{rI&;+6g)dL7e71P-c%OuSoGX*=Z5EoUCm|TuJY%HyUU&*);Ztc z!DYrAmW6}#)PZH8$a&_W7`a2sLe}<-?c*4uj~-nXN^HAn&84CJ$Q*qT=Z4Ng{p3aZ z#m#N&^3S%O6~?(Tj}5yg_&V6PrNDe^dSaLPaZUcCF>-Exoa~mMM6 z0i;)3|G<7jWy?cL{k5S+I<=@nJz5*C4T+{}Lj%2&-Z64**iPSpp6%Can_O$`4h^cC0+?w5L@~^#>MOe_*lo2gshP@<+7q+Z~@oA4XzY!)$-~bwqn)w8Z!u#m-kfn?}nK`It@Q}9g^p=_ ztiIwe@UfsT#<2Q%B-YXDn&_)PUZ`#^I_fd_yfAd4@|BkT#(S3;hfAE~J#5u~a(se8)%dsMcu?oZ55F^?Wqs9})FmB8tgR}o1 zNV5O+EA%7P!2aK5{6RZqP*&FW^JG|X1H z#|rnVt+8HQi}uI{vKsNxApkU8?vcWMA?_b$2yaPLt{a`^DuIC@LL;pvE{*Pw;AKTdr>J#g@u~*ptYqUR< zoBc>(0O_Oriw^KFLcaenF(c&H!@K-+zG#;AF2sL;#eaW|E%5%|ubz$Pr04XZ$F;pP zXqiygh;K&{eTcSu{P$Oy-YLJt|NV^6Gw9hM|APeV$D(QY$=zRqmY} zsB-V*pzvX7OyCHP;uxmP!=EOPBWsLp(m%IO&?i1Mi*cg9o+Cdw=Sgt|-;@8>YoC!@ zzv}sw>VKurL%P^C&CxDE<8S;20NKN*uPyH1QH|0C`G(l)ly(l(`NqoplcY@?+%ZCT5#tTH1zW)#QF zlvPG?Wi+Eg1V~E?0a8dIKnh6{@|*l1zdnhKWkzPmXjD|jW;K)I6g8V>ETfD~ z`+eR}x}BNb?>-*i?;o$n`+d$m_uPBVJ->h5l~{$k0kqO8x0-{p&L4%IU-Rmg=9ENBwi@Q@kZGNFPFRwfYeG zo)xvR3&?dSL@_o+b5q)jrSvkCqXLx?;=iLxAD&)~8q}gY&pV)j-oMn~|GijWog86z8fP$y zx6ZGPF_##JIZJMSNg4>r;T(MeQ#g-$X{14#IYKsO`8GNKHveDXW*SK&>H(x84e7{0 z&q?_oeP`qW`T0Mbf5FdoWyw*pgWsL>=-)8io_FkmI)NXVn1_xEdw0o8=hM!oFTg_d z8Iz0FkPJ+#x5z<6d)exyVjH930e~Bs&Z}3T6GuwNSAgtxzx2~ zcyqs}Z@-**P##uNN(pMswu%O#PWx^wYm|RX85Gxl=ld&JS3m zk1*R>|ChYaBPrnpW1yG*UwV?eesL7F0fy-#7%EUMA#p-n;&kK}f6tKPAGa1JGUF`H zVFFV)j|-Sa()XkVOul$^s9R-hg`S4mHTG$fhnlVHA?KiFOkPKH=0`J*EEfOBbZs8$ ztJJ?Z(x9F~Q}4xb=@xn`vbcF`{WG6=0TyELYuaDU>i;fvq;Z3Oa)3EutbGteXsglx z$5C{k6Vbl7?ppP~=jIAeGzJizAA5q{onx$>--{590YrWOer&p2|5*QLkUoTge9uAD z_s`*WF_vH{>eiXRaBUgAk>Al6z;b#nqJQeNpqby%S-Ug)@7Hj%O8Cp!d8g)5Zs@J# z3isq;C01cI)*v4R=;5Z9+dlWb`Tv7vZG>9yS*ZUvE&L~he^~g*&QakfyP1!X9TVpE z`BR8ul%hL@oAk><8GUB|<0AXg=@t0*`yVUW9rB&3$ZFIrcAf!QBHh>K3Rj_UE%83e zp$7IXdBVlqL^iu#U#0(hT)IFbn$Rr&w#e75C^lDLgl6|{xL}RIcKh#r_jcwk^qeri zKCF*T_K#{8&=cnP2hVFKRKGDlZ;t*bI?##e-yhvLhK{A$2K-6jI8LCOT^CN$`%C$a zVfqM$=z~a9dKONzJCtV43^|HA`WSf@we;wpKaDuYJb{^Wg_xU0gb7idI%XUqI%j_z z(YW~(cjMm;;XHW()0n*1{@L%7uc$N0)JXrW^(X4GXzn)3BPAg-q_I!O;5C_H4!P-@ z;;Sh$WYXs$3-hr63(E#%;Iuc^5hn zoym0s(cb=gv=&OcXhsvVJ!27auoz3Q6w9z2^%KtbH#dJ|q4{sjoQq#=j@jIGYx&I~ zgPUBeKps|N6;@*ndK%1sqtAK%{r-DCkZ)ccg9YZgXfnY1Gn>qzG4^pwf}=#$s`qdeIwZ(>ls7(&}w zWrgzNC^_?gKYEvPqWiS`pHQ|m_-^_t8xq>+wegVj193ohH``-IFZr4JjqF>%|1AFJ z@ShySP=@};IqR5R-}D9bueM@3y>84oO6)V}jjM%irFG5pOth{LHg?Tu%8!S6?6NQ) z3$PGl_sqY43llz^oOkxeBtI$&OCmNH9ce{s!)w+Ou2@v zMFW~Kv;RQd(@u}(B>K=r@5RuxIuy|ydGxRNnf|x>CHlu*V}&xKOW84^3^}PRIc_bQ zdlEQ~6POuKX!pHvlDWqkzFzd9U!QtlmHxLmfI)I7Q~h72?7~rf^Z&5^kDEj4xncf{ zAX;LGZ+>9YQ`{yhfo&9r;oItns#}s*9I_+QV z9rgPYOS~h75S;;3zfReSOZx<8>VI)@X`kSE_g}y?l1^)PAr)yz$D98@s#O2iYL_IU zcF9}De=i8Xy8h@Hbv-)8YZos4|L9PXy zsHGn{pn7k{56XZz@ zV+5OiW_`gc+VS)=7{wSSPhA$yMtYm{NJjtmogk-h9v3i;q#sICNJSdbk%2jg&W|g- z-udAC%48nDNqb|yvL@5}>MzY>H`D((<@~uXI0w-D{@8cF7w?{u-!GWkalQYH{68WM zyl4$NbJQjsL;}&jYU-z)pN1o7MAJfjTeP4RUEVRveG8F|MaaQoEJ4pX>-*8CFWg^h zUA%sCLf?20LtXw4kZb*Up7rNs2X~zp_=#@%u`%oC*QJG}{8@(O$VFYd{&(+HVFkUh zO8>jUIs^JjL}%x>pcze`v5H-XF^ARU8g!qt)_^QP7roDzMZd9%CBm_e-Jr3FA!7yu z>=QN4t)FN99Y^pV{(tD53*A$UQj{So<7dWy64tsf_bd{A^qmxbM0*kvi2m_6gti63 zpDp~}cjo^H8NxqD_)~=+hZ>xJz@LljaD8uM1a-~Ak1F)5(XJ5@t$Amh7tVOi;vw>_zbk66@y!0CBIWl9AK9uL%M}1Iwy*2y|*7YBmC;y`f z&4|wUY(=ko5;%?%csAXbISzX78vWXyGHx(4PFSf=t8bqukVgyUf3oeY`R@ty-(<(M zwRiNcQR8IpJ;rTxe#N#EH-wY?9>xgjYK$MUKTV%Gf7E-Np^u`~73J#aGcgZYn2!ZmhnezeGX&+$b{Jku7Z@M}-P5r+# zC1iWfBIIB(>XzAmh9&gIQuQB}(wCuCUD$%??6M{-XP1i=$iqsk!fLER6j#NF%FBG_ zXwIkv>*$53KcW6V?HP#v_1iSf?@VK(8OBZ0?9b$;7@O?9-h7k%NiRb=D)8q2cgnQ? zF;FF6VURwAHuUlyt~hN^3iAof{P$&@@gU_yGg*H@ z{g0XbIqLhSEcN07ZDca~_vT6W3}XbR@z(a=8Rni!^?$WGx<>tvfqZRC3}Ogv=hXi= zdR|?x?(G~^|97eX+tvRVJQWY4{29YJOkfJglbMk>2B45jmPo%TV#cUgS#nI$j18c%mE19Q-=-tR&t{czH&@!G7{<8^0Vk3X06 z+xR?oH5txG&D6KZQO2bICf=O%a=c~1uj8#rzmC_Z|1RE8^}G0yq*vmNnZJp5CcPBz zTJ_8L;PbD>OVF0|t9X0vWc=u|$#_Tp|BPqxZ$1`aA+oUuIarLD^MCJ2Hm+!V|E0^q z5_U_m49iiJE&b+5zvK$$Jgh|2H<r%Y|*F^*`mW#TzPLiyx_eE#62rwZ9f`PP`UxX?QK(id^@sLm`S$iZYa= z0zKlccfsrNKC(aO_4ojpU>+oghF_1j@uM9_i(iX(pc7qXuf@ApzaBr9`FgyPA7i(? z8m}V9AAL1mP1c|m4QNI?y3mW6`d7NzfXUg@@s8Zr;tBT0G5C=78QERv8S7q)pQQIM zcrD(Cne};1(q!6e@xc`LWV{w1z@f_53=i+{2uU;HD*J*h}TIx;W^nV5&3rSdz!`tm%79N=f7;9?su8f$J-_O}l! zf5?teuJE}{>bA8%!j*4{Rg z(HWj^J^v=!|ChsEEm?=f^qKvC*Xe`L*ACB8z9?hf9AA*nmT~1GUiyJ@oDeTt9B)rjiOHDo@bJ%D}kNWc73z`PF8-oK$d?VJMb9C{(UVw9o`n-AudBTI$W$8ymPYIW(|qMtkJG-nSyhzV=$YmmGgt zx+Ra}1WsZYBRGwYillIcytKb;ls<;D=<}`nrH6qv(#N#&`MkVvPXB+t`Y%^Kftm5| zO7&m0I+l#i39WVSIZR**=WzkkNcsoqPhY<`Mc-Oo-cJtb^C!re`S%?C|MTi5b?Q;F zW0m>$Li6v&3c3-k{~Nz1C8Y3Y?B(Q;O1{~?R%fR%r{mK4gADqlJ&tq8&BkLlX%8Rr zu9?j9kcIiEQ>WHq0ljgZu;uF~)3Xts8QgNg8iOf&4YXJ4D}*0MFth)!SNLm%zghUn zMefPLVl2T@EW>i-qGy%-Cr)SD+2VNSTtDVP_CxC8wo&2d&r$w$kTdfi4cbRl7w12s zGm%#CCl4#J3f)=SPz$Xspht5becY^}=VNG%vHVpR=ROA3S!=-Vkg%>J3sJ{?F&Ujf zJhT45wP+4xP?{`b-!f0QeDkIp;mj8H6Y79=ZGZ9zqP~9<*?L|+N4a|{P>Cv3qXxBT zK-Bl|IWBLVk;llW??1pU>iZ8)$(!<7GzJizy&vt-i}t5SV*tkxjR6!bwFeIyqgnw~U&cZNQSF^n5GIUDI)lfpT20#mqvX%wwdKNqNv z3XLCF$GGWb;{lJRg`}Toryvz+NJj?dAQSVDh51;3g~-MtnHU8 z^r4REr#6yJQQw-JIVX=iFN_<^>ldHBT0wAiSjCTgY}#W!&Ysbk^Z%CUXD`#wCI^t9 z50XRctPjZ3|Gl99t3TUOtN)+S|0Sb+_Qxu(3I+U{6vlOAA&OCoGL&Ob{8x~bs6sWy zt)r+R$DBEF=&Zgny#dW=M_puh-gD^Do`NI#HK@6cSUHEYn9q63$Ug*X#ba=Nj{Fzz* z+haWnJ-QZ+k&My%PxBjR>E|$%&2J=5cortu9jey9B+sL+%ejB#G-}0lG{Tx!YJ(4qcDjxKv_`ICkDSb*+1+-O@Yr1yKTK4jAu zA^NBGpz8_OqknDZu#3*XTTCuR^zYkc>1k*+ z573gQ{%6i-G#|FrsF8)#;H#AL-LvV`r~5fVI}>)K~+EqJ+GZD*etep3A=q41UUb z$m{Iq-{=49U&;)pm8DhetD|cJH-#Fq77ggQpd2CF(S=?_XFEh^ybiu#PF&pgt&`u_ zAID7no5PF&ZpOYcE1Y0Ie!2ZFQm=BpT-&ZnR7#9hBNQeoPBS8zXh#mOP_0f+1zl{_-V(3gp6zqz63 z#N5!!zVEcSXs|w@*ZKeq(uZo?V~>8i=VV|GGSNN4k6L@s>HX2Y=bgV#pN}EW8C=AV z9DZ@PfZal5V-a#N_Q>3@m>loCDJ&tEVi_hwW>`+SWDEsEHl3az=Sc5wA>a|(c zCD0qqvmYriPF$}K$lO|MZ9uQN^MpBc?uwR)yIko99VhLP<)%x#cGC;#n+|FxB})V9 zg#KpVyvw&2_k(0~7DIiN{Es7P@_&kNlIh!!Gy6}orA>a6q73DzKqabBjh-y|e*ym& z@gD>8`H#U->HoC!zfAt;<|sEEWM_`FNp{bZ|2?aQAGK&e^be`#$Xq2~)7$A?=*1Cv zy;0t6I?v7Y#lAr+bHcUbIDx4Bdy*W+2u@>Wyiodz*2kP-9>o|ocSh%bC(n%konxNB z6eeG?4xYS#X^g*W4g60prvDT&6=_IE20B)n+b1(I4_Uah|7Siu>ht$u0evBc#*_`p zgiG_^ZN0*Yqp9KtopZ!biul1Xan(c4{9kR2_+2G_SBhU;+Fx1vnOPy5zl)HAEct0M zxdcm5ztH%_BIVvm@rR}};twtKRzzp^j1}D+mT@~i;9msJSXdtE(#_yC$zcUO50kT{ zQ*sqnV@+iDwVSPfv;QxugBL5yl$rVLFYW&+pf~1uHfH|+bH6eBF==aB8bcH6OZ5TE z^?%96D)pbbEIJprC83|#Zham<)}au^C`B2{QGp(HWN)+juh#r`s``D7`W-Xp|D@>u zkEs7}6dm~Y{|AiD1gzvw6{=B#?o|EXBz<^#e+IwNKySv-I(2KlIyOaJi*|Nn2a`h= zIsO^#ku-6UExqNa7uY3m93_wFGk!^)cv;(RyKzExlY5;1Up_08T&`{n`fXdSX<%Ph zru|SnD-6>cE42+ULO+dGV+So$+7G4L4>-fF6jV+TPpvrkUrHr1M8$u3`)l{ z|NlGj|1$o^T?RkqAQOkIEt^Nyl6AxBnGOVa(JW{$DT$lo+&D=AGFk8#y6s5;V6=bykFOQ7Q|6WP1!fLER z^zYAnvHHTH;PYHbldKqTUFI%qMUZE{lq5VHUEtIpbKqabBmuDZr zinLHoZ(M3_xKMwa-hkFBXTMcA1CDH3p7w9s{|l_8=BA76K51V7eWv|?xiXl!|Ag^@ z59?Ek6O{$)-u50XQPH`@Q|yrXBePt<#z6WYI{+P~Tc z$6Sy4j|=&ejYY^ocd7JKsZULh*4#yB-!Gvr#mxWj<{N`UvMWKrQp9UORr2UU3dNW$8wf`$aSmmD8Sc7~N zU_*mEBi@S8QziV(!e6WXzee~AguhVu$sy)8;cY)J{OC9#{KDQP+}&NmuRk#Ms5ZEM zb=1xtzh6I_|AiR5E;$sFB~7tVMs9x8`26gYP)@HvbXL#gNA$m^tUY3`LN#g-{e!n_ z%orp+IuovMwQo!>IhY)V7Rl!;l+jUHtsT)E*>6n_?U4-kx=%hp0>^Pg`is(FeXo4b zF6~VlzZsMEj1M)Re`8#2m2oCxJngl{m5g^CW9}ro3XC1)MdMOwp@;0vjmD+YjQgiq z`1U|Y_g9yGk&6{5%F!N)`j?p@CP0oz}J>;BddfgdqpAl!h(Hq6_kzwQf^mT~F?xQjMW-=P9C}h`>kRQlW zbgR?L$Z~Yi`*O7l^2|puSK`vRKy(Iu^dF4+eC<;lK_mXd|8JV^4^{4~Mh$AwfM&F# zXSMk?^rae~$3UjCAw{{6X8e!*H~+uk?#=W0=*N@`UHs@p0*CZ1j+3%po%|4_TOx1z3n|EW)I{3OQtPp>XGiu$UgLaal?(i>|9T z20!8Kh3!dUIrF$P1arw1$it=|i=S8QnXk?Xr-f??yOmgly3;wv|8l}=dgBFqv!`;x z8hSoj=V(`@KMB3?PB}+y_F# zst2r5cpx;AO@;hk?HRrHzoWuEm8e2BYEX*?G@~ci{`Wll-^u_t?^^yAI;KYa*o({E}2t1mlyg#+F6W7?h_XLG_i z{!CyB=g~c8kG#G67wFMHdHWW{!ZbbUMeko|><@``+K=hZm0)+MCMTqlX{f8s3F%}8 zYMZ4UZX1_5Q15C##o@c`}dv z*4|fgA+oUuIarJ(Sc>TUpJn8w^M97pbFl*dU(f%E&L7Nk|4OXFYOF!D_b;FP|9$?? zLl1^=ZHe*A?c@K_1EGMQ>rjYdl%fpfsKB7|D>@fsa^C}?ih1VzpVi`UrE-9*W_RiQ zpBnm`=l^^=+4&U7`bGNxQ`#L%d;`?i>VK-sX3oFqRj1IK(V`A&MI}FK(ST;OqYJ%A zpeNt_KW6^#H>&@CLjT{G`XD(pt^d!jc6HoQ{IBzG)xXQtzlhGiInJLGIEi6&&sYDd z<3{NH`Rd;_+8^{Y7}6IXR5vG9sDD>#f3SLR;wOdV95i2IP{-p%DiW8l%* z^!fZ9l>fTb!RzRS=vVjlA)4D8XgBuXpd28JU8`HIt!o^rME~_rx%MQxO}{ms67AQR zGFG5H+^jv^L`G#nbS79CH|3~6C8|)3adBNkjw$zQ$>?mL2C^CL=t3_NIF61fbNghL z?{<HPn5{F}fO&f@~6k@Rz6|A8~0$f*CHN{%0PHp1+TkVa2O2BNvoIbE^$)JZru^0_+ZX)}}4;sWU2~wexxGR-(36-+i@kVHNXg zwB!nxzEIN|;nc_PD0Ai-`+O8&9lDD>pM4>{ztZ!|t-Gd|V(5bB3rE5;2T;bY92Ka< z`0TV$MUGu>{DhornjLD$S~Q>;?dU=;>XY>A@aF#&=*?(BwAZ0%RNTCg{u10DN6G#2 zw{sm&(t9$+zxe6P694Dq{|iyO-gWvA+Lns{<>DV5=*$*2{&thc+;^xfEe!K#lezKe z{~sgtI{zf8Wq+F9SZVyH+!=xNQM6t#{$rm=bE)wkjIryemiIp*UpmTWj+CN7MjGxe# zqBUFDf@tkQ)CO9{ZaH$X0(n@8RhZP~8T?YR`2lBYGLK)Ue@Nz|0P9eQVw56U-&sa> zY4b(@cP^(_pc2s<^8QlkqgwjN=5LPlK{lkAgZ6KfM)l$m^f2W_SEB~C=v_Q3L~{T$`|FH%&g`!<_IYW4UG(q2=pVt+KXQ*Isn@kJI@`^C zH=6?|4^`Ovzt#N2ZSt@7UjskuxT$rmncjHPdrfOY(7P}*{*dq5I`27R?txuLmFJSj z5w$B$kS8(o{~&$+=v=~K<`K;NmvD)83j2gHfYa>GU=(9Gi*uO3WPM7QBFFAY3Fpai z|8%@SP9y0h{XiVb^#8pD7uyg~{G_t0JMI0oInwBjQ{MlaJ|4Z~Hg)=O@jB}L$GkuL z|Ly(*-=c-Tt>oaZjQzQHX8+?Y(k6W#vM?V#h2p?B>`U_v$N_PjNOj*L=MSJwd#JrY z+;i7KcCPRZmWX@b_ZV4UA%9d!qc!pennt9}QEBw_#rdfPp0N?Kx zdvBinsXyPpO8(TRPhb#3n3-QISO1l&|BUx_7RcX)@;7;mc_}}Zp+i1hPIk#VC0msB z^ltg%L{w(nc9Zm_jxA8PwtI%Ud59eBwf=8)EUe%*4=b?>b(!+t9A^&FXVx$0Skp_- zN2{_V%KOb^w0^l>9T=TKek39MWK*;7%NH&3M{AT%?z|}!xNjW_QH(Nd{(|)ZH)Vz% zd8)Tu`-|+Cw+85M&hM@>{&e{lrblLpN#Td4G28=)%w{VL;-vcf+OqK)tRd zaL7NzOUdInfs?2+Cth2d8HVYN8Row+LO+exuA4&(np4bw;|#k|jNvTaI{$W#c>+`D zQeOV$4cv8#;~#4SF@J-(-u!<<##`1GEVRDB+?tTnZ82`l}hS0{3c77g32Rc)&Ex@J!>&^J@KR1(~$PA()Y>u8W;F;g`j`PhA zWclA&g|n|RGQ+_%^HoJRhbOriAD}-kO`o<`U;P|~Y1|oE7b@1w58JW(LV9?5 zDm_$|-f8dJ0&4+`f$uVgHudhXvHbQ>Qh9qQ9lJAZ%6xCwyzafB$#eI5k0)~97oME| zHvc`dA8yXI;i;+X!nVw}&xGrM>pODa5q4HwA9m&65GpU+5T3qpQ`lYTe1@OJ!_!xq z|H}2QzSBPGbkl5U7CmKGgr=lqZ3*RF@nzwZb3#w)GxWN4h@U6OZ{zO}=SOkv`(s1& z4C(2aO7m&C?}`mnDDT>%@2$L3r}QXOdtK{eevTiV%G<7C#t_U4;%TbGtkz3Es4&{fF!l{>I;dxVCe}l5?xSMlDcye|Kw`iWN zzySAug`2LBcfXkw{+`~m=87;v{s^P^8OG7i-}mz89DRsBcKGJ7ka_$Q&Sl8@YB=Q# zwtseQ66c{CRrEWYDvE_&_7A*9-*KJv`iMP@!P*G-rlDu*if|42ta!SXESrC2cstqZ z83T8z>z`K+M0E+fQ}%K6oVX(NJ*<70>b=?h?Pa0pr}$^QllfoCEb@w3`m?wm7w|r` zvcHe)BL7Pym2+PtkLBMUQ`Hu=8;kf$vLkJNY&)4aCm)f91Bz^iF6y0gN9?c3{uS57 z-g&vQ+dDi+YLXWH3Asp7Xih#F)-3v4@(%J{+~kmdN`9DJNq)q&HCW0VC;yOK&wd4Y zn|J$DvOn#eu^-Y;UH5_5T;^y^*r0qC{X^4{okc~f_3sMIzvd@}jid8oC97wJQgi>C zu$kSK8f~iPr0|6K=qJl(g{{UP%4?ItAMk4%xqZyo`oyfT!;dC*3g9koDwkg-k0piO z`rUg*l0ubw_Zju?Ught;)ynd!>tg%4Ie>!&*TrgjFAL8uwHK#!LG186<@y5SknEr9 zQa{(E#~PMj7mNN+bZnk*3}nZK@6V2T{h}kf?oDXLw_N*MjNv7mbp2}d)W{3I)Wjr?Y95( z7`L70ok25UUucfD<8lAdBO9`v8#!!^iEB+{^NE!3Ec-(#DdC`J)GV^DATuRA$9$Nq z&2Zk0ci8Lx2gqFPA594d7ABh~kM{AWguh@{hQr?Z1Mdt)KM{t5^c}w46wYI(Z=T}& zr^0sJqTS3#<;A~n?I}EiS~N$vmSicrZS_T_kBd$v-yIunyDN6;7V>B0HTVKPbysY7 zYosrX4PUn~cIpR_{6Or~S8j<7fA>AHQ+LsCWk37=*zgw~jGemv!PxLUk<5t=fAjsZ zQxD%yUlcoa@B3rJ^~_ty-`*WNwcc|cB5xxvC%-)}Hhk;5W2c(%(7mx!d$MA~-@Y$) z>ZV&`!`IvzJ5}-__kYNB_5ah1c$o;+Vj{R*i^S$>;Km5C9QEd2;2V$p0=3tJwz==C!KXkocnL0}x_2#@Y z_8orsHFQxe`#OFfCBH1)XpZ>bvpxvDHQ|4DRqu^8*Uk=q#oTz}ow2y<&%6FE^1GyA zv7-6(JRC~8K6a4Z4gAf-fxOva|MJ<^hs+M|Vcxr9UhF8@iDT%;?J#6l^nL7H@b1_T z$uIlXe}Qc&v%^#Q%IQ_J!+Pc?m)sKjtoJX=eqZd*$S>k?_FpB}!cbSyW#XmSbBk6Y z$p7{cZ3xNX3ic(}$p6_0{PgsgaP`~x^(D33n!sqFY>yyKbJNM@^pHgoZknOv^5qh*idW@r-dR4o{ zyRVC`Ym*d`(KGs9@{i1t-xn)ea9`|+dH2Ph%(^eOb&)h!5(`f)yf3yb+co}IoVz-d zM$bDX4C13Q(qFJP=&bb@^S#$X?Ya=uLumoM3>s-4$ z=QpyeZ@4ek(0pGkYUedG_hg&TkKBG-|Gmq*+!+t2t~0*JuEY9{PU|~P1^tUu>pu|v zKfFg_*;C-T#wZ5nUmebHJ1FxF)+bFJ&ElbmvOAB2p z&uDBX`d{=EH|=w*jhSx@?!5eMolatfIsfe0=7Q5gA9~S)qQn)USX*F2MsnDA@(S%Q zfW1Dv}aO6RnFz%8Ew72?%(JBYWJU#uTI^fURZTSsL@t>mOM3} zUT9AiUh}_XkMV^1;^c7SyVP}6&Ir%bUi2Q#4as3HyVmT>tuMShv`;06@5lNzDD?-n_D?@MA<-w3}QNK3PKw4%jF)CdwyfO@-NLU-x@kiQ|LQJ`I zBTDC_hD}weVRL0_*fN$H%CfRz?`6LjpTOtwb(CX!^A(|DX^QnWVNZ4x{*>?xxi>8(?8~qgPTjYkJdi5<4XL4KT0MDQJ((o@>c(2`>d5EZS5G!D zA0Zp(s8`EUg+Dr<*}s6NQ$t%)YG^NXzH)(j+WnoYQ>~p;Pji3F{XJywSXQi$T|Wj! zvSNvx)G)X%)jrnLu<@>xQ1j@l@M~ciRkt3d*M4kPxHd_<5Kk&|ZXs{QmX9O{-Oi#r zp-EYE4|KVTevYHF&EDiJbOp1Xj(qFa7S$ByZiPfV^`=0 zN%!LWNo+wGo+?cZh3?%tCLffih8?|k#C9%zPi%YD9kGfNSB9sZ`?0&=J+WOU(wsYb zYwQ{3y=QKZ?IEky%?jJy`!CoxePuW(e1}mF(>z6Ouq3Rg2Lt#f_W4hJN56o%Hc z!q8SwDE$>me}(pMJ{CIj9t&N|9t+*{W9uH1haU^QxsNGx9t-{CfNP2D$Fw;f3quS1 z*IZrR$NesH8944o;EDb|MKoV8)Y(zRjZlC_~^@!C+jY;D+d;cH=Y;adG4 zb9Ky5tXS(m=wA<8xht>!dU&ecct_^f!}f|lidD>C8+H`04LgU|hFxdYhRSnm<%6|h z_sELao|?6xYSpJ>&rGcidzXDWwr^x@s9yQ?u)p|^Vh2vI)&D6BHRYd*rC+5 z;qbh*p>~1vSN7@Hb5);?)%Sin)=;q48uq^ozvp}X5&i`K4S$X&@D%EB1V`~#_>%?N zhd7GA#^2+6_(!C?SFn+Ts?}G7JaRSGARlEYM=v(K`t`8!rLTt*^kHoJ$~VGhoTi_} zD|i*Ry-&G_dvGrv!bh;VMJi_-O13dE#d5#dA1@Z{a)m5succ4;}ak{ihhm zKcVyC^`Yxi>kVqo3dN^cCiH*Ft@^m-=S$%3S}!$kYEN_pFKfze)PPNntO!kGY!te%B9>2lcIMx{R?U zqO&NH!eQpx)B3-oN#QxNe!?8ZdG+(O^|53Vb2IxE*IUUp?%TVJue!g3>}2lBHNKj6 zaejMde!Rf=Y@xAFa_0O4*9To6A`iS@eBjsk4St8$G3)M7bS38EL-;T@Vl%d3C-&k1 zzJnP5o!I5yEtR=f`Tp9*g zTU>7?+qiG|d8`y=b!aqRF9~JjP91W^!mxOAbdDrTc8c}<4`x5zWnfylX zWZs%3KQ53T$!$6EYl{4uCclzTGMBI~b$t`LnYn0*{OUdOC^r7OG-e}K>8^Y}9M;s9Flef$)Yn6*fKjBD`@+=%z!4&04Dz#rn1_%gnV zZ(tjCVmA)p5S~LLS}}wl;V1Yf{1TJ+HGYTJ5!Y6@8t=edybE{WeYhJRLXq;KxXbvT z@?#@e!d!aVy8BVFj9Fh)>@0;1FJ2ZlZb=R$Ke#cJ?z+L* zCdTpg1GhZy4B~K2c%t5Ub4}@CYq%|xcis`6>bX5^8<_9k6kiMD2mO1ihu`|234RZL z&AE7~)`;mNZupWlwd|_A)40ap{_0fd55-)@6#a+5O zD^r^Ij5f|g^!dK|m(&@L#KM#2VD>L^?Rsu6H^w0ztCsubby4Y$-$2svt=Lc;ixJ#R-upAOYbuW`!V`5+1I(}B)gBf2hW&m*?-2kHZ}~n z9=+3f`U}RDx46#jeseAdM%eYZCyJAW&Ru^+`hC^)MXqIW6ETmD!Rxuv!FwiyKAY~n zCU2E~f5tt1e-d}8{D|I#oAEpO8@)68F9`4C%j{F$c<;2!!dKXB_?r8hxZC9nMgxoE zFB>EIICtk}g#*LP_ZXl0Of2j>IVve(^Za+;hHJbxzXreLTCHoJ zV4s*ZqetIyd^U3hvvyTvj`HK=DelF^hVA4YWADkqe4RCv(n!^Ob6B5|Ctor49@&eJ zjvo2tHfzWpRYwcgvHQ6bwysy>^0+;lAMx(tjr=dZT5r1OuK6MM4|~2aN8fXBpES2n zdMnbls`noJ+Mts;ei%BC<1b3rBmB{MtUAkm6o>M}zA6Y;S>au+>! z4tsI8;ZyEcZf!_*&x_X1Mb8(9Gv%B9Vbv{RV zarz8B`gR+dtV>vOd)V-bw$HbP`M2WgF7s~uA6MR$a5H$XdCF@pzUvm(_U8rdl@Ok3 zSDv}P&3CU_>>CN^ffL?wYb@-z-d>;|$3y%*+F2*r;~4`Wd-%Wik$9*Q<_*fmEZ6Ts z5q4obdfqMgxO~g}d}Q~tSeSjAe9r&_sI>&?X&M&9}$t zV>gBU%RG0L`8Va>xH$g?`DT6avUGL-O*83XXn3yonHwH|%k?M^MQLRtyC~jg(oeQ` z=1){_ZHwG~MVu%@_ly7NTSW2y`t|DCIiA;+8Qyb)I-hut>}2jLF#cU=eGS>e+{-@N1K&>$FekF5clSr@@cvF5dZph7xnW%p6^tvVh4Akx|YLG*p_Nxb}# zSomY|Pf@Ww7CsxquoHddr^9Y?FAm_# z{Q4>m(KpC1&ykIo#qLU6jhFA$hI!$8v1{q$pZM$8JIEU`7w1o;ksf$aYPDQIc;>Mxpt)g3H3@HJ61gx&DKreey)J{!ESW z2bAL}C?bpA>plL8+~MDzJN@T#m-@TXr0~u#dS~oxHzX zT|fW2P?cqj0(K)sRNfKrrTGd7{ORvtw^`fzDo`Vf5(?Px;>jw1RuMkkJ;8x7is^~#_| z@`!e0^BQ9$%&nQmNKivRs9Zbb`m^RaYAdue**!d&$!9Qlu5bL*cX%u`TB%km#52> z9|!am>%Cu}btKPQ2RLjE-4CrHa&E$Z>i5hR^+T;POP+n&*za@XFY)`v!mh#}d*6T5 zPrFwB-mMSwW#-$N50H)Q=H45M{>*(l*v&FN^ua~0Gxwp<{U?R_hpzn@me3dBckX+4 zej89Uou+YPPM1({*%i5o-t)Q#b(!fV2|u^Ym6_7zK^|&3IP?W9>j1Aa`l7+_qunC*7CEfTRp1_mXns59MPhlIj zryKvn4(+9#8OHz8oO!Fvep;Emo7|%ee{=m)!DYt(tbfYZZ_CkdBM%Nc=lrzwPs-Lq z3RF1)diOaG`zpQsrv!`eS5l#PjtXN(@zu8GbV?bObR&KT`xj?NhE zDb)TU`xYx>3$2e{Wxb$xh|W+QTF&nbX-xWPls=E3Sz2vEYlD3b?asX@Fi*Gg%5do1 zmEqZC(&pHe;qV1%b5XQL;quU{q1;oE=4>nT>SUtVJiGfoC~gPI_A_arZ6wY5_q1@7 z>~g)6{W0cl@&WFCA1l#=zrt=jhaaN&Y?=ZfP5t4F5|p9@o9*M@wBUMqD=qBd=U(y| zR4+{n`^W?NY5EOmp~7B{?XK@6ca)ivBzIM%`6ohJc$)d=(#N|!XD7ag7jcy|dN)3T ze_;;f+b|a%44SdWk6OZXZ#VF#W;175_l=s`2Sg@3?HnDrjzG3Fr`doU5{()nlc zdHi?$3%B>Me;B`y`B;I~D8X9n!?*DrG~o=!(TAJmrE4%nzk~cO86y|r_waFi8lS_X zD8*JBMk9JKf}dalv%J%Fn2Y(i4~ww^-%=+0JeeOU=45g0WuAkNLy{{h!Cs7Wr~6)% z!R>AM5Plyk@ilD5GYI^+8aHDB?#G{9Zu}IQB}K2#Qf8&{@7`;}2gn8N@4`K})wOf# zpdXnt`nfvi+vI;$PuQbKgMjzSXzl<}CZ$aR7ge>)rpa>|dh41JB}@xB;h7i%Hy!@1Ozyir~H{ z`CCq3LB{bAyNB^HEW(X=jo-iF<}Uix_yzN6<_bLGp1aw-4gZt{QrjDL*03oqf-Yr4RI7A+1u4P~6`g3GG_YK+hk-NW4$LGwOZ8CpF{nk2SWpn&>PX@S~FVc zt!P{4yaF7hcc9a?E_Bn6p=Xiv5zt5P$AD`I4AO^CbV2yBVQEg-h>{gKp%k0wo3Uk` z?~fv|4ciy_{+Ox%mtE#RJNig@>VI-K^PU3rzk2r>axe2f_SLTMCl4?m z>{9$+v5+6`#%GldNZMSS7yKh;Q<+Z(X~@ z_gLn8kO!G-QhYDp`Vcu#Ek0|-r~8M<9_HRs@m(&y$<9&VgMGK_$H+G3_7lF>ub8py9Zg)=wc3>yIr40HBEN3eEHP$JgvXqzW$r5~$evtfo zd=t+r+g?yNeM4RJ_b>!q^gjF^K8vrT0zXo}8CEY!<@TM(#z*kS_;VCv7ycEN!4+MP z_uvCqg5Sp{@d#F<68j^wcl^v%#)$Ay(b1D?j1a9?@Z(DTLUw#Wb|5DtqBfQmwbu;b$G58i?+3=f<`*GYa;$F@*9dMLz zF5sP16ZdLUlzh|qeNufz9kb`F^G{z@ zm)=+RS5B$tYaht3lL{=ERB*xX71CeSyYh^}EC0&)_}|p8Kj}llZzBADp37y#r}hEb z@00{#7NSyYW zydizJlD_9*F=5{aJtiMWe-HX&Mjj>aHxpJ0X}pbi z^}`Ri{ywg`i))|7z6U?HQ}ln%(EnL}qY@_R|9~BmAbIje`oA|Sm2ua!U1tB2#r`LL z69KZ&XGdGg+09%Z&-O1P&7MUsv_LDg?Pd%e+MxqFH!b4($23z@-g@G@sfHlm;qpaCL=3>r4<~MwiH65(gILDfe?er&*RZx1KH63SJ%W;}D z9Z35O_U1z}*C(-OJbiT@1zW#gWOOvRjnOy~MEWS5UMJB&y5AiEjA z@`V@hPG>C-=61}cuikrls;}O6p1qcs{i&?wK?X6qSFvUU>c9gIr~wx^!F!JJFldDq zFl&kG!K@`Rzm4??WvzrVa}DLFh4N8G8JtBKd66=8ALVEm_kY0k__+?d`;j5a(k#kQ z7Q6*d;y#P(FTwp0uGvXhyq&QA1xqMrom`VnS^l0W>$s&;Ki`9o;a|9QaQ!D-_Yvjw z4gCKJUcvrVt{H}xxnDWg-o`y#@F9L*B5|orKd* z_#bfn3c|S>zyFQDF7B0K{Lo*HJA19-*}t0riF+ykkc2)NQnu3nhqNQ#< zr_Y^*%*LE^g#P~t`v1s$>QTWt`u`W0|3DUFF2TMO_cCNT{wune|KR#6WHsiREzEzE zGXFte+y%9}d4GTh>Y#ozeROEtGh0pgZQjKEM;FgB%rix{9i+@0p{$&s%pf~4cb%gw zU7$=M{g?yT2XPM}dohQ*82{n={_th`9(l48UL{}4;YITEA8-lJ<~H~i`ISxHJ%QbE zs3p%n=DqP>*l!^Z_dqRqJWjqJC!A!S%ToOP(L7h=S9vb6n${2#&{!2fp%x0En%!{6U{jz7e{8Sdix*Kz+8_dgP*9Y2S-$0F>Xhf7S@g!wu= zO_-nO+IjE+*F1y&$8i5M@+k6E{1g+Gqm>k2%=DGi5B>oPAYC7#_lMBWnrU zg>+(eAZxa9h9Gfh0WWcCh9>ll&=6R{*(avVE-gM4X?wk*d2v+g!Ni=54f5-&tRw8xG{y6m?nT$DQ^g5+d57Q1` zr}RD4|6SC7+&Sz

PfLe;M^3O6F7lq3ksEA1cmM|DkH~ zI=zhFnz_tDBF*pZeTaO5ym8N={?E8Xw~-%D!V}Oqlk-8LnXtUjg1!~n_A>ql?dUt8 z6SpqtM&ASeHH`nkf6ga7wOnED)psTHpWG*$c@1Y4@GY#oMtu;-HD}iItwaV(*(<(< zJ>$>=-Qa^Z=zw#tD_W*5@A z@fuZf&V2<`J6P}4z?{7O8kImfltCeO1-KQ@pihO&ZJ-U$+<1QA8rtw*qwm0)w1c-O zJ5I{l!F<%?tXbR4TD1+VS=+>1wwF0=>Pj+oDv5Vq+#DurAYq8LAdpO)o1h<#!`+nU z@4=Ih4#(j#%H+#%4drq*?1bl`1A5>x%HxM{BjxjM$f0a`{!BUN+J4@Z2PmUyrkui0 zp$cxGtlkS>r+nT3_kst`!UdR5nOz27A*==P0K5P-;D>tXg@40Cl*1pvPfU4)FH;`B z2|FoIPeMK{CT)K+lJfODq(dv1-*SlqbEa$pBnHScNJ5_sDY&H``I^$mzjR~y zf^5jiWsY$9*O(h*{t>?gsjLOF$Es-WS5zGHRh5wEW*t`9l{{12D$eoDE9p-VUUepO zb7#LM$8P54zy-CreE-06=4+}eL;d}5PJOQu5KVUIs;W2m>hT$6X-hy&iOS!lOo`DaRQFbUF z^WZO-=YPn%36|1tTMhTpr`Q6I!Y0@ZKcGMQ@fT?$!5R2B{2ex5#-12RhE?z**adsQ z1tZW01JDLNFdb~vk+==K|F-h{A#o$mACmU+{I~G_YheB5dg{`4=07%2*S7Kg!(Z0W zt;*)woK-ykwXFX{<{#ww^Bf9KFgJ`W#$0la=YN6oX%6JYGon|}-=3qA+=0Sey=X5#p8i)-|cE##dy(j#*;9+!NWjB9n>sjylL9) za;~|9v(Fi*a1bB3gRm3a%*}^Cr|+UqYG95YQc78WaEiHo;%(;mGcHWc@n>N+bNo4# zpJV?IYXk6`?`8bUM}BpYUxZT}VEoI@_*Wd`Ur>&|0>719Q&q_L7Xu|VyNJ&Y#=rJ5 z{?ii7)nb*w=3%J)0T-f+qCM8yWxF#Q4_+#=q7x{N4}IT#^nv%%_nko>m~#U1ArE~a=k65H_bkS*=>wM%MhRh+(`PQD z4_dj9J}~{}YV519cTCU+K0)92usQdizVD6)t=e4=T6KJb%~|3N7w7|@=WHI#jhM~Z zMCLa%S_#XXCF)!Dpw+hgLF*x&*>_+w`~ZGMIsSk;u=;YwRNz`z49g)F=EEcSORV^! z{(`@=Fax`*jh_^^t%J?*V|WSnLJcs4ZwmucH@4Go$CnsSgH5m%w!<^< zQ}`v6KqXB4@Bzz>Y=lVnt z>}6w5hkG{dklF0+cC&1o@pE$0GM<$%R zOo@zP*ujitBtr_MLK>t)24q4O{FlQrkxIUNfV;S{cCn%lN?D1>TgfDZVC z_XtCrwoBnvut6Gp2p>TdVXT35a1T5}otQ^>UxAyT4w_*Q{zX`qaZNTX!tV+0y#fEL zaN7Z+unGHTuy=zWuA*Kph8rQD_XN{}wxjSnxCFZ|!1MTf3BnN1T80F$otdRLWYX5@ zN`}Pc)0qdFt~9=>>0bKkO!B7gp3c0_bmo1gD`V4iW%?K^IKr60cJ=}8W6XeWbJ1Sb z-EE$wg8hsspfB6S{%5|^C9I(+Jg=g*!d!oD7VBVVsfB&itSR`TkGx{ol*?e?EH! zpcA_;=w8G87x>W!!2E`A2zt>+&IdeC`yXDT42)2QBEK8Hk$U2rsSx;A(Qa67_EFAM z&m8I%vTMsswO`A@2obrJeUgu2c>s+h(W~iZ-+F4GPwiuaJj6A9Bvq?#4X} znGG4!SeK2A?9)0$zm2wz*|(L1Odg_LkF;ZsM<(EIL&jnL=z)tmNcteK;u`o0e2n=z2` zGFQ?6TT8h^zE2*UfV>TS{~qW2hb-L0_YYaT@Bicbhg%hXs*yEe>H+&0oCi50EtBzY z`uy%n`v1r}%=H2K|InC9{~wx5>Hk9u`c`N=&-fR#qwj#uX`k}_|5V+KA@qQsetrOg z5Q5%@sWrxZgkgT8`^%Kc%b*aRf-K0L{&}sX?7a;Ia4%(R24!F!l=J+vAq@SPb17S& zNB<^%4xoPryAJpi{Q&GFycYNwcK?O>C*;+ggn0%t#|YY9#_k^MZi5``KEUnY`0vDx zpAxfug!yCmDR!44JG4i1b91ugA ze+V9U3;qfm-Du+oN83E|;>*xR`SDSX+9^*R$WF>&7qT0357JK=3?PG;L&#p-_j27Z z+)N(V6W_1|LJ#H)!)L|4Y z2+;0f&I0=cNaUND&b^<9t(4G4Dm~JnvJ)LDM_-Zq+5Q7$^+x*t0s8;oT-Y(S{v~&6jlO4n zyH*fh{VBfxv?UsmO&5qCd6Wz}!0|J-wnD!Dm81`uhS{90{CCQKHwCe$i|;?rCvqMp zenR-^#T?#4{_G=vkny=LD*awzE z?`ON&2ZeiP41NB?e2*b#f_BgezSpCCuMhIQhQcBCvD02Cr=3u8mhU-K;kWt-``%B@ zP$mAI(d>g?I72nd*cT6;HB;vv)vjgV{3^~pnq$tU<{Tux?+ws0gYyNkYo0knUTEJ( zdt@)?6Yi!x0^NkyLwKFrXQ+$tg7Z1|D2j8CAV4^MT-(oey@VBp#DDX9ql@@nlV=w4 zJY0a>0_Ng5P_z0M?5TT-GpoM+1x>tBW4-ZfJ7;g$t$*SF+@JW4mb|V1L6-lYw{;Qe zTX{m$PxDzRsIl5BKH;oG&apA(#2PF4dge;m(DOyi8MEm}bz1re;%h9;!_NFRQyzPw zirMEEV9#J8n}k044*jcNSc<~^E8xy}TZ@pHY{vL#3G@FuxW>;se<$zxcj&7VUJ+?( zUCX(24P)qS2+<^IhOV^_hua}CrrjGNkREowINakH6^o6VXw&eNO8d3v0y zmjMYevlY*|VR36_^WvMO)zUe5Dl zzP1AL*UwhohS{8vHJh~(j3;a%j++@z;QYM;XbQ|`Y@0rwZ#Lh$*=pVRsahIlvnFe{ zn!%MjTh2nx&@Y{>nz-4jv$M7Y>CT+3S_oku#804--(rCNi4^N2exIT1Ypiv^ZwFzu zduPk%n60)|pQ;Dh&3(GMW~&q8_RrEsxA5*?OCMk}>#HB94*M)cmZ zVKrxl)p|P2dWNuk3;Wep8CJ)}QPLS!mm{psaF}$4)xFWDz>ZP&0QwXf;=I>`K85M) z^wQ7hUr1kwc-SuVF&{F7vPsy2mO7Z4esi*pswk*#|@9N|H zw>&Geuuoab+m$_3W#ydiQ?9Rz{(qj8f415xIMSy={1;93vHma5D%qZAmF~;4$^w1l z0q4J6VEm(yGvmI*{QnuoKdxl_gY)5>d%hqShjZ3Of8n$7kGECFILrU5 zTPlPi^uzbyv5%4x9A7IrHaX;s@%o+&nm`$CQ1L{3C2H;GyZX0t`o;qJ9J#` zgU8wbd0cfDj?>mX&U(b-{07`{#z2p&89!e9wm|FNVWqp_W{x0?XU8SQ> zgHL(CwMBCt_KDvq3G9#v2@}6l{E^=&hWE(La2wnMTcCpL%XhuY``}&u81wT`yyaci zg1$@N?OhdYe3!o4yUN?~u6}`ADl|h&E`9k@`t))1^;4zBO8WYY1=M4ghr4I@Ovc11 z8*^vMwY|i0&f&~FsDV(JJ;V43@6WKRk^Q_o`{uGwm_}Id<1{XK|GtL(o0rng;1%{1 zMgAFh0eDT=>}*>66*Box%y}DL}=RABxxz}^%=R@=b9%X)(^=*|uxDx$KoS*T}pyIFWW<6<;@z0>_3%eQr z3@UkTP$@HmjDH4Mvk|0h1lj)+ROY@Q;~zm~Zx1TR;a6^Skn%sKd}P7GpbGbQs|Z&t8|2zn=QA>XY{D7E5w@ZVyna+ zQt`rKtLRjSb36M~5DqCnGo(CZZXl$bb0K9f4=Jm#+{)b1!``?h(2CO?ZnGh8B$weNUhknoCwK#p@(-vNKMc<+M|ZUA=N(~P#yR2Tqw5O zXNs*_q-$eH&J7_s_JvflzRa?f)+#PK#fopJRl@vwaomZ`9+O1Sa ztG|E;C}TdR_dwT%DP zs%&em%D2?2BBoZAYipVNsa17!t!lP;BVs+!MXS81a`H5D5 z_ywa{6*`q-^=`6T;oY^21JfB+qp#CB{wL zs6lB+hLvp`6=Tc9K5M31*)!bCeHnYV^5(nQ|K?`?uTJ;lr)c7^idVTA>u}TVZBQAq ze5ir-FNgI2?o|zTs~WqSjc(SHyX7PtSBzV=Nca3S%d^$3y2@0mevO;-r&^8NuPM;L z_@CYK62F#BEv$b_wc7S2TE6vee*f6bn1@@P`%|s1!>Lv`vWIy2x79KK=N7LdTWHd) zUI_1RP#^M9>}<WXnC*!&(`S>Y_cW_oZ14jx?(g*|g82W}dq@ z>`{xmQLR1?_1`1k*+%x(dDL;nqt5dlb)EI7oBQ;f^2qP-DB$%l{_jz!%ftHDT7?Te z%sIWKeq?c=PDNdHD$K0o*^!3%=~mukGi{nW<(x{ks+ZTPYE6?k{>N6~f2tYFm?0=;t-lsr z*Xb4X9>R6w_ZR3n-o@rTNL-L9(3)DOmhIz=kMgahJTx`bsnK0WdzSXJz_ol!d=+dyR57XIQ!G zGpsyher1gc4m)Yv*D(Io%(!ihN_N+%baS)HkmZ|d*#BFjO6;pP)-e86!~9Q;90zOU zoT!oONDb>hopPV6k!SvIRfnuUT%(4VW;G(4W;8Pv_*;1ko7J+zX0>A1HnWEDj~cbF zuVMXjjXIHCTeGe1`8BkEGA#ez8U=6<`oI~*e zhZ4FR0D z_c&BA*TMP64i(LGux87lk~o)2k!7ckFt_SZ#WaU1C%v3cz}f$`ltk}VWCrv_-ne-sb)u(<@IG*Eyz~l)yDJo9dxq(sfFK?bMpRk z@~v#4{p)0HsZ;(@rvm7M6HeCuI_dj36%ISqN4)yo#QDlr<+)wTU6yU-6uLM=B-<*Q ztXHAWrGirlR(YvQW!RT`T`I|qx2h*yv=Otd%C*^61v24utK!$X6nCPP@41UMlS@hV z1k1j+Rf$LFGn{a-c7!r?+NBiklf66J%Ib0{ld_kAeFygKGmgrKY~wlhtZ7v@_FZ#Y z`QE!|v$z!8lVAl>Tjh^F%9=$NC^KhW@?huY`(2A{ znsBM{h>N*1m+FUHlo=QAPM5r-uQ}|JpE?paMg2JIWlgs84fuS$1kSY=x>tnytMR>cmlDxqp_i>e`KAMck9=KbQO@8DIz znids8(fk&kRf|&g_gblI0#@4QE0qr0_(m7-UdvkURrWIKH}9>uJy$BSzHjf9N<80d z*=Gc-r1L>5d1k=ch5r)#zlf|r(j~Xu$M|D9lHp2QAM$m^Cf`I}hWr9;@?zvykqqzI zzJW~o5p(7?#*Xi2fg5%s@IClGJPtXGvrH1!l}i{Kjb{G#2Kqj)u(weBHt_X~voEI~ zwVF$>V;uP=`Y^GqlfeE*H}Y=Bt{J=kK`vdzI1~PY`2RcZFXHcV+`bK0v(D}&PY#IHZ51aAuyRLzP*CN;Duc^~gFq=T6ZWscfrwl}knTRu#@nunO>(ACq9^ooi*> z$7bbtTa|sGRapmHl{wnVxKJx+Zni4To^7S>rM@3IuH;eP2eduy7hFn2Cd}u3aqhU{ zkT&W>KMM@{8osQsN|V!>TAvWbOwjZw{fGpCebVyv}Q&NB&? z<3fT}gS5@`TXAdWDxNXZgbQIS@m#&^SN2&+(Q}o2mNp8LqN$L^IBNRgu$2+xw=&oE zSy{fh%7z@qUUPk6E05=xe_}4*KgNBT>@3>sw~D=Ct7KcBRhk>N%69mza%6>LI`h1j ztIF4;>eR~_(_`Fs-{o>5U4?yCEz-T?a(OD}GS)X&_2Id+f9G;OKjXmLn$%23-0SVL zTHJkBYw29IWd>d`mL0P%asagbEhkPqSwlp)1plG z<*a>U9R&6{8v|BuSFe?~vERxM^jZa*16E-}uT_-FIJkE%-@itc=FU~wj_E3AlC6U4 zDtGl;RhX;e=BkFga8#21!pj-|x?HuX)8*dU$oVH`th3*$5BFOQahI#{l;3K?Z!>kdqujQZ7punkqD|qB`h3Mz>?!H`M z=o_7`{%O6IZCa4u|LC>ik8$?H7GxDg$g#a?!$Wp&!2T58+xsh-OPWKUcs2qJkI}}$GVSs zwEyR+ig2ra4U9{0{!3-AtY#TT?%nGkPduEn>i-)dh@`{!V<)p=%~y4Ivw-CO6W2mFW4dQ#GVI%tJX^;*3w zs0%|M>r?t?@*Jjxt?ILVR>icCRY`d$+srymCg4k=n^nqWd=YCViVyc$1&%9J2zfCf zD}Qa+%9%;Jsjpf4uTVB*?BRFOXsf4n(N5kHwo-O8Z$vpw+7YsnAHPD0>zieV$T?S~ zl)KG!j5}UI-EUSu^foXrLtP2d9|>_EKY1D;AG^;4tsdgh*%h|BQm%w~amGk6W$R3#)^Hg&_ zWI3kwS@Dj=dYJd<4h>kd%3REt#}XCeHxv6j z?2_>FF7CNqi`9+3_`+hwz!$3z{Z&xDcZq(3EX`f4z=y1t+P_?X#ay}Ux5Q^L=WuaO z6yFC&)M8zVtczK!TE`N78GU2w64vQ54;EOWhEq#e1F%@F-X&_r-V5#6eHVXire;tgoy!+9&$n2?b4zIVFVXk$6PQb!)-Tu3(f4jyt|X*wPPF0*7tuagqy)zz z%73)%kc2)tu!#PDH0|GL72!91S~UHrXxjhL%CawF|7WyvwnZy9yhwTY$v?kH1@1*E zJh(_jLyH*yTBMT0izxq#IR7tN<-4O*ML16QJNf0}yWEZ5apfX8mo4JmyoleMWR9mH zS|0RuYZlS}jb{EYS|8)TDK45Z(r9_1WyT`4x}&N8(ekZbq;~vt>|LbJX!d_@TEsf2 zMe5nJNdC=>DF2HX|Ba@77p((?{}vnri?)C*aHHZNew6*MkT`Lp?2v>$8B)%({}s}( z`zzP{4}1!rld?EjBZ)sYxgkH#oZG8dl|03G|Wwif`X#W@S{x75bUqt)Ai1vRG?f)X$|3$R_ zi)jBB(f%)@-WAdQFVYa_t~P91s>X#&SyQr9&ESO=_X5Tjm+GtB!_l=s&eR23guZs> zQhft?Gt`wX(6^9*zyjTc3~gSjXOPdq_u&W7wtgvNbqkoEU#f1*o%RL%ZrTEMpIV@v zBMX$0yOegxQl&YTDjhN)6SATg(7#?Pn|G<=RxMEcO!m}HTcBSOhCOvDjjKeE>PKtrSz|ss$$;_s*F3TsvS2lzH$TOb4yv1vp{*=BY*1x)~7Du_h}ZWX!`=@ z;ubLBxt96wwTj=kP6?2>b)D>xggzNk_O4Sr_e_GHLl&fg6aGz^%!yjZ{+D&CzvoKz zVfPvwfOlaU?Odmr!+VNzzJ9_!&u1kMW8QTy(H3EExce8gH0{rnC76TXxo{=Sgea(^ z-Zw!s`sFYSe;bjTU@KgXc^h&&+(}!r6~BAY7eX$4757TyHOK~}8@v#P{m_Ab+pgOb zxA!*1J8q-?bN&a|y(3EEyoO{qZFJ6aNTqF*Rye}=2lL*{OJ^dp&W}+4nfHF2_Rn_K zrEOy#d=K;9%rO^omPq8hcIKK(@LxKEdGKk>gVWcqKvu?`qW{l4IBk@gnY4en#<~65 za?y6GT}Aun%n^AWA5qGc1Z z4=29%g)@|dKDqR5=7eV`bvx-N4(ar)0{~8NK`pq!19eaj4bTYx`Tg!0to@%u z+jrZq)yBPi&_0v>@6b7)w(`U&b+iAV2S5IEr&#mF{&xt$|9k&?wR@eapt5kCDxiGJ z8!C%ir_$7Q^!L}%hWV$8yz6MwtW&|{2dsZTt-LPE{HArv+3>y&Q;sI!1WZB?WoilK zErgs-nfwCGh7bAf-GKcEtLU?A`$Taw*HPft>D##V<99P|i}4fMzD_|1>{-Y7-a7Sc zdtcp-k9eoAQ|In=>e#VP?c3JLhir5IjXC^vYDxV_-pPNe+5Qo~5w(u-|9`4s*9ZJ| z%sR%9*2zPCc5hzC*wQ+=)~}QE;P2(wvW~si>vTW&e+UMM!%vW}z%L*X3gC6@hhYWr ztH9g_oiGURz?<+}_#J!%|Ag7}LB9k`;8EVMw;*rl`jyCQ;X7~-{1EQO{wd_6@Bkd* zUe6+5f?vQ3FpgaT@_zjO8fnGwenGy6{c$)7jkG6P;B&bB6}J%ie8)F=2VlMdz5(00 z{tx)Q9sSMl1lUqp2MY0}tO0;T^ma&M&MX;HP7EqFlXazb=FTcvXNt_koQ2GuHmn@x z(Q+XV@}U3cAQ3k^BtbIxPZK`~o+p0Li#`l} z^NAm{Z5@`c;SlXS(nmTwkzHpv-vZe)!TVv)F*Teef1qjYu$po6k_Rp1Q7f{BYaBzo zUk>tqnK7(du5p8Bl>7x7<@0&U#ZJn?z3?Ku46nkkARS)9Jsl8UFd_b8vWhy31Qubyoh`f z`82!;zk*A?$vu(%`0qdtBA3A-^rOfz7>Cb)lk~v}^zXp$;RCpgaj1E44P5n2?hj|N z|1a`=g|Wx{FU+5Sc>lSuTZ~;RvIAydcPYG%ekt~sp??qkAK*I7U&4G5{f)+rI{$gN z3cim0jmQ<)--^5g9)#7Huf=US+=|{d=RL)l`F_?#CLj}8Cu!e9{ohCZr(UMSyvO** zdrDhR{f7+5gsctJ|5GE%+4`Pxhp7JtssGbnWBi-?4@D<_t^e%*+;&1`XQ}^CahUoK zk#p*gQ2&=x|5s7}*D_C!ti|l!!n%3lW7f^r@8ZmoJ*=xIK26AG%wFtUaBoGn;orB4 z^`Bhdf$YTGb(s1AJ>aKa1R!|sghIR{dZ{1bBh;@G)GuTe;~~{h0hLe&lJC5o?>;iOkaoZc+5x9%7a%i_&`#J-JE7}cr5vW+07;XyBOsBsjUD2n zhG@sKP=R(&T+FX&hY%0wg)jsgcn^So_lN?>?o84_J?=zy*?G@-c@JQ2+w`t{Zt6K{ zY=vg(xOYGA0n*e&+UlcOFNpNewyKNz>~|HBPRuT3&3xKjbKi~B|5(bxPE$6JFC!D+ z+vwLrHTt`db;yVR%s4G&rXT$P+>L%1xsdPvefW6*9>dR%;6v;$VVw0o{9JW2bpWo0 zZ_szT2DuoP!i}(n>sBD|hWlVOd=H+4JKzy`44!}=;P)wb0=`Gszrrp7(%}{CvXMKG z590O+Jcs^8cnp0jZaol!VHkiz@H&jcQFsrY!0!jh58+AlpCI=U#w^C3FN3GCyBb-} z`yw9uWY~$_^Kduz8JMe)E(pOe6k+!#-cKya(KzNqa2(EH_W`mT_ZnD1p4|p_gB$aG z$d@4jlHpg755>@ky9@au_iaM9VtyI@AJF@FCZ8ZXvFn3ZvHuk;UPHPuC!ile+UE29 zhxldGe@MLWPuZzUNz|oe>Qc&;>?4Wd`;Sbg?q%$u{_msyBeOB*%p6cI_?7bKXMf7 zF@}o+)Lp;V-k2vo?^~m~+mtuJ!_JaFO|#3$QP& z{6s}8`Y*PBq7pB^Q_{unfP~rCgsi}=vVq@+q8uKi{5^(DrY!D6z6`%IW=O~GE!=z1 z4?s5Nd>BT56#HWIze7&KAE6xc=Q%TB7R-aI;UnycI%l}?`xy`fqQP%K^ZOal2U?&7 zZ^S_YB!V51AQ@5&Z`dNL@sn~e9yJZpA>R;j&5wkTA8{?fwjg3#5V0+Y*w$fN4;@-y z%m=y41Yt~M7$$O2GYt`47P_p6E*o8TM3;juC!#Zcb0^wR^R(a(@ufy7(?Tue|28es zVl7d$zM-Z3P}a3E%it!XM0Bx6x56kZjk3xp9I$51j;c55@?0{!rCp-(g;5m36UVz>3Q+N?xf<5pvco|-Sz3_9GOteMR_=uVi zQSA{mDWWDv)YOQY9#JzQYF0$ej;J{iH8-N>p}y}oVZQG-p}p@n*S;U9G-}XX`+l&% zs3Dh8d*dRix%QO3DWZCfn$~I5Oyhbg)A&D?lWEjke*5g&Q{{Yuu02&zZB(xZ^^e7F zqgJFuRCC8aR@57{rqQTvCbSRYO!yz928>#6?*2h-pHbZ=ZXdYKegBkf{QoJr!>H{J zqjs1aIGt=__~$m0t`Fl&jGA6+)JhYd59`c*KWsDc{7ZbZQ414|T2lL$n$I+6$|IU8 zlUILnnXCTNW@7l4fQj3g1QYU^43j@+3JHIizNzb21G-!{=mztDgKpGKy6Jk|bc1gC zeO%%S-Sk0xKrxEBUNJW)<|h7)ASaAkk%AiWfDhvy6LE-%IK)I8VigmMLu|w$HsTN) zaaf_46*#PjIIM_RtyIiPtX4*>Rz|E=DTWE{m{k$0RS~P36>~FIH%F{)j##Z$%xbJw zN32#ytZq@vEm++WvAQK(uv!zbS`)D%iHbon2}2OsNTQ8OWKg+3Dz`-CE~o^I zN?4I#A_+1<#VYoCkS1C4<_}H5j8%c8PYQ;tp8heXkBgukE^{w8{nyR6WP z>qUOAxQTz`v?7N8h$dDma9ybtt3*3-#cI(?TCqke2nNeYU`E9<;@t!piRntMyk0yn z23}UiXl1NcuFy&>S83(VTDe*)Z_&y%S{Vr-lA@L7(nzXSY1Q@OxiVb83iq2M>Me>S zbd~WHdD1uQ=Ih0yjwJhLV;y*NX=q$(UP=b>!Kv z(VFYU!;fUs8e<)K{@>EKzO`1jvQlEL)|&sdTBqA|+d|!To4%vf1gsgOSkLx z-MT|}+^IXBzd?6kxASS;sXOo0oj=f>Ki8c@Vd&Ea-KD$MKdQTR_wBm-o}cUP2e#?% zy}C#DJfM3X);&)P%Z=J-{^4G2+@g&SYvZH3SNG|@+jSr6Cf%?5@74Ve-KqPZ(fzOW zXp=VGu1$An)1BHx0GkMP)AzOMVQqRuo5mXSU48dnefJrC_j!F!59on=^uPm~;9))R zeLe8d264e=eP7>yQ4i|DJN4iuJ&5LKdPom#xL*$u=0lsbMGx!Yjr>PIiKP7zJ#vR0 z`GFq!u^!c<8~NX;t@?o;(+~By{!35jN7|-sH|)AW+irXkUevY&efqI}yzQrYQcvFa zJnYev+n&{v7@yLUyZDcV6?#ff-L9v$>8WS+)Gq#G0o$MJDKc}rwm+=xKYd#Nt^YRv zJK$Lvfx1gi>uG}giGD)bezHwFw8Q+vGurW-cKl2`UePn!sh#V!ll1Mx+s=oz^AYVd zQQ`URMBT-IEUeJZm$Z|dd{)msqi1*N+2{1^^H1y9SM)6Tzf1VS)2{W}^{{q5qFq1M zuBWwY$FthSKhDP8cz7P;i`qrbeWu3hIX$;t&)ucx9@cY@=(%U~+%7%$qMq0D59}V^jfoCYr6yP5@{N+X&_Dm@ft|bK%xfh8c5PW zvIbH#kg9<+4Ww%zLj#!_$kITz268o!r-6J86lkDO14SAr)j*jB$~91-fl3Y3XuzQX zrv_XasMUa519cjx*Fb{?8a2?Qfo2VOHPE7gRt>aiz^8$B4RmOrQv+QZ=+;0$13?Xh zG|;Poum<`yXwzVv2IDoDput29+BKM@!DJ1lXfRcSX&Ow|V1@=WHJGKrYz^jVFkgcO z8Z6Xckp_!3SfasF4VG!JT!R%FtkhtY2CFq#qd|uTof>p$uvUX^4SF zgH0N2)}U8|EgEdqV4DVg8tl|ymj=5v*rUOK27?+5X|Pv=VX=sRC_zJs8nSCBNkhpR zO3_fdhB7pisi7XvnXj zfY^yJ6w*+ycu7&Mev_r&wZ&`6?2>>5eZNU}y!G?J>3 zG>xQdBts*a8p+Z~wnlO^lBjKqG}3Dbh%>MoKhNs*y5{lxw6)Bh?ylYQ&?F zI*rt8q(LK%8fnre8rNqaLyTf3!iPjT&vzs8=il8g0|4PowP`?a*kaM!PiX*JwbaAsvX*fp{HA z(1BDPNYjCI9mvpuOdZJ4fm|KP(}6-AsMLXK9SG<^?{lJ)Ll1`z5=y?tk~Eg0u~dzv zX)HrySsKgMSdPYWHI}C_JQQfGP-8_JE7n+v#!59-rm=F3RcNeIV^tcf)>w_k92#?J ztX5-gjd?Uyr?GnR(i>~kSd+$@HRjb=i^f_t)}}F^#@aR3p|MVlb!n_yV?7%4Yb>C# zpvFQP>*aqv+yxKABk(jl3%lS&jfFMVr?Gw=vguHq4#n$Gf(|9>kX?t8bSPPeQgkR) zhthN?U57GsC{u^Bbf`jyD)pvKZ^r4(1ihK8H#78Rrrylbo27a)pf|%hoTkGWI-H}! zxjLM$!^JvWp~ICrTqSno9PZJ0oW|odo}=+xjpu1RU*km@FV=XO#>+Kcq47$MS82Rj z<4%pcG+wK5x5hmhuhV#=#+x+WtZ}c#TQuIP@ivY7G~TZ94vlweyi4PLjR$nZ{dT;L zbRKaVrO7B>qjc*?AIi}j{@?B~ir*-`C`T)FG?#y4$bGwCNAvhMhJ5~wEa2bB!nXrP zL04py5~Gy9-DwnbWkxCIKO#*y75p1RrH)n^Sz{E3QJgyJGO~6`!LP^Y>Wos)zmW}} zDM#ZX`lIoPh~DTL`8V$Eq~#rl-f`+3m)@z>J8r$>;s2dFqcj`EYm^qF_>9tyGGRAL zl2MY4l46upqoko6Hx9>*!*SzqJjW=7GC|f&B)n5^!l|1`z4Z%5&s?jV7Lr=6;P4#|eAnGK2{PVJ@7oN3KAa;7yboO_Um^Y_V(N-(1&Z zLTfUiHJQ+wjMpaPwaIvGGMD44*@TT^?2ItB6Nw9q63H_BN0NmS@i}#SOjAD1RYpf3 zku;)2jOLD{)`V(=@iC=GM=Dy(ty|1hk!*-$LL>`X^-dfAjGvg;``+o8NSR1P6R|j+ zVWK3Hqz?W~7$_zbgbBW5N*{?yhq({Rlzk-d4ijdFi5g1e>ZwFwnzAIZ9TEQ~!W58B z;|V2VG+}p|)JzHE!`!0N1lk#~oq9s(r%Z&_X+rBXmv`}RLO}UUL~Kn6T_%Js@;hLz zLt`$Ahy{^HK&f4TTjE3-nu!#mnE08;<9WtocHR7&$akBYp%@1vjFY)1 zin*@a+`2oWi}*0^RL>rBRgbYo`Aq+rDdNNU?J+t(|HdJ}zquVs#5fgM%u}XFe{*fn_(l0&fnQ@XK8)WWx(OR~7bPpAOynAo9Z{zAIYw`W;SdoePZM$c z6FiF7C@m3%drZWArkHT!xC08hc>ayg_=!Yv7lBWdDJv5IC=t(}DG|?8ku@*ANVzf(M#kN4?qn)~QH)O`2m>+Y9Jyg6-6&K3Oxec+;+@th zjR|Y&DpRA4e#(ofZc~XSoe<$CPkRi3YqiO=(-Fgb$leo%A?PdI-bgoD!`OQJ1chbs0KYUv#n_$NJ(a zQ86W&rbPRc@JB?m<76{Fo1GEiP0>j&VR#9nr9dZJ(*ScTW*=@HHb@4e$E^dmE@YRH z_~|Kw3aErCoeX52^tbC|u<2y5bV@jMGL(5Th-EL=gl#ALx)AXv`+5)=QzCat6itcp zDd9UA=Atly#3TBMN?%Uo^1i%?m%fTA;g}MhDbX|~+W&J+`_!c!|FP&W7LzuQCKD>e zs5itxJS0FO*dYm$Aq7$)4bmY4G9e4HAqR3H4+@|V%D{wO2~|)74se1C>YxFdpc%Z- z0izdo8$u^wD{iMZj?74@ZIQhAcpJ(Xj zUVfgTpZocFhJNnp=NbBWhJMoQ=l*`;>?hrRo}-_8`nji{y!Vp^KhM=)4#dGv9Q@>s zpEUV-CVukB&olA!O#D0(Kk@VPO#D0(e?9O_{N$mZXW}Ox{p6#cXX59X_{mQ{`9j^9 z^pl@{^3zX#`Y8wg9`Hi|f-uDX^an_Pfb<7Qe}MD{NPmFz2S|T_ z^an_Pfb<7Qe}MD{NPmFzb4=G{fb<7Qe}MD{NIyr)Oa@4Qkn{&he~|PCNq>;^2T6aB z^an|Qkn{&he~|PCNq>;^2T6aB^an|Qkn{&he~|PCNq>;^2T6aB^an|Qko5CHn+%f9 zAn6Q}&LHUwlFlIM43f?u=?s$2An6Q}&LHUwlFlIM43f?u=?s$2AnD|FI2k0JLDCr{ zok7wWB%MLh86=%S(itS3LDCr{ok7wWB%LAB86uq_(itM1A<`Klogva0BAp@986uq_ z(itM1A<`Klogva0BAp@986uq_(itM1A<`KlouU6r)qP;Mk!4Yv#T?HfZluW zZPHgI-2{3PRib_84x%@dS$}u0m2Zg#48#U#fQb7cIUhJ5I3GA4+|Lj0=Lh%mgZufx z{rup5e&B!Lf8c-Mf8c-Mf8c-Mf8c-Mf8c-Mf8c-Mf8c-Mf8c-Mf8c-Mf8c-Mf8c-M zf8c-Mf8>1ReB^xOeB^xOeB^xOeB^xOeB|_J_0j$M=ze{4zdpKOAKkA{yidGOyidGOyidGOoKKujoKKujoKKuj zd{2B&d{2B&d{2C`hW|XdAD`t*o>`t*o>`t*o>`vVXU{y(?z3m6XZP7N*R$`*XSQdy zXSQdyXSQdyXSQdyXSQdyXSQdyXSQdyXSQdyXSQdyXZPJR-?RJfnemy?1C;+fb3VJ@ zo>`w+Ul?B)Ul?EbUie=4Uf5o^URYjOURYl2|BL;9vHvgj|Hb~l*w+{P`eI*S?B|R9 ze6gP|_VdMlzSz$f`}txYU+m9|{duuJFZSof{=C?q7yI(^*!k}tlh%wiYhC&8pMU;; zZ~gtw?SKEc_1XSEzuy`->il=em-{pSUw`KRJ7hCM%hrl@-uds(F>BnK==^th!1`|e zv_`B^%hwJ2y5U`G-#YQ%KmYvy-unBU^ZyRp=y3m^-*1h4>-=}bCP!>?#3n~Jt!?Yf za>rd;_y0Ps1$iJ+AcmCV`!}9fAU*G*<4O_O+J+*v&*VlJ_ea|*~TbAwicC2H| zKK5L$=W?@H^DeW{>-=}$cJ_xl|2BnKuqLf3 zYucLaum9_p7X{_1EY>Z@u-_{`c1T|33RZGV{;J-scwn_txs^KW{l{{lB+X z{`qWw>+{w^=hruT^Ua=ovlrj&!8d#G&F8)?TFcg|wQg-%+t!|SU>#Y1T9?+fbz|LG z_tw9yzpRH}-+;e8T2Gx{1H-=t+y(=#!N5&_-sabUEf2V@25fm?``6&quffM(gKn_? zJ14&e=l=QZufgkIgUdep_h($@#9KSoZh!q6?Da1(V1N4WxUAcLz@855_t&q1BOmmZ z{T?{%uU~GHfs5Z)=AQAss~vEC_SESAAi4d;R7yjf8pFL{VVdm4bhBwr+;((zt{QC zTQq;)Snk+?%l>5zJ^UKDvicUc^56gbzL&6EU-$RN`&(V}){S-BU;Qiny#v4B`F*ea zzUn^Ecgle8l!5!-x5Php-M@UGe_g(H1|ItB_wCldEB}6L*js&N|9k1#dg-wC*Z=mi zf42_~ScBGg>xVUD{p_#b*Vy0fe}seX`oWRkw{D*>*!?v)?w_7DYPp4ezd!!#pPPJe z!bc{4-<17rPxjYuko)(En1Yz@xz*WVYO_2%p^-{!s(`}@=1 zfcN|N*WYisl?VNt{{9j8-}hO6hy6(G{Jz#cWbXH8`WKz=uV41mO*(M!_f{5s%#ZQl zVt=~>FTcO4zx73*{`>avV>r0<`_}8Hp(??{D?*xBC0C+F!q~qrZ=9{q_4+^WF0M1HbRqzQvEk;Cla4eYbDVzu)=2 zwe?^9TfKjad&{@*#ouq)72h&^{p;E2ufMeLw=(S`i(N^H;Ug+{arY)4y_}rf7Pzn zZvS(CdZ+*K{#UxaXKnEBx7=Xvs{V)jyL0;Y4*I8m`}j%wd&~WsqmOpW^tb3OKdys* zCvXB}>&$I&?#}$vx74K{lpFp#-*%p#{PLXR zm&d}tJf?W{_wMzNMeE<4*Kb3e*Ma%Yt6TNeafw$C1YU;^I!$NMw$yo@ zn6vyl+3UPcopfGjt+_R8%kuWZf@OP){-?sr&z;wmpPkozUvseEc|AI}?mDl>SDn|> z>(1-Nc<1%Hzs{_GbzW~>zvuR^p4*!tv_`D1<+?n7fAyUF)pP7u&#zxSpMLfH`PFmg zSI?JUJzswH9QoDr<5$m(Up;>{d)RsP9P!n2!&lD@O(9yd7L(_4ub!v9dhYe=dDg4v zPOqNdSZ~wb^mOFaQ;Js)rC&Ydef9A6)x+6W4B-G8@8?CLF=dG>xb7Y*Eei`haWm` zBSV(!8M(4-#{;Xk?uxZzU0XMux1O)<`FPL1^lZE5`g^wB`?qB~qimzLV@&#O)OC#V zkN(TD{Zan0Z`OCq<;HAhY|3&wjJe#H{T;JEV}Dw%Z_IYR((T)ruN!w8jNf(MChY&j zu+_B~CpIkmH{mv%a63#~SeKTspR^y7E<4FI>AslcowP5L_I1kkrhIP7ZSKeIZF<{s zT{Es@cCzy}=kjwi7RQ|J&)esD_IbDYyzR`pu6frn@A3<7*9F^Ku#H96xx}<&yGw4b z6}Q2P+i=BouDah=y}xRoR~grPowto~%jGuh^A%YJR!&bIq@+jZ?M zTE2G2b?=;8wz1>#JMODp*T3t!cik?#_IcOW`<3vvYoB&~Z|xqupJD8}dG}EE(D&@- zo|}HpMfPm|z~&EJ)uG)z^d(0=ezf0tJ9antP4aeXS5BWhZ)a|vGYl`P_jYbq&)t>h z-oJ3yT-e5iuf4E)7q)rf=Du+E{b|?!bd&yRn}7P)pFVcEX?g$Bx6q}#@6y*?x(Tk_ z6b?MU-MFc5ZS&Uk-6G%m{Ox1s?GE|wug=@O{k-3@Y~$Y7-`k&iUw`jzytkizE4=v) z@aD+VnMK&4HLV2TE{c_vq){ zi;`xja!@6q4ihieaQQs)xG=WtaIze^0|?oHDj%H-n(b`Fn4|^6{R1={;J{o%d1uH2TzeAG4h? zmmPEcV-KD8@j=V}j30I0C+z3M56iwxGEHuG-lzDcd~WLAvYqK=%k|B;?5zFu{~vhw zQ};eMZ=G8I>b(1jd!JvhmMq(v|I>0?E!g&g>+%!#zOZBMb>0`p->>Y(qTOBesl~JY z@9p;`3O|SM%jC;bmc3c_{>qSLYb*V?UDK*t$H?jXs>}NseP4A8t-94$>DJt`Yc{)P z&(_?cYp!P99<00E#D-ny|IbAP{gJx>3;-~0HzeQ|93-LdR<$8_HvkA45k z4ef8~yW^7YjzzvZ?)dJQ;=AK>?~cd4`_XxKtnJ+~rgz6u-d|3v=g!9;E7rR8@6N}! zu61JlY28~-oezIlKRg-x@VM$@(A&5`S7j&;amM< z_|Q7GY-{+|y0advm(GWy5FaBhH)2~OF6&5yGNAL}NW+I8g%3v-J{(c_=y~6dz{jZV z_`~!u_E+a){JZ7r$6fb$&)T!DIv*4E-yffkNgwyE|1sqEeXtHJANPasvHH!j@BTo2tnse7oIg?@ z>n^k5@*B4A8~$U2Iv+bOx8v;{`?h;*xow>A?0^4%_)GJ#$GT^~yxfY$ z!g3vZ_I=O3?7PnWG0QghCoKE2|1Zne9@wtGN*@QV>(Fg^FM9p=VRw{*!CUB`*Z;B zbJ*qlTzn31bUqz%`*g(Z(}TWGN7+7o?|*h}yE|pAS(lb=be}q(y*+Ec^Ev9{qpoN4 zuJbu&U&m%F?~l8_ahIF0e;%NHdVuyhv1%PyuEPVf&q<%3^7*NS&gYElp7F66+n=$I zv-6$Lx!um^!kFbY@nGt6(e@W@caeL^$CoB8+g!3u-`k%{C!J5f+&`DSy|Uf;TphRg z*L-}9W!*N`ZEItz^SS9Z@JssB1FX+2`?lq_-Ll_11J;CPpLW(Qm)&t&?D)D}*SF{E z_UzZ5+t`u9&waPgzI{EgZwJ2q(DsgP>)366Y(IT3exCT;iOZbW_Y>PX@wKP6b84T? z-2NUpeV)6G{=DyeUfQP9?Vp!(mdjqbeXs1p71I^RwXeIjk50vZIu-wU?e@O$uiw?5 zxAyDS_lDoipLcG@JGZ0X&7b#f?|UD29_rJ>nNR0>KAq3_bPnUw`G`;FAwE4^`E*SE z)3NkV$H70HoBwp|`|FP#>tCHOzkk0RE&Td+Wj%Jj1}3a2Yqs;{0Lz!70AHThehof# zzMKR9`o7Zn@_YEplbbK+pT9go`EqXf>*tU)YWez~F6Uv#*Rbt-dH1go+wCn_i=D60 z@13tP*E#X;&e!Bz=WFi1^ELm6)w9kl?=P-e=bf)5AM^e2rLFv0vaKcCS#~|k_I=rQ zmtA(*$CqtuWzgETd~W5?^7$2?U-kYf+uE>Yzx;v!@(2EF&1KiPH(ZC`dtV!ymh1Ps z?`z|w^X2>KYx9TY^PBd2(`7dwI$s{JfBF0Vwc~3%Q2*Moot+EIzU}(BN8ew*Z@%{3 zF8c@8Y3J);z;YWNJa@j1+-4pFe4W_liLZA){OfejvW-*wX>#N1Y}xYurQ79lq4VW$ z_t&-C|JL^I+^+Y3b-tXJ{&N2G%fs6*=P$qhvMuK+zy7k1&KrI?C-~)@;FrJ2U(UCE z`F|*Ud1(9P9NU+3XkY$De|dQO^=Mz68~gIT^5wkLm-A3x&N+QKfAi)1&6jgAU(UgN zIS12L4?1n_qSH?AJMC<`qmZ+{Sp(Lf_1*em4Ou^}VQa+dT0Lvj8nec&32V}tvijSa zv1Y9~Yu;M07Of>~*;=tytu<@i+OX`?pIg?pwPWpCd)B^XU;jL`j;v$r#5%RktaFRw z&p)k8>&m*eZme7D&SLZ|rv3SE>o4oUdbFObXX~ZY+&}G-|C0Zb|C0Zb-_Lrx{!9K#{!4y; z>)Iv1XISl$-?OadSysE`_ZYBU@?Y{_@?Y{_@?Y|M7S=BLFZn$iYnS}~X0}Uy|C^U~ z$$!c3_f)&&zvTD#wE17)v`hX={!9KV{ww|~{ww|~{ww|~{wscuLE9Do75^3g6~AYG z?TY`3|BC;L-{0wW#ecw!cl>w!cl>w!cl>w! zcl>w!cl>w!cl>w!cl>w!j$5}o{yY9V{yY9V{yY9V{yY9Vevi@H9seEw9seEw9seEw z9seEw9seEw9seEw9seEw9seEw9lxVZ&Eu-(Z)>~bzvI8-zvI8-cOW6jaAcF%v$?LiJ^www=Q-`3|DOMz|DNCT{^s#UyXU{>cjT=(^49M89k*|e+c(dR znq&9POkOjS*UaQKGkMKSUh|x(IiBAf&u?b(nq&ISOkOjS*UaQKGkMKSUNe)|{>5)5 zubIhfX7ZYuyyp0SbKIes$!q`OH}Ui*vR`JLu5aC5k~nbm7%^_p3|_7}hBNX@yh zW>&8`2h_~!HHUdd(r?W>&8`Z`RD}HOIJ`$JNcOUNfuL%<46> zdd)ecW>&A6)oUJeH?w-ptX?y#*BldTX7!pw&CPSO=J;51zNvX!-aIaEX7!p`z2@&A6)oW(;npwSORdd;j} zGppA;cW4j%5Bv}Oo<}s#BiaMM8NTMES$pI+!`B=(Z=PQ?GknbqUo*qk9{J7iHHXui z8NOzQuX+B_JpX8B_?i=L%?w}jT%*v#-X&q>-NzgfO!mam!RYtCObvwY1gUwh<# zr`kI-(W~Q&1>1&=lH8Xw9 zOkaEAf8u}QH`~`dr)p;Vn%TZ)wy!<$Kk+-Q&>TBzo^Lg$7n&KrX2!2Q@tg5$PyFWm z+7rK7zxKp$-mg9JoB3-_R5qt1n&$1d9ozxjdo!tWSP53lCvm}Z<5&mV);tr4qh^{i2A%o?{Q ztVwIinzm-FS!>Rkw-&5LYsp%+R;*QP&04oMtW9gn+O~GAU2D(Uw+^gB>&QB`POMYw z%sRI&tUs+w>&m*eZme7D&bqh$W&PXw%X+XLttac*;{V3~jsF|}H-4{TlW+Wv>E;{% zH~w$@j5F(DYo{$ffn zrUc{EWK0UixtM(8H!T>$4*ACK_+Xr$iDT4pj5-7SPGe?(e}I30e}I30e}I30-*N1i zD~w~=8Q>q_AK-VKF$4ULHD-W+fZs944Dg#t$pHTVzjHw`iOFc<@aF&vly{sI00{sDd?b{XJzd_CsoVmK~_<6{0T=HFtP zDrT!<TMU{Db_4+G40JgZxhG#`&ulV~hE<7+lLB{~*8P zsu|=rq!x2&G07FvXBp%-RTkr78RU0bIL?2?fJsbs#pqTHY{fCz7_W-asW@#MlTk6l z6O&Of1r<|JF_03oPch>ZvrjS56a!5$$dy5U(@Qb46vuXBW+`TtGRQy3Kge%(DQ1^q zb}43;VsW6eCO9yMOPK4v!koZcYa+}bXCz+MOPJF zRdiKxOgXx$eCO9zMO&5c{NMS%^LwqnXszP7bM#ixTSae`@BEspI5(K@{FI@BH8S4dz6L6&+S|SkYm{yi>mOf9Ka@Ziyka`u>9cv!T*E*2mcTLAN)oGq7#cwEIP3mG>So^=)_{!D8>e2*eH(E z$G}l^Vli|S=TM^)i%u*$vFOC26N^qPIXax16CR0H)NF|{vrM${vrM${vrM${vrOK{6G1B^8e)j$^Vnzxsd4O zqLYh3t^DNw$?yC~4Bp4N>o`vm=da^jNq+L{=<<{QC;v}=O@v|`G4{o_KLhnPGkdZ5ifwUNgh|!~Dbi z!~9}}4D$;XVvaFp0%G_r<^p0aAjAB_{KjH3%s|!`D&K1YG;*9W*@H>we|9kq3@Q?7D zpNLW;UH&e=Vk3%;bosmd%8eK>OqbvJ=5+bH{9XPof0y4m=ydtJ{9S(Y80qqN`MdmG z{w}{E#dP_duTGc0%irbi@^|?SFs94jsy@A3Ee4N<1YZ^SA+{vLmi zzsKL>@A3Eed;C5A9={Q+^!R)H&X=ag-{bG`_xOAKJ$^%(>G3OIqJW74CJGp@Qfcu! zf1e(IkH5#SjEOQP%9to)(&O*(_xOAKJ^mhlkKdqYdi*{99>1Z_82U_)-)u~J{6@sm z;~(WWC!A6KQGVxkV>mR1Lt}(2qx_@%qx_@%qx_@%qx_@%qx_@%qx_@%qx^Oe~f>O-*{aN$Hsg`%vZ#aY>eAwjDL)OjNklq49;eZ-|%d_E^)^A z$N0zi$N0zi&1Phbe~f>Oe~jPQUJTo2jDL)OjDL)OjDL*Z5N^i!$M_B7W{lt5Q^xqm z_{aGD@62NkD#rgZ#&1|RWBdkoGtNKGZ*Vt817lVyQ>Z_XwY{ASN$C^{4T6Z{kW2Bc#YGlrxy!EYWX6Z{kW6Z{6JGr>Q>Kf!NS zClmbUbuz(km^u^u6Z{kW6Z{kW<}@yD;B){3FO!80iPw`LjPw`LjPw`LjPw|@v%oM*ls7&!s@lWwj@lWwj@lWwj@f(}X z6#o?e6#o?e6#o?e6#o?e6uQ~Xo>Q~XAFGsQo}KgB=AKgB=AZ?rd4{L}o?{L}o?{L}pA z9y859%|Fd=1~Sw9)BJ{YGtEEEKg~bQKg~bQKg~bQKg~bQKh1A;GSmFVh%?PU%|Fd= zm_O6})BMx?2KzJ3Kg~bQKg~bQKg~bQKg~bQKg~bQKg~bQKg~bQKg~bQKg~bQKg~bQ zKh3Yv$uz%CC)4}_f=u&I^Uv_l@Xzqi@Edc^4F3%O4F3%O48Qr$%<#|f&+yOi&+yOi z&+yOi&+yOi&+yOi&+yOi&+yOi&+wZe%?$qx{|x^O{|x^O{|x^O{|vu5)6DSC@Xzqi z@Xzqi@Xzp@$;%A?48PgD%!4yrFthyTWHZY@%RkFM z%RkFM%Wr-*v;4FCv;4FC=4ms_Kg&PMZ?-nG{ImR;rOfir^3U?m@|(NOEWg>?n7xhh z^vv?l@|(lWEdMP3EWerD%<|9jo6XHE{~Z4u{~Z4u{~Z4u{~Z4u{~Z4u{~Z4u{~Z4u z{~Z4u{~Z4u{~Z4u{~W&<~w^Uw2}@5?;@JpVlZJpVlZJioc-%=6Fl&-2goO9C>_ zFAK;#|2+RZzc45B{PXoz`wvRTgU?c0{;U40{;U4 z0{;U40{;U40{;U40{;U40{;U40{;U40{;U40{;U40{;U40{;U40{;U40>7E?NF=hr zzrer1zrer1zrer1zrer1zrer1zrer1zrer1zrer1zrer1zrer1zrer1zrer1zrer1 zzrer9zsSGHzsSGHzsPT{EPgH{EPgH{EPgH{EPhN*t5uQ{x_PAEb=e%FY+() zFY+()FY+()n+uJ(_bl?8eUAnti~NiHi~NiHi~NiHi~NiHi~NiHi~NiHi~MFwv&g^5 zzsSGHzsSGHuVKt0|04e)|04e)|04e)|04e)zgguh@h|b4!Os%^68{qa68{qa62CYl zOZ;Y8v&3(nHB0e{|f&K{|f&K{|dkG zC@cIc{44w`{44w`{44w`{44w`{44w`{44w`{44w`{44w`{44w`{44w`{44w`{44w` z{44zCo@4GgEBq_`EBu;|n1jv={|f&K{|f&KzZN7b{Nkvr@UQT%@asde!f$puEBvCV ztnjb!ukf$%ukf$%ukZ`1q9@5J|0@3~|0@3~|0=)ODq^dw@~`r*@~`rn-_I)lD*r0~ zD*r0~D*r0Kh%y4Ltn#n&ukwqvvdX{8uV0CNC9C`buB`H#^Uf;&D*r0KS>Jc{N}i`#=pkD#=pkD#=pkD z#=pkD#;>o)8owaU|Nq3|U*linU+34VXPtkYf1O`67d=MS`Pcc^`Pcb1Mp@@y=U?Yv z=U?Yv=U?Yv=U?Yv=U?X+-esMCoqwHwoqwHwoqwHwoqwHwoqwHwoqwHwoqwHQ&y;ok zb^dk!b^dk!b^dk!b^dk!b^dk!b^dk!b^dk!b^dk!b$)G0*7?`@*ZJ4^*ZDX2H~2UB zH~2UBH~2UBH~2UBH~2UBbq3ks-{2PnXM=x(e}jL6e}jL6e}jL6e}jL6e}jL6U!#x> z{tf;O{tbT3LNqVYY-NLggMWj6gMWj6gMWj6gMWj6gMWj6gMWj6gMWj6gJ1U$k#aWp zH~2UBH~2UBH~2UBH~2UBH~2UBH~DoE+2r5k-{jX!WRriBf0KWcf0JKFkxhOPbT;`n z`8WAD`8WAD`8WAD`8WB+meE~glYf(clYf(clYf(6(P5w>(P5w>(P5w=O z-9|R~H~BaDH~BaDy}m*=`32b#WM`9qlYf(clYf(clYf(clVAIhP5w>(P5w=OjZwDv zxA?dCxA?dCxA?dCxA?dCxA?dC^+?&`*Vbi=e~W*MUq6~H{w@A3{w@A3{w@A3exZ1_ z__z4C_%$fm;@381i+_t>hs_;>hs_;>hs_;>hs_;>hs_;>g-;Me!%fd7F1fd7F1fd7F1fM4UB1O5a41O5a41O5a41AdKZ z4)_oF5BLxG5BLxG5BLxG5BLxG5BLxG5BLxG5BLxGHL5w_Kj1&$Kj1&$Kj1&$Kj1&$ zKj1&$Kj1&$*CpnF|A7C1|A7C1|A7C1|A7C1|A7C1|A1e|m;-)IY!3Oowm}a05BU%I z5BU%I5BU%I5BU%I5BU%I5BU%I5BU%I5Ba?|LJs*4`49OI`49OI`49OI`SsyBLxklKjc5;Kjc5;KjioNNjc;{ zBmN`)BmN`)BmN`)BmN`)BYwSoj`)xGkNA)HkN7qCIpRO!KjJ^)KjJ^;KjuH?KjuH? zKjuH?KjuH?KjuH?KjuH?KjuH?KjuH?KjuH?KjuH?KjuH?KjuH?KjuH?KjuH?KjuH? zKjuH?KjuH?KjuH?KjuH?KjuH?KjuH?KjuH?KjuH?KjuH?KjuH?KjuH?KjuH?KjuH? zKjuH?KjuH?KjuH?KjuH-KjA;&KjA;&KjA;&KjA;&KjA;&KjA;&KjA;&KjA;&KjA;& zKjA;&KjA;&KjA;&KjA;&KjA;&KjA;&KjA;&KjA;&KjA;&KjA;&KjA;&KjA;&KjA;& zKjA;&KjA;&KjA;&KjA;&KjA;&KjGK3$7?O*g#U#9g#UzJ=bjV(Q+~aBPWeyyy`D)< z`A_*z`Msi0PWiq5cTV}e26#^Sy$*Oz`A_*z`8Civ3+GaWBKjlB= zKjlB=Kjrrt5b@e6Ipsg)Kjqg_=am1H->Vtrl>e0fl>e0fl>d}pZ=F;AQ-0lbyf#@* z`A_*z`8Cct<=67(l;3NXN1Gk5TOO~FlvDmQ{xkkF{xkkF{xkkF{xkkF{xkkFelNiv zFTtNP{xkkF{xkkFel2y*_`L>9&iK#x&-lHDK+gE};5p+z<3Hm+<3Hm+<3Ho~x-mK9 zKjS~+_o_`f<3Hm+<3Hm+<3Hm+<3Hoqm*(Atj-)qq1jQ@=P zjQ@=PjQ@<^>l@^p|D6Av|D4||Mdh6Tod2BvoZo98MbtkN!XU|LFgt|BwDZ`v2(vqyLZo zKl=aZ|D*qp{y+Nv=>MbtkN)5PQ`h3x|408H{eSfT(f>#PAN_yy|Iz#PAN_yy|Iz#PAN_yy|Iz#PAN_yy|Iz#PAN_yy|Iz#PAN_yy|Iz#PAN_yy|Iz

#PAN_yy|Iz#PAN_yy|Iz#PAN_yy z|Iz#PAN_yy|Iz#PAN_yy|Iz#P zAN_yy|Iz#PAN_yy|Iz#PAN_yy|Iz#PAN_yy|Iz#PAN_yy|Iz#PAN_yy|Iz#PAN_yy|Iz#PAN_yy|Iz

#PAN_yy|Iz#PAN_yy|Iz#PAN_yy z|Iz#PAN_yy|Iz#PAN_yy|Iz#P zAN_yy|Iz#PAN_yy|Iz#PAN_yy|Iz#PAN_yy|Iz#PAN_yy|Iz#PAN_yy|Iz#PAN_yy|Iz#PAN_yy|Iz

#PAN_yy|Iz#PAN_yy|Iz#PAN_yy z|Iz#PAN_yy|Iz#PAN_yy|Iz#P zAN_yy|Iz#PAN_yy|Iz#PAN_yy|Iz#PAN_yy|Iz#PAN_yy|Iz#PAN_yy|Iz#PAN_yy|Iz#PAN_yy|Iz

#PAN_yy|Iz#PAN_yy|Iz#PAN_yy z|Iz#PAN_yy|Iz#PAN_yy|Iz#P zAN_yy|Iz#PAN_yy|Iz#PAN_yy|Iz#PAN_yy|Iz#PAN_yy|Iz#PAN_yy|Iz#PAN_yy|Iz#PAN_yy|Iz

#PAN_yy|Iz#PAN_yy|Iz#PAN_yy z|Iz#PAN_yy|IzHkarU;6*j|Cj#1^#7&*Fa3Y%|4aX0`v21Zm;S%>|E2#g{eS8IOaEW`|I+`L z{=fA9rT;Jef9d~A|6lt5(*KwKzx4m5|1bT2>HkarU;6*j|Cj#1^#7&*Fa3Y%|4aX0 z`v21Zm;S%>|E2#g{eS8IOaEW`|I+`L{=fA9rT;Jef9d~A|6lt5(*KwKzx4m5|1bT2 z>HkarU;6*j|Cj#1^#7&*Fa3Y%|4aX0`v21Zm;S%>|E2#g{eS8IOaEW`|I+`L{=fA9 zrT;Jef9d~A|6lt5(*KwKzx4m5|1bT2>HkarU;6*j|Cj#1^#7&*Fa3Y%|4aX0`v21Z zm;S%>|E2#g{eS8IOaEW`|I+`L{=fA9rT;Jef9d~A|6lt5(*KwKzx4m5|1bT2>Hkar zU;6*j|Cj#1^#7&*Fa3Y%|4aX0`v21Zm;S%>|E2#g{eS8IOaEW`|I+`L{=fA9rT;Je zf9d~A|6lt5(*KwKzx4m5|1bT2>HkarUjzL5|I+`L{=fA9rT?!1e*J&x|4aX0`v21Z zm;S%>|E2#g{eS8IOaEW`|I+`L{=fA9rT;Jef9d~A|6lt5(*KwKzx4m5|1bT24f5;% zOaEW`|I+`L{=WwK_5Y>+Fa3Y%|4aX0`v21Zm;S%>|E2#g{eS8IOaEW`|I+`L{=fA9 zrT;Jef9d~A|6lt5(*KwKzx4m5|1bT2>HkarU;6*j|Cj#1^#7&*Fa3Y%|4aX0`v21Z zm;S%>|E2#g{eS8IOaEW`|I+`L{=fA9rT;Jef9d~A|6lt5(*KwKzx4m5|1bT2>Hkar zU;6*j|Cj#1^#7&*Fa3Y%|4aX0`v21Zm;S%>|E2#g{eS8IOaEW`|I+`L{=fA9rT;Je zf9d~A|6lt5(*KwKzx4m5|1bT2>HkarU;6*j|Cj#1^#7&*Fa3Y%|4aX0`v21Zm;S%> z|E2#g{eS8IOaEW`|I+`L{=fA9rT;Jef9d~A|6lt5(*KwKzx4m5|1bT2>HkarU;6*j z|Cj#1^#7&*Fa3Y%|4aX0`v21Zm;S%>|E2#g{eS8IOaEW`|I+`L{=fA9rT;Jef9d~A z|6lt5(*KwKzx4m5|1bT2>HkarU;6*j|Cj#1^#7&*Fa3Y%|4aX0`v21Zm;S%>|E2#g z{eS8IOaEW`|I+`L{=fA9rT;Jef9d~A|6lt5(*KwKzx4m5|1bT2>HkarU;6*j|Cj#1 z^#7&*Fa3Y%|4aX0`v21Zm;S%>|E2#g{eS8IOaEW`|I+`L{=fA9rT;Jef9d~A|6lt5 z(*KwKzx4m5|1bT2>HkarU;6*j|Cj#1^#7&*Fa3Y%|4aX0`v21Zm;S%>|E2#g{eS8I zOaEW`|I+`L{=fA9rT;Jef9d~A|6lt5(*KwKzx4m5|1bT2>HkarU;6*j|Cj#1^#7&* zFa3Y%|4aX0`v21Zm;S%>|E2#g{eS8IOaEW`|I+`L{=fA9rT;Jef9d~A|6lt5(*KwK zzx4m5|1bT2>HkarU;6*j|Cj#1^#7&*Fa3Y%|4aX0`v21Zm;S%>|E2#g{eS8IOaEW` z|I+`L{=fA9rT;Jef9d~A|6lt5(*KwKzx4m5|1bT2>HkarU;6*j|Cj#1^#7&*Fa3Y% z|4aX0`v21Zm;S%>|E2#g{eS8IOaEW`|I+`L{=fA9rT;Jef9d~A|6lt5(*KwKzx4m5 z|1bT2>HkarU;6*j|Cj#1^#7&*Fa3Y%|4aX0`v21Zm;S%>|E2#g{eS8IOaEW`|I+`L z{=fA9rT;Jef9d~A|6lt5(*KwKzx4m5|1bT2>HkarU;6*j|Cj#1^#7&*Fa3Y%|4aX0 z`v21Zm;S%>|E2#g{eS8IOaEW`|I+`L{=fA9rT;Jef9d~A|6lt5(*KwKzx4m5|1bT2 z>HkarU;6*j|Cj#1^#7&*Fa3Y%|4aX0`v21Zm;S%>|E2#g{eS8IOaEW`|I+`L{=fA9 z)#dNgUH&eAm%q#3@A3Eed;C5A z9)FL&$KT`c@%Q+9{5}32e~-V%-{bG`_xOAKJ^mhlkH5#?+Fa3Y%|4aX0`v21Zm;S%>|E2#g{eS8I zOaEW`|I+`L{=fA9rT;Jef9d~A|6lt5(*KwKzx4m5|1bT2>HkarU;6*j|Cj#1^#7&* zFa3Y%|4aX0`v21Zm;S%>|E2#g{eS8IOaEW`|I+`L{=fA9rT;Jef9d~A|6lt5(*KwK zzx4m5|1bT2>HkarU;6*j|Cj#1^#7&*Fa3Y%|4aX0`v21Zm;S%>|E2#g{eS8IOaEW` z|I+`L{=dff$N0zi$N0zi$N0zi$N0zi$N2UCrT?!n{xSYB{xN?2f9d~A|6lt5(*KwK zzx4m5|1bT2>HkarU;6*j|Cj#1^#7&*Fa3Y%|4aX0`v21Zm;S%>|E2#g{eS8IOaEW` z|I+`L{=fA9rT;Jef9d~A|6lt5(*KwKzx4m5|1bT2>HkarU;6*j|Cj#1^#7&*Fa3Y% z|4aX0`v21Zm;S%>|E2#g{eS8IOaEW`|I+`L{=fA9rT;Jef9d~A|6lt5(*KwKzx4m5 z|1bT2>HkarU;6*j|Cj#1^#7&*Fa3Y%|4aX0`v21Zm;S%>|E2#g{eS8IOaEW`|I+`L z{=fA9rT;Jef9d~A|6lt5(*KwKzx4m5|1bT2>HkarU;6*j|Cj#1^#7&*Fa3Y%|4aX0 z`v21Zm;S%>|E2#g{eS8IOaEW`|I+`L{=fA9rT;Jef9d~A|6lt5(*KwKzx4m5|1bT2 z>HkarU;6*j|Cj#1^#7&*Fa3Y%|4aX0`v21Zm;S%>|E2#g{eS8IOaEW`|I+`L{=fA9 zrT;Jef9d~A|6lt5(*KwKzx4m5|1bT2>HkarU;6*j|Cj#1^#7&*Fa3Y%|4aX0`v21Z zm;S%>|E2#g{eS8IOaEW`|I+`L{=fA9rT;Jef9d~A|6lt5(*KwKzx4m5|1bT2>Hkar zU;6*j|Cj#1^#7&*Fa3Y%|4aX0`v21Zm;S%>|E2#g{eS8IOaEW`|I+`L{=fA9rT;Je zf9d~A|6lt5(*KwKzx4m5|1bT2>HkarU;6*j|Cj#1^#7&*Fa3Y%|4aX0`v21Zm;S%> z|E2#g{eS8IOaEW`|I+`L{=fA9rT;Jef9d~A|6lt5(*KwKzx4m5|1bT2>HkarU;6*j z|Cj#1^#7&*Fa3Y%|4aX0`v21Zm;S%>|E2#g{eS8IOaEW`|I+`L{=fA9rT;Jef9d~A z|6lt5(*KwKzx4m5|1bT2>HkarU;6*j|Cj#1^#7&*Fa3Y%|4aX0`v21Zm;S%>|E2#g z{eS8IOaEW`|I+`L{=fA9rT;Jef9d~A|6lt5(*KwKzx4m5|1bT2>HkarU;6*j|Cj#1 z^#7&*Fa3Y%|4aX0`v21Zm;S%>|E2#g{eS8IOaEW`|I+`L{=fA9rT;Jef9d~A|6lt5 z(*KwKzx4m5|1bT2>HkarU;6*j|Cj#1^#7&*Fa3Y%|4aX0`v21Zm;S%>|E2#g{eS8I zOaEW`|I+`L{=fA9rT;Jef9d~A|6lt5(*KwKzx4m5|1bT2>HkarU;6*j|Cj#1^#7&* zFa3Y%|4aX0`v21Zm;S%>|MmY=aW}hlEC>}w`!RT45ItzB27nL@KqS|Rb8}q9&t(Kc zJP`V}RFA+)!-Zzxice&nct?qSTvgxF|EK>?|DXOp{eSxZ^#AGq)BmUcPye6(KmC9D z|MdUq|I`1c|4;v){y+VH`v3I*>HpLJr~gm?pZ-7nfBOIQ|LOnJ|EK>?|DXOp{eSxZ z^#AGq)BmUcPye6(KmC9D|MdUq|I`1c|4;v){y+VH`v3I*>HpLJr~gm?pZ-7nfBOIQ z|LOnJ|EK>?|DXOp{eSxZ^#AGq)BmUcPye6(KmC9D|MdUq|I`1c|4;v){y+VH`v3I* z>HpLJr~gm?pZ-7nfBOIQ|LOnJ|EK>?|DXOp{eSxZ^#AGq)BmUcPye6(KmC9D|MdUq z|I`1c|4;v){y+VH`v3I*>HpLJr~gm?pZ-7nfBOIQ|LOnJ|EK>?|DXOp{eSxZ^#AGq z)BmUcPye6(KmC9D|MdUq|I`1c|4;v){y+VH`v3I*>HpLJr~gm?pZ-7nfBOIQ|LOnJ z|EK>?|DXOp{eSxZ^#AGq)BmUcPye6(KmC9D|MdUq|I`1c|4;v){y+VH`v3I*>HpLJ zr~gm?pZ-7nfBOIQ|LOnJ|EK>?|DXOp{eSxZ^#AGq)BmUcPye6(KmC9D|MdUq|I`1c z|4;v){y+VH`v3I*>HpLJr~gm?pZ-7nfBOIQ|LOnJ|EK>?|DXOp{eSxZ^#AGq)BmUc zPye6(KmC9D|MdUq|I`1c|4;v){y+VH`v3I*>HpLJr~gm?pZ-7nfBOIQ|LOnJ|EK>? z|DXOp{eSxZ^#AGq)BmUcPye6(KmC9D|MdUq|I`1c|4;v){y+VH`v3I*>HpLJr~gm? zpZ-7nfBOIQ|LOnJ|EK>?|DXOp{eSxZ^#AGq)BmUcPye6(KmC9D|MdUq|I`1c|4;v) z{y+VH`v3I*>HpLJr~gm?pZ-7nfBOIQ|LOnJ|EK>?|DXOp{eSxZ^#AGq)BmUcPye6( zKmC9D|MdUq|I`1c|4;v){y+VH`v3I*>HpLJr~gm?pZ-7nfBOIQ|LOnJ|EK>?|DXOp z{eSxZ^#AGq)BmUcPye6(KmC9D|MdUq|I`1c|4;v){y+VH`v3I*>HpLJr~gm?pZ-7n zfBOIQ|LOnJ|EK>?|DXOp{eSxZ^#AGq)BmUcPye6(KmC9D|MdUq|I`1c|4;v){y+VH z`v3I*>HpLJr~gm?pZ-7nfBOIQ|LOnJ|EK>?|DXOp{eSxZ^#AGq)BmUcPye6(KmC9D z|MdUq|I`1c|4;v){y+VH`v3I*>HpLJr~gm?pZ-7nfBOIQ|LOnJ|EK>?|DXOp{eSxZ z^#AGq)BmUcPye6(KmC9D|MdUq|I`1c|4;v){y+VH`v3I*>HpLJr~gm?pZ-7nfBOIQ z|LOnJ|EK>?|DXOp{eSxZ^#AGq)BmUcPye6(KmC9D|MdUq|I`1c|4;v){y+VH`v3I* z>HpLJr~gm?pZ-7nfBOIQ|LOnJ|EK>?|DXOp{eSxZ^#AGq)BmUcPye6(KmC9D|MdUq z|I`1c|4;v){y+VH`v3I*>HpLJr~gm?pZ-7nfBOIQ|LOnJ|EK>?|DXOp{eSxZ^#AGq z)BmUcPye6(KmC9D|MdUq|I`1c|4;v){y+VH`v3I*>HpLJr~gm?pZ-7nfBOIQ|LOnJ z|EK>?|DXOp{eSxZ^#AGq)BmUcPye6(KmC9D|MdUq|I`1c|4;v){y+VH`v3I*>HpLJ zr~gm?pZ-7nfBOIQ|LOnJ|EK>?|DXOp{eSxZ^#AGq)BmUcPye6(KmC9D|MdUq|I`1c z|4;v){y+VH`v3I*>HpLJr~gm?pZ-7nfBOIQ|LOnJ|EK>?|DXOp{eSxZ^#AGq)BmUc zPye6(KmC9D|MdUq|I`1c|4;v){y+VH`v3I*>HpLJr~gm?pZ-7nfBOIQ|LOnJ|EK>? z|DXOp{eSxZ^#AGq)BmUcPye6(KmC9D|MdUq|I`1c|4;v){y+VH`v3I*>HpLJr~gm? zpZ-7nfBOIQ|LOnJ|EK>?|DXOp{eSxZ^#AGq)BmUcPye6(KmC9D|MdUq|I`1c|4;v) z{y+VH`v3I*>HpLJr~gm?pZ-7nfBOIQ|LOnJ|EK>?|DXOp{eSxZ^#AGq)BmUcPye6( zKmC9D|MdUq|I`1c|4;v){y+VH`v3I*>HpLJr~gm?pZ-7nfBOIQ|LOnJ|EK>?|DXOp z{eSxZ^#AGq)BmUcPye6(KmC9D|MdUq|I`1c|4;v){y+VH`v3I*>HpLJr~gm?pZ-7n zfBOIQ|LOnJ|EK>?|DXOp{eSxZ^#AGq)BmUcPye6(KmC9D|MdUq|I`1c|4;v){y+VH z`v3I*>HpLJr~gm?pZ-7nfBOIQ|LOnJ|EK>?|DXOp{eSxZ^#AGq)BmUcPye6(KmC9D z|MdUq|I`1c|4;v){y+VH`v3I*>HpLJr~gm?pZ-7nfBOIQ|LOnJ|EK>?|DXOp{eSxZ z^#AGq)BmUcPye6(KmC9D|MdUq|I`1c|4;v){y+VH`v3I*>HpLJr~gm?pZ-7nfBOIQ z|LOnJ|EK>?|DXOp{eSxZ^#AGq)BmUcPye6(KmC9D|MdUq|I`1c|4;v){y+VH`v3I* z>HpLJr~gm?pZ-7nfBOIQ|LOnJ|M!#qKiRMUPye6(KmC9D|MdUq|I`1c|4;v){y+VH z`v3I*>HpLJr~gm?pZ-7nfBOIQ|LOnJ|EK>?|DXOp{eSxZ^#8rt|7O4bKmC9D|MdUq z|I`1c|4;v){y+VH`v3I*>HpLJr~gm?pZ-7nfBOIQ|LOnJ|EK>?|DXOp{eSxZ^#AGq z)BmUcPye6(KmC9D|MdUq|I`1c|4;v){y+VH`v3I*>HpLJr~gm?pZ-7nfBJuKzyG#` z0SE&S1|SST7=SPUVF1DagaHTx5C$L&Kp2290AT>a0E7Vu0}uuv3_uuwFaTiy!T^K; z2m=rXAPhhlfG_}I0Kx!-0SE&S1|SST7=SPUVF1DagaHTx5C$L&Kp2290AT>a0E7Vu z0}uuv3_uuwFaTiy!T^K;2m=rXAPhhlfG_}I0Kx!-0SE&S1|SST7=SPUVF1DagaHTx z5C$L&Kp2290AT>a0E7Vu0}uuv3_uuwFaTiy!T^K;2m=rXAPhhlfG_}I0Kx!-0SE&S z1|SST7=SPUVF1DagaHTx5C$L&Kp2290AT>a0E7Vu0}uuv3_uuwFaTiy!T^K;2m=rX zAPhhlfG_}I0Kx!-0SE&S1|SST7=SPUVF1DagaHTx5C$L&Kp2290AT>a0E7Vu0}uuv z3_uuwFaTiy!T^K;2m=rXAPhhlfG_}I0Kx!-0SE&S1|SST7=SPUVF1DagaHTx5C$L& zKp2290AT>a0E7Vu0}uuv3_uuwFaTiy!T^K;2m=rXAPhhlfG_}I0Kx!-0SE&S1|SST z7=SPUVF1DagaHTx5C$L&Kp2290AT>a0E7Vu0}uuv3_uuwFaTiy!T^K;2m=rXAPhhl zfG_}I0Kx!-0SE&S1|SST7=SPUVF1DagaHTx5C$L&Kp2290AT>a0E7Vu0}uuv3_uuw zFaTiy!T^K;2m=rXAPhhlfG_}I0Kx!-0SE&S1|SST7=SPUVF1DagaHTx5C$L&Kp229 z0AT>a0E7Vu0}uuv3_uuwFaTiy!T^K;2m=rXAPhhlfG_}I0Kx!-0SE&S1|SST7=SPU zVF1DagaHTx5C$L&Kp2290AT>a0E7Vu0}uuv3_uuwFaTiy!T^K;2m=rXAPhhlfG_}I z0Kx!-0SE&S1|SST7=SPUVF1DagaHTx5C$L&Kp2290AT>a0E7Vu0}uuv3_uuwFaTiy z!T^K;2m=rXAPhhlfG_}I0Kx!-0SE&S1|SST7=SPUVF1DagaHTx5C$L&Kp2290AT>a z0E7Vu0}uuv3_uuwFaTiy!T^K;2m=rXAPhhlfG_}I0Kx!-0SE&S1|SST7=SPUVF1Da zgaHTx5C))^{V)4p_P^}M0E7YPW&g|mm;EpMU-rN3f7$=C|7HKn{+InP`(O6I?0?z+ zvj1iO%YF<%7=SPUVF1DagaHTx5C$L&Kp2290AT>a0E7Vu0}uuv3_uuwFaTiy!T^K; z2m=rXAPhhlfG_}I0Kx!-0SE&S1|SST7=SPUVF1DagaHTx5C$L&Kp2290AT>a0E7Vu z0}uuv3_uuwFaTiy!T^K;2m=rXAPhhlfG_}I0Kx!-0SE&S1|SST7=SPUVF1DagaHTx z5C)(R`#Zq;3_uuwFaTiy`mq1Q{tx>ZsU z5BoptzwE#4zwE#4zwE#4zwE#4zwE#4zwE#4zwE#4zwE#4zwE#4zwE#4zwE#4zwE#4 zzwE#4zwE#4zwE#4zwE#4zwE#4zwE#4zwE#4zwE#4zwE#4#{h%@=(7K^|FZwG9|I5u zAPhhlfG_}I0Kx!-0SE&S1|SST7=SPUVF1DagaPQb|F-|O|F-|O|F-|O|F-|O|F$0k z5C)*z{@ecB{@Z>GKp2290AT>a0E7Vu0}uuv3_uuwFaTiy!T^K;2m=rXpxgf2{@ecB z{@ecB{@ecB{@ecB{@ecB{@ecB{@ecB{@ecB{@ecB{@ecB{@ecB{@ecB{@ecB{@ecB z{@ecBehfeufG_}I0Kx!-0SE&S1|SST7=SPUVF1DagaHTx5C$L&Kp2290AT>a0E7Vu z0}uuv3_uuwFaTiy!T^K;2m=rXAPhhlfG_}I0Kx!-0SE&S1|SST7=SPUVF1DagaHTx z5C$L&Kp2290AT>a0E7Vu0}uuv3_uuwFaTiy!T^K;2m=rXAPhhlfG_}I0Kx!-0SE&S z1|SST7=SPUVF1DagaHTx5C$L&Kp2290AT>a0E7Vu0}uuv3_uuwFaTiy!T^K;2m=rX zAPhhlfG_}I0Kx!-0SE&S1|SST7=SPUVF1DagaHTx5C$L&Kp2290AT>a0E7Vu0}uuv z3_uuwFaTiy!T^K;2m=rXAPhhlfG_}I0Kx!-0SE&S1|SST7=SPUVF1DagaHTx5C$L& zKp2290AT>a0E7Vu0}uuv3_uuwFaTiy!T^K;2m=rXAPhhlfG_}I0Kx!-0SE&S1|SST z7=SPUVF1DagaHTx5C$L&Kp2290AT>a0E7Vu0}uuv41l-)R9i;_hz1Z1AR0h4fM@{G z0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLaw zq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V z0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0Q$v#8bCCFXaLawq5(t$hz1Z1 zAR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ( z8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2 zKs1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4Immo zG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c2GEaxpEQ7I0MP)V0Yn3c1`rJ(8bCCF zXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks118 z0MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT z(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G z0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLaw zq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V z0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?W zL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz z1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$ zhz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c z1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh z5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC? z4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1 zAR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ( z8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2 zKs1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4Immo zG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4 zfM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCF zXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks118 z0MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT z(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G z0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLaw zq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V z0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?W zL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz z1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$ zhz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c z1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh z5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC? z4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1 zAR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ( z8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2 zKs1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4Immo zG=OLT(Ey?WL<5Kh5Dj27fYAU(0~ifpG=R|nMgtfPU^IZy07e5C4PZ2Y(EvsR7!6=F zfYAU(0~ifpG=R|nMgtfPU^IZy07e5C4PZ2Y(EvsR7!6=FfYAU(0~ifpG=R|nMgtfP zU^IZy07e5C4PZ2Y(EvsR7!6=FfYAU(0~ifpG=R|nMgtfPU^IZy07e5C4PZ2Y(EvsR z7!6=FfYAU(0~ifpG=R|nMgtfPU^IZy07e5C4PZ2Y(EvsR7!6=FfYAU(0~ifpG=R|n zMgtfPU^IZy07e5C4PZ2Y(EvsR7!6=FfYAU(0~ifpG=R|nMgtfPU^IZy07e5C4PZ2Y z(EvsR7!6=FfYAU(0~ifpG=R|nMgtfPU^IZy07e5C4PZ2Y(EvsR7!6=FfYAU(0~ifp zG=R|nMgtfPU^IZy07e5C4PZ2Y(EvsR7!6=FfYAU(0~ifpG=R|nMgtfPU^IZy07e5C z4PZ2Y(EvsR7!6=FfYAU(0~ifpG=R|nMgtfPU^IaJvHdiF(EvsR7!6=FfYAU(0~ifp zG=R|nMgtfPU^IZy07e5C4PZ2Y(EvsR7!6=FfYAU(0~ifpG=R|nMgtfPU^IZy07e7Y zpV&_W7!6=FfYAU(0~ifpG=R|nMgtfPU^IZy07e5C4PZ2Y(EvsR7!6=FfYAU(0~ifp zG=R|nMgtfPU^IZy07e5C4PZ2Y(EvsR7!6=FfYAU(0~ifpG=R|nMgtfPU^IZy07e5C z4PZ2Y(EvsR7!6=FfYAU(0~ifpG=R|nMgtfPU^IZy07e5C4PZ2Y(EvsR7!6=FfYAU( z0~ifpG=R|nMgtfPU^IZy07e5C4PZ2Y(EvsR7!6=FfYAU(0~ifpG=R|nMgtfPU^IZy z07e5C4PZ2Y(EvsR7!6=FfYAU(0~ifpG=R|nMgtfPU^IZy07e5C4PZ2Y(EvsR7!6=F zfYAU(0~ifpG=R|nMgtfPU^IZy07e5C4PZ2Y(EvsR7!6=FfYAU(0~ifpG=R|nMgtfP zU^IZy07e5C4PZ2Y(EvsR7!6=FfYAU(0~ifpG=R|nMgtfPU^IZy07e5C4PZ2Y(EvsR z7!6=FfYAU(0~ifpG=R|nMgtfPU^IZy07e5C4PZ2Y(EvsR7!6=FfYAU(0~ifpG=R|n zMgtfPU^IZy07e5C4PZ2Y(EvsR7!6=FfYAU(0~ifpG=R|nMgtfPU^IZy07e5C4PZ2Y z(EvsR7!6=FfYAU(0~ifpG=R|nMgtfPU^IZy07e5C4PZ2Y(EvsR7!6=FfYAU(0~ifp zG=R|nMgtfPU^IZy07e5C4PZ2Y(EvsR7!6=FfYAU(0~ifpG=R|nMgtfPU^IZy07e5C z4PZ2Y(EvsR7!6=FfYAU(0~ifpG=R|nMgtfPU^IZy07e5C4PZ2Y(EvsR7!6=FfYAU( z0~ifpG=R|nMgtfPU^IZy07e5C4PZ2Y(EvsR7!6=FfYAU(0~ifpG=R|nMgtfPU^IZy z07e5C4PZ2Y(EvsR7!6=FfYAU(0~ifpG=R|nMgtfPU^IZy07e5C4PZ2Y(EvsR7!6=F zfYAU(0~ifpG=R|nMgtfPU^IZy07e5C4PZ2Y(EvsR7!6=FfYAU(0~ifpG=R|nMgtfP zU^IZy07e5C4PZ2Y(EvsR7!6=FfYAU(0~ifpG=R|nMgtfPU^IZy07e5C4PZ2Y(EvsR z7!6=FfYAU(0~ifpG=R|nMgtfPU^IZy07e5C4PZ2Y(EvsR7!6=FfYAU(0~ifpG=R|n zMgtfPU^IZy07e5C4PZ2Y(EvsR7!6=FfYAU(0~ifpG=R|nMgtfPU^IZy07e5C4PZ2Y z(EvsR7!6=FfYAU(0~ifpG=R|nMgtfPU^IZy07e5C4PZ2Y(EvsR7!6=FfYAU(0~ifp zG=R|nMgtfPU^IZy07e5C4PZ2Y(EvsR7!6=FfYAU(0~ifpG=R|nMgtfPU^IZy07e5C z4PZ2Y(EvsR7!6=FfYAU(0~ifpG=R|nMgtfPU^IZy07e5C4PZ2Y(EvsR7!6=FfYAU( z0~ifpG=R|nMgtfPU^IZy0QNikX#k@Ej0P|oz-R!Y0gMJP8o+1(qXCQtFdD#U0HXnn z1~3}HXaJ)Dj0P|oz-R!Y0gMJP8o+1(qXCQtFdD#U0HXnn2CzS~p9U}*z-R!Y0gMJP z8o+1(qXCQtFdD#U0HXnn1~3}HXaJ)Dj0P|oz-R!Y0gMJP8o+1(qXCQtFdD#U0HXnn z1~3}HXaJ)Dj0P|oz-R!Y0gMJP8o+1(qXCQtFdD#U0HXnn1~3}HXaJ)Dj0Uj7{=@#m z{=xcaMz=wQtiR<~~^{<2H`sCH*^U1fR=aT~e z`6Lg1K8f6(Pim0ov!4T=&%XFQpZ(8&KC2O*&yt$wvzu_A&+Za*c>m4wlWR5(fA8Qd z*z=PwKhIB!c8A|Nh<2WzR07YN>-W!_lcCR>vrf;OQ#B4gmp3Op9RA9|kCo4xufflo z4EuQ#dp&RJq37)vf7Nep*nQsI>GZt0b>(?;|Ht#i3G3&JGm+01r)HinUMoCb{O5bV zFae$~?!H7KV4wdKKWtHdMHG=2e$@=HrnbzmsslDgj&s)#C zA1I!8uOOdyu!Mt8?fvq6?>|1ZceiOj@9wF6-rZF6yt|9!`Q|e2^Ub-Q=bLZ7&o|Ba z^G%Wad~+ZA^UV!j&o_4+JwJQ7=io2wXJ3e(pG6kW&u+JUzPon#e0O2z`R@L6& literal 0 HcmV?d00001 diff --git a/records/track_10min_16mb/2026-04-27_SmearGateBOSFix_PR1787Base_LQERAsym_PhasedTTT/train_gpt.py b/records/track_10min_16mb/2026-04-27_SmearGateBOSFix_PR1787Base_LQERAsym_PhasedTTT/train_gpt.py new file mode 100644 index 0000000000..740f060bb0 --- /dev/null +++ b/records/track_10min_16mb/2026-04-27_SmearGateBOSFix_PR1787Base_LQERAsym_PhasedTTT/train_gpt.py @@ -0,0 +1,3555 @@ +import base64, collections, copy, fcntl, glob, io, lzma, math, os +from pathlib import Path +import random, re, subprocess, sys, time, uuid, numpy as np, sentencepiece as spm, torch, torch.distributed as dist, torch.nn.functional as F +from torch import Tensor, nn +from flash_attn_interface import ( + flash_attn_func as flash_attn_3_func, + flash_attn_varlen_func, +) +from concurrent.futures import ThreadPoolExecutor +import triton +import triton.language as tl +from triton.tools.tensor_descriptor import TensorDescriptor + + +# ===== Fused softcapped cross-entropy (Triton) — training-only path ===== +# Replaces the eager +# logits_softcap = softcap * tanh(logits / softcap) +# F.cross_entropy(logits_softcap.float(), targets, reduction="mean") +# sequence with a single fused kernel that reads logits_proj once, applies +# softcap in-register, and computes (LSE, loss) in one streaming pass. The +# backward kernel mirrors the forward so there's no stored softcapped logits. +# Numerically identical to the eager path up to fp32 accumulation differences. +_FUSED_CE_LIBRARY = "pgsubmission1draft7fusedce" +_FUSED_CE_BLOCK_SIZE = 1024 +_FUSED_CE_NUM_WARPS = 4 + + +@triton.jit +def _softcapped_ce_fwd_kernel( + logits_ptr, losses_ptr, lse_ptr, targets_ptr, + stride_logits_n, stride_logits_v, + n_rows, n_cols, softcap, + block_size: tl.constexpr, +): + row_idx = tl.program_id(0).to(tl.int64) + logits_row_ptr = logits_ptr + row_idx * stride_logits_n + max_val = -float("inf") + sum_exp = 0.0 + A = 2.0 * softcap + inv_C = 2.0 / softcap + for off in range(0, n_cols, block_size): + cols = off + tl.arange(0, block_size) + mask = cols < n_cols + val = tl.load( + logits_row_ptr + cols * stride_logits_v, + mask=mask, other=-float("inf"), + ).to(tl.float32) + z = A * tl.sigmoid(val * inv_C) + z = tl.where(mask, z, -float("inf")) + curr_max = tl.max(z, axis=0) + new_max = tl.maximum(max_val, curr_max) + sum_exp = sum_exp * tl.exp(max_val - new_max) + tl.sum(tl.exp(z - new_max), axis=0) + max_val = new_max + lse = max_val + tl.log(sum_exp) + tl.store(lse_ptr + row_idx, lse) + target = tl.load(targets_ptr + row_idx).to(tl.int32) + target_val = tl.load(logits_row_ptr + target * stride_logits_v).to(tl.float32) + target_z = A * tl.sigmoid(target_val * inv_C) + tl.store(losses_ptr + row_idx, lse - target_z) + + +@triton.jit +def _softcapped_ce_bwd_kernel( + grad_logits_ptr, grad_losses_ptr, lse_ptr, logits_ptr, targets_ptr, + stride_logits_n, stride_logits_v, + stride_grad_n, stride_grad_v, + n_rows, n_cols, softcap, + block_size: tl.constexpr, +): + row_idx = tl.program_id(0).to(tl.int64) + logits_row_ptr = logits_ptr + row_idx * stride_logits_n + grad_row_ptr = grad_logits_ptr + row_idx * stride_grad_n + lse = tl.load(lse_ptr + row_idx) + grad_loss = tl.load(grad_losses_ptr + row_idx).to(tl.float32) + target = tl.load(targets_ptr + row_idx).to(tl.int32) + A = 2.0 * softcap + inv_C = 2.0 / softcap + dz_dx_scale = A * inv_C + for off in range(0, n_cols, block_size): + cols = off + tl.arange(0, block_size) + mask = cols < n_cols + val = tl.load( + logits_row_ptr + cols * stride_logits_v, + mask=mask, other=0.0, + ).to(tl.float32) + sigmoid_u = tl.sigmoid(val * inv_C) + z = A * sigmoid_u + probs = tl.exp(z - lse) + grad_z = grad_loss * (probs - tl.where(cols == target, 1.0, 0.0)) + grad_x = grad_z * (dz_dx_scale * sigmoid_u * (1.0 - sigmoid_u)) + tl.store(grad_row_ptr + cols * stride_grad_v, grad_x, mask=mask) + + +def _validate_softcapped_ce_inputs( + logits: Tensor, targets: Tensor, softcap: float, +) -> tuple[Tensor, Tensor]: + if logits.ndim != 2: + raise ValueError(f"Expected logits.ndim=2, got {logits.ndim}") + if targets.ndim != 1: + raise ValueError(f"Expected targets.ndim=1, got {targets.ndim}") + if logits.shape[0] != targets.shape[0]: + raise ValueError( + f"Expected matching rows, got logits={tuple(logits.shape)} targets={tuple(targets.shape)}" + ) + if not logits.is_cuda or not targets.is_cuda: + raise ValueError("softcapped_cross_entropy requires CUDA tensors") + if softcap <= 0.0: + raise ValueError(f"softcap must be positive, got {softcap}") + if logits.dtype not in (torch.float16, torch.bfloat16, torch.float32): + raise ValueError(f"Unsupported logits dtype: {logits.dtype}") + logits = logits.contiguous() + targets = targets.contiguous() + if targets.dtype != torch.int64: + targets = targets.to(dtype=torch.int64) + return logits, targets + + +@torch.library.custom_op(f"{_FUSED_CE_LIBRARY}::softcapped_ce", mutates_args=()) +def softcapped_ce_op(logits: Tensor, targets: Tensor, softcap: float) -> tuple[Tensor, Tensor]: + logits, targets = _validate_softcapped_ce_inputs(logits, targets, float(softcap)) + n_rows, n_cols = logits.shape + losses = torch.empty((n_rows,), device=logits.device, dtype=torch.float32) + lse = torch.empty((n_rows,), device=logits.device, dtype=torch.float32) + _softcapped_ce_fwd_kernel[(n_rows,)]( + logits, losses, lse, targets, + logits.stride(0), logits.stride(1), + n_rows, n_cols, float(softcap), + block_size=_FUSED_CE_BLOCK_SIZE, num_warps=_FUSED_CE_NUM_WARPS, + ) + return losses, lse + + +@softcapped_ce_op.register_fake +def _(logits: Tensor, targets: Tensor, softcap: float): + if logits.ndim != 2 or targets.ndim != 1: + raise ValueError("softcapped_ce fake impl expects 2D logits and 1D targets") + if logits.shape[0] != targets.shape[0]: + raise ValueError( + f"Expected matching rows, got logits={tuple(logits.shape)} targets={tuple(targets.shape)}" + ) + n_rows = logits.shape[0] + return ( + logits.new_empty((n_rows,), dtype=torch.float32), + logits.new_empty((n_rows,), dtype=torch.float32), + ) + + +@torch.library.custom_op(f"{_FUSED_CE_LIBRARY}::softcapped_ce_backward", mutates_args=()) +def softcapped_ce_backward_op( + logits: Tensor, targets: Tensor, lse: Tensor, grad_losses: Tensor, softcap: float, +) -> Tensor: + logits, targets = _validate_softcapped_ce_inputs(logits, targets, float(softcap)) + lse = lse.contiguous() + grad_losses = grad_losses.contiguous().to(dtype=torch.float32) + if lse.ndim != 1 or grad_losses.ndim != 1: + raise ValueError("Expected 1D lse and grad_losses") + if lse.shape[0] != logits.shape[0] or grad_losses.shape[0] != logits.shape[0]: + raise ValueError( + f"Expected row-aligned lse/grad_losses, got logits={tuple(logits.shape)} " + f"lse={tuple(lse.shape)} grad_losses={tuple(grad_losses.shape)}" + ) + grad_logits = torch.empty_like(logits) + n_rows, n_cols = logits.shape + _softcapped_ce_bwd_kernel[(n_rows,)]( + grad_logits, grad_losses, lse, logits, targets, + logits.stride(0), logits.stride(1), + grad_logits.stride(0), grad_logits.stride(1), + n_rows, n_cols, float(softcap), + block_size=_FUSED_CE_BLOCK_SIZE, num_warps=_FUSED_CE_NUM_WARPS, + ) + return grad_logits + + +@softcapped_ce_backward_op.register_fake +def _(logits: Tensor, targets: Tensor, lse: Tensor, grad_losses: Tensor, softcap: float): + if logits.ndim != 2 or targets.ndim != 1 or lse.ndim != 1 or grad_losses.ndim != 1: + raise ValueError("softcapped_ce_backward fake impl expects 2D logits and 1D row tensors") + if ( + logits.shape[0] != targets.shape[0] + or logits.shape[0] != lse.shape[0] + or logits.shape[0] != grad_losses.shape[0] + ): + raise ValueError("softcapped_ce_backward fake impl expects row-aligned tensors") + return logits.new_empty(logits.shape) + + +def _softcapped_ce_setup_context( + ctx: torch.autograd.function.FunctionCtx, inputs, output, +) -> None: + logits, targets, softcap = inputs + _losses, lse = output + ctx.save_for_backward(logits, targets, lse) + ctx.softcap = float(softcap) + + +def _softcapped_ce_backward( + ctx: torch.autograd.function.FunctionCtx, grad_losses: Tensor, grad_lse: "Tensor | None", +): + del grad_lse + logits, targets, lse = ctx.saved_tensors + grad_logits = torch.ops.pgsubmission1draft7fusedce.softcapped_ce_backward( + logits, targets, lse, grad_losses, ctx.softcap + ) + return grad_logits, None, None + + +softcapped_ce_op.register_autograd( + _softcapped_ce_backward, setup_context=_softcapped_ce_setup_context, +) + + +def softcapped_cross_entropy( + logits: Tensor, targets: Tensor, softcap: float, reduction: str = "mean", +) -> Tensor: + losses, _lse = torch.ops.pgsubmission1draft7fusedce.softcapped_ce( + logits, targets, float(softcap) + ) + if reduction == "none": + return losses + if reduction == "sum": + return losses.sum() + if reduction == "mean": + return losses.mean() + raise ValueError(f"Unsupported reduction={reduction!r}") + + +class Hyperparameters: + data_dir = os.environ.get("DATA_DIR", "./data/") + seed = int(os.environ.get("SEED", 1337)) + run_id = os.environ.get("RUN_ID", str(uuid.uuid4())) + iterations = int(os.environ.get("ITERATIONS", 20000)) + warmdown_frac = float(os.environ.get("WARMDOWN_FRAC", 0.75)) + warmup_steps = int(os.environ.get("WARMUP_STEPS", 20)) + train_batch_tokens = int(os.environ.get("TRAIN_BATCH_TOKENS", 786432)) + # Fused softcapped CE (Triton). Training-only — forward_logits eval path still uses + # eager softcap+F.cross_entropy. Default ON since validated as at-worst neutral. + fused_ce_enabled = bool(int(os.environ.get("FUSED_CE_ENABLED", "1"))) + train_seq_len = int(os.environ.get("TRAIN_SEQ_LEN", 2048)) + train_log_every = int(os.environ.get("TRAIN_LOG_EVERY", 500)) + max_wallclock_seconds = float(os.environ.get("MAX_WALLCLOCK_SECONDS", 6e2)) + val_batch_tokens = int(os.environ.get("VAL_BATCH_TOKENS", 524288)) + eval_seq_len = int(os.environ.get("EVAL_SEQ_LEN", 2048)) + val_loss_every = int(os.environ.get("VAL_LOSS_EVERY", 4000)) + vocab_size = int(os.environ.get("VOCAB_SIZE", 8192)) + num_layers = int(os.environ.get("NUM_LAYERS", 11)) + xsa_last_n = int(os.environ.get("XSA_LAST_N", 11)) + model_dim = int(os.environ.get("MODEL_DIM", 512)) + num_kv_heads = int(os.environ.get("NUM_KV_HEADS", 4)) + num_heads = int(os.environ.get("NUM_HEADS", 8)) + mlp_mult = float(os.environ.get("MLP_MULT", 4.0)) + skip_gates_enabled = bool(int(os.environ.get("SKIP_GATES_ENABLED", "1"))) + tie_embeddings = bool(int(os.environ.get("TIE_EMBEDDINGS", "1"))) + logit_softcap = float(os.environ.get("LOGIT_SOFTCAP", 3e1)) + rope_base = float(os.environ.get("ROPE_BASE", 1e4)) + rope_dims = int(os.environ.get("ROPE_DIMS", 16)) + rope_train_seq_len = int(os.environ.get("ROPE_TRAIN_SEQ_LEN", 2048)) + rope_yarn = bool(int(os.environ.get("ROPE_YARN", "0"))) + ln_scale = bool(int(os.environ.get("LN_SCALE", "1"))) + qk_gain_init = float(os.environ.get("QK_GAIN_INIT", 5.0)) + num_loops = int(os.environ.get("NUM_LOOPS", 2)) + loop_start = int(os.environ.get("LOOP_START", 3)) + loop_end = int(os.environ.get("LOOP_END", 5)) + enable_looping_at = float(os.environ.get("ENABLE_LOOPING_AT", 0.35)) + parallel_start_layer = int(os.environ.get("PARALLEL_START_LAYER", 8)) + parallel_final_lane = os.environ.get("PARALLEL_FINAL_LANE", "mean") + min_lr = float(os.environ.get("MIN_LR", 0.0)) + embed_lr = float(os.environ.get("EMBED_LR", 0.6)) + tied_embed_lr = float(os.environ.get("TIED_EMBED_LR", 0.03)) + tied_embed_init_std = float(os.environ.get("TIED_EMBED_INIT_STD", 0.005)) + matrix_lr = float(os.environ.get("MATRIX_LR", 0.026)) + scalar_lr = float(os.environ.get("SCALAR_LR", 0.02)) + muon_momentum = float(os.environ.get("MUON_MOMENTUM", 0.97)) + muon_backend_steps = int(os.environ.get("MUON_BACKEND_STEPS", 5)) + muon_momentum_warmup_start = float( + os.environ.get("MUON_MOMENTUM_WARMUP_START", 0.92) + ) + muon_momentum_warmup_steps = int(os.environ.get("MUON_MOMENTUM_WARMUP_STEPS", 1500)) + muon_row_normalize = bool(int(os.environ.get("MUON_ROW_NORMALIZE", "1"))) + beta1 = float(os.environ.get("BETA1", 0.9)) + beta2 = float(os.environ.get("BETA2", 0.95)) + adam_eps = float(os.environ.get("ADAM_EPS", 1e-08)) + grad_clip_norm = float(os.environ.get("GRAD_CLIP_NORM", 0.3)) + eval_stride = int(os.environ.get("EVAL_STRIDE", 64)) + adam_wd = float(os.environ.get("ADAM_WD", 0.02)) + muon_wd = float(os.environ.get("MUON_WD", 0.095)) + embed_wd = float(os.environ.get("EMBED_WD", 0.085)) + ema_decay = float(os.environ.get("EMA_DECAY", 0.9965)) + ttt_enabled = bool(int(os.environ.get("TTT_ENABLED", "1"))) + ttt_lora_rank = int(os.environ.get("TTT_LORA_RANK", 96)) + ttt_lora_lr = float(os.environ.get("TTT_LORA_LR", 0.0001)) + ttt_chunk_size = int(os.environ.get("TTT_CHUNK_SIZE", 48)) + ttt_eval_seq_len = int(os.environ.get("TTT_EVAL_SEQ_LEN", 2048)) + ttt_batch_size = int(os.environ.get("TTT_BATCH_SIZE", 64)) + ttt_grad_steps = int(os.environ.get("TTT_GRAD_STEPS", 1)) + ttt_weight_decay = float(os.environ.get("TTT_WEIGHT_DECAY", 1.0)) + ttt_beta1 = float(os.environ.get("TTT_BETA1", 0)) + ttt_beta2 = float(os.environ.get("TTT_BETA2", 0.999)) + ttt_k_lora = bool(int(os.environ.get("TTT_K_LORA", "1"))) + ttt_mlp_lora = bool(int(os.environ.get("TTT_MLP_LORA", "1"))) + ttt_o_lora = bool(int(os.environ.get("TTT_O_LORA", "1"))) + ttt_optimizer = os.environ.get("TTT_OPTIMIZER", "adam") + ttt_eval_batches = os.environ.get("TTT_EVAL_BATCHES", "") + val_doc_fraction = float(os.environ.get("VAL_DOC_FRACTION", 1.0)) + compressor = os.environ.get("COMPRESSOR", "brotli") + gptq_calibration_batches = int(os.environ.get("GPTQ_CALIBRATION_BATCHES", 16)) + gptq_reserve_seconds = float(os.environ.get("GPTQ_RESERVE_SECONDS", 4.0)) + phased_ttt_prefix_docs = int(os.environ.get("PHASED_TTT_PREFIX_DOCS", 2000)) + phased_ttt_num_phases = int(os.environ.get("PHASED_TTT_NUM_PHASES", 1)) + global_ttt_lr = float(os.environ.get("GLOBAL_TTT_LR", 0.001)) + global_ttt_momentum = float(os.environ.get("GLOBAL_TTT_MOMENTUM", 0.9)) + global_ttt_epochs = int(os.environ.get("GLOBAL_TTT_EPOCHS", 1)) + global_ttt_chunk_tokens = int(os.environ.get("GLOBAL_TTT_CHUNK_TOKENS", 32768)) + global_ttt_batch_seqs = int(os.environ.get("GLOBAL_TTT_BATCH_SEQS", 32)) + global_ttt_warmup_start_lr = float(os.environ.get("GLOBAL_TTT_WARMUP_START_LR", 0.0)) + global_ttt_warmup_chunks = int(os.environ.get("GLOBAL_TTT_WARMUP_CHUNKS", 0)) + global_ttt_grad_clip = float(os.environ.get("GLOBAL_TTT_GRAD_CLIP", 1.0)) + global_ttt_respect_doc_boundaries = bool(int(os.environ.get("GLOBAL_TTT_RESPECT_DOC_BOUNDARIES", "1"))) + matrix_bits = int(os.environ.get("MATRIX_BITS", 6)) + embed_bits = int(os.environ.get("EMBED_BITS", 8)) + matrix_clip_sigmas = float(os.environ.get("MATRIX_CLIP_SIGMAS", 12.85)) + embed_clip_sigmas = float(os.environ.get("EMBED_CLIP_SIGMAS", 2e1)) + mlp_clip_sigmas = float(os.environ.get("MLP_CLIP_SIGMAS", 10.0)) + attn_clip_sigmas = float(os.environ.get("ATTN_CLIP_SIGMAS", 13.0)) + # AttnOutGate (per-head multiplicative output gate, PR #1667 MarioPaerle). + # Zero-init weight: 2*sigmoid(0)=1 -> transparent at start. Source defaults to + # block input x ('proj'); 'q' uses raw Q projection output. + attn_out_gate_enabled = bool(int(os.environ.get("ATTN_OUT_GATE_ENABLED", "0"))) + attn_out_gate_src = os.environ.get("ATTN_OUT_GATE_SRC", "proj") + # SmearGate (input-dependent forward-1 token smear, modded-nanogpt @classiclarryd + # via PR #1667). x_t <- x_t + lam * sigmoid(W*x_t[:gate_window]) * x_{t-1}. + # lam=0 + W=0 -> transparent at init. + smear_gate_enabled = bool(int(os.environ.get("SMEAR_GATE_ENABLED", "0"))) + # Window: first GATE_WINDOW dims of the source feed the gate projection. + gate_window = int(os.environ.get("GATE_WINDOW", 12)) + # Gated Attention (Qwen, NeurIPS 2025 Best Paper, arXiv:2505.06708; + # qiuzh20/gated_attention). Per-head sigmoid gate on SDPA output, BEFORE + # out_proj. Gate input = full block input x (paper's headwise G1 variant + # driven from hidden_states). W_g shape (num_heads, dim), plain sigmoid. + # Near-zero init gives g~0.5 at step 0 (half attention output); per-block + # attn_scale (init 1.0) compensates during training. Name contains + # "attn_gate" so CONTROL_TENSOR_NAME_PATTERNS routes it to scalar AdamW. + gated_attn_enabled = bool(int(os.environ.get("GATED_ATTN_ENABLED", "0"))) + gated_attn_init_std = float(os.environ.get("GATED_ATTN_INIT_STD", 0.01)) + # Dedicated int8-per-row quantization for `attn_gate_w` tensors. These are + # small ((num_heads, dim) = (8, 512) = 4096 params) and bypass GPTQ via the + # numel<=65536 passthrough branch -> stored as fp16 (8 KB/layer, ~65 KB total + # compressed). int8-per-row cuts the raw tensor in half with negligible BPB + # impact: scales per head (8 values), symmetric quant over [-127, 127]. + # No Hessian needed (gate weights not in collect_hessians()). + gated_attn_quant_gate = bool(int(os.environ.get("GATED_ATTN_QUANT_GATE", "0"))) + # Sparse Attention Gate (modded-nanogpt-style). Keeps dense SDPA and only + # swaps the output-gate input to the first GATE_WINDOW residual dims. + # W_g: (num_heads, gate_window) = (8, 12) = 96 params/layer (~44K total), + # vs dense GatedAttn's (8, 512) = 4K/layer (~44K diff). Name "attn_gate_w" + # is shared so quant routing and int8 gate passthrough Just Work. Gate + # passthrough int8 still applies via GATED_ATTN_QUANT_GATE=1. + # Mutually exclusive with ATTN_OUT_GATE_ENABLED and GATED_ATTN_ENABLED. + sparse_attn_gate_enabled = bool(int(os.environ.get("SPARSE_ATTN_GATE_ENABLED", "0"))) + sparse_attn_gate_init_std = float(os.environ.get("SPARSE_ATTN_GATE_INIT_STD", 0.0)) + sparse_attn_gate_scale = float(os.environ.get("SPARSE_ATTN_GATE_SCALE", 1.0)) + # LQER asymmetric rank-k correction on top-K quant-error tensors (PR #1530 v2 port). + # Computes SVD of E = W_fp - W_quant, packs top-r A,B as INT2/INT4 (asym) or INTk (sym). + lqer_enabled = bool(int(os.environ.get("LQER_ENABLED", "1"))) + lqer_rank = int(os.environ.get("LQER_RANK", 4)) + lqer_top_k = int(os.environ.get("LQER_TOP_K", 3)) + lqer_factor_bits = int(os.environ.get("LQER_FACTOR_BITS", 4)) + lqer_asym_enabled = bool(int(os.environ.get("LQER_ASYM_ENABLED", "1"))) + lqer_asym_group = int(os.environ.get("LQER_ASYM_GROUP", "64")) + distributed = "RANK" in os.environ and "WORLD_SIZE" in os.environ + rank = int(os.environ.get("RANK", "0")) + world_size = int(os.environ.get("WORLD_SIZE", "1")) + local_rank = int(os.environ.get("LOCAL_RANK", "0")) + is_main_process = rank == 0 + grad_accum_steps = 8 // world_size + # CaseOps integration: optional override of dataset root + tokenizer path. + # When CASEOPS_ENABLED=1, the wrapper loads a per-token byte sidecar + # (fineweb_val_bytes_*.bin, identical shard layout to val_*.bin) and uses + # it as the canonical raw-byte budget for BPB accounting. The sidecar + # REPLACES the build_sentencepiece_luts byte-counting path entirely. + caseops_enabled = bool(int(os.environ.get("CASEOPS_ENABLED", "0"))) + _default_caseops_data = os.path.join( + data_dir, + "datasets", + "fineweb10B_sp8192_caseops", + "datasets", + "datasets", + "fineweb10B_sp8192_lossless_caps_caseops_v1_reserved", + ) + _default_caseops_tok = os.path.join( + data_dir, + "datasets", + "fineweb10B_sp8192_caseops", + "datasets", + "tokenizers", + "fineweb_8192_bpe_lossless_caps_caseops_v1_reserved.model", + ) + if caseops_enabled: + datasets_dir = os.environ.get("DATA_PATH", _default_caseops_data) + tokenizer_path = os.environ.get("TOKENIZER_PATH", _default_caseops_tok) + else: + datasets_dir = os.environ.get( + "DATA_PATH", + os.path.join(data_dir, "datasets", f"fineweb10B_sp{vocab_size}"), + ) + tokenizer_path = os.environ.get( + "TOKENIZER_PATH", + os.path.join(data_dir, "tokenizers", f"fineweb_{vocab_size}_bpe.model"), + ) + train_files = os.path.join(datasets_dir, "fineweb_train_*.bin") + val_files = os.path.join(datasets_dir, "fineweb_val_*.bin") + val_bytes_files = os.path.join(datasets_dir, "fineweb_val_bytes_*.bin") + artifact_dir = os.environ.get("ARTIFACT_DIR", "") + logfile = ( + os.path.join(artifact_dir, f"{run_id}.txt") + if artifact_dir + else f"logs/{run_id}.txt" + ) + model_path = ( + os.path.join(artifact_dir, "final_model.pt") + if artifact_dir + else "final_model.pt" + ) + quantized_model_path = ( + os.path.join(artifact_dir, "final_model.int6.ptz") + if artifact_dir + else "final_model.int6.ptz" + ) + + +_logger_hparams = None + + +def set_logging_hparams(h): + global _logger_hparams + _logger_hparams = h + + +def log(msg, console=True): + if _logger_hparams is None: + print(msg) + return + if _logger_hparams.is_main_process: + if console: + print(msg) + if _logger_hparams.logfile is not None: + with open(_logger_hparams.logfile, "a", encoding="utf-8") as f: + print(msg, file=f) + + +class ValidationData: + def __init__(self, h, device): + self.sp = spm.SentencePieceProcessor(model_file=h.tokenizer_path) + if int(self.sp.vocab_size()) != h.vocab_size: + raise ValueError( + f"VOCAB_SIZE={h.vocab_size} does not match tokenizer vocab_size={int(self.sp.vocab_size())}" + ) + self.val_tokens = load_validation_tokens(h.val_files, h.eval_seq_len) + ( + self.base_bytes_lut, + self.has_leading_space_lut, + self.is_boundary_token_lut, + ) = build_sentencepiece_luts(self.sp, h.vocab_size, device) + # CaseOps: when enabled, load per-token byte sidecar and stash it as a + # CPU tensor aligned 1:1 with self.val_tokens. eval_val/eval_val_ttt + # branches use this as the canonical raw-byte budget per token. + self.caseops_enabled = bool(getattr(h, "caseops_enabled", False)) + self.val_bytes = None + if self.caseops_enabled: + self.val_bytes = load_validation_byte_sidecar( + h.val_bytes_files, h.eval_seq_len, self.val_tokens.numel() + ) + + +def build_sentencepiece_luts(sp, vocab_size, device): + sp_vocab_size = int(sp.vocab_size()) + assert ( + sp.piece_to_id("▁") != sp.unk_id() + ), "Tokenizer must have '▁' (space) as its own token for correct BPB byte counting" + table_size = max(sp_vocab_size, vocab_size) + base_bytes_np = np.zeros((table_size,), dtype=np.int16) + has_leading_space_np = np.zeros((table_size,), dtype=np.bool_) + is_boundary_token_np = np.ones((table_size,), dtype=np.bool_) + for token_id in range(sp_vocab_size): + if sp.is_control(token_id) or sp.is_unknown(token_id) or sp.is_unused(token_id): + continue + is_boundary_token_np[token_id] = False + if sp.is_byte(token_id): + base_bytes_np[token_id] = 1 + continue + piece = sp.id_to_piece(token_id) + if piece.startswith("▁"): + has_leading_space_np[token_id] = True + piece = piece[1:] + base_bytes_np[token_id] = len(piece.encode("utf-8")) + return ( + torch.tensor(base_bytes_np, dtype=torch.int16, device=device), + torch.tensor(has_leading_space_np, dtype=torch.bool, device=device), + torch.tensor(is_boundary_token_np, dtype=torch.bool, device=device), + ) + + +def load_validation_tokens(pattern, seq_len): + # Filter out CaseOps byte sidecar shards which share the val_*.bin glob. + files = [ + Path(p) + for p in sorted(glob.glob(pattern)) + if "_bytes_" not in Path(p).name + ] + if not files: + raise FileNotFoundError(f"No files found for pattern: {pattern}") + tokens = torch.cat([load_data_shard(file) for file in files]).contiguous() + usable = (tokens.numel() - 1) // seq_len * seq_len + if usable <= 0: + raise ValueError(f"Validation split is too short for TRAIN_SEQ_LEN={seq_len}") + return tokens[: usable + 1] + + +def load_validation_byte_sidecar(pattern, seq_len, expected_len): + """Load CaseOps per-token byte sidecar(s). Same shard layout as token shards + (256 int32 header + uint16 array). Each entry = canonical raw-text byte + budget for that token in the corresponding val shard. Returns a CPU + int16 tensor sliced to match expected_len (i.e. val_tokens length).""" + files = [Path(p) for p in sorted(glob.glob(pattern))] + if not files: + raise FileNotFoundError(f"No byte sidecar files for pattern: {pattern}") + shards = [load_data_shard(file) for file in files] + # load_data_shard returns uint16 — that's exactly what the sidecar stores. + bytes_full = torch.cat(shards).contiguous() + if bytes_full.numel() < expected_len: + raise ValueError( + f"Byte sidecar too short: {bytes_full.numel()} < val_tokens {expected_len}" + ) + return bytes_full[:expected_len].to(torch.int32) + + +def load_data_shard(file): + header_bytes = 256 * np.dtype(" 0: + pos = start + while pos < end: + seg_starts.append(pos) + pos += max_doc_len + else: + seg_starts.append(start) + boundaries = seg_starts + [total_len] + padded_len = get_next_multiple_of_n(len(boundaries), bucket_size) + cu = torch.full((padded_len,), total_len, dtype=torch.int32, device=device) + cu[: len(boundaries)] = torch.tensor(boundaries, dtype=torch.int32, device=device) + seg_ends = seg_starts[1:] + [total_len] + max_seqlen = max(end - start for start, end in zip(seg_starts, seg_ends)) + return cu, max_seqlen + +class DocumentPackingLoader: + _shard_pool = ThreadPoolExecutor(1) + + def __init__(self, h, device, cu_bucket_size=64): + self.rank = h.rank + self.world_size = h.world_size + self.device = device + self.cu_bucket_size = cu_bucket_size + self.max_seq_len = h.train_seq_len + all_files = [Path(p) for p in sorted(glob.glob(h.train_files))] + if not all_files: + raise FileNotFoundError(f"No files found for pattern: {h.train_files}") + self.files = all_files + self.file_iter = iter(self.files) + self._init_shard(load_data_shard(next(self.file_iter))) + self._next_shard = self._submit_next_shard() + self._batch_pool = ThreadPoolExecutor(1) + self._next_batch = None + + def _init_shard(self, tokens): + global BOS_ID + self.tokens = tokens + self.shard_size = tokens.numel() + if BOS_ID is None: + BOS_ID = 1 + self.bos_idx = ( + (tokens == BOS_ID).nonzero(as_tuple=True)[0].to(torch.int64).cpu().numpy() + ) + self.cursor = int(self.bos_idx[0]) + + def _submit_next_shard(self): + try: + path = next(self.file_iter) + return self._shard_pool.submit(load_data_shard, path) + except StopIteration: + return None + + def _advance_shard(self): + if self._next_shard is None: + self.file_iter = iter(self.files) + self._next_shard = self._shard_pool.submit( + load_data_shard, next(self.file_iter) + ) + self._init_shard(self._next_shard.result()) + self._next_shard = self._submit_next_shard() + + def _local_doc_starts(self, local_start, total_len): + lo = np.searchsorted(self.bos_idx, local_start, side="left") + hi = np.searchsorted(self.bos_idx, local_start + total_len, side="left") + return (self.bos_idx[lo:hi] - local_start).tolist() + + def _prepare_batch(self, num_tokens_local, max_seq_len): + per_rank_span = num_tokens_local + 1 + global_span = per_rank_span * self.world_size + while self.cursor + global_span > self.shard_size: + self._advance_shard() + local_start = self.cursor + self.rank * per_rank_span + buf = self.tokens[local_start : local_start + per_rank_span] + inputs = buf[:-1].to(dtype=torch.int64).pin_memory() + targets = buf[1:].to(dtype=torch.int64).pin_memory() + starts = self._local_doc_starts(local_start, inputs.numel()) + cu_seqlens, max_seqlen = _build_cu_seqlens( + starts, inputs.numel(), inputs.device, max_seq_len, self.cu_bucket_size + ) + cu_seqlens = cu_seqlens.pin_memory() + self.cursor += global_span + return inputs, targets, cu_seqlens, max_seqlen + + def next_batch(self, global_tokens, grad_accum_steps): + num_tokens_local = global_tokens // (self.world_size * grad_accum_steps) + if self._next_batch is not None: + inputs, targets, cu_seqlens, max_seqlen = self._next_batch.result() + else: + inputs, targets, cu_seqlens, max_seqlen = self._prepare_batch( + num_tokens_local, self.max_seq_len + ) + self._next_batch = self._batch_pool.submit( + self._prepare_batch, num_tokens_local, self.max_seq_len + ) + return ( + inputs[None].to(self.device, non_blocking=True), + targets[None].to(self.device, non_blocking=True), + cu_seqlens.to(self.device, non_blocking=True), + max_seqlen, + ) + + +class ShuffledSequenceLoader: + def __init__(self, h, device): + self.world_size = h.world_size + self.seq_len = h.train_seq_len + self.device = device + all_files = [Path(p) for p in sorted(glob.glob(h.train_files))] + if not all_files: + raise FileNotFoundError(f"No files found for pattern: {h.train_files}") + self.files = all_files[h.rank :: h.world_size] + self.rng = np.random.Generator(np.random.PCG64(h.rank)) + self.num_tokens = [_read_num_tokens(f) for f in self.files] + self.start_inds = [[] for _ in self.files] + for si in range(len(self.files)): + self._reset_shard(si) + + def _reset_shard(self, si): + max_phase = min( + self.seq_len - 1, max(0, self.num_tokens[si] - self.seq_len - 1) + ) + phase = int(self.rng.integers(max_phase + 1)) if max_phase > 0 else 0 + num_sequences = (self.num_tokens[si] - 1 - phase) // self.seq_len + sequence_order = self.rng.permutation(num_sequences) + self.start_inds[si] = (phase + sequence_order * self.seq_len).tolist() + + def next_batch(self, global_tokens, grad_accum_steps): + device_tokens = global_tokens // (self.world_size * grad_accum_steps) + device_batch_size = device_tokens // self.seq_len + remaining = np.array([len(s) for s in self.start_inds], dtype=np.float64) + x = torch.empty((device_batch_size, self.seq_len), dtype=torch.int64) + y = torch.empty((device_batch_size, self.seq_len), dtype=torch.int64) + for bi in range(device_batch_size): + total = remaining.sum() + if total <= 0: + for si in range(len(self.files)): + self._reset_shard(si) + remaining = np.array( + [len(s) for s in self.start_inds], dtype=np.float64 + ) + total = remaining.sum() + probs = remaining / total + si = int(self.rng.choice(len(self.files), p=probs)) + start_ind = self.start_inds[si].pop() + remaining[si] -= 1 + mm = _get_shard_memmap(self.files[si]) + window = torch.as_tensor( + np.array(mm[start_ind : start_ind + self.seq_len + 1], dtype=np.int64) + ) + x[bi] = window[:-1] + y[bi] = window[1:] + return x.to(self.device, non_blocking=True), y.to( + self.device, non_blocking=True + ) + + +class RMSNorm(nn.Module): + def __init__(self, eps=None): + super().__init__() + self.eps = eps + + def forward(self, x): + return F.rms_norm(x, (x.size(-1),), eps=self.eps) + + +class CastedLinear(nn.Linear): + def forward(self, x): + w = self.weight.to(x.dtype) + bias = self.bias.to(x.dtype) if self.bias is not None else None + return F.linear(x, w, bias) + + +@triton.jit +def linear_leaky_relu_square_kernel( + a_desc, + b_desc, + c_desc, + aux_desc, + M, + N, + K, + BLOCK_SIZE_M: tl.constexpr, + BLOCK_SIZE_N: tl.constexpr, + BLOCK_SIZE_K: tl.constexpr, + NUM_SMS: tl.constexpr, + FORWARD: tl.constexpr, +): + dtype = tl.bfloat16 + start_pid = tl.program_id(axis=0) + num_pid_m = tl.cdiv(M, BLOCK_SIZE_M) + num_pid_n = tl.cdiv(N, BLOCK_SIZE_N) + k_tiles = tl.cdiv(K, BLOCK_SIZE_K) + num_tiles = num_pid_m * num_pid_n + tile_id_c = start_pid - NUM_SMS + for tile_id in tl.range(start_pid, num_tiles, NUM_SMS, flatten=True): + pid_m = tile_id // num_pid_n + pid_n = tile_id % num_pid_n + offs_am = pid_m * BLOCK_SIZE_M + offs_bn = pid_n * BLOCK_SIZE_N + accumulator = tl.zeros((BLOCK_SIZE_M, BLOCK_SIZE_N), dtype=tl.float32) + for ki in range(k_tiles): + offs_k = ki * BLOCK_SIZE_K + a = a_desc.load([offs_am, offs_k]) + b = b_desc.load([offs_bn, offs_k]) + accumulator = tl.dot(a, b.T, accumulator) + tile_id_c += NUM_SMS + offs_am_c = offs_am + offs_bn_c = offs_bn + acc = tl.reshape(accumulator, (BLOCK_SIZE_M, 2, BLOCK_SIZE_N // 2)) + acc = tl.permute(acc, (0, 2, 1)) + acc0, acc1 = tl.split(acc) + c0 = acc0.to(dtype) + c1 = acc1.to(dtype) + if not FORWARD: + pre0 = aux_desc.load([offs_am_c, offs_bn_c]) + pre1 = aux_desc.load([offs_am_c, offs_bn_c + BLOCK_SIZE_N // 2]) + c0 = c0 * tl.where(pre0 > 0, 2.0 * pre0, 0.5 * pre0) + c1 = c1 * tl.where(pre1 > 0, 2.0 * pre1, 0.5 * pre1) + c_desc.store([offs_am_c, offs_bn_c], c0) + c_desc.store([offs_am_c, offs_bn_c + BLOCK_SIZE_N // 2], c1) + if FORWARD: + aux0 = tl.where(c0 > 0, c0, 0.5 * c0) + aux1 = tl.where(c1 > 0, c1, 0.5 * c1) + aux_desc.store([offs_am_c, offs_bn_c], aux0 * aux0) + aux_desc.store([offs_am_c, offs_bn_c + BLOCK_SIZE_N // 2], aux1 * aux1) + + +def linear_leaky_relu_square(a, b, aux=None): + M, K = a.shape + N, K2 = b.shape + assert K == K2 + c = torch.empty((M, N), device=a.device, dtype=a.dtype) + forward = aux is None + if aux is None: + aux = torch.empty((M, N), device=a.device, dtype=a.dtype) + num_sms = torch.cuda.get_device_properties(a.device).multi_processor_count + BLOCK_SIZE_M, BLOCK_SIZE_N, BLOCK_SIZE_K = 128, 256, 64 + num_stages = 4 if forward else 3 + a_desc = TensorDescriptor.from_tensor(a, [BLOCK_SIZE_M, BLOCK_SIZE_K]) + b_desc = TensorDescriptor.from_tensor(b, [BLOCK_SIZE_N, BLOCK_SIZE_K]) + c_desc = TensorDescriptor.from_tensor(c, [BLOCK_SIZE_M, BLOCK_SIZE_N // 2]) + aux_desc = TensorDescriptor.from_tensor(aux, [BLOCK_SIZE_M, BLOCK_SIZE_N // 2]) + grid = lambda _meta: ( + min(num_sms, triton.cdiv(M, BLOCK_SIZE_M) * triton.cdiv(N, BLOCK_SIZE_N)), + ) + linear_leaky_relu_square_kernel[grid]( + a_desc, + b_desc, + c_desc, + aux_desc, + M, + N, + K, + BLOCK_SIZE_M=BLOCK_SIZE_M, + BLOCK_SIZE_N=BLOCK_SIZE_N, + BLOCK_SIZE_K=BLOCK_SIZE_K, + NUM_SMS=num_sms, + FORWARD=forward, + num_stages=num_stages, + num_warps=8, + ) + if forward: + return c, aux + return c + + +class FusedLinearLeakyReLUSquareFunction(torch.autograd.Function): + @staticmethod + def forward(ctx, x, w1, w2): + x_flat = x.reshape(-1, x.shape[-1]) + pre, post = linear_leaky_relu_square(x_flat, w1) + out = F.linear(post, w2) + ctx.save_for_backward(x, w1, w2, pre, post) + return out.view(*x.shape[:-1], out.shape[-1]) + + @staticmethod + def backward(ctx, grad_output): + x, w1, w2, pre, post = ctx.saved_tensors + x_flat = x.reshape(-1, x.shape[-1]) + grad_output_flat = grad_output.reshape(-1, grad_output.shape[-1]) + dw2 = grad_output_flat.T @ post + dpre = linear_leaky_relu_square(grad_output_flat, w2.T.contiguous(), aux=pre) + dw1 = dpre.T @ x_flat + dx = dpre @ w1 + return dx.view_as(x), dw1, dw2 + + +FusedLeakyReLUSquareMLP = FusedLinearLeakyReLUSquareFunction.apply + + +class Rotary(nn.Module): + def __init__(self, dim, base=1e4, train_seq_len=1024, rope_dims=0, yarn=True): + super().__init__() + self.dim = dim + self.base = base + self.train_seq_len = train_seq_len + self.yarn = yarn + self.rope_dims = rope_dims if rope_dims > 0 else dim + inv_freq = 1.0 / base ** ( + torch.arange(0, self.rope_dims, 2, dtype=torch.float32) / self.rope_dims + ) + self.register_buffer("inv_freq", inv_freq, persistent=False) + self._seq_len_cached = 0 + self._cos_cached = None + self._sin_cached = None + + def forward(self, seq_len, device, dtype): + if ( + self._cos_cached is None + or self._sin_cached is None + or self._seq_len_cached < seq_len + or self._cos_cached.device != device + ): + rd = self.rope_dims + if self.yarn and seq_len > self.train_seq_len: + scale = seq_len / self.train_seq_len + new_base = self.base * scale ** (rd / (rd - 2)) + inv_freq = 1.0 / new_base ** ( + torch.arange(0, rd, 2, dtype=torch.float32, device=device) / rd + ) + else: + inv_freq = self.inv_freq.float().to(device) + t = torch.arange(seq_len, device=device, dtype=torch.float32) + freqs = torch.outer(t, inv_freq) + self._cos_cached = freqs.cos()[None, :, None, :] + self._sin_cached = freqs.sin()[None, :, None, :] + self._seq_len_cached = seq_len + return self._cos_cached[:, :seq_len].to(dtype=dtype), self._sin_cached[:, :seq_len].to(dtype=dtype) + + +def apply_rotary_emb(x, cos, sin, rope_dims=0): + if rope_dims > 0 and rope_dims < x.size(-1): + x_rope, x_pass = x[..., :rope_dims], x[..., rope_dims:] + half = rope_dims // 2 + x1, x2 = x_rope[..., :half], x_rope[..., half:] + x_rope = torch.cat((x1 * cos + x2 * sin, x1 * -sin + x2 * cos), dim=-1) + return torch.cat((x_rope, x_pass), dim=-1) + half = x.size(-1) // 2 + x1, x2 = x[..., :half], x[..., half:] + return torch.cat((x1 * cos + x2 * sin, x1 * -sin + x2 * cos), dim=-1) + + +class CausalSelfAttention(nn.Module): + def __init__( + self, dim, num_heads, num_kv_heads, rope_base, qk_gain_init, train_seq_len, yarn=True, + attn_out_gate=False, attn_out_gate_src="proj", gate_window=12, + gated_attn=False, gated_attn_init_std=0.01, + sparse_attn_gate=False, sparse_attn_gate_init_std=0.0, sparse_attn_gate_scale=1.0, + ): + super().__init__() + if dim % num_heads != 0: + raise ValueError("model_dim must be divisible by num_heads") + if num_heads % num_kv_heads != 0: + raise ValueError("num_heads must be divisible by num_kv_heads") + if int(attn_out_gate) + int(gated_attn) + int(sparse_attn_gate) > 1: + raise ValueError( + "attn_out_gate, gated_attn, and sparse_attn_gate are mutually exclusive" + ) + self.num_heads = num_heads + self.num_kv_heads = num_kv_heads + self.head_dim = dim // num_heads + if self.head_dim % 2 != 0: + raise ValueError("head_dim must be even for RoPE") + self.q_gain = nn.Parameter( + torch.full((num_heads,), qk_gain_init, dtype=torch.float32) + ) + self.rope_dims = 0 + self.rotary = Rotary(self.head_dim, base=rope_base, train_seq_len=train_seq_len, yarn=yarn) + self.use_xsa = False + # AttnOutGate (PR #1667 MarioPaerle): per-head multiplicative gate on attention + # output. CastedLinear so restore_fp32_params casts back to fp32 for GPTQ. + # _zero_init -> 2*sigmoid(0)=1 -> transparent at init. + self.attn_out_gate = attn_out_gate + self.attn_out_gate_src = attn_out_gate_src + self.gate_window = gate_window + if attn_out_gate: + self.attn_gate_proj = CastedLinear(gate_window, num_heads, bias=False) + self.attn_gate_proj._zero_init = True + # Gated Attention (arXiv:2505.06708, Qwen, NeurIPS 2025). Per-head sigmoid + # gate on SDPA output, BEFORE out_proj. Gate projection W_g: (num_heads, dim). + # Name "attn_gate_w" contains "attn_gate" substring so it matches + # CONTROL_TENSOR_NAME_PATTERNS and routes to the scalar AdamW group. + # fp32 Parameter -> restore_fp32_params path covers it via the ndim<2 OR + # name-pattern check (name matches "attn_gate"). Cast to x.dtype on use. + self.gated_attn = gated_attn + if gated_attn: + W = torch.empty(num_heads, dim, dtype=torch.float32) + nn.init.normal_(W, mean=0.0, std=gated_attn_init_std) + self.attn_gate_w = nn.Parameter(W) + # Sparse attention head-output gate (modded-nanogpt style). Keeps dense SDPA + # and only narrows the gate input to the first gate_window residual dims. + # W_g: (num_heads, gate_window). y_{t,h} <- sigmoid(scale * W_g_h @ x_t[:gate_window]) * y_{t,h}. + # Shares attn_gate_w name with dense GatedAttn so the quant routing + # (CONTROL_TENSOR_NAME_PATTERNS / attn_gate_w int8 passthrough) is unchanged. + self.sparse_attn_gate = sparse_attn_gate + self.sparse_attn_gate_scale = sparse_attn_gate_scale + if sparse_attn_gate: + W = torch.empty(num_heads, gate_window, dtype=torch.float32) + if sparse_attn_gate_init_std > 0: + nn.init.normal_(W, mean=0.0, std=sparse_attn_gate_init_std) + else: + nn.init.zeros_(W) + self.attn_gate_w = nn.Parameter(W) + + def _xsa_efficient(self, y, v): + B, T, H, D = y.shape + Hkv = v.size(-2) + group = H // Hkv + y_g = y.reshape(B, T, Hkv, group, D) + vn = F.normalize(v, dim=-1).unsqueeze(-2) + proj = (y_g * vn).sum(dim=-1, keepdim=True) * vn + return (y_g - proj).reshape(B, T, H, D) + + def forward(self, x, q_w, k_w, v_w, out_w, cu_seqlens=None, max_seqlen=0): + bsz, seqlen, dim = x.shape + # q_raw kept around as a tap point for attn_out_gate_src='q' (post-projection, + # pre-reshape, pre-RoPE). + q_raw = F.linear(x, q_w.to(x.dtype)) + q = q_raw.reshape(bsz, seqlen, self.num_heads, self.head_dim) + k = F.linear(x, k_w.to(x.dtype)).reshape(bsz, seqlen, self.num_kv_heads, self.head_dim) + v = F.linear(x, v_w.to(x.dtype)).reshape(bsz, seqlen, self.num_kv_heads, self.head_dim) + q = F.rms_norm(q, (q.size(-1),)) + k = F.rms_norm(k, (k.size(-1),)) + cos, sin = self.rotary(seqlen, x.device, q.dtype) + q = apply_rotary_emb(q, cos, sin, self.rope_dims) + k = apply_rotary_emb(k, cos, sin, self.rope_dims) + q = q * self.q_gain.to(dtype=q.dtype)[None, None, :, None] + if cu_seqlens is not None: + y = flash_attn_varlen_func( + q[0], + k[0], + v[0], + cu_seqlens_q=cu_seqlens, + cu_seqlens_k=cu_seqlens, + max_seqlen_q=max_seqlen, + max_seqlen_k=max_seqlen, + causal=True, + window_size=(-1, -1), + )[None] + else: + y = flash_attn_3_func(q, k, v, causal=True) + if self.use_xsa: + y = self._xsa_efficient(y, v) + # AttnOutGate inlined (PR #1667). Inline + .contiguous() barrier so torch.compile + # fullgraph=True is happy (this avoids the @torch.compiler.disable trap that + # crashed gates v3). Per-head gate on (B,T,H,D) tensor: g shape [B,T,H], broadcast + # over D via [..., None]. zero-init weight -> 2*sigmoid(0)=1 -> transparent. + if self.attn_out_gate: + gate_src = q_raw if self.attn_out_gate_src == "q" else x + gate_in = gate_src[..., : self.gate_window].contiguous() + g = 2.0 * torch.sigmoid(self.attn_gate_proj(gate_in)) + y = y * g[..., None] + # Gated Attention (arXiv:2505.06708 G1). Inline + .contiguous() barrier so + # torch.compile fullgraph=True is happy. Per-head gate on (B,T,H,D): g shape + # [B,T,H], broadcast over D via [..., None]. Paper: g = sigmoid(x @ W_g.T) + # where W_g: (H, dim). .to(x.dtype) on fp32 param before broadcast with bf16. + if self.gated_attn: + x_c = x.contiguous() + g = torch.sigmoid(F.linear(x_c, self.attn_gate_w.to(x.dtype))) + y = y * g[..., None] + # Sparse head-output gate: narrower (gate_window) input, same shape g as GatedAttn. + if self.sparse_attn_gate: + gate_in = x[..., : self.gate_window].contiguous() + g = torch.sigmoid( + self.sparse_attn_gate_scale + * F.linear(gate_in, self.attn_gate_w.to(x.dtype)) + ) + y = y * g[..., None] + y = y.reshape(bsz, seqlen, dim) + self._last_proj_input = y.detach() if getattr(self, "_calib", False) else None + return F.linear(y, out_w.to(x.dtype)) + + +class MLP(nn.Module): + def __init__(self, dim, mlp_mult): + super().__init__() + self.use_fused = True + + def forward(self, x, up_w, down_w): + if self.training and self.use_fused: + return FusedLeakyReLUSquareMLP(x, up_w.to(x.dtype), down_w.to(x.dtype)) + hidden = F.leaky_relu(F.linear(x, up_w.to(x.dtype)), negative_slope=0.5).square() + self._last_down_input = hidden.detach() if getattr(self, "_calib", False) else None + return F.linear(hidden, down_w.to(x.dtype)) + + +class Block(nn.Module): + def __init__( + self, + dim, + num_heads, + num_kv_heads, + mlp_mult, + rope_base, + qk_gain_init, + train_seq_len, + layer_idx=0, + ln_scale=False, + yarn=True, + attn_out_gate=False, + attn_out_gate_src="proj", + gate_window=12, + gated_attn=False, + gated_attn_init_std=0.01, + sparse_attn_gate=False, + sparse_attn_gate_init_std=0.0, + sparse_attn_gate_scale=1.0, + ): + super().__init__() + self.attn_norm = RMSNorm() + self.mlp_norm = RMSNorm() + self.attn = CausalSelfAttention( + dim, num_heads, num_kv_heads, rope_base, qk_gain_init, train_seq_len, yarn=yarn, + attn_out_gate=attn_out_gate, attn_out_gate_src=attn_out_gate_src, gate_window=gate_window, + gated_attn=gated_attn, gated_attn_init_std=gated_attn_init_std, + sparse_attn_gate=sparse_attn_gate, + sparse_attn_gate_init_std=sparse_attn_gate_init_std, + sparse_attn_gate_scale=sparse_attn_gate_scale, + ) + self.mlp = MLP(dim, mlp_mult) + self.attn_scale = nn.Parameter(torch.ones(dim, dtype=torch.float32)) + self.mlp_scale = nn.Parameter(torch.ones(dim, dtype=torch.float32)) + self.resid_mix = nn.Parameter( + torch.stack((torch.ones(dim), torch.zeros(dim))).float() + ) + self.ln_scale_factor = 1.0 / math.sqrt(layer_idx + 1) if ln_scale else 1.0 + + def forward(self, x, x0, q_w, k_w, v_w, out_w, up_w, down_w, cu_seqlens=None, max_seqlen=0): + mix = self.resid_mix.to(dtype=x.dtype) + x_in = mix[0][None, None, :] * x + mix[1][None, None, :] * x0 + attn_out = self.attn( + self.attn_norm(x_in) * self.ln_scale_factor, + q_w, k_w, v_w, out_w, + cu_seqlens=cu_seqlens, + max_seqlen=max_seqlen, + ) + x_out = x_in + self.attn_scale.to(dtype=x_in.dtype)[None, None, :] * attn_out + x_out = x_out + self.mlp_scale.to(dtype=x_out.dtype)[ + None, None, : + ] * self.mlp(self.mlp_norm(x_out) * self.ln_scale_factor, up_w, down_w) + return x_out + +class GPT(nn.Module): + def __init__(self, h): + super().__init__() + if h.logit_softcap <= 0.0: + raise ValueError(f"logit_softcap must be positive, got {h.logit_softcap}") + self.tie_embeddings = h.tie_embeddings + self.tied_embed_init_std = h.tied_embed_init_std + self.logit_softcap = h.logit_softcap + self.fused_ce_enabled = bool(h.fused_ce_enabled) + self.tok_emb = nn.Embedding(h.vocab_size, h.model_dim) + self.num_layers = h.num_layers + head_dim = h.model_dim // h.num_heads + kv_dim = h.num_kv_heads * head_dim + hidden_dim = int(h.mlp_mult * h.model_dim) + self.qo_bank = nn.Parameter(torch.empty(2 * h.num_layers, h.model_dim, h.model_dim)) + self.kv_bank = nn.Parameter(torch.empty(2 * h.num_layers, kv_dim, h.model_dim)) + self.mlp_up_bank = nn.Parameter(torch.empty(h.num_layers, hidden_dim, h.model_dim)) + self.mlp_down_bank = nn.Parameter(torch.empty(h.num_layers, h.model_dim, hidden_dim)) + self.num_encoder_layers = h.num_layers // 2 + self.num_decoder_layers = h.num_layers - self.num_encoder_layers + self.blocks = nn.ModuleList( + [ + Block( + h.model_dim, + h.num_heads, + h.num_kv_heads, + h.mlp_mult, + h.rope_base, + h.qk_gain_init, + h.train_seq_len, + layer_idx=i, + ln_scale=h.ln_scale, + yarn=h.rope_yarn, + attn_out_gate=h.attn_out_gate_enabled, + attn_out_gate_src=h.attn_out_gate_src, + gate_window=h.gate_window, + gated_attn=h.gated_attn_enabled, + gated_attn_init_std=h.gated_attn_init_std, + sparse_attn_gate=h.sparse_attn_gate_enabled, + sparse_attn_gate_init_std=h.sparse_attn_gate_init_std, + sparse_attn_gate_scale=h.sparse_attn_gate_scale, + ) + for i in range(h.num_layers) + ] + ) + if h.rope_dims > 0: + head_dim = h.model_dim // h.num_heads + for block in self.blocks: + block.attn.rope_dims = h.rope_dims + block.attn.rotary = Rotary( + head_dim, + base=h.rope_base, + train_seq_len=h.train_seq_len, + rope_dims=h.rope_dims, + yarn=h.rope_yarn, + ) + self.final_norm = RMSNorm() + self.lm_head = ( + None + if h.tie_embeddings + else CastedLinear(h.model_dim, h.vocab_size, bias=False) + ) + if self.lm_head is not None: + self.lm_head._zero_init = True + if h.xsa_last_n > 0: + for i in range(max(0, h.num_layers - h.xsa_last_n), h.num_layers): + self.blocks[i].attn.use_xsa = True + self.looping_active = False + if h.num_loops > 0: + loop_seg = list(range(h.loop_start, h.loop_end + 1)) + all_indices = list(range(h.loop_start)) + for _ in range(h.num_loops + 1): + all_indices.extend(loop_seg) + all_indices.extend(range(h.loop_end + 1, h.num_layers)) + num_enc = len(all_indices) // 2 + self.encoder_indices = all_indices[:num_enc] + self.decoder_indices = all_indices[num_enc:] + else: + self.encoder_indices = list(range(self.num_encoder_layers)) + self.decoder_indices = list(range(self.num_encoder_layers, h.num_layers)) + self.num_skip_weights = min( + len(self.encoder_indices), len(self.decoder_indices) + ) + self.skip_weights = nn.Parameter( + torch.ones(self.num_skip_weights, h.model_dim, dtype=torch.float32) + ) + self.skip_gates = ( + nn.Parameter( + torch.zeros(self.num_skip_weights, h.model_dim, dtype=torch.float32) + ) + if h.skip_gates_enabled + else None + ) + self.parallel_start_layer = h.parallel_start_layer + self.parallel_final_lane = h.parallel_final_lane.lower() + self.parallel_post_lambdas = nn.Parameter( + torch.ones(h.num_layers, 2, 2, dtype=torch.float32) + ) + self.parallel_resid_lambdas = nn.Parameter( + torch.full((h.num_layers, 2), 1.1, dtype=torch.float32) + ) + # SmearGate (PR #1667 / modded-nanogpt @classiclarryd): + # x_t <- x_t + lam * sigmoid(W * x_t[:gate_window]) * x_{t-1}. + # Per-token forward-1 smear of the embedding lane. W zero-init + lam=0 -> + # transparent at init. Uses CastedLinear so restore_fp32_params handles dtype. + self.smear_gate_enabled = h.smear_gate_enabled + if self.smear_gate_enabled: + self.smear_window = h.gate_window + self.smear_gate = CastedLinear(self.smear_window, 1, bias=False) + self.smear_gate._zero_init = True + self.smear_lambda = nn.Parameter(torch.zeros(1, dtype=torch.float32)) + self._init_weights() + + def _init_weights(self): + if self.tie_embeddings: + nn.init.normal_(self.tok_emb.weight, mean=0.0, std=self.tied_embed_init_std) + n = self.num_layers + proj_scale = 1.0 / math.sqrt(2 * n) + for i in range(n): + nn.init.orthogonal_(self.qo_bank.data[i], gain=1.0) + nn.init.zeros_(self.qo_bank.data[n + i]) + self.qo_bank.data[n + i].mul_(proj_scale) + nn.init.orthogonal_(self.kv_bank.data[i], gain=1.0) + nn.init.orthogonal_(self.kv_bank.data[n + i], gain=1.0) + for i in range(n): + nn.init.orthogonal_(self.mlp_up_bank.data[i], gain=1.0) + nn.init.zeros_(self.mlp_down_bank.data[i]) + self.mlp_down_bank.data[i].mul_(proj_scale) + for name, module in self.named_modules(): + if isinstance(module, nn.Linear): + if getattr(module, "_zero_init", False): + nn.init.zeros_(module.weight) + elif ( + module.weight.ndim == 2 + and module.weight.shape[0] >= 64 + and module.weight.shape[1] >= 64 + ): + nn.init.orthogonal_(module.weight, gain=1.0) + + def _bank_weights(self, i): + n = self.num_layers + return ( + self.qo_bank[i], + self.kv_bank[i], + self.kv_bank[n + i], + self.qo_bank[n + i], + self.mlp_up_bank[i], + self.mlp_down_bank[i], + ) + + def _parallel_block( + self, block_idx, lane0, lane1, x0, + q_w, k_w, v_w, out_w, up_w, down_w, + cu_seqlens=None, max_seqlen=0, + ): + block = self.blocks[block_idx] + mix = block.resid_mix.to(dtype=lane0.dtype) + attn_read = mix[0][None, None, :] * lane0 + mix[1][None, None, :] * x0 + attn_out = block.attn( + block.attn_norm(attn_read) * block.ln_scale_factor, + q_w, k_w, v_w, out_w, + cu_seqlens=cu_seqlens, max_seqlen=max_seqlen, + ) + attn_out = block.attn_scale.to(dtype=attn_out.dtype)[None, None, :] * attn_out + mlp_read = lane1 + mlp_out = block.mlp_scale.to(dtype=lane1.dtype)[None, None, :] * block.mlp( + block.mlp_norm(mlp_read) * block.ln_scale_factor, up_w, down_w + ) + attn_resid = self.parallel_resid_lambdas[block_idx, 0].to(dtype=lane0.dtype) + attn_post = self.parallel_post_lambdas[block_idx, 0].to(dtype=lane0.dtype) + mlp_resid = self.parallel_resid_lambdas[block_idx, 1].to(dtype=lane0.dtype) + mlp_post = self.parallel_post_lambdas[block_idx, 1].to(dtype=lane0.dtype) + lane0 = attn_resid * lane0 + attn_post[0] * attn_out + mlp_post[0] * mlp_out + lane1 = mlp_resid * lane1 + attn_post[1] * attn_out + mlp_post[1] * mlp_out + return lane0, lane1 + + def _final_parallel_hidden(self, lane0, lane1): + if self.parallel_final_lane == "mlp": + return lane1 + if self.parallel_final_lane == "attn": + return lane0 + return 0.5 * (lane0 + lane1) + + def _forward_hidden(self, input_ids, cu_seqlens=None, max_seqlen=0): + """Run the encoder/decoder stack to the final RMSNorm; returns pre-projection hidden. + Shared by eval (softcap+projection via forward_logits) and train (fused CE path).""" + x = self.tok_emb(input_ids) + # SmearGate (PR #1667). Inline gate compute with .contiguous() on the slice fed + # to the projection so torch.compile fullgraph is happy. lam=0 + W=0 -> identity + # at init. This block runs unconditionally on the smear path; the cat keeps + # position 0 untouched so causality holds. + if self.smear_gate_enabled: + sl = self.smear_lambda.to(dtype=x.dtype) + gate_in = x[:, 1:, : self.smear_window].contiguous() + g = sl * torch.sigmoid(self.smear_gate(gate_in)) + bos_mask = (input_ids[:, 1:] == 1).unsqueeze(-1) + g = g.masked_fill(bos_mask, 0.0) + x = torch.cat([x[:, :1], x[:, 1:] + g * x[:, :-1]], dim=1) + x = F.rms_norm(x, (x.size(-1),)) + x0 = x + skips = [] + enc_iter = ( + self.encoder_indices + if self.looping_active + else range(self.num_encoder_layers) + ) + dec_iter = ( + self.decoder_indices + if self.looping_active + else range( + self.num_encoder_layers, + self.num_encoder_layers + self.num_decoder_layers, + ) + ) + for i in enc_iter: + q_w, k_w, v_w, out_w, up_w, down_w = self._bank_weights(i) + x = self.blocks[i](x, x0, q_w, k_w, v_w, out_w, up_w, down_w, cu_seqlens=cu_seqlens, max_seqlen=max_seqlen) + skips.append(x) + psl = self.parallel_start_layer + lane0 = None + lane1 = None + for skip_idx, i in enumerate(dec_iter): + q_w, k_w, v_w, out_w, up_w, down_w = self._bank_weights(i) + if i >= psl and psl > 0: + if lane0 is None: + lane0 = x + lane1 = x + if skip_idx < self.num_skip_weights and skips: + skip = skips.pop() + w = self.skip_weights[skip_idx].to(dtype=lane0.dtype)[None, None, :] + if self.skip_gates is not None: + g = torch.sigmoid(self.skip_gates[skip_idx].to(dtype=lane0.dtype))[None, None, :] + lane0 = torch.lerp(w * skip, lane0, g) + else: + lane0 = lane0 + w * skip + lane0, lane1 = self._parallel_block( + i, lane0, lane1, x0, q_w, k_w, v_w, out_w, up_w, down_w, + cu_seqlens=cu_seqlens, max_seqlen=max_seqlen, + ) + else: + if skip_idx < self.num_skip_weights and skips: + scaled_skip = ( + self.skip_weights[skip_idx].to(dtype=x.dtype)[None, None, :] + * skips.pop() + ) + if self.skip_gates is not None: + g = torch.sigmoid(self.skip_gates[skip_idx].to(dtype=x.dtype))[None, None, :] + x = torch.lerp(scaled_skip, x, g) + else: + x = x + scaled_skip + x = self.blocks[i](x, x0, q_w, k_w, v_w, out_w, up_w, down_w, cu_seqlens=cu_seqlens, max_seqlen=max_seqlen) + if lane0 is not None: + x = self._final_parallel_hidden(lane0, lane1) + x = self.final_norm(x) + return x + + def _project_logits(self, hidden): + if self.tie_embeddings: + return F.linear(hidden, self.tok_emb.weight) + return self.lm_head(hidden) + + def forward_logits(self, input_ids, cu_seqlens=None, max_seqlen=0): + hidden = self._forward_hidden(input_ids, cu_seqlens=cu_seqlens, max_seqlen=max_seqlen) + logits_proj = self._project_logits(hidden) + return self.logit_softcap * torch.tanh(logits_proj / self.logit_softcap) + + def forward(self, input_ids, target_ids, cu_seqlens=None, max_seqlen=0): + hidden = self._forward_hidden(input_ids, cu_seqlens=cu_seqlens, max_seqlen=max_seqlen) + logits_proj = self._project_logits(hidden) + flat_targets = target_ids.reshape(-1) + # Fused softcapped-CE kernel (training path only). Applies softcap inside the + # Triton kernel; takes pre-softcap logits_proj. Non-fused path matches stock + # PR-1736 numerics exactly (softcap in fp32, then F.cross_entropy on fp32). + if self.fused_ce_enabled: + return softcapped_cross_entropy( + logits_proj.reshape(-1, logits_proj.size(-1)), + flat_targets, + self.logit_softcap, + reduction="mean", + ) + logits = self.logit_softcap * torch.tanh(logits_proj / self.logit_softcap) + return F.cross_entropy( + logits.reshape(-1, logits.size(-1)).float(), + flat_targets, + reduction="mean", + ) + + def forward_ttt(self, input_ids, target_ids, lora): + x = self.tok_emb(input_ids) + # SmearGate on the TTT path — same inline compute as forward_logits. + if self.smear_gate_enabled: + sl = self.smear_lambda.to(dtype=x.dtype) + gate_in = x[:, 1:, : self.smear_window].contiguous() + g = sl * torch.sigmoid(self.smear_gate(gate_in)) + bos_mask = (input_ids[:, 1:] == 1).unsqueeze(-1) + g = g.masked_fill(bos_mask, 0.0) + x = torch.cat([x[:, :1], x[:, 1:] + g * x[:, :-1]], dim=1) + x = F.rms_norm(x, (x.size(-1),)) + x0 = x + skips = [] + enc_iter = ( + self.encoder_indices + if self.looping_active + else list(range(self.num_encoder_layers)) + ) + dec_iter = ( + self.decoder_indices + if self.looping_active + else list( + range( + self.num_encoder_layers, + self.num_encoder_layers + self.num_decoder_layers, + ) + ) + ) + slot = 0 + for i in enc_iter: + q_w, k_w, v_w, out_w, up_w, down_w = self._bank_weights(i) + x = self._block_with_lora(self.blocks[i], x, x0, lora, slot, q_w, k_w, v_w, out_w, up_w, down_w) + slot += 1 + skips.append(x) + psl = self.parallel_start_layer + lane0 = None + lane1 = None + for skip_idx, i in enumerate(dec_iter): + q_w, k_w, v_w, out_w, up_w, down_w = self._bank_weights(i) + if i >= psl and psl > 0: + if lane0 is None: + lane0 = x + lane1 = x + if skip_idx < self.num_skip_weights and skips: + skip = skips.pop() + w = self.skip_weights[skip_idx].to(dtype=lane0.dtype)[None, None, :] + if self.skip_gates is not None: + g = torch.sigmoid(self.skip_gates[skip_idx].to(dtype=lane0.dtype))[None, None, :] + lane0 = torch.lerp(w * skip, lane0, g) + else: + lane0 = lane0 + w * skip + lane0, lane1 = self._parallel_block_with_lora( + i, lane0, lane1, x0, lora, slot, + q_w, k_w, v_w, out_w, up_w, down_w, + ) + else: + if skip_idx < self.num_skip_weights and skips: + scaled_skip = ( + self.skip_weights[skip_idx].to(dtype=x.dtype)[None, None, :] + * skips.pop() + ) + if self.skip_gates is not None: + g = torch.sigmoid(self.skip_gates[skip_idx].to(dtype=x.dtype))[None, None, :] + x = torch.lerp(scaled_skip, x, g) + else: + x = x + scaled_skip + x = self._block_with_lora(self.blocks[i], x, x0, lora, slot, q_w, k_w, v_w, out_w, up_w, down_w) + slot += 1 + if lane0 is not None: + x = self._final_parallel_hidden(lane0, lane1) + x = self.final_norm(x) + if self.tie_embeddings: + logits = F.linear(x, self.tok_emb.weight) + else: + logits = self.lm_head(x) + logits = logits + lora.lm_head_lora(x) + logits = self.logit_softcap * torch.tanh(logits / self.logit_softcap) + bsz, sl, V = logits.shape + return F.cross_entropy( + logits.float().reshape(-1, V), target_ids.reshape(-1), reduction="none" + ).reshape(bsz, sl) + + def _block_with_lora(self, block, x, x0, lora, slot, q_w, k_w, v_w, out_w, up_w, down_w): + mix = block.resid_mix.to(dtype=x.dtype) + x_in = mix[0][None, None, :] * x + mix[1][None, None, :] * x0 + n = block.attn_norm(x_in) * block.ln_scale_factor + attn = block.attn + bsz, seqlen, dim = n.shape + # Keep raw Q for AttnOutGate src='q' (matches forward path semantics). + q_raw = F.linear(n, q_w.to(n.dtype)) + lora.q_loras[slot](n) + q = q_raw.reshape(bsz, seqlen, attn.num_heads, attn.head_dim) + k = F.linear(n, k_w.to(n.dtype)) + if lora.k_loras is not None: + k = k + lora.k_loras[slot](n) + k = k.reshape(bsz, seqlen, attn.num_kv_heads, attn.head_dim) + v = (F.linear(n, v_w.to(n.dtype)) + lora.v_loras[slot](n)).reshape( + bsz, seqlen, attn.num_kv_heads, attn.head_dim + ) + q = F.rms_norm(q, (q.size(-1),)) + k = F.rms_norm(k, (k.size(-1),)) + cos, sin = attn.rotary(seqlen, n.device, q.dtype) + q = apply_rotary_emb(q, cos, sin, attn.rope_dims) + k = apply_rotary_emb(k, cos, sin, attn.rope_dims) + q = q * attn.q_gain.to(dtype=q.dtype)[None, None, :, None] + y = flash_attn_3_func(q, k, v, causal=True) + if attn.use_xsa: + y = attn._xsa_efficient(y, v) + # AttnOutGate (TTT path) — inline + .contiguous() barrier, same as the eval path. + if attn.attn_out_gate: + gate_src = q_raw if attn.attn_out_gate_src == "q" else n + gate_in = gate_src[..., : attn.gate_window].contiguous() + g = 2.0 * torch.sigmoid(attn.attn_gate_proj(gate_in)) + y = y * g[..., None] + # Gated Attention (TTT path). Gate input is n (post-norm block input), same + # as eval path. .to(n.dtype) on fp32 param before bf16 broadcast. + if attn.gated_attn: + n_c = n.contiguous() + g = torch.sigmoid(F.linear(n_c, attn.attn_gate_w.to(n.dtype))) + y = y * g[..., None] + # Sparse attention head-output gate (TTT path) — must match the eval path in + # forward() exactly, else training (which applied the gate) and TTT eval (which + # skipped it) produce mismatched representations and catastrophic BPB regression. + if attn.sparse_attn_gate: + gate_in = n[..., : attn.gate_window].contiguous() + g = torch.sigmoid( + attn.sparse_attn_gate_scale + * F.linear(gate_in, attn.attn_gate_w.to(n.dtype)) + ) + y = y * g[..., None] + y = y.reshape(bsz, seqlen, dim) + attn_out = F.linear(y, out_w.to(n.dtype)) + if lora.o_loras is not None: + attn_out = attn_out + lora.o_loras[slot](n) + x_out = x_in + block.attn_scale.to(dtype=x_in.dtype)[None, None, :] * attn_out + mlp_n = block.mlp_norm(x_out) * block.ln_scale_factor + mlp_out = block.mlp(mlp_n, up_w, down_w) + if lora.mlp_loras is not None: + mlp_out = mlp_out + lora.mlp_loras[slot](mlp_n) + x_out = x_out + block.mlp_scale.to(dtype=x_out.dtype)[None, None, :] * mlp_out + return x_out + + def _parallel_block_with_lora( + self, block_idx, lane0, lane1, x0, lora, slot, + q_w, k_w, v_w, out_w, up_w, down_w, + ): + block = self.blocks[block_idx] + mix = block.resid_mix.to(dtype=lane0.dtype) + attn_read = mix[0][None, None, :] * lane0 + mix[1][None, None, :] * x0 + n = block.attn_norm(attn_read) * block.ln_scale_factor + attn = block.attn + bsz, seqlen, dim = n.shape + q_raw = F.linear(n, q_w.to(n.dtype)) + lora.q_loras[slot](n) + q = q_raw.reshape(bsz, seqlen, attn.num_heads, attn.head_dim) + k = F.linear(n, k_w.to(n.dtype)) + if lora.k_loras is not None: + k = k + lora.k_loras[slot](n) + k = k.reshape(bsz, seqlen, attn.num_kv_heads, attn.head_dim) + v = (F.linear(n, v_w.to(n.dtype)) + lora.v_loras[slot](n)).reshape( + bsz, seqlen, attn.num_kv_heads, attn.head_dim + ) + q = F.rms_norm(q, (q.size(-1),)) + k = F.rms_norm(k, (k.size(-1),)) + cos, sin = attn.rotary(seqlen, n.device, q.dtype) + q = apply_rotary_emb(q, cos, sin, attn.rope_dims) + k = apply_rotary_emb(k, cos, sin, attn.rope_dims) + q = q * attn.q_gain.to(dtype=q.dtype)[None, None, :, None] + y = flash_attn_3_func(q, k, v, causal=True) + if attn.use_xsa: + y = attn._xsa_efficient(y, v) + # AttnOutGate (TTT parallel path) — inline + .contiguous() barrier. + if attn.attn_out_gate: + gate_src = q_raw if attn.attn_out_gate_src == "q" else n + gate_in = gate_src[..., : attn.gate_window].contiguous() + g = 2.0 * torch.sigmoid(attn.attn_gate_proj(gate_in)) + y = y * g[..., None] + # Gated Attention (TTT parallel path). Gate input is n (post-norm block input). + if attn.gated_attn: + n_c = n.contiguous() + g = torch.sigmoid(F.linear(n_c, attn.attn_gate_w.to(n.dtype))) + y = y * g[..., None] + # Sparse attention head-output gate (TTT parallel path) — must match the + # eval path in forward() to keep train/eval semantics in sync. + if attn.sparse_attn_gate: + gate_in = n[..., : attn.gate_window].contiguous() + g = torch.sigmoid( + attn.sparse_attn_gate_scale + * F.linear(gate_in, attn.attn_gate_w.to(n.dtype)) + ) + y = y * g[..., None] + y = y.reshape(bsz, seqlen, dim) + attn_out = F.linear(y, out_w.to(n.dtype)) + if lora.o_loras is not None: + attn_out = attn_out + lora.o_loras[slot](n) + attn_out = block.attn_scale.to(dtype=attn_out.dtype)[None, None, :] * attn_out + mlp_read = lane1 + mlp_n = block.mlp_norm(mlp_read) * block.ln_scale_factor + mlp_out = block.mlp(mlp_n, up_w, down_w) + if lora.mlp_loras is not None: + mlp_out = mlp_out + lora.mlp_loras[slot](mlp_n) + mlp_out = block.mlp_scale.to(dtype=lane1.dtype)[None, None, :] * mlp_out + attn_resid = self.parallel_resid_lambdas[block_idx, 0].to(dtype=lane0.dtype) + attn_post = self.parallel_post_lambdas[block_idx, 0].to(dtype=lane0.dtype) + mlp_resid = self.parallel_resid_lambdas[block_idx, 1].to(dtype=lane0.dtype) + mlp_post = self.parallel_post_lambdas[block_idx, 1].to(dtype=lane0.dtype) + lane0 = attn_resid * lane0 + attn_post[0] * attn_out + mlp_post[0] * mlp_out + lane1 = mlp_resid * lane1 + attn_post[1] * attn_out + mlp_post[1] * mlp_out + return lane0, lane1 + + +class BatchedLinearLoRA(nn.Module): + # PR-1767: rank-scaled output (alpha/rank), like standard LoRA. Decouples + # effective magnitude from rank so changing rank does not change LR scale. + _ALPHA = float(os.environ.get("TTT_LORA_ALPHA", "144")) + # PR-1767: optionally keep A warm across per-doc resets (only B is zeroed). + # Accumulates useful feature directions across documents within a TTT phase. + _WARM_START_A = bool(int(os.environ.get("TTT_WARM_START_A", "1"))) + + def __init__(self, bsz, in_features, out_features, rank): + super().__init__() + self._bound = 1.0 / math.sqrt(in_features) + self._scale = self._ALPHA / rank + self.A = nn.Parameter( + torch.empty(bsz, rank, in_features).uniform_(-self._bound, self._bound) + ) + self.B = nn.Parameter(torch.zeros(bsz, out_features, rank)) + + def reset(self): + with torch.no_grad(): + if not self._WARM_START_A: + self.A.uniform_(-self._bound, self._bound) + self.B.zero_() + + def forward(self, x): + return ((x @ self.A.transpose(1, 2)) @ self.B.transpose(1, 2)) * self._scale + + +class BatchedTTTLoRA(nn.Module): + def __init__(self, bsz, model, rank, k_lora=True, mlp_lora=True, o_lora=True): + super().__init__() + self.bsz = bsz + dim = model.qo_bank.shape[-1] + vocab = model.tok_emb.num_embeddings + if getattr(model, "looping_active", False): + num_slots = len(model.encoder_indices) + len(model.decoder_indices) + else: + num_slots = len(model.blocks) + kv_dim = model.blocks[0].attn.num_kv_heads * ( + dim // model.blocks[0].attn.num_heads + ) + embed_dim = model.tok_emb.embedding_dim + self.lm_head_lora = BatchedLinearLoRA(bsz, embed_dim, vocab, rank) + self.q_loras = nn.ModuleList( + [BatchedLinearLoRA(bsz, dim, dim, rank) for _ in range(num_slots)] + ) + self.v_loras = nn.ModuleList( + [BatchedLinearLoRA(bsz, dim, kv_dim, rank) for _ in range(num_slots)] + ) + self.k_loras = ( + nn.ModuleList( + [BatchedLinearLoRA(bsz, dim, kv_dim, rank) for _ in range(num_slots)] + ) + if k_lora + else None + ) + self.mlp_loras = ( + nn.ModuleList( + [BatchedLinearLoRA(bsz, dim, dim, rank) for _ in range(num_slots)] + ) + if mlp_lora + else None + ) + self.o_loras = ( + nn.ModuleList( + [BatchedLinearLoRA(bsz, dim, dim, rank) for _ in range(num_slots)] + ) + if o_lora + else None + ) + + def reset(self): + with torch.no_grad(): + self.lm_head_lora.reset() + for loras in [self.q_loras, self.v_loras, self.k_loras, + self.mlp_loras, self.o_loras]: + if loras is not None: + for lora in loras: + lora.reset() + + +# Polar Express per-iteration minimax Newton-Schulz coefficients (PR #1344). +# Replaces the fixed (3.4445, -4.775, 2.0315) coefficients of stock Muon. +# Applied at backend_steps=5 — taking more than 5 iterations from this list +# falls back to the final (converged) tuple via the slice guard below. +_PE_COEFFS = ( + (8.156554524902461, -22.48329292557795, 15.878769915207462), + (4.042929935166739, -2.808917465908714, 0.5000178451051316), + (3.8916678022926607, -2.772484153217685, 0.5060648178503393), + (3.285753657755655, -2.3681294933425376, 0.46449024233003106), + (2.3465413258596377, -1.7097828382687081, 0.42323551169305323), +) + + +@torch.compile +def zeropower_via_newtonschulz5(G, steps=10, eps=1e-07): + was_2d = G.ndim == 2 + if was_2d: + G = G.unsqueeze(0) + X = G.bfloat16() + transposed = X.size(-2) > X.size(-1) + if transposed: + X = X.mT + X = X / (X.norm(dim=(-2, -1), keepdim=True) + eps) + coeffs = _PE_COEFFS[:steps] if steps <= len(_PE_COEFFS) else _PE_COEFFS + for a, b, c in coeffs: + A = X @ X.mT + B = b * A + c * (A @ A) + X = a * X + B @ X + if transposed: + X = X.mT + if was_2d: + X = X.squeeze(0) + return X + + +class Muon(torch.optim.Optimizer): + def __init__( + self, + params, + lr, + momentum, + backend_steps, + nesterov=True, + weight_decay=0.0, + row_normalize=False, + ): + super().__init__( + params, + dict( + lr=lr, + momentum=momentum, + backend_steps=backend_steps, + nesterov=nesterov, + weight_decay=weight_decay, + row_normalize=row_normalize, + ), + ) + self._built = False + + def _build(self): + self._distributed = dist.is_available() and dist.is_initialized() + self._world_size = dist.get_world_size() if self._distributed else 1 + self._rank = dist.get_rank() if self._distributed else 0 + ws = self._world_size + self._bank_meta = [] + for group in self.param_groups: + for p in group["params"]: + B = p.shape[0] + padded_B = ((B + ws - 1) // ws) * ws + shard_B = padded_B // ws + tail = p.shape[1:] + dev = p.device + self._bank_meta.append({ + "p": p, + "B": B, + "padded_grad": torch.zeros(padded_B, *tail, device=dev, dtype=torch.bfloat16), + "shard": torch.zeros(shard_B, *tail, device=dev, dtype=torch.bfloat16), + "shard_mom": torch.zeros(shard_B, *tail, device=dev, dtype=torch.bfloat16), + "full_update": torch.zeros(padded_B, *tail, device=dev, dtype=torch.bfloat16), + "scale": max(1, p.shape[-2] / p.shape[-1]) ** 0.5, + }) + self._bank_meta.sort(key=lambda m: -m["p"].numel()) + self._built = True + + def launch_reduce_scatters(self): + if not self._built: + self._build() + if not self._distributed: + return + self._rs_futures = [] + for m in self._bank_meta: + p = m["p"] + if p.grad is None: + self._rs_futures.append(None) + continue + pg = m["padded_grad"] + pg[: m["B"]].copy_(p.grad.bfloat16()) + if pg.shape[0] > m["B"]: + pg[m["B"] :].zero_() + fut = dist.reduce_scatter_tensor( + m["shard"], pg, op=dist.ReduceOp.AVG, async_op=True + ) + self._rs_futures.append(fut) + + @torch.no_grad() + def step(self, closure=None): + loss = None + if closure is not None: + with torch.enable_grad(): + loss = closure() + if not self._built: + self._build() + for group in self.param_groups: + lr = group["lr"] + momentum = group["momentum"] + backend_steps = group["backend_steps"] + nesterov = group["nesterov"] + wd = group.get("weight_decay", 0.0) + row_normalize = group.get("row_normalize", False) + prev_ag_handle = None + prev_m = None + sharded = self._distributed and hasattr(self, "_rs_futures") + for idx, m in enumerate(self._bank_meta): + p = m["p"] + if p.grad is None: + continue + if prev_ag_handle is not None: + prev_ag_handle.wait() + pp = prev_m["p"] + upd = prev_m["full_update"][: prev_m["B"]] + if wd > 0.0: + pp.data.mul_(1.0 - lr * wd) + pp.add_(upd.to(dtype=pp.dtype), alpha=-lr * prev_m["scale"]) + if sharded and self._rs_futures[idx] is not None: + self._rs_futures[idx].wait() + g = m["shard"] + buf = m["shard_mom"] + else: + g = p.grad.bfloat16() + state = self.state[p] + if "momentum_buffer" not in state: + state["momentum_buffer"] = torch.zeros_like(g) + buf = state["momentum_buffer"] + buf.mul_(momentum).add_(g) + if nesterov: + update = g.add(buf, alpha=momentum) + else: + update = buf + if row_normalize: + rn = update.float().norm(dim=-1, keepdim=True).clamp_min(1e-07) + update = update / rn.to(update.dtype) + update = zeropower_via_newtonschulz5(update, steps=backend_steps) + if sharded: + prev_ag_handle = dist.all_gather_into_tensor( + m["full_update"], update, async_op=True + ) + prev_m = m + else: + if wd > 0.0: + p.data.mul_(1.0 - lr * wd) + p.add_(update.to(dtype=p.dtype), alpha=-lr * m["scale"]) + if prev_ag_handle is not None: + prev_ag_handle.wait() + pp = prev_m["p"] + upd = prev_m["full_update"][: prev_m["B"]] + if wd > 0.0: + pp.data.mul_(1.0 - lr * wd) + pp.add_(upd.to(dtype=pp.dtype), alpha=-lr * prev_m["scale"]) + if hasattr(self, "_rs_futures"): + del self._rs_futures + return loss + + +CONTROL_TENSOR_NAME_PATTERNS = tuple( + pattern + for pattern in os.environ.get( + "CONTROL_TENSOR_NAME_PATTERNS", + "attn_scale,attn_scales,mlp_scale,mlp_scales,resid_mix,resid_mixes,q_gain,skip_weight,skip_weights,skip_gates,parallel_post_lambdas,parallel_resid_lambdas,attn_gate_proj,attn_gate_w,smear_gate,smear_lambda", + ).split(",") + if pattern +) + + +PACKED_REPLICATED_GRAD_MAX_NUMEL = 1 << 15 + + +class Optimizers: + def __init__(self, h, base_model): + matrix_params = [ + base_model.qo_bank, + base_model.kv_bank, + base_model.mlp_up_bank, + base_model.mlp_down_bank, + ] + block_named_params = list(base_model.blocks.named_parameters()) + scalar_params = [ + p + for (name, p) in block_named_params + if p.ndim < 2 + or any(pattern in name for pattern in CONTROL_TENSOR_NAME_PATTERNS) + ] + if base_model.skip_weights.numel() > 0: + scalar_params.append(base_model.skip_weights) + if base_model.skip_gates is not None and base_model.skip_gates.numel() > 0: + scalar_params.append(base_model.skip_gates) + if base_model.parallel_post_lambdas is not None: + scalar_params.append(base_model.parallel_post_lambdas) + if base_model.parallel_resid_lambdas is not None: + scalar_params.append(base_model.parallel_resid_lambdas) + # SmearGate params live on GPT root (not in .blocks), so add them by hand. + # Both are tiny (gate_window scalars + 1 lambda). Optimized via scalar Adam. + if getattr(base_model, "smear_gate_enabled", False): + scalar_params.append(base_model.smear_gate.weight) + scalar_params.append(base_model.smear_lambda) + token_lr = h.tied_embed_lr if h.tie_embeddings else h.embed_lr + tok_params = [ + {"params": [base_model.tok_emb.weight], "lr": token_lr, "base_lr": token_lr} + ] + self.optimizer_tok = torch.optim.AdamW( + tok_params, + betas=(h.beta1, h.beta2), + eps=h.adam_eps, + weight_decay=h.embed_wd, + fused=True, + ) + self.optimizer_muon = Muon( + matrix_params, + lr=h.matrix_lr, + momentum=h.muon_momentum, + backend_steps=h.muon_backend_steps, + weight_decay=h.muon_wd, + row_normalize=h.muon_row_normalize, + ) + for group in self.optimizer_muon.param_groups: + group["base_lr"] = h.matrix_lr + self.optimizer_scalar = torch.optim.AdamW( + [{"params": scalar_params, "lr": h.scalar_lr, "base_lr": h.scalar_lr}], + betas=(h.beta1, h.beta2), + eps=h.adam_eps, + weight_decay=h.adam_wd, + fused=True, + ) + self.optimizers = [ + self.optimizer_tok, + self.optimizer_muon, + self.optimizer_scalar, + ] + self.replicated_params = list(tok_params[0]["params"]) + self.replicated_params.extend(scalar_params) + self.replicated_large_params = [] + self.replicated_packed_params = [] + for p in self.replicated_params: + if p.numel() <= PACKED_REPLICATED_GRAD_MAX_NUMEL: + self.replicated_packed_params.append(p) + else: + self.replicated_large_params.append(p) + + def __iter__(self): + return iter(self.optimizers) + + def zero_grad_all(self): + for opt in self.optimizers: + opt.zero_grad(set_to_none=True) + + def _all_reduce_packed_grads(self): + grads_by_key = collections.defaultdict(list) + for p in self.replicated_packed_params: + if p.grad is not None: + grads_by_key[(p.grad.device, p.grad.dtype)].append(p.grad) + for grads in grads_by_key.values(): + flat = torch.empty( + sum(g.numel() for g in grads), + device=grads[0].device, + dtype=grads[0].dtype, + ) + offset = 0 + for g in grads: + n = g.numel() + flat[offset : offset + n].copy_(g.contiguous().view(-1)) + offset += n + dist.all_reduce(flat, op=dist.ReduceOp.AVG) + offset = 0 + for g in grads: + n = g.numel() + g.copy_(flat[offset : offset + n].view_as(g)) + offset += n + + def step(self, distributed=False): + self.optimizer_muon.launch_reduce_scatters() + if distributed: + reduce_handles = [ + dist.all_reduce(p.grad, op=dist.ReduceOp.AVG, async_op=True) + for p in self.replicated_large_params + if p.grad is not None + ] + self._all_reduce_packed_grads() + for handle in reduce_handles: + handle.wait() + self.optimizer_tok.step() + self.optimizer_scalar.step() + self.optimizer_muon.step() + self.zero_grad_all() + + +def restore_fp32_params(model): + for module in model.modules(): + if isinstance(module, CastedLinear): + module.float() + for name, param in model.named_parameters(): + if ( + param.ndim < 2 + or any(pattern in name for pattern in CONTROL_TENSOR_NAME_PATTERNS) + ) and param.dtype != torch.float32: + param.data = param.data.float() + if hasattr(model, "qo_bank") and model.qo_bank is not None: + model.qo_bank.data = model.qo_bank.data.float() + model.kv_bank.data = model.kv_bank.data.float() + model.mlp_up_bank.data = model.mlp_up_bank.data.float() + model.mlp_down_bank.data = model.mlp_down_bank.data.float() + + +def collect_hessians(model, train_loader, h, device, n_calibration_batches=64): + hessians = {} + hooks = [] + for i, block in enumerate(model.blocks): + block.attn._calib = True + block.mlp._calib = True + block.mlp.use_fused = False + + def make_attn_hook(layer_idx): + def hook_fn(module, inp, out): + x = inp[0].detach().float() + if x.ndim == 3: + x = x.reshape(-1, x.shape[-1]) + for suffix in ["c_q", "c_k", "c_v"]: + name = f"blocks.{layer_idx}.attn.{suffix}.weight" + if name not in hessians: + hessians[name] = torch.zeros( + x.shape[1], x.shape[1], dtype=torch.float32, device=device + ) + hessians[name].addmm_(x.T, x) + y = module._last_proj_input + if y is not None: + y = y.float() + if y.ndim == 3: + y = y.reshape(-1, y.shape[-1]) + name = f"blocks.{layer_idx}.attn.proj.weight" + if name not in hessians: + hessians[name] = torch.zeros( + y.shape[1], y.shape[1], dtype=torch.float32, device=device + ) + hessians[name].addmm_(y.T, y) + return hook_fn + + def make_mlp_hook(layer_idx): + def hook_fn(module, inp, out): + x = inp[0].detach().float() + if x.ndim == 3: + x = x.reshape(-1, x.shape[-1]) + name = f"blocks.{layer_idx}.mlp.fc.weight" + if name not in hessians: + hessians[name] = torch.zeros( + x.shape[1], x.shape[1], dtype=torch.float32, device=device + ) + hessians[name].addmm_(x.T, x) + h_act = module._last_down_input + if h_act is not None: + h_act = h_act.float() + if h_act.ndim == 3: + h_act = h_act.reshape(-1, h_act.shape[-1]) + name = f"blocks.{layer_idx}.mlp.proj.weight" + if name not in hessians: + hessians[name] = torch.zeros( + h_act.shape[1], h_act.shape[1], dtype=torch.float32, device=device + ) + hessians[name].addmm_(h_act.T, h_act) + return hook_fn + + for i, block in enumerate(model.blocks): + hooks.append(block.attn.register_forward_hook(make_attn_hook(i))) + hooks.append(block.mlp.register_forward_hook(make_mlp_hook(i))) + + # Hessian hooks for embedding factorization projection layers + def make_linear_input_hook(weight_name): + def hook_fn(module, inp, out): + x = inp[0].detach().float() + if x.ndim == 3: + x = x.reshape(-1, x.shape[-1]) + if weight_name not in hessians: + hessians[weight_name] = torch.zeros( + x.shape[1], x.shape[1], dtype=torch.float32, device=device + ) + hessians[weight_name].addmm_(x.T, x) + return hook_fn + + if model.tie_embeddings: + hook_module = model.final_norm + + def make_output_hook(name): + def hook_fn(module, inp, out): + x = out.detach().float() + if x.ndim == 3: + x = x.reshape(-1, x.shape[-1]) + if name not in hessians: + hessians[name] = torch.zeros( + x.shape[1], x.shape[1], dtype=torch.float32, device=device + ) + hessians[name].addmm_(x.T, x) + return hook_fn + + hooks.append( + hook_module.register_forward_hook(make_output_hook("tok_emb.weight")) + ) + model.eval() + with torch.no_grad(): + for _ in range(n_calibration_batches): + x, _ = train_loader.next_batch(h.train_batch_tokens, h.grad_accum_steps) + model.forward_logits(x) + for hook in hooks: + hook.remove() + for i, block in enumerate(model.blocks): + block.attn._calib = False + block.mlp._calib = False + block.mlp.use_fused = True + for name in hessians: + hessians[name] = hessians[name].cpu() / n_calibration_batches + return hessians + + +def gptq_quantize_weight(w, H, clip_sigmas=3.0, clip_range=63, block_size=128): + W_orig = w.float().clone() + rows, cols = W_orig.shape + H = H.float().clone() + dead = torch.diag(H) == 0 + H[dead, dead] = 1 + damp = 0.01 * H.diag().mean() + H.diagonal().add_(damp) + perm = torch.argsort(H.diag(), descending=True) + invperm = torch.argsort(perm) + W_perm = W_orig[:, perm].clone() + W_perm[:, dead[perm]] = 0 + H = H[perm][:, perm] + Hinv = torch.cholesky_inverse(torch.linalg.cholesky(H)) + Hinv = torch.linalg.cholesky(Hinv, upper=True) + row_std = W_orig.std(dim=1) + s = (clip_sigmas * row_std / clip_range).clamp_min(1e-10).to(torch.float16) + sf = s.float() + Q = torch.zeros(rows, cols, dtype=torch.int8) + W_work = W_perm.clone() + for i1 in range(0, cols, block_size): + i2 = min(i1 + block_size, cols) + W_block = W_work[:, i1:i2].clone() + Hinv_block = Hinv[i1:i2, i1:i2] + Err = torch.zeros(rows, i2 - i1) + for j in range(i2 - i1): + w_col = W_block[:, j] + d = Hinv_block[j, j] + q_col = torch.clamp(torch.round(w_col / sf), -clip_range, clip_range) + Q[:, i1 + j] = q_col.to(torch.int8) + err = (w_col - q_col.float() * sf) / d + Err[:, j] = err + W_block[:, j:] -= err.unsqueeze(1) * Hinv_block[j, j:].unsqueeze(0) + if i2 < cols: + W_work[:, i2:] -= Err @ Hinv[i1:i2, i2:] + return Q[:, invperm], s + + +def _quantize_gate_int8_row(w): + # Symmetric int8-per-row quantization for small gate tensors. w shape + # (R, C) -> (R,) scales in fp16, int8 values in [-127, 127]. Single scale + # per row keeps accuracy high while halving storage vs fp16. + W = w.float().contiguous() + row_max = W.abs().amax(dim=1).clamp_min(1e-10) + s = (row_max / 127.0).to(torch.float16) + sf = s.float().view(-1, 1) + q = torch.clamp(torch.round(W / sf), -127, 127).to(torch.int8) + return q, s + + +def _lqer_pack(A, B, bits): + rng = 2 ** (bits - 1) - 1 + sA = (A.abs().amax(dim=1).clamp_min(1e-10) / rng).to(torch.float16) + sB = (B.abs().amax(dim=1).clamp_min(1e-10) / rng).to(torch.float16) + qA = torch.clamp(torch.round(A / sA.float().view(-1, 1)), -rng, rng).to(torch.int8) + qB = torch.clamp(torch.round(B / sB.float().view(-1, 1)), -rng, rng).to(torch.int8) + return qA, sA, qB, sB + + +def _lqer_pack_asym(A, B, g=64): + # A: INT2 per-matrix scalar (signed [-2,1], scale = |A|max/1.5). + sA = (A.abs().amax().clamp_min(1e-10) / 1.5).to(torch.float16) + qA = torch.clamp(torch.round(A / sA.float()), -2, 1).to(torch.int8) + # B: INT4 groupwise g over flattened B (signed [-8,7], per-group scale). + Bf = B.reshape(-1, g) + Bmax = Bf.abs().amax(dim=-1, keepdim=True).clamp_min(1e-10) + sB = (Bmax / 7.5).to(torch.float16).reshape(-1) + qB = torch.clamp(torch.round(Bf / sB.float().reshape(-1, 1)), -8, 7).to( + torch.int8 + ).reshape(B.shape) + return qA, sA, qB, sB + + +def gptq_mixed_quantize(state_dict, hessians, h): + result = {} + meta = {} + quant_gate = bool(getattr(h, "gated_attn_quant_gate", False)) + lqer_on = bool(getattr(h, "lqer_enabled", False)) + lqer_cands = {} + for (name, tensor) in state_dict.items(): + t = tensor.detach().cpu().contiguous() + # Dedicated int8-per-row path for attn_gate_w (bypasses both GPTQ and + # fp16 passthrough). Applied BEFORE the numel<=65536 passthrough check + # so the gate tensor is routed here instead of to fp16. + if ( + quant_gate + and t.is_floating_point() + and t.ndim == 2 + and name.endswith(".attn_gate_w") + # Dense GatedAttn: (num_heads, dim) = (8, 512) = 4096. + # Sparse gate: (num_heads, gate_window) = (8, 12) = 96. + # Both need int8-per-row routing; the 1024 lower bound in stock + # PR-1736 presumed dense-only. Widen to catch both. + and 32 <= t.numel() <= 8192 + ): + gq, gs = _quantize_gate_int8_row(t) + result[name + ".gq"] = gq + result[name + ".gs"] = gs + meta[name] = "gate_int8_row" + continue + if not t.is_floating_point() or t.numel() <= 65536: + result[name] = t.to(torch.float16) if t.is_floating_point() else t + meta[name] = "passthrough (float16)" + continue + if "tok_emb" in name: + cs = h.embed_clip_sigmas + elif ".mlp." in name: + cs = h.mlp_clip_sigmas + elif ".attn." in name: + cs = h.attn_clip_sigmas + else: + cs = h.matrix_clip_sigmas + bits = h.embed_bits if "tok_emb" in name else h.matrix_bits + clip_range = 2 ** (bits - 1) - 1 + ret = gptq_quantize_weight( + t, hessians[name], clip_sigmas=cs, clip_range=clip_range + ) + q, s = ret + result[name + ".q"] = q + result[name + ".scale"] = s + meta[name] = f"gptq (int{bits})" + if lqer_on: + W_q = q.float() * s.float().view(-1, 1) + E = t.float() - W_q + lqer_cands[name] = (E, float(E.norm())) + if lqer_on and lqer_cands: + top = sorted(lqer_cands.items(), key=lambda kv: -kv[1][1])[: h.lqer_top_k] + asym_on = bool(getattr(h, "lqer_asym_enabled", False)) + asym_g = int(getattr(h, "lqer_asym_group", 64)) + for (name, (E, _)) in top: + U, S, Vh = torch.linalg.svd(E, full_matrices=False) + r = min(h.lqer_rank, S.numel()) + A = (U[:, :r] * S[:r]).contiguous() + B = Vh[:r, :].contiguous() + if asym_on and B.numel() % asym_g == 0: + qA, sA, qB, sB = _lqer_pack_asym(A, B, asym_g) + result[name + ".lqA_a"] = qA + result[name + ".lqAs_a"] = sA + result[name + ".lqB_a"] = qB + result[name + ".lqBs_a"] = sB + meta[name] = meta[name] + "+lqer_asym" + else: + qA, sA, qB, sB = _lqer_pack(A, B, h.lqer_factor_bits) + result[name + ".lqA"] = qA + result[name + ".lqAs"] = sA + result[name + ".lqB"] = qB + result[name + ".lqBs"] = sB + meta[name] = meta[name] + "+lqer" + categories = collections.defaultdict(set) + for (name, cat) in meta.items(): + short = re.sub("\\.\\d+$", "", re.sub("blocks\\.\\d+", "blocks", name)) + categories[cat].add(short) + log("Quantized weights:") + for cat in sorted(categories): + log(f" {cat}: {', '.join(sorted(categories[cat]))}") + return result, meta + +def dequantize_mixed(result, meta, template_sd): + out = {} + for (name, orig) in template_sd.items(): + info = meta.get(name) + if info is None: + continue + orig_dtype = orig.dtype + if "passthrough" in info: + t = result[name] + if t.dtype == torch.float16 and orig_dtype in ( + torch.float32, + torch.bfloat16, + ): + t = t.to(orig_dtype) + out[name] = t + continue + if info == "gate_int8_row": + gq = result[name + ".gq"] + gs = result[name + ".gs"] + out[name] = (gq.float() * gs.float().view(-1, 1)).to(orig_dtype) + continue + q, s = result[name + ".q"], result[name + ".scale"] + if s.ndim > 0: + W = q.float() * s.float().view(q.shape[0], *[1] * (q.ndim - 1)) + else: + W = q.float() * float(s.item()) + if "lqer_asym" in info: + qA_t = result[name + ".lqA_a"] + sA_t = result[name + ".lqAs_a"] + qB_t = result[name + ".lqB_a"] + sB_t = result[name + ".lqBs_a"] + qA = qA_t.float() * float(sA_t) + g_sz = qB_t.numel() // sB_t.numel() + qB = (qB_t.reshape(-1, g_sz).float() * sB_t.float().view(-1, 1)).reshape( + qB_t.shape + ) + W = W + qA @ qB + elif "lqer" in info: + qA = result[name + ".lqA"].float() * result[name + ".lqAs"].float().view(-1, 1) + qB = result[name + ".lqB"].float() * result[name + ".lqBs"].float().view(-1, 1) + W = W + qA @ qB + out[name] = W.to(orig_dtype) + return out + + +_BSHF_MAGIC = b"BSHF" + + +def _byte_shuffle(data, stride=2): + if stride <= 1 or len(data) < stride: + return data + src = np.frombuffer(data, dtype=np.uint8) + n = len(src) + out = np.empty(n, dtype=np.uint8) + dest_off = 0 + for pos in range(stride): + chunk = src[pos::stride] + out[dest_off : dest_off + len(chunk)] = chunk + dest_off += len(chunk) + return _BSHF_MAGIC + bytes([stride]) + out.tobytes() + + +def _byte_unshuffle(data): + if len(data) < 5 or data[:4] != _BSHF_MAGIC: + return data + stride = data[4] + if stride < 2: + return data[5:] + payload = np.frombuffer(data, dtype=np.uint8, offset=5) + n = len(payload) + out = np.empty(n, dtype=np.uint8) + src_off = 0 + for pos in range(stride): + chunk_len = n // stride + (1 if pos < n % stride else 0) + out[pos::stride][:chunk_len] = payload[src_off : src_off + chunk_len] + src_off += chunk_len + return out.tobytes() + + +def _compress(data, compressor): + data = _byte_shuffle(data) + if compressor == "lzma": + return lzma.compress(data, preset=6) + elif compressor == "brotli": + import brotli + + return brotli.compress(data, quality=11) + raise ValueError(f"Unknown compressor: {compressor!r}") + + +def _decompress(data, compressor): + if compressor == "lzma": + raw = lzma.decompress(data) + elif compressor == "brotli": + import brotli + + raw = brotli.decompress(data) + else: + raise ValueError(f"Unknown compressor: {compressor!r}") + raw = _byte_unshuffle(raw) + return raw + + +def _unbank_state_dict(state_dict, num_layers): + sd = {} + n = num_layers + for k, v in state_dict.items(): + t = v.detach().cpu() if v is not None else None + if k == "qo_bank": + for i in range(n): + sd[f"blocks.{i}.attn.c_q.weight"] = t[i] + sd[f"blocks.{i}.attn.proj.weight"] = t[n + i] + elif k == "kv_bank": + for i in range(n): + sd[f"blocks.{i}.attn.c_k.weight"] = t[i] + sd[f"blocks.{i}.attn.c_v.weight"] = t[n + i] + elif k == "mlp_up_bank": + for i in range(n): + sd[f"blocks.{i}.mlp.fc.weight"] = t[i] + elif k == "mlp_down_bank": + for i in range(n): + sd[f"blocks.{i}.mlp.proj.weight"] = t[i] + else: + if t is not None: + sd[k] = t + return sd + + +def _rebank_state_dict(flat_sd, num_layers, model_dim, kv_dim, hidden_dim): + sd = {} + n = num_layers + sd["qo_bank"] = torch.zeros(2 * n, model_dim, model_dim) + sd["kv_bank"] = torch.zeros(2 * n, kv_dim, model_dim) + for i in range(n): + sd["qo_bank"][i] = flat_sd[f"blocks.{i}.attn.c_q.weight"] + sd["qo_bank"][n + i] = flat_sd[f"blocks.{i}.attn.proj.weight"] + sd["kv_bank"][i] = flat_sd[f"blocks.{i}.attn.c_k.weight"] + sd["kv_bank"][n + i] = flat_sd[f"blocks.{i}.attn.c_v.weight"] + sd["mlp_up_bank"] = torch.zeros(n, hidden_dim, model_dim) + sd["mlp_down_bank"] = torch.zeros(n, model_dim, hidden_dim) + for i in range(n): + sd["mlp_up_bank"][i] = flat_sd[f"blocks.{i}.mlp.fc.weight"] + sd["mlp_down_bank"][i] = flat_sd[f"blocks.{i}.mlp.proj.weight"] + for k, v in flat_sd.items(): + if not ( + k.startswith("blocks.") + and any( + p in k + for p in [ + ".attn.c_q.", ".attn.c_k.", ".attn.c_v.", + ".attn.proj.", ".mlp.fc.", ".mlp.proj.", + ] + ) + ): + sd[k] = v + return sd + + + +def _compressed_code_size(code): + code_raw = code.encode("utf-8") + minified = subprocess.run( + ["pyminify", "--no-rename-locals", "--no-hoist-literals", "--remove-literal-statements", "-"], + input=code_raw, capture_output=True, check=True, + ).stdout + compressed = lzma.compress(minified) + encoded = base64.b85encode(compressed) + wrapper = b'import lzma as L,base64 as B\nexec(L.decompress(B.b85decode("' + encoded + b'")))\n' + return len(code_raw), len(wrapper) + + +def serialize(h, base_model, code): + code_bytes_uncompressed, code_bytes = _compressed_code_size(code) + if h.is_main_process: + torch.save(base_model.state_dict(), h.model_path) + model_bytes = os.path.getsize(h.model_path) + log(f"Serialized model: {model_bytes} bytes") + log(f"Code size (uncompressed): {code_bytes_uncompressed} bytes") + log(f"Code size (compressed): {code_bytes} bytes") + sd_cpu = _unbank_state_dict(base_model.state_dict(), h.num_layers) + device = torch.device("cuda", h.local_rank) + t0 = time.perf_counter() + calib_loader = ShuffledSequenceLoader(h, device) + log("GPTQ:collecting Hessians from calibration data...") + hessians = collect_hessians( + base_model, + calib_loader, + h, + device, + n_calibration_batches=h.gptq_calibration_batches, + ) + log(f"GPTQ:collected {len(hessians)} Hessians in {time.perf_counter()-t0:.1f}s") + quant_result, quant_meta = gptq_mixed_quantize(sd_cpu, hessians, h) + quant_buf = io.BytesIO() + torch.save({"w": quant_result, "m": quant_meta}, quant_buf) + quant_raw = quant_buf.getvalue() + quant_blob = _compress(quant_raw, h.compressor) + quant_file_bytes = len(quant_blob) + bytes_total = quant_file_bytes + code_bytes + if h.is_main_process: + with open(h.quantized_model_path, "wb") as f: + f.write(quant_blob) + log(f"Serialized model quantized+{h.compressor}: {quant_file_bytes} bytes") + log(f"Total submission size quantized+{h.compressor}: {bytes_total} bytes") + return bytes_total, quant_file_bytes + + +def deserialize(h, device): + eval_model = GPT(h).to(device).bfloat16() + restore_fp32_params(eval_model) + flat_template = _unbank_state_dict(eval_model.state_dict(), h.num_layers) + with open(h.quantized_model_path, "rb") as f: + quant_blob_disk = f.read() + quant_state = torch.load( + io.BytesIO(_decompress(quant_blob_disk, h.compressor)), map_location="cpu" + ) + deq_flat = dequantize_mixed(quant_state["w"], quant_state["m"], flat_template) + head_dim = h.model_dim // h.num_heads + kv_dim = h.num_kv_heads * head_dim + hidden_dim = int(h.mlp_mult * h.model_dim) + deq_state = _rebank_state_dict(deq_flat, h.num_layers, h.model_dim, kv_dim, hidden_dim) + eval_model.load_state_dict(deq_state, strict=True) + return eval_model + + +def _loss_bpb(loss_sum, token_count, byte_count): + val_loss = (loss_sum / token_count).item() + val_bpb = val_loss / math.log(2.0) * (token_count.item() / byte_count.item()) + return val_loss, val_bpb + + +def eval_val(h, device, val_data, model, forward_logits_fn=None): + seq_len = h.eval_seq_len + local_batch_tokens = h.val_batch_tokens // (h.world_size * h.grad_accum_steps) + if local_batch_tokens < seq_len: + raise ValueError( + f"VAL_BATCH_SIZE must provide at least one sequence per rank; got VAL_BATCH_SIZE={h.val_batch_tokens}, WORLD_SIZE={h.world_size}, GRAD_ACCUM_STEPS={h.grad_accum_steps}, seq_len={seq_len}" + ) + local_batch_seqs = local_batch_tokens // seq_len + total_seqs = (val_data.val_tokens.numel() - 1) // seq_len + seq_start = total_seqs * h.rank // h.world_size + seq_end = total_seqs * (h.rank + 1) // h.world_size + + # TODO: Don't truncate this. + seq_end = seq_start + ((seq_end - seq_start) // local_batch_seqs) * local_batch_seqs + + val_loss_sum = torch.zeros((), device=device, dtype=torch.float64) + val_token_count = torch.zeros((), device=device, dtype=torch.float64) + val_byte_count = torch.zeros((), device=device, dtype=torch.float64) + run_forward_logits = ( + (model.module.forward_logits if hasattr(model, "module") else model.forward_logits) + if forward_logits_fn is None + else forward_logits_fn + ) + model.eval() + global BOS_ID + if BOS_ID is None: + BOS_ID = 1 + with torch.no_grad(): + for batch_seq_start in range(seq_start, seq_end, local_batch_seqs): + batch_seq_end = min(batch_seq_start + local_batch_seqs, seq_end) + raw_start = batch_seq_start * seq_len + raw_end = batch_seq_end * seq_len + 1 + local = val_data.val_tokens[raw_start:raw_end].to( + device=device, dtype=torch.int64, non_blocking=True + ) + x = local[:-1] + y = local[1:] + bos_pos = (x == BOS_ID).nonzero(as_tuple=True)[0].tolist() + cu_seqlens, max_seqlen = _build_cu_seqlens( + bos_pos, x.numel(), x.device, h.eval_seq_len, 64 + ) + with torch.autocast(device_type="cuda", dtype=torch.bfloat16, enabled=True): + logits = run_forward_logits( + x[None], cu_seqlens=cu_seqlens, max_seqlen=max_seqlen + ).detach() + per_token_loss = F.cross_entropy( + logits.reshape(-1, logits.size(-1)).float(), + y.reshape(-1), + reduction="none", + ) + val_loss_sum += per_token_loss.to(torch.float64).sum() + val_token_count += float(y.numel()) + prev_ids = x + tgt_ids = y + if val_data.caseops_enabled and val_data.val_bytes is not None: + # CaseOps: read per-token byte budget from sidecar at the same + # global positions as the target tokens y. raw_start/raw_end + # span [raw_start, raw_end), x = local[:-1], y = local[1:], + # so y is at sidecar positions [raw_start + 1, raw_end). + sidecar_slice = val_data.val_bytes[raw_start + 1 : raw_end].to( + device=device, dtype=torch.int32, non_blocking=True + ) + val_byte_count += sidecar_slice.to(torch.float64).sum() + else: + token_bytes = val_data.base_bytes_lut[tgt_ids].to(dtype=torch.int16) + token_bytes += ( + val_data.has_leading_space_lut[tgt_ids] + & ~val_data.is_boundary_token_lut[prev_ids] + ).to(dtype=torch.int16) + val_byte_count += token_bytes.to(torch.float64).sum() + if dist.is_available() and dist.is_initialized(): + dist.all_reduce(val_loss_sum, op=dist.ReduceOp.SUM) + dist.all_reduce(val_token_count, op=dist.ReduceOp.SUM) + dist.all_reduce(val_byte_count, op=dist.ReduceOp.SUM) + model.train() + return _loss_bpb(val_loss_sum, val_token_count, val_byte_count) + + +def _find_docs(all_tokens): + bos_positions = (all_tokens == BOS_ID).nonzero(as_tuple=True)[0].numpy() + docs = [] + for i in range(len(bos_positions)): + start = int(bos_positions[i]) + end = ( + int(bos_positions[i + 1]) + if i + 1 < len(bos_positions) + else all_tokens.numel() + ) + if i + 1 < len(bos_positions): + end += 1 + assert end - start >= 2 + docs.append((start, end - start)) + return docs + + +def _build_ttt_global_batches(doc_entries, h, ascending=False): + batch_size = h.ttt_batch_size + global_doc_entries = sorted(doc_entries, key=lambda x: x[1][1]) + global_batches = [ + global_doc_entries[i : i + batch_size] + for i in range(0, len(global_doc_entries), batch_size) + ] + indexed = list(enumerate(global_batches)) + if not ascending: + indexed.sort(key=lambda ib: -max(dl for _, (_, dl) in ib[1])) + return indexed + + +def _init_batch_counter(path): + with open(path, "wb") as f: + f.write((0).to_bytes(4, "little")) + + +def _claim_next_batch(counter_path, queue_len): + try: + with open(counter_path, "r+b") as f: + fcntl.flock(f, fcntl.LOCK_EX) + idx = int.from_bytes(f.read(4), "little") + f.seek(0) + f.write((idx + 1).to_bytes(4, "little")) + f.flush() + except FileNotFoundError: + return queue_len + return idx + + +def _compute_chunk_window(ci, pred_len, num_chunks, chunk_size, eval_seq_len): + chunk_end = pred_len if ci == num_chunks - 1 else (ci + 1) * chunk_size + win_start = max(0, chunk_end - eval_seq_len) + win_len = chunk_end - win_start + chunk_start = ci * chunk_size + chunk_offset = chunk_start - win_start + chunk_len = chunk_end - chunk_start + return win_start, win_len, chunk_offset, chunk_len + + +def _accumulate_bpb( + ptl, + x, + y, + chunk_offsets, + chunk_lens, + pos_idx, + base_bytes_lut, + has_leading_space_lut, + is_boundary_token_lut, + loss_sum, + byte_sum, + token_count, + y_bytes=None, +): + pos = pos_idx[: x.size(1)].unsqueeze(0) + mask = ( + (chunk_lens.unsqueeze(1) > 0) + & (pos >= chunk_offsets.unsqueeze(1)) + & (pos < (chunk_offsets + chunk_lens).unsqueeze(1)) + ) + mask_f64 = mask.to(torch.float64) + if y_bytes is not None: + tok_bytes = y_bytes.to(torch.float64) + else: + tok_bytes = base_bytes_lut[y].to(torch.float64) + tok_bytes += (has_leading_space_lut[y] & ~is_boundary_token_lut[x]).to( + torch.float64 + ) + loss_sum += (ptl.to(torch.float64) * mask_f64).sum() + byte_sum += (tok_bytes * mask_f64).sum() + token_count += chunk_lens.to(torch.float64).sum() + + +def _loss_bpb_from_sums(loss_sum, token_count, byte_sum): + val_loss = (loss_sum / token_count).item() + val_bpb = val_loss / math.log(2.0) * (token_count.item() / byte_sum.item()) + return val_loss, val_bpb + + +def _add_to_counter(path, delta): + try: + with open(path, "r+b") as f: + fcntl.flock(f, fcntl.LOCK_EX) + cur = int.from_bytes(f.read(8), "little", signed=True) + cur += int(delta) + f.seek(0) + f.write(int(cur).to_bytes(8, "little", signed=True)) + f.flush() + return cur + except FileNotFoundError: + return int(delta) + + +def _init_int64_counter(path): + with open(path, "wb") as f: + f.write((0).to_bytes(8, "little", signed=True)) + + +def _select_ttt_doc_entries(docs, h): + doc_entries = list(enumerate(docs)) + if h.val_doc_fraction < 1.0: + sample_n = max(1, int(round(len(docs) * h.val_doc_fraction))) + sampled_indices = sorted( + random.Random(h.seed).sample(range(len(docs)), sample_n) + ) + return [(i, docs[i]) for i in sampled_indices] + return doc_entries + + +def train_val_ttt_global_sgd_distributed(h, device, val_data, base_model, val_tokens, batch_seqs=None): + global BOS_ID + if BOS_ID is None: + BOS_ID = 1 + base_model.eval() + seq_len = h.eval_seq_len + total_tokens = val_tokens.numel() - 1 + ttt_chunk = h.global_ttt_chunk_tokens + batch_seqs = h.global_ttt_batch_seqs if batch_seqs is None else batch_seqs + num_chunks = (total_tokens + ttt_chunk - 1) // ttt_chunk + ttt_params = [p for p in base_model.parameters()] + for p in ttt_params: + p.requires_grad_(True) + optimizer = torch.optim.SGD( + ttt_params, lr=h.global_ttt_lr, momentum=h.global_ttt_momentum + ) + t_start = time.perf_counter() + for ci in range(num_chunks): + chunk_start = ci * ttt_chunk + chunk_end = min((ci + 1) * ttt_chunk, total_tokens) + is_last_chunk = ci == num_chunks - 1 + if is_last_chunk or h.global_ttt_epochs <= 0: + continue + base_model.train() + chunk_seqs = (chunk_end - chunk_start) // seq_len + if chunk_seqs <= 0: + continue + warmup_chunks = max(0, min(h.global_ttt_warmup_chunks, num_chunks - 1)) + if warmup_chunks > 0 and ci < warmup_chunks: + warmup_denom = max(warmup_chunks - 1, 1) + warmup_t = ci / warmup_denom + lr_now = ( + h.global_ttt_warmup_start_lr + + (h.global_ttt_lr - h.global_ttt_warmup_start_lr) * warmup_t + ) + else: + decay_steps = max(num_chunks - 1 - warmup_chunks, 1) + decay_ci = max(ci - warmup_chunks, 0) + lr_now = h.global_ttt_lr * 0.5 * ( + 1.0 + math.cos(math.pi * decay_ci / decay_steps) + ) + for pg in optimizer.param_groups: + pg["lr"] = lr_now + my_seq_s = chunk_seqs * h.rank // h.world_size + my_seq_e = chunk_seqs * (h.rank + 1) // h.world_size + my_chunk_seqs = my_seq_e - my_seq_s + for _ in range(h.global_ttt_epochs): + for bs in range(0, my_chunk_seqs, batch_seqs): + be = min(bs + batch_seqs, my_chunk_seqs) + actual_bs = my_seq_s + bs + start_tok = chunk_start + actual_bs * seq_len + end_tok = chunk_start + (my_seq_s + be) * seq_len + 1 + if end_tok > val_tokens.numel(): + continue + local = val_tokens[start_tok:end_tok].to(device=device, dtype=torch.int64) + x_flat = local[:-1] + y_flat = local[1:] + optimizer.zero_grad(set_to_none=True) + with torch.enable_grad(): + with torch.autocast(device_type="cuda", dtype=torch.bfloat16): + if h.global_ttt_respect_doc_boundaries: + bos_pos = (x_flat == BOS_ID).nonzero(as_tuple=True)[0].tolist() + cu_seqlens, max_seqlen = _build_cu_seqlens( + bos_pos, x_flat.numel(), x_flat.device, h.eval_seq_len, 64 + ) + loss = base_model( + x_flat[None], + y_flat[None], + cu_seqlens=cu_seqlens, + max_seqlen=max_seqlen, + ) + else: + x = x_flat.reshape(-1, seq_len) + y = y_flat.reshape(-1, seq_len) + loss = base_model(x, y) + loss.backward() + if dist.is_available() and dist.is_initialized(): + for p in ttt_params: + if p.grad is not None: + dist.all_reduce(p.grad, op=dist.ReduceOp.SUM) + p.grad.mul_(1.0 / h.world_size) + if h.global_ttt_grad_clip > 0: + torch.nn.utils.clip_grad_norm_(ttt_params, h.global_ttt_grad_clip) + optimizer.step() + base_model.eval() + if h.rank == 0: + elapsed = time.perf_counter() - t_start + log( + f"tttg: c{ci+1}/{num_chunks} lr:{lr_now:.6f} t:{elapsed:.1f}s" + ) + for p in base_model.parameters(): + p.requires_grad_(True) + base_model.eval() + + +def eval_val_ttt_phased(h, base_model, device, val_data, forward_ttt_train): + global BOS_ID + if BOS_ID is None: + BOS_ID = 1 + base_model.eval() + for p in base_model.parameters(): + p.requires_grad_(False) + all_tokens = val_data.val_tokens + all_tokens_idx = all_tokens.to(torch.int32) + docs = _find_docs(all_tokens) + doc_entries = _select_ttt_doc_entries(docs, h) + prefix_doc_limit = max(0, min(len(doc_entries), int(h.phased_ttt_prefix_docs))) + num_phases = max(1, int(h.phased_ttt_num_phases)) + phase_boundaries = [] + for pi in range(num_phases): + boundary = prefix_doc_limit * (pi + 1) // num_phases + phase_boundaries.append(boundary) + current_phase = 0 + current_phase_boundary = phase_boundaries[0] + log( + "ttt_phased:" + f" total_docs:{len(doc_entries)} prefix_docs:{prefix_doc_limit} " + f"suffix_docs:{len(doc_entries) - prefix_doc_limit}" + f" num_phases:{num_phases} boundaries:{phase_boundaries}" + ) + chunk_size, eval_seq_len = h.ttt_chunk_size, h.ttt_eval_seq_len + eval_batch_set = None + if h.ttt_eval_batches: + eval_batch_set = set(int(x) for x in h.ttt_eval_batches.split(",") if x.strip()) + use_ascending = eval_batch_set is not None + global_batches_sorted = _build_ttt_global_batches( + doc_entries, h, ascending=use_ascending + ) + queue_len = len(global_batches_sorted) + counter_path = f"/tmp/ttt_counter_{h.run_id}" + prefix_counter_path = f"/tmp/ttt_prefix_counter_{h.run_id}" + pause_flag_path = f"/tmp/ttt_pause_flag_{h.run_id}" + if h.rank == 0: + _init_batch_counter(counter_path) + _init_int64_counter(prefix_counter_path) + try: + os.remove(pause_flag_path) + except FileNotFoundError: + pass + if dist.is_available() and dist.is_initialized(): + path_list = [counter_path, prefix_counter_path, pause_flag_path] + dist.broadcast_object_list(path_list, src=0) + counter_path, prefix_counter_path, pause_flag_path = path_list + dist.barrier() + loss_sum = torch.zeros((), device=device, dtype=torch.float64) + byte_sum = torch.zeros((), device=device, dtype=torch.float64) + token_count = torch.zeros((), device=device, dtype=torch.float64) + t_start = time.perf_counter() + reusable_lora = BatchedTTTLoRA( + h.ttt_batch_size, base_model, h.ttt_lora_rank, + k_lora=h.ttt_k_lora, mlp_lora=h.ttt_mlp_lora, o_lora=h.ttt_o_lora, + ).to(device) + + def _build_opt(lora): + if h.ttt_optimizer == "sgd": + return torch.optim.SGD( + lora.parameters(), lr=h.ttt_lora_lr, + momentum=h.ttt_beta1, weight_decay=h.ttt_weight_decay, + ) + return torch.optim.AdamW( + lora.parameters(), lr=h.ttt_lora_lr, + betas=(h.ttt_beta1, h.ttt_beta2), + eps=1e-10, weight_decay=h.ttt_weight_decay, fused=True, + ) + + reusable_opt = _build_opt(reusable_lora) + local_scored_docs = [] + global_ttt_done = prefix_doc_limit == 0 + try: + while True: + queue_idx = _claim_next_batch(counter_path, queue_len) + if queue_idx >= queue_len: + break + orig_batch_idx, batch_entries = global_batches_sorted[queue_idx] + batch = [doc for _, doc in batch_entries] + bsz = len(batch) + prev_loss = loss_sum.item() + prev_bytes = byte_sum.item() + prev_tokens = token_count.item() + if bsz == reusable_lora.bsz: + reusable_lora.reset() + for s in reusable_opt.state.values(): + for k, v in s.items(): + if isinstance(v, torch.Tensor): + v.zero_() + elif k == "step": + s[k] = 0 + cur_lora = reusable_lora + cur_opt = reusable_opt + else: + cur_lora = BatchedTTTLoRA( + bsz, base_model, h.ttt_lora_rank, + k_lora=h.ttt_k_lora, mlp_lora=h.ttt_mlp_lora, o_lora=h.ttt_o_lora, + ).to(device) + cur_opt = _build_opt(cur_lora) + pred_lens = [doc_len - 1 for _, doc_len in batch] + num_chunks = [(pl + chunk_size - 1) // chunk_size for pl in pred_lens] + max_nc = max(num_chunks) + num_chunks_t = torch.tensor(num_chunks, dtype=torch.int64, device=device) + for ci in range(max_nc): + active = [ci < nc for nc in num_chunks] + needs_train = any(ci < nc - 1 for nc in num_chunks) + tok_starts = torch.zeros(bsz, dtype=torch.int64) + tok_wls = torch.zeros(bsz, dtype=torch.int64) + chunk_offsets_cpu = torch.zeros(bsz, dtype=torch.int64) + chunk_lens_cpu = torch.zeros(bsz, dtype=torch.int64) + for b in range(bsz): + if not active[b]: + continue + doc_start, doc_len = batch[b] + win_start, win_len, chunk_offset, chunk_len = _compute_chunk_window( + ci, pred_lens[b], num_chunks[b], chunk_size, eval_seq_len + ) + tok_starts[b] = doc_start + win_start + tok_wls[b] = win_len + chunk_offsets_cpu[b] = chunk_offset + chunk_lens_cpu[b] = chunk_len + _, context_size, chunk_offset, _ = _compute_chunk_window( + ci, (ci + 1) * chunk_size, ci + 1, chunk_size, eval_seq_len + ) + col_idx = torch.arange(context_size + 1) + idx = tok_starts.unsqueeze(1) + col_idx.unsqueeze(0) + idx.clamp_(max=all_tokens.numel() - 1) + gathered_gpu = all_tokens_idx[idx].to( + device=device, dtype=torch.int64, non_blocking=True + ) + valid = (col_idx[:context_size].unsqueeze(0) < tok_wls.unsqueeze(1)).to( + device, non_blocking=True + ) + chunk_offsets = chunk_offsets_cpu.to(device, non_blocking=True) + chunk_lens = chunk_lens_cpu.to(device, non_blocking=True) + x = torch.where(valid, gathered_gpu[:, :context_size], 0) + y = torch.where(valid, gathered_gpu[:, 1 : context_size + 1], 0) + ctx_pos = torch.arange(context_size, device=device, dtype=torch.int64) + with torch.autocast(device_type="cuda", dtype=torch.bfloat16): + per_tok_loss = forward_ttt_train(x, y, lora=cur_lora) + # CaseOps sidecar-driven byte budget. Mirror the index pattern + # used to build y from all_tokens: y[b, j] corresponds to the + # token at global position tok_starts[b] + 1 + j (when valid). + y_bytes_arg = None + if val_data.caseops_enabled and val_data.val_bytes is not None: + y_idx = ( + tok_starts.unsqueeze(1) + + 1 + + col_idx[:context_size].unsqueeze(0) + ) + y_idx = y_idx.clamp_(max=val_data.val_bytes.numel() - 1) + y_bytes_arg = val_data.val_bytes[y_idx].to( + device=device, dtype=torch.int32, non_blocking=True + ) + # Mirror the `valid` masking used for y so out-of-range tokens + # contribute zero bytes (matches y=0 substitution above). + y_bytes_arg = torch.where( + valid, y_bytes_arg, torch.zeros_like(y_bytes_arg) + ) + with torch.no_grad(): + _accumulate_bpb( + per_tok_loss, + x, + y, + chunk_offsets, + chunk_lens, + ctx_pos, + val_data.base_bytes_lut, + val_data.has_leading_space_lut, + val_data.is_boundary_token_lut, + loss_sum, + byte_sum, + token_count, + y_bytes=y_bytes_arg, + ) + if needs_train: + activate_chunk_mask = (num_chunks_t - 1 > ci).float() + for gi in range(h.ttt_grad_steps): + if gi > 0: + with torch.autocast(device_type="cuda", dtype=torch.bfloat16): + per_tok_loss = forward_ttt_train(x, y, lora=cur_lora) + per_doc = per_tok_loss[ + :, chunk_offset : chunk_offset + chunk_size + ].mean(dim=-1) + cur_opt.zero_grad(set_to_none=True) + (per_doc * activate_chunk_mask).sum().backward() + cur_opt.step() + else: + del per_tok_loss + batch_num = orig_batch_idx + 1 + doc_lens = [dl for _, dl in batch] + should_report = batch_num in eval_batch_set if eval_batch_set is not None else True + if should_report: + cur_tokens = token_count.item() + cur_loss_val = loss_sum.item() + cur_bytes_val = byte_sum.item() + dt = cur_tokens - prev_tokens + db = cur_bytes_val - prev_bytes + if dt > 0 and db > 0: + b_loss = (cur_loss_val - prev_loss) / dt + b_bpb = b_loss / math.log(2.0) * (dt / db) + else: + b_loss = b_bpb = 0.0 + r_loss = cur_loss_val / max(cur_tokens, 1) + r_bpb = r_loss / math.log(2.0) * (cur_tokens / max(cur_bytes_val, 1)) + elapsed = time.perf_counter() - t_start + log( + f"ttp: b{batch_num}/{queue_len} bl:{b_loss:.4f} bb:{b_bpb:.4f} " + f"rl:{r_loss:.4f} rb:{r_bpb:.4f} dl:{min(doc_lens)}-{max(doc_lens)} " + f"gd:{int(global_ttt_done)}" + ) + if not global_ttt_done: + local_scored_docs.extend( + (orig_batch_idx, pos, doc_start, doc_len) + for pos, (doc_start, doc_len) in enumerate(batch) + ) + prefix_done = _add_to_counter(prefix_counter_path, len(batch_entries)) + if prefix_done >= current_phase_boundary: + try: + with open(pause_flag_path, "x"): + pass + except FileExistsError: + pass + should_pause = os.path.exists(pause_flag_path) + if should_pause: + if dist.is_available() and dist.is_initialized(): + dist.barrier() + gathered_scored_docs = [None] * h.world_size + if dist.is_available() and dist.is_initialized(): + dist.all_gather_object(gathered_scored_docs, local_scored_docs) + else: + gathered_scored_docs = [local_scored_docs] + scored_docs_for_global = [] + for rank_docs in gathered_scored_docs: + if rank_docs: + scored_docs_for_global.extend(rank_docs) + scored_docs_for_global.sort(key=lambda x: (x[0], x[1])) + scored_docs_for_global = scored_docs_for_global[:current_phase_boundary] + scored_token_chunks = [ + val_data.val_tokens[doc_start : doc_start + doc_len] + for _, _, doc_start, doc_len in scored_docs_for_global + ] + if scored_token_chunks: + global_ttt_tokens = torch.cat(scored_token_chunks) + else: + global_ttt_tokens = val_data.val_tokens[:0] + if h.rank == 0: + prefix_done = 0 + try: + with open(prefix_counter_path, "rb") as f: + prefix_done = int.from_bytes( + f.read(8), "little", signed=True + ) + except FileNotFoundError: + pass + log( + f"ttpp: phase:{current_phase + 1}/{num_phases} pd:{prefix_done} " + f"gd:{len(scored_docs_for_global)} " + f"t:{time.perf_counter() - t_start:.1f}s" + ) + train_val_ttt_global_sgd_distributed( + h, device, val_data, base_model, global_ttt_tokens + ) + for p in base_model.parameters(): + p.requires_grad_(False) + reusable_lora = BatchedTTTLoRA( + h.ttt_batch_size, base_model, h.ttt_lora_rank, + k_lora=h.ttt_k_lora, mlp_lora=h.ttt_mlp_lora, o_lora=h.ttt_o_lora, + ).to(device) + reusable_opt = _build_opt(reusable_lora) + current_phase += 1 + if current_phase >= num_phases: + global_ttt_done = True + else: + current_phase_boundary = phase_boundaries[current_phase] + if h.rank == 0: + try: + os.remove(pause_flag_path) + except FileNotFoundError: + pass + if dist.is_available() and dist.is_initialized(): + dist.barrier() + if h.rank == 0: + log(f"ttpr: phase:{current_phase}/{num_phases} t:{time.perf_counter() - t_start:.1f}s") + del cur_lora, cur_opt + finally: + pass + if dist.is_available() and dist.is_initialized(): + dist.all_reduce(loss_sum, op=dist.ReduceOp.SUM) + dist.all_reduce(byte_sum, op=dist.ReduceOp.SUM) + dist.all_reduce(token_count, op=dist.ReduceOp.SUM) + for p in base_model.parameters(): + p.requires_grad_(True) + base_model.train() + return _loss_bpb_from_sums(loss_sum, token_count, byte_sum) + + +def timed_eval(label, fn, *args, **kwargs): + torch.cuda.synchronize() + t0 = time.perf_counter() + val_loss, val_bpb = fn(*args, **kwargs) + torch.cuda.synchronize() + elapsed_ms = 1e3 * (time.perf_counter() - t0) + log( + f"{label} val_loss:{val_loss:.8f} val_bpb:{val_bpb:.8f} eval_time:{elapsed_ms:.0f}ms" + ) + return val_loss, val_bpb + + +def train_model(h, device, val_data): + base_model = GPT(h).to(device).bfloat16() + restore_fp32_params(base_model) + compiled_model = torch.compile(base_model, dynamic=False, fullgraph=True) + compiled_forward_logits = torch.compile( + base_model.forward_logits, dynamic=False, fullgraph=True + ) + model = compiled_model + log(f"model_params:{sum(p.numel()for p in base_model.parameters())}") + optimizers = Optimizers(h, base_model) + train_loader = DocumentPackingLoader(h, device) + max_wallclock_ms = ( + 1e3 * h.max_wallclock_seconds if h.max_wallclock_seconds > 0 else None + ) + if max_wallclock_ms is not None: + max_wallclock_ms -= h.gptq_reserve_seconds * 1e3 + log( + f"gptq:reserving {h.gptq_reserve_seconds:.0f}s, effective={max_wallclock_ms:.0f}ms" + ) + + def training_frac(step, elapsed_ms): + if max_wallclock_ms is None: + return step / max(h.iterations, 1) + return elapsed_ms / max(max_wallclock_ms, 1e-09) + + def lr_mul(frac): + if h.warmdown_frac <= 0: + return 1.0 + if frac >= 1.0 - h.warmdown_frac: + return max((1.0 - frac) / h.warmdown_frac, h.min_lr) + return 1.0 + + def step_fn(step, lr_scale): + optimizers.zero_grad_all() + train_loss = torch.zeros((), device=device) + for micro_step in range(h.grad_accum_steps): + x, y, cu_seqlens, _max_seqlen = train_loader.next_batch( + h.train_batch_tokens, h.grad_accum_steps + ) + with torch.autocast(device_type="cuda", dtype=torch.bfloat16, enabled=True): + loss = model(x, y, cu_seqlens=cu_seqlens, max_seqlen=h.train_seq_len) + train_loss += loss.detach() + (loss / h.grad_accum_steps).backward() + train_loss /= h.grad_accum_steps + frac = ( + min(step / h.muon_momentum_warmup_steps, 1.0) + if h.muon_momentum_warmup_steps > 0 + else 1.0 + ) + muon_momentum = ( + 1 - frac + ) * h.muon_momentum_warmup_start + frac * h.muon_momentum + for group in optimizers.optimizer_muon.param_groups: + group["momentum"] = muon_momentum + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["base_lr"] * lr_scale + if h.grad_clip_norm > 0: + torch.nn.utils.clip_grad_norm_(base_model.parameters(), h.grad_clip_norm) + optimizers.step(distributed=h.distributed) + return train_loss + + if h.warmup_steps > 0: + initial_model_state = { + name: tensor.detach().cpu().clone() + for (name, tensor) in base_model.state_dict().items() + } + initial_optimizer_states = [ + copy.deepcopy(opt.state_dict()) for opt in optimizers + ] + model.train() + num_tokens_local = h.train_batch_tokens // h.world_size + for blk in base_model.blocks: + blk.attn.rotary(num_tokens_local, device, torch.bfloat16) + cu_bucket_size = train_loader.cu_bucket_size + warmup_cu_buckets = tuple(cu_bucket_size * i for i in range(1, 5)) + warmup_cu_iters = 3 + x, y, cu_seqlens, _ = train_loader.next_batch( + h.train_batch_tokens, h.grad_accum_steps + ) + log(f"warmup_cu_buckets:{','.join(str(b) for b in warmup_cu_buckets)} iters_each:{warmup_cu_iters}") + def _run_cu_bucket_warmup(): + for bucket_len in warmup_cu_buckets: + boundaries = list(range(0, x.size(1), max(h.train_seq_len, 1))) + if boundaries[-1] != x.size(1): + boundaries.append(x.size(1)) + cu = torch.full((bucket_len,), x.size(1), dtype=torch.int32, device=device) + cu[: len(boundaries)] = torch.tensor(boundaries, dtype=torch.int32, device=device) + for _ in range(warmup_cu_iters): + optimizers.zero_grad_all() + with torch.autocast(device_type="cuda", dtype=torch.bfloat16, enabled=True): + wloss = model(x, y, cu_seqlens=cu, max_seqlen=h.train_seq_len) + (wloss / h.grad_accum_steps).backward() + optimizers.zero_grad_all() + _run_cu_bucket_warmup() + if h.num_loops > 0: + base_model.looping_active = True + _run_cu_bucket_warmup() + base_model.looping_active = False + for warmup_step in range(h.warmup_steps): + step_fn(warmup_step, 1.0) + if ( + warmup_step <= 5 + or (warmup_step + 1) % 10 == 0 + or warmup_step + 1 == h.warmup_steps + ): + log(f"warmup_step: {warmup_step+1}/{h.warmup_steps}") + if h.num_loops > 0: + base_model.looping_active = True + log( + f"loop_warmup:enabled encoder:{base_model.encoder_indices} decoder:{base_model.decoder_indices}" + ) + for warmup_step in range(h.warmup_steps): + step_fn(warmup_step, 1.0) + if ( + warmup_step <= 5 + or (warmup_step + 1) % 10 == 0 + or warmup_step + 1 == h.warmup_steps + ): + log(f"loop_warmup_step: {warmup_step+1}/{h.warmup_steps}") + base_model.looping_active = False + base_model.load_state_dict(initial_model_state, strict=True) + for (opt, state) in zip(optimizers, initial_optimizer_states, strict=True): + opt.load_state_dict(state) + optimizers.zero_grad_all() + train_loader = DocumentPackingLoader(h, device) + ema_state = { + name: t.detach().float().clone() + for (name, t) in base_model.state_dict().items() + } + ema_decay = h.ema_decay + training_time_ms = 0.0 + stop_after_step = None + torch.cuda.synchronize() + t0 = time.perf_counter() + step = 0 + while True: + last_step = ( + step == h.iterations + or stop_after_step is not None + and step >= stop_after_step + ) + should_validate = ( + last_step or h.val_loss_every > 0 and step % h.val_loss_every == 0 + ) + if should_validate: + torch.cuda.synchronize() + training_time_ms += 1e3 * (time.perf_counter() - t0) + val_loss, val_bpb = eval_val( + h, device, val_data, model, compiled_forward_logits + ) + log( + f"{step}/{h.iterations} val_loss: {val_loss:.4f} val_bpb: {val_bpb:.4f}" + ) + torch.cuda.synchronize() + t0 = time.perf_counter() + if last_step: + if stop_after_step is not None and step < h.iterations: + log( + f"stopping_early: wallclock_cap train_time: {training_time_ms:.0f}ms step: {step}/{h.iterations}" + ) + break + elapsed_ms = training_time_ms + 1e3 * (time.perf_counter() - t0) + frac = training_frac(step, elapsed_ms) + scale = lr_mul(frac) + if ( + h.num_loops > 0 + and not base_model.looping_active + and frac >= h.enable_looping_at + ): + base_model.looping_active = True + log( + f"layer_loop:enabled step:{step} frac:{frac:.3f} encoder:{base_model.encoder_indices} decoder:{base_model.decoder_indices}" + ) + train_loss = step_fn(step, scale) + with torch.no_grad(): + for (name, t) in base_model.state_dict().items(): + ema_state[name].mul_(ema_decay).add_( + t.detach().float(), alpha=1.0 - ema_decay + ) + step += 1 + approx_training_time_ms = training_time_ms + 1e3 * (time.perf_counter() - t0) + should_log_train = h.train_log_every > 0 and ( + step <= 5 or step % h.train_log_every == 0 or stop_after_step is not None + ) + if should_log_train: + tok_per_sec = step * h.train_batch_tokens / (approx_training_time_ms / 1e3) + log( + f"{step}/{h.iterations} train_loss: {train_loss.item():.4f} train_time: {approx_training_time_ms/60000:.1f}m tok/s: {tok_per_sec:.0f}" + ) + reached_cap = ( + max_wallclock_ms is not None and approx_training_time_ms >= max_wallclock_ms + ) + if h.distributed and max_wallclock_ms is not None: + reached_cap_tensor = torch.tensor(int(reached_cap), device=device) + dist.all_reduce(reached_cap_tensor, op=dist.ReduceOp.MAX) + reached_cap = bool(reached_cap_tensor.item()) + if stop_after_step is None and reached_cap: + stop_after_step = step + log( + f"peak memory allocated: {torch.cuda.max_memory_allocated()//1024//1024} MiB reserved: {torch.cuda.max_memory_reserved()//1024//1024} MiB" + ) + log("ema:applying EMA weights") + current_state = base_model.state_dict() + avg_state = { + name: t.to(dtype=current_state[name].dtype) for (name, t) in ema_state.items() + } + base_model.load_state_dict(avg_state, strict=True) + return base_model, compiled_model, compiled_forward_logits + + +def train_and_eval(h, device): + random.seed(h.seed) + np.random.seed(h.seed) + torch.manual_seed(h.seed) + torch.cuda.manual_seed_all(h.seed) + if h.artifact_dir and h.is_main_process: + os.makedirs(h.artifact_dir, exist_ok=True) + val_data = ValidationData(h, device) + log( + f"train_shards: {len(list(Path(h.datasets_dir).resolve().glob('fineweb_train_*.bin')))}" + ) + log(f"val_tokens: {val_data.val_tokens.numel()-1}") + # TTT_EVAL_ONLY: skip training + GPTQ, jump straight to TTT eval on a + # pre-existing quantized artifact. Used to test TTT-only improvements + # (e.g., PR-1767's alpha/warm-start/WD) without retraining. + ttt_eval_only = os.environ.get("TTT_EVAL_ONLY", "0") == "1" + if ttt_eval_only: + log("TTT_EVAL_ONLY=1 — skipping training + GPTQ, loading saved artifact for TTT eval") + log(f"ttt_lora_alpha: {BatchedLinearLoRA._ALPHA}") + log(f"ttt_warm_start_a: {BatchedLinearLoRA._WARM_START_A}") + log(f"ttt_weight_decay: {h.ttt_weight_decay}") + else: + base_model, compiled_model, compiled_forward_logits = train_model( + h, device, val_data + ) + torch._dynamo.reset() + timed_eval( + "diagnostic pre-quantization post-ema", + eval_val, + h, + device, + val_data, + compiled_model, + compiled_forward_logits, + ) + if os.environ.get("PREQUANT_ONLY", "0") == "1": + log("PREQUANT_ONLY=1 — skipping serialize/GPTQ/post-quant eval/TTT") + return + serialize(h, base_model, Path(__file__).read_text(encoding="utf-8")) + if h.distributed: + dist.barrier() + eval_model = deserialize(h, device) + if h.num_loops > 0: + eval_model.looping_active = True + if not ttt_eval_only: + compiled_model = torch.compile(eval_model, dynamic=False, fullgraph=True) + compiled_forward_logits = torch.compile( + eval_model.forward_logits, dynamic=False, fullgraph=True + ) + timed_eval( + "diagnostic quantized", + eval_val, + h, + device, + val_data, + compiled_model, + compiled_forward_logits, + ) + del eval_model + if h.ttt_enabled: + if not ttt_eval_only: + del compiled_model + if ttt_eval_only: + del eval_model + torch._dynamo.reset() + torch.cuda.empty_cache() + ttt_model = deserialize(h, device) + if h.num_loops > 0: + ttt_model.looping_active = True + for p in ttt_model.parameters(): + p.requires_grad_(False) + + if h.rope_yarn: + _yarn_seqlen = h.train_batch_tokens // h.grad_accum_steps + for block in ttt_model.blocks: + block.attn.rotary(_yarn_seqlen, device, torch.bfloat16) + else: + for block in ttt_model.blocks: + block.attn.rotary._cos_cached = None + block.attn.rotary._sin_cached = None + block.attn.rotary._seq_len_cached = 0 + block.attn.rotary(h.ttt_eval_seq_len, device, torch.bfloat16) + + def _fwd_ttt_inner(input_ids, target_ids, lora): + return ttt_model.forward_ttt(input_ids, target_ids, lora=lora) + + _fwd_ttt_compiled_inner = None + + def _fwd_ttt(input_ids, target_ids, lora): + nonlocal _fwd_ttt_compiled_inner + if _fwd_ttt_compiled_inner is None: + _fwd_ttt_compiled_inner = torch.compile(_fwd_ttt_inner, dynamic=True) + return _fwd_ttt_compiled_inner(input_ids, target_ids, lora=lora) + + fwd_ttt_compiled = _fwd_ttt + log(f"ttt_lora:warming up compile (random tokens, no val data)") + global BOS_ID + if BOS_ID is None: + BOS_ID = 1 + t_warmup = time.perf_counter() + warmup_bszes = [h.ttt_batch_size] + for bsz in warmup_bszes: + wl = BatchedTTTLoRA( + bsz, ttt_model, h.ttt_lora_rank, + k_lora=h.ttt_k_lora, mlp_lora=h.ttt_mlp_lora, o_lora=h.ttt_o_lora, + ).to(device) + wo = torch.optim.AdamW( + wl.parameters(), + lr=h.ttt_lora_lr, + betas=(h.ttt_beta1, h.ttt_beta2), + eps=1e-10, + weight_decay=h.ttt_weight_decay, + fused=True, + ) + for ctx_len in (h.ttt_chunk_size, h.ttt_eval_seq_len): + xw = torch.randint(0, h.vocab_size, (bsz, ctx_len), device=device, dtype=torch.int64) + yw = torch.randint(0, h.vocab_size, (bsz, ctx_len), device=device, dtype=torch.int64) + with torch.autocast(device_type="cuda", dtype=torch.bfloat16): + ptl = fwd_ttt_compiled(xw, yw, lora=wl) + ptl[:, : min(h.ttt_chunk_size, ctx_len)].mean(dim=-1).sum().backward() + wo.step() + wo.zero_grad(set_to_none=True) + del wl, wo + torch.cuda.empty_cache() + compile_elapsed = time.perf_counter() - t_warmup + log(f"ttt_lora:compile warmup done ({compile_elapsed:.1f}s)") + log("\nbeginning TTT eval timer") + torch.cuda.synchronize() + t_ttt = time.perf_counter() + ttt_val_loss, ttt_val_bpb = eval_val_ttt_phased( + h, ttt_model, device, val_data, forward_ttt_train=fwd_ttt_compiled + ) + torch.cuda.synchronize() + ttt_eval_elapsed = time.perf_counter() - t_ttt + log( + "quantized_ttt_phased " + f"val_loss:{ttt_val_loss:.8f} val_bpb:{ttt_val_bpb:.8f} " + f"eval_time:{1e3*ttt_eval_elapsed:.0f}ms" + ) + log(f"total_eval_time:{ttt_eval_elapsed:.1f}s") + del ttt_model + + +def main(): + world_size = int(os.environ.get("WORLD_SIZE", "1")) + local_rank = int(os.environ.get("LOCAL_RANK", "0")) + distributed = "RANK" in os.environ and "WORLD_SIZE" in os.environ + if not torch.cuda.is_available(): + raise RuntimeError("CUDA is required") + if world_size <= 0: + raise ValueError(f"WORLD_SIZE must be positive, got {world_size}") + if 8 % world_size != 0: + raise ValueError( + f"WORLD_SIZE={world_size} must divide 8 so grad_accum_steps stays integral" + ) + device = torch.device("cuda", local_rank) + torch.cuda.set_device(device) + if distributed: + dist.init_process_group(backend="nccl", device_id=device) + dist.barrier() + torch.backends.cuda.matmul.allow_tf32 = True + torch.backends.cudnn.allow_tf32 = True + torch.set_float32_matmul_precision("high") + from torch.backends.cuda import ( + enable_cudnn_sdp, + enable_flash_sdp, + enable_math_sdp, + enable_mem_efficient_sdp, + ) + + enable_cudnn_sdp(False) + enable_flash_sdp(True) + enable_mem_efficient_sdp(False) + enable_math_sdp(False) + torch._dynamo.config.optimize_ddp = False + torch._dynamo.config.cache_size_limit = 16 + h = Hyperparameters() + set_logging_hparams(h) + if h.is_main_process: + os.makedirs(h.artifact_dir if h.artifact_dir else "logs", exist_ok=True) + log(100 * "=", console=False) + log("Hyperparameters:", console=True) + for (k, v) in sorted(vars(type(h)).items()): + if not k.startswith("_"): + log(f" {k}: {v}", console=True) + log("=" * 100, console=False) + log("Source code:", console=False) + log("=" * 100, console=False) + with open(__file__, "r", encoding="utf-8") as _src: + log(_src.read(), console=False) + log("=" * 100, console=False) + log(f"Running Python {sys.version}", console=False) + log(f"Running PyTorch {torch.__version__}", console=False) + log("=" * 100, console=False) + train_and_eval(h, device) + if distributed: + dist.destroy_process_group() + + +if __name__ == "__main__": + main() diff --git a/records/track_10min_16mb/2026-04-27_SmearGateBOSFix_PR1787Base_LQERAsym_PhasedTTT/train_seed42.log b/records/track_10min_16mb/2026-04-27_SmearGateBOSFix_PR1787Base_LQERAsym_PhasedTTT/train_seed42.log new file mode 100644 index 0000000000..e4e34a2534 --- /dev/null +++ b/records/track_10min_16mb/2026-04-27_SmearGateBOSFix_PR1787Base_LQERAsym_PhasedTTT/train_seed42.log @@ -0,0 +1,833 @@ +nohup: ignoring input +W0427 04:43:16.977000 185990 torch/distributed/run.py:803] +W0427 04:43:16.977000 185990 torch/distributed/run.py:803] ***************************************** +W0427 04:43:16.977000 185990 torch/distributed/run.py:803] Setting OMP_NUM_THREADS environment variable for each process to be 1 in default, to avoid your system being overloaded, please further tune the variable for optimal performance in your application as needed. +W0427 04:43:16.977000 185990 torch/distributed/run.py:803] ***************************************** +Hyperparameters: + adam_eps: 1e-08 + adam_wd: 0.02 + artifact_dir: + attn_clip_sigmas: 13.0 + attn_out_gate_enabled: False + attn_out_gate_src: proj + beta1: 0.9 + beta2: 0.95 + caseops_enabled: True + compressor: brotli + data_dir: ./data/ + datasets_dir: /dev/shm/pgolf_data + distributed: True + ema_decay: 0.9965 + embed_bits: 7 + embed_clip_sigmas: 15.0 + embed_lr: 0.6 + embed_wd: 0.085 + enable_looping_at: 0.35 + eval_seq_len: 2048 + eval_stride: 64 + fused_ce_enabled: True + gate_window: 12 + gated_attn_enabled: False + gated_attn_init_std: 0.01 + gated_attn_quant_gate: False + global_ttt_batch_seqs: 32 + global_ttt_chunk_tokens: 32768 + global_ttt_epochs: 1 + global_ttt_grad_clip: 1.0 + global_ttt_lr: 0.001 + global_ttt_momentum: 0.9 + global_ttt_respect_doc_boundaries: True + global_ttt_warmup_chunks: 0 + global_ttt_warmup_start_lr: 0.0 + gptq_calibration_batches: 16 + gptq_reserve_seconds: 0.5 + grad_accum_steps: 1 + grad_clip_norm: 0.3 + is_main_process: True + iterations: 20000 + ln_scale: True + local_rank: 0 + logfile: logs/340ad5c9-cc29-4e8b-918a-9786cbc866be.txt + logit_softcap: 30.0 + loop_end: 5 + loop_start: 3 + lqer_asym_enabled: True + lqer_asym_group: 64 + lqer_enabled: True + lqer_factor_bits: 4 + lqer_rank: 4 + lqer_top_k: 3 + matrix_bits: 6 + matrix_clip_sigmas: 12.85 + matrix_lr: 0.026 + max_wallclock_seconds: 600.0 + min_lr: 0.1 + mlp_clip_sigmas: 12.0 + mlp_mult: 4.0 + model_dim: 512 + model_path: final_model.pt + muon_backend_steps: 5 + muon_momentum: 0.97 + muon_momentum_warmup_start: 0.92 + muon_momentum_warmup_steps: 1500 + muon_row_normalize: True + muon_wd: 0.095 + num_heads: 8 + num_kv_heads: 4 + num_layers: 11 + num_loops: 2 + parallel_final_lane: mean + parallel_start_layer: 8 + phased_ttt_num_phases: 3 + phased_ttt_prefix_docs: 2000 + qk_gain_init: 5.0 + quantized_model_path: final_model.int6.ptz + rank: 0 + rope_base: 10000.0 + rope_dims: 16 + rope_train_seq_len: 2048 + rope_yarn: False + run_id: 340ad5c9-cc29-4e8b-918a-9786cbc866be + scalar_lr: 0.02 + seed: 42 + skip_gates_enabled: True + smear_gate_enabled: True + sparse_attn_gate_enabled: True + sparse_attn_gate_init_std: 0.0 + sparse_attn_gate_scale: 1.0 + tie_embeddings: True + tied_embed_init_std: 0.005 + tied_embed_lr: 0.03 + tokenizer_path: /dev/shm/pgolf_tokenizers/fineweb_8192_bpe_lossless_caps_caseops_v1_reserved.model + train_batch_tokens: 786432 + train_files: /dev/shm/pgolf_data/fineweb_train_*.bin + train_log_every: 500 + train_seq_len: 2048 + ttt_batch_size: 64 + ttt_beta1: 0.0 + ttt_beta2: 0.999 + ttt_chunk_size: 48 + ttt_enabled: True + ttt_eval_batches: + ttt_eval_seq_len: 2048 + ttt_grad_steps: 1 + ttt_k_lora: True + ttt_lora_lr: 0.0001 + ttt_lora_rank: 96 + ttt_mlp_lora: True + ttt_o_lora: True + ttt_optimizer: adam + ttt_weight_decay: 1.0 + val_batch_tokens: 524288 + val_bytes_files: /dev/shm/pgolf_data/fineweb_val_bytes_*.bin + val_doc_fraction: 1.0 + val_files: /dev/shm/pgolf_data/fineweb_val_*.bin + val_loss_every: 4000 + vocab_size: 8192 + warmdown_frac: 0.75 + warmup_steps: 20 + world_size: 8 + xsa_last_n: 11 +train_shards: 39 +val_tokens: 47851520 +model_params:35945671 +gptq:reserving 0s, effective=599500ms +warmup_cu_buckets:64,128,192,256 iters_each:3 +warmup_step: 1/20 +warmup_step: 2/20 +warmup_step: 3/20 +warmup_step: 4/20 +warmup_step: 5/20 +warmup_step: 6/20 +warmup_step: 10/20 +warmup_step: 20/20 +loop_warmup:enabled encoder:[0, 1, 2, 3, 4, 5, 3, 4] decoder:[5, 3, 4, 5, 6, 7, 8, 9, 10] +loop_warmup_step: 1/20 +loop_warmup_step: 2/20 +loop_warmup_step: 3/20 +loop_warmup_step: 4/20 +loop_warmup_step: 5/20 +loop_warmup_step: 6/20 +loop_warmup_step: 10/20 +loop_warmup_step: 20/20 +0/20000 val_loss: 9.0076 val_bpb: 4.1159 +1/20000 train_loss: 9.0087 train_time: 0.0m tok/s: 12535509 +2/20000 train_loss: 12.8485 train_time: 0.0m tok/s: 10830895 +3/20000 train_loss: 10.2683 train_time: 0.0m tok/s: 9514405 +4/20000 train_loss: 8.7315 train_time: 0.0m tok/s: 8959951 +5/20000 train_loss: 7.9453 train_time: 0.0m tok/s: 8662131 +500/20000 train_loss: 2.5698 train_time: 0.8m tok/s: 8216992 +1000/20000 train_loss: 2.8006 train_time: 1.6m tok/s: 8178350 +1500/20000 train_loss: 2.6344 train_time: 2.4m tok/s: 8163805 +2000/20000 train_loss: 2.6634 train_time: 3.2m tok/s: 8157405 +layer_loop:enabled step:2173 frac:0.350 encoder:[0, 1, 2, 3, 4, 5, 3, 4] decoder:[5, 3, 4, 5, 6, 7, 8, 9, 10] +2500/20000 train_loss: 2.5534 train_time: 4.3m tok/s: 7653758 +3000/20000 train_loss: 2.5637 train_time: 5.5m tok/s: 7164309 +3500/20000 train_loss: 2.5657 train_time: 6.7m tok/s: 6864774 +4000/20000 train_loss: 2.4112 train_time: 7.9m tok/s: 6658543 +4000/20000 val_loss: 2.4294 val_bpb: 1.1101 +4500/20000 train_loss: 2.2778 train_time: 9.1m tok/s: 6516065 +4900/20000 val_loss: 2.3541 val_bpb: 1.0757 +stopping_early: wallclock_cap train_time: 599623ms step: 4900/20000 +peak memory allocated: 41709 MiB reserved: 47026 MiB +ema:applying EMA weights +diagnostic pre-quantization post-ema val_loss:2.33055251 val_bpb:1.06490240 eval_time:7779ms +Serialized model: 135417533 bytes +Code size (uncompressed): 151766 bytes +Code size (compressed): 31250 bytes +GPTQ:collecting Hessians from calibration data... +GPTQ:collected 67 Hessians in 3.5s +Quantized weights: + gptq (int6): blocks.attn.c_k.weight, blocks.attn.c_q.weight, blocks.attn.c_v.weight, blocks.attn.proj.weight, blocks.mlp.fc.weight, blocks.mlp.proj.weight + gptq (int6)+lqer_asym: blocks.mlp.fc.weight + gptq (int7)+lqer_asym: tok_emb.weight + passthrough (float16): blocks.attn.attn_gate_w, blocks.attn.q_gain, blocks.attn_scale, blocks.mlp_scale, blocks.resid_mix, parallel_post_lambdas, parallel_resid_lambdas, skip_gates, skip_weights, smear_gate.weight, smear_lambda +Serialized model quantized+brotli: 15920836 bytes +Total submission size quantized+brotli: 15952086 bytes +diagnostic quantized val_loss:2.35058660 val_bpb:1.07405660 eval_time:63234ms +ttt_lora:warming up compile (random tokens, no val data) +ttt_lora:compile warmup done (156.2s) + +beginning TTT eval timer +ttt_phased: total_docs:50000 prefix_docs:2000 suffix_docs:48000 num_phases:3 boundaries:[666, 1333, 2000] +ttp: b775/782 bl:2.2782 bb:1.0591 rl:2.2782 rb:1.0591 dl:6892-7524 gd:0 +ttp: b771/782 bl:2.3090 bb:1.0606 rl:2.2918 rb:1.0598 dl:5523-5749 gd:0 +ttp: b765/782 bl:2.3185 bb:1.0843 rl:2.2987 rb:1.0660 dl:4393-4510 gd:0 +ttpp: phase:1/3 pd:1104 gd:666 t:218.0s +tttg: c1/111 lr:0.001000 t:2.4s +tttg: c2/111 lr:0.001000 t:2.5s +tttg: c3/111 lr:0.000999 t:2.6s +tttg: c4/111 lr:0.000998 t:2.7s +tttg: c5/111 lr:0.000997 t:2.8s +tttg: c6/111 lr:0.000995 t:3.0s +tttg: c7/111 lr:0.000993 t:3.1s +tttg: c8/111 lr:0.000990 t:3.2s +tttg: c9/111 lr:0.000987 t:3.3s +tttg: c10/111 lr:0.000984 t:3.5s +tttg: c11/111 lr:0.000980 t:3.6s +tttg: c12/111 lr:0.000976 t:3.7s +tttg: c13/111 lr:0.000971 t:3.9s +tttg: c14/111 lr:0.000966 t:4.0s +tttg: c15/111 lr:0.000961 t:4.1s +tttg: c16/111 lr:0.000955 t:4.3s +tttg: c17/111 lr:0.000949 t:4.4s +tttg: c18/111 lr:0.000942 t:4.5s +tttg: c19/111 lr:0.000935 t:4.6s +tttg: c20/111 lr:0.000928 t:4.8s +tttg: c21/111 lr:0.000921 t:4.9s +tttg: c22/111 lr:0.000913 t:5.0s +tttg: c23/111 lr:0.000905 t:5.1s +tttg: c24/111 lr:0.000896 t:5.3s +tttg: c25/111 lr:0.000887 t:5.4s +tttg: c26/111 lr:0.000878 t:5.5s +tttg: c27/111 lr:0.000868 t:5.8s +tttg: c28/111 lr:0.000859 t:5.9s +tttg: c29/111 lr:0.000848 t:6.1s +tttg: c30/111 lr:0.000838 t:6.2s +tttg: c31/111 lr:0.000827 t:6.3s +tttg: c32/111 lr:0.000817 t:6.4s +tttg: c33/111 lr:0.000805 t:6.6s +tttg: c34/111 lr:0.000794 t:7.0s +tttg: c35/111 lr:0.000782 t:7.2s +tttg: c36/111 lr:0.000770 t:7.3s +tttg: c37/111 lr:0.000758 t:7.4s +tttg: c38/111 lr:0.000746 t:7.5s +tttg: c39/111 lr:0.000733 t:7.7s +tttg: c40/111 lr:0.000721 t:7.8s +tttg: c41/111 lr:0.000708 t:7.9s +tttg: c42/111 lr:0.000695 t:8.0s +tttg: c43/111 lr:0.000681 t:8.2s +tttg: c44/111 lr:0.000668 t:8.3s +tttg: c45/111 lr:0.000655 t:8.4s +tttg: c46/111 lr:0.000641 t:8.5s +tttg: c47/111 lr:0.000627 t:8.6s +tttg: c48/111 lr:0.000613 t:8.8s +tttg: c49/111 lr:0.000599 t:8.9s +tttg: c50/111 lr:0.000585 t:9.0s +tttg: c51/111 lr:0.000571 t:9.2s +tttg: c52/111 lr:0.000557 t:9.3s +tttg: c53/111 lr:0.000543 t:9.4s +tttg: c54/111 lr:0.000529 t:9.6s +tttg: c55/111 lr:0.000514 t:9.7s +tttg: c56/111 lr:0.000500 t:9.8s +tttg: c57/111 lr:0.000486 t:9.9s +tttg: c58/111 lr:0.000471 t:10.1s +tttg: c59/111 lr:0.000457 t:10.2s +tttg: c60/111 lr:0.000443 t:10.3s +tttg: c61/111 lr:0.000429 t:10.4s +tttg: c62/111 lr:0.000415 t:10.5s +tttg: c63/111 lr:0.000401 t:10.7s +tttg: c64/111 lr:0.000387 t:10.8s +tttg: c65/111 lr:0.000373 t:10.9s +tttg: c66/111 lr:0.000359 t:11.0s +tttg: c67/111 lr:0.000345 t:11.1s +tttg: c68/111 lr:0.000332 t:11.2s +tttg: c69/111 lr:0.000319 t:11.3s +tttg: c70/111 lr:0.000305 t:11.4s +tttg: c71/111 lr:0.000292 t:11.6s +tttg: c72/111 lr:0.000279 t:11.7s +tttg: c73/111 lr:0.000267 t:11.8s +tttg: c74/111 lr:0.000254 t:11.9s +tttg: c75/111 lr:0.000242 t:12.0s +tttg: c76/111 lr:0.000230 t:12.1s +tttg: c77/111 lr:0.000218 t:12.2s +tttg: c78/111 lr:0.000206 t:12.3s +tttg: c79/111 lr:0.000195 t:12.4s +tttg: c80/111 lr:0.000183 t:12.5s +tttg: c81/111 lr:0.000173 t:12.6s +tttg: c82/111 lr:0.000162 t:12.7s +tttg: c83/111 lr:0.000152 t:12.8s +tttg: c84/111 lr:0.000141 t:12.9s +tttg: c85/111 lr:0.000132 t:13.0s +tttg: c86/111 lr:0.000122 t:13.1s +tttg: c87/111 lr:0.000113 t:13.2s +tttg: c88/111 lr:0.000104 t:13.3s +tttg: c89/111 lr:0.000095 t:13.4s +tttg: c90/111 lr:0.000087 t:13.5s +tttg: c91/111 lr:0.000079 t:13.6s +tttg: c92/111 lr:0.000072 t:13.7s +tttg: c93/111 lr:0.000065 t:13.9s +tttg: c94/111 lr:0.000058 t:14.0s +tttg: c95/111 lr:0.000051 t:14.1s +tttg: c96/111 lr:0.000045 t:14.2s +tttg: c97/111 lr:0.000039 t:14.3s +tttg: c98/111 lr:0.000034 t:14.4s +tttg: c99/111 lr:0.000029 t:14.5s +tttg: c100/111 lr:0.000024 t:14.6s +tttg: c101/111 lr:0.000020 t:14.7s +tttg: c102/111 lr:0.000016 t:14.8s +tttg: c103/111 lr:0.000013 t:14.9s +tttg: c104/111 lr:0.000010 t:15.0s +tttg: c105/111 lr:0.000007 t:15.1s +tttg: c106/111 lr:0.000005 t:15.2s +tttg: c107/111 lr:0.000003 t:15.3s +tttg: c108/111 lr:0.000002 t:15.4s +tttg: c109/111 lr:0.000001 t:15.5s +tttg: c110/111 lr:0.000000 t:15.6s +ttpr: phase:1/3 t:236.2s +ttp: b757/782 bl:2.2825 bb:1.0625 rl:2.2959 rb:1.0654 dl:3550-3633 gd:0 +ttpp: phase:2/3 pd:1808 gd:1333 t:340.9s +tttg: c1/185 lr:0.001000 t:0.1s +tttg: c2/185 lr:0.001000 t:0.1s +tttg: c3/185 lr:0.001000 t:0.2s +tttg: c4/185 lr:0.000999 t:0.4s +tttg: c5/185 lr:0.000999 t:0.6s +tttg: c6/185 lr:0.000998 t:0.7s +tttg: c7/185 lr:0.000997 t:0.8s +tttg: c8/185 lr:0.000996 t:0.9s +tttg: c9/185 lr:0.000995 t:1.6s +tttg: c10/185 lr:0.000994 t:1.7s +tttg: c11/185 lr:0.000993 t:1.8s +tttg: c12/185 lr:0.000991 t:2.0s +tttg: c13/185 lr:0.000990 t:2.1s +tttg: c14/185 lr:0.000988 t:2.2s +tttg: c15/185 lr:0.000986 t:2.3s +tttg: c16/185 lr:0.000984 t:2.4s +tttg: c17/185 lr:0.000981 t:2.5s +tttg: c18/185 lr:0.000979 t:2.6s +tttg: c19/185 lr:0.000977 t:2.7s +tttg: c20/185 lr:0.000974 t:2.8s +tttg: c21/185 lr:0.000971 t:2.9s +tttg: c22/185 lr:0.000968 t:3.0s +tttg: c23/185 lr:0.000965 t:3.2s +tttg: c24/185 lr:0.000962 t:3.3s +tttg: c25/185 lr:0.000959 t:3.4s +tttg: c26/185 lr:0.000955 t:3.5s +tttg: c27/185 lr:0.000952 t:3.6s +tttg: c28/185 lr:0.000948 t:4.6s +tttg: c29/185 lr:0.000944 t:4.7s +tttg: c30/185 lr:0.000940 t:4.8s +tttg: c31/185 lr:0.000936 t:4.9s +tttg: c32/185 lr:0.000932 t:5.0s +tttg: c33/185 lr:0.000927 t:5.1s +tttg: c34/185 lr:0.000923 t:5.2s +tttg: c35/185 lr:0.000918 t:5.3s +tttg: c36/185 lr:0.000913 t:5.4s +tttg: c37/185 lr:0.000908 t:5.5s +tttg: c38/185 lr:0.000904 t:5.6s +tttg: c39/185 lr:0.000898 t:5.7s +tttg: c40/185 lr:0.000893 t:5.8s +tttg: c41/185 lr:0.000888 t:5.9s +tttg: c42/185 lr:0.000882 t:6.1s +tttg: c43/185 lr:0.000877 t:6.2s +tttg: c44/185 lr:0.000871 t:6.3s +tttg: c45/185 lr:0.000865 t:6.4s +tttg: c46/185 lr:0.000860 t:6.5s +tttg: c47/185 lr:0.000854 t:6.6s +tttg: c48/185 lr:0.000847 t:6.7s +tttg: c49/185 lr:0.000841 t:6.8s +tttg: c50/185 lr:0.000835 t:6.9s +tttg: c51/185 lr:0.000829 t:7.0s +tttg: c52/185 lr:0.000822 t:7.1s +tttg: c53/185 lr:0.000816 t:7.2s +tttg: c54/185 lr:0.000809 t:7.3s +tttg: c55/185 lr:0.000802 t:7.4s +tttg: c56/185 lr:0.000795 t:7.5s +tttg: c57/185 lr:0.000788 t:7.7s +tttg: c58/185 lr:0.000781 t:7.8s +tttg: c59/185 lr:0.000774 t:7.9s +tttg: c60/185 lr:0.000767 t:8.0s +tttg: c61/185 lr:0.000760 t:8.1s +tttg: c62/185 lr:0.000752 t:8.2s +tttg: c63/185 lr:0.000745 t:8.3s +tttg: c64/185 lr:0.000738 t:8.4s +tttg: c65/185 lr:0.000730 t:8.5s +tttg: c66/185 lr:0.000722 t:8.6s +tttg: c67/185 lr:0.000715 t:8.9s +tttg: c68/185 lr:0.000707 t:9.0s +tttg: c69/185 lr:0.000699 t:9.1s +tttg: c70/185 lr:0.000691 t:9.2s +tttg: c71/185 lr:0.000683 t:9.3s +tttg: c72/185 lr:0.000675 t:9.4s +tttg: c73/185 lr:0.000667 t:9.5s +tttg: c74/185 lr:0.000659 t:9.6s +tttg: c75/185 lr:0.000651 t:9.7s +tttg: c76/185 lr:0.000643 t:9.8s +tttg: c77/185 lr:0.000635 t:9.9s +tttg: c78/185 lr:0.000627 t:10.0s +tttg: c79/185 lr:0.000618 t:10.1s +tttg: c80/185 lr:0.000610 t:10.2s +tttg: c81/185 lr:0.000602 t:10.3s +tttg: c82/185 lr:0.000593 t:10.5s +tttg: c83/185 lr:0.000585 t:10.6s +tttg: c84/185 lr:0.000577 t:10.7s +tttg: c85/185 lr:0.000568 t:10.8s +tttg: c86/185 lr:0.000560 t:10.9s +tttg: c87/185 lr:0.000551 t:11.0s +tttg: c88/185 lr:0.000543 t:11.1s +tttg: c89/185 lr:0.000534 t:11.2s +tttg: c90/185 lr:0.000526 t:11.3s +tttg: c91/185 lr:0.000517 t:11.4s +tttg: c92/185 lr:0.000509 t:11.5s +tttg: c93/185 lr:0.000500 t:11.6s +tttg: c94/185 lr:0.000491 t:11.7s +tttg: c95/185 lr:0.000483 t:11.8s +tttg: c96/185 lr:0.000474 t:11.9s +tttg: c97/185 lr:0.000466 t:12.0s +tttg: c98/185 lr:0.000457 t:12.1s +tttg: c99/185 lr:0.000449 t:12.2s +tttg: c100/185 lr:0.000440 t:12.3s +tttg: c101/185 lr:0.000432 t:12.4s +tttg: c102/185 lr:0.000423 t:12.6s +tttg: c103/185 lr:0.000415 t:12.7s +tttg: c104/185 lr:0.000407 t:12.8s +tttg: c105/185 lr:0.000398 t:12.9s +tttg: c106/185 lr:0.000390 t:13.0s +tttg: c107/185 lr:0.000382 t:13.1s +tttg: c108/185 lr:0.000373 t:13.2s +tttg: c109/185 lr:0.000365 t:13.3s +tttg: c110/185 lr:0.000357 t:13.4s +tttg: c111/185 lr:0.000349 t:13.5s +tttg: c112/185 lr:0.000341 t:13.6s +tttg: c113/185 lr:0.000333 t:15.5s +tttg: c114/185 lr:0.000325 t:15.6s +tttg: c115/185 lr:0.000317 t:15.7s +tttg: c116/185 lr:0.000309 t:15.8s +tttg: c117/185 lr:0.000301 t:15.9s +tttg: c118/185 lr:0.000293 t:16.0s +tttg: c119/185 lr:0.000285 t:16.1s +tttg: c120/185 lr:0.000278 t:16.2s +tttg: c121/185 lr:0.000270 t:16.3s +tttg: c122/185 lr:0.000262 t:16.4s +tttg: c123/185 lr:0.000255 t:16.5s +tttg: c124/185 lr:0.000248 t:16.6s +tttg: c125/185 lr:0.000240 t:16.7s +tttg: c126/185 lr:0.000233 t:16.8s +tttg: c127/185 lr:0.000226 t:16.9s +tttg: c128/185 lr:0.000219 t:17.1s +tttg: c129/185 lr:0.000212 t:17.2s +tttg: c130/185 lr:0.000205 t:17.3s +tttg: c131/185 lr:0.000198 t:17.4s +tttg: c132/185 lr:0.000191 t:17.5s +tttg: c133/185 lr:0.000184 t:18.3s +tttg: c134/185 lr:0.000178 t:18.4s +tttg: c135/185 lr:0.000171 t:18.5s +tttg: c136/185 lr:0.000165 t:18.7s +tttg: c137/185 lr:0.000159 t:18.8s +tttg: c138/185 lr:0.000153 t:18.9s +tttg: c139/185 lr:0.000146 t:19.0s +tttg: c140/185 lr:0.000140 t:19.1s +tttg: c141/185 lr:0.000135 t:19.2s +tttg: c142/185 lr:0.000129 t:19.3s +tttg: c143/185 lr:0.000123 t:19.4s +tttg: c144/185 lr:0.000118 t:19.5s +tttg: c145/185 lr:0.000112 t:19.6s +tttg: c146/185 lr:0.000107 t:19.7s +tttg: c147/185 lr:0.000102 t:19.8s +tttg: c148/185 lr:0.000096 t:19.9s +tttg: c149/185 lr:0.000092 t:20.0s +tttg: c150/185 lr:0.000087 t:20.1s +tttg: c151/185 lr:0.000082 t:20.2s +tttg: c152/185 lr:0.000077 t:20.3s +tttg: c153/185 lr:0.000073 t:20.4s +tttg: c154/185 lr:0.000068 t:20.5s +tttg: c155/185 lr:0.000064 t:20.6s +tttg: c156/185 lr:0.000060 t:20.7s +tttg: c157/185 lr:0.000056 t:20.9s +tttg: c158/185 lr:0.000052 t:21.0s +tttg: c159/185 lr:0.000048 t:21.1s +tttg: c160/185 lr:0.000045 t:21.2s +tttg: c161/185 lr:0.000041 t:21.3s +tttg: c162/185 lr:0.000038 t:21.4s +tttg: c163/185 lr:0.000035 t:21.5s +tttg: c164/185 lr:0.000032 t:21.6s +tttg: c165/185 lr:0.000029 t:21.7s +tttg: c166/185 lr:0.000026 t:21.8s +tttg: c167/185 lr:0.000023 t:21.9s +tttg: c168/185 lr:0.000021 t:22.0s +tttg: c169/185 lr:0.000019 t:22.1s +tttg: c170/185 lr:0.000016 t:22.2s +tttg: c171/185 lr:0.000014 t:22.3s +tttg: c172/185 lr:0.000012 t:22.4s +tttg: c173/185 lr:0.000010 t:22.5s +tttg: c174/185 lr:0.000009 t:22.6s +tttg: c175/185 lr:0.000007 t:22.7s +tttg: c176/185 lr:0.000006 t:22.8s +tttg: c177/185 lr:0.000005 t:22.9s +tttg: c178/185 lr:0.000004 t:23.0s +tttg: c179/185 lr:0.000003 t:23.1s +tttg: c180/185 lr:0.000002 t:23.3s +tttg: c181/185 lr:0.000001 t:23.4s +tttg: c182/185 lr:0.000001 t:23.5s +tttg: c183/185 lr:0.000000 t:23.6s +tttg: c184/185 lr:0.000000 t:23.7s +ttpr: phase:2/3 t:367.2s +ttp: b746/782 bl:2.4164 bb:1.0648 rl:2.3107 rb:1.0653 dl:2884-2943 gd:0 +ttp: b745/782 bl:2.2362 bb:1.0238 rl:2.3027 rb:1.0609 dl:2842-2883 gd:0 +ttpp: phase:3/3 pd:2448 gd:2000 t:384.3s +tttg: c1/250 lr:0.001000 t:0.1s +tttg: c2/250 lr:0.001000 t:0.1s +tttg: c3/250 lr:0.001000 t:0.2s +tttg: c4/250 lr:0.001000 t:0.3s +tttg: c5/250 lr:0.000999 t:0.4s +tttg: c6/250 lr:0.000999 t:0.5s +tttg: c7/250 lr:0.000999 t:0.8s +tttg: c8/250 lr:0.000998 t:0.9s +tttg: c9/250 lr:0.000997 t:1.1s +tttg: c10/250 lr:0.000997 t:1.2s +tttg: c11/250 lr:0.000996 t:1.3s +tttg: c12/250 lr:0.000995 t:1.4s +tttg: c13/250 lr:0.000994 t:1.5s +tttg: c14/250 lr:0.000993 t:1.6s +tttg: c15/250 lr:0.000992 t:1.7s +tttg: c16/250 lr:0.000991 t:1.8s +tttg: c17/250 lr:0.000990 t:1.9s +tttg: c18/250 lr:0.000989 t:2.0s +tttg: c19/250 lr:0.000987 t:2.1s +tttg: c20/250 lr:0.000986 t:2.2s +tttg: c21/250 lr:0.000984 t:2.3s +tttg: c22/250 lr:0.000983 t:2.4s +tttg: c23/250 lr:0.000981 t:2.5s +tttg: c24/250 lr:0.000979 t:2.6s +tttg: c25/250 lr:0.000977 t:2.7s +tttg: c26/250 lr:0.000975 t:2.8s +tttg: c27/250 lr:0.000973 t:2.9s +tttg: c28/250 lr:0.000971 t:3.1s +tttg: c29/250 lr:0.000969 t:3.2s +tttg: c30/250 lr:0.000967 t:3.3s +tttg: c31/250 lr:0.000965 t:3.4s +tttg: c32/250 lr:0.000962 t:3.5s +tttg: c33/250 lr:0.000960 t:3.6s +tttg: c34/250 lr:0.000957 t:3.7s +tttg: c35/250 lr:0.000955 t:3.8s +tttg: c36/250 lr:0.000952 t:3.9s +tttg: c37/250 lr:0.000949 t:4.0s +tttg: c38/250 lr:0.000947 t:4.1s +tttg: c39/250 lr:0.000944 t:4.2s +tttg: c40/250 lr:0.000941 t:4.3s +tttg: c41/250 lr:0.000938 t:4.4s +tttg: c42/250 lr:0.000935 t:4.6s +tttg: c43/250 lr:0.000931 t:4.7s +tttg: c44/250 lr:0.000928 t:4.8s +tttg: c45/250 lr:0.000925 t:4.9s +tttg: c46/250 lr:0.000922 t:5.0s +tttg: c47/250 lr:0.000918 t:5.1s +tttg: c48/250 lr:0.000915 t:5.2s +tttg: c49/250 lr:0.000911 t:5.3s +tttg: c50/250 lr:0.000907 t:5.4s +tttg: c51/250 lr:0.000904 t:5.5s +tttg: c52/250 lr:0.000900 t:5.6s +tttg: c53/250 lr:0.000896 t:5.7s +tttg: c54/250 lr:0.000892 t:5.8s +tttg: c55/250 lr:0.000888 t:5.9s +tttg: c56/250 lr:0.000884 t:6.0s +tttg: c57/250 lr:0.000880 t:6.1s +tttg: c58/250 lr:0.000876 t:6.2s +tttg: c59/250 lr:0.000872 t:6.3s +tttg: c60/250 lr:0.000868 t:8.1s +tttg: c61/250 lr:0.000863 t:8.2s +tttg: c62/250 lr:0.000859 t:8.3s +tttg: c63/250 lr:0.000855 t:8.4s +tttg: c64/250 lr:0.000850 t:8.5s +tttg: c65/250 lr:0.000846 t:8.7s +tttg: c66/250 lr:0.000841 t:8.8s +tttg: c67/250 lr:0.000836 t:8.9s +tttg: c68/250 lr:0.000832 t:9.0s +tttg: c69/250 lr:0.000827 t:9.1s +tttg: c70/250 lr:0.000822 t:9.2s +tttg: c71/250 lr:0.000817 t:9.3s +tttg: c72/250 lr:0.000812 t:9.4s +tttg: c73/250 lr:0.000807 t:9.5s +tttg: c74/250 lr:0.000803 t:9.6s +tttg: c75/250 lr:0.000797 t:9.7s +tttg: c76/250 lr:0.000792 t:9.8s +tttg: c77/250 lr:0.000787 t:9.9s +tttg: c78/250 lr:0.000782 t:10.0s +tttg: c79/250 lr:0.000777 t:10.1s +tttg: c80/250 lr:0.000772 t:10.2s +tttg: c81/250 lr:0.000766 t:10.4s +tttg: c82/250 lr:0.000761 t:11.3s +tttg: c83/250 lr:0.000755 t:11.4s +tttg: c84/250 lr:0.000750 t:11.5s +tttg: c85/250 lr:0.000745 t:11.6s +tttg: c86/250 lr:0.000739 t:11.8s +tttg: c87/250 lr:0.000733 t:11.9s +tttg: c88/250 lr:0.000728 t:12.0s +tttg: c89/250 lr:0.000722 t:12.1s +tttg: c90/250 lr:0.000717 t:12.2s +tttg: c91/250 lr:0.000711 t:12.3s +tttg: c92/250 lr:0.000705 t:12.4s +tttg: c93/250 lr:0.000699 t:12.5s +tttg: c94/250 lr:0.000694 t:12.6s +tttg: c95/250 lr:0.000688 t:12.7s +tttg: c96/250 lr:0.000682 t:12.8s +tttg: c97/250 lr:0.000676 t:12.9s +tttg: c98/250 lr:0.000670 t:13.0s +tttg: c99/250 lr:0.000664 t:13.1s +tttg: c100/250 lr:0.000658 t:13.2s +tttg: c101/250 lr:0.000652 t:13.3s +tttg: c102/250 lr:0.000646 t:13.4s +tttg: c103/250 lr:0.000640 t:13.5s +tttg: c104/250 lr:0.000634 t:13.6s +tttg: c105/250 lr:0.000628 t:13.8s +tttg: c106/250 lr:0.000622 t:13.9s +tttg: c107/250 lr:0.000616 t:14.0s +tttg: c108/250 lr:0.000610 t:14.1s +tttg: c109/250 lr:0.000603 t:14.2s +tttg: c110/250 lr:0.000597 t:14.9s +tttg: c111/250 lr:0.000591 t:15.0s +tttg: c112/250 lr:0.000585 t:15.1s +tttg: c113/250 lr:0.000579 t:15.2s +tttg: c114/250 lr:0.000572 t:15.3s +tttg: c115/250 lr:0.000566 t:15.5s +tttg: c116/250 lr:0.000560 t:15.6s +tttg: c117/250 lr:0.000554 t:15.7s +tttg: c118/250 lr:0.000547 t:15.8s +tttg: c119/250 lr:0.000541 t:15.9s +tttg: c120/250 lr:0.000535 t:16.0s +tttg: c121/250 lr:0.000528 t:16.1s +tttg: c122/250 lr:0.000522 t:16.2s +tttg: c123/250 lr:0.000516 t:16.3s +tttg: c124/250 lr:0.000509 t:16.4s +tttg: c125/250 lr:0.000503 t:16.5s +tttg: c126/250 lr:0.000497 t:16.6s +tttg: c127/250 lr:0.000491 t:16.7s +tttg: c128/250 lr:0.000484 t:16.8s +tttg: c129/250 lr:0.000478 t:16.9s +tttg: c130/250 lr:0.000472 t:17.0s +tttg: c131/250 lr:0.000465 t:17.1s +tttg: c132/250 lr:0.000459 t:17.3s +tttg: c133/250 lr:0.000453 t:17.4s +tttg: c134/250 lr:0.000446 t:17.5s +tttg: c135/250 lr:0.000440 t:17.6s +tttg: c136/250 lr:0.000434 t:17.7s +tttg: c137/250 lr:0.000428 t:17.8s +tttg: c138/250 lr:0.000421 t:17.9s +tttg: c139/250 lr:0.000415 t:18.0s +tttg: c140/250 lr:0.000409 t:18.1s +tttg: c141/250 lr:0.000403 t:18.2s +tttg: c142/250 lr:0.000397 t:18.3s +tttg: c143/250 lr:0.000390 t:18.4s +tttg: c144/250 lr:0.000384 t:18.5s +tttg: c145/250 lr:0.000378 t:18.6s +tttg: c146/250 lr:0.000372 t:18.7s +tttg: c147/250 lr:0.000366 t:18.8s +tttg: c148/250 lr:0.000360 t:18.9s +tttg: c149/250 lr:0.000354 t:19.0s +tttg: c150/250 lr:0.000348 t:19.1s +tttg: c151/250 lr:0.000342 t:19.2s +tttg: c152/250 lr:0.000336 t:19.3s +tttg: c153/250 lr:0.000330 t:19.5s +tttg: c154/250 lr:0.000324 t:19.6s +tttg: c155/250 lr:0.000318 t:19.7s +tttg: c156/250 lr:0.000312 t:19.8s +tttg: c157/250 lr:0.000306 t:19.9s +tttg: c158/250 lr:0.000301 t:20.0s +tttg: c159/250 lr:0.000295 t:20.1s +tttg: c160/250 lr:0.000289 t:20.2s +tttg: c161/250 lr:0.000283 t:20.3s +tttg: c162/250 lr:0.000278 t:20.4s +tttg: c163/250 lr:0.000272 t:20.5s +tttg: c164/250 lr:0.000267 t:20.6s +tttg: c165/250 lr:0.000261 t:20.7s +tttg: c166/250 lr:0.000255 t:20.8s +tttg: c167/250 lr:0.000250 t:20.9s +tttg: c168/250 lr:0.000245 t:21.0s +tttg: c169/250 lr:0.000239 t:21.2s +tttg: c170/250 lr:0.000234 t:21.3s +tttg: c171/250 lr:0.000228 t:21.4s +tttg: c172/250 lr:0.000223 t:21.5s +tttg: c173/250 lr:0.000218 t:21.6s +tttg: c174/250 lr:0.000213 t:21.7s +tttg: c175/250 lr:0.000208 t:21.8s +tttg: c176/250 lr:0.000203 t:21.9s +tttg: c177/250 lr:0.000197 t:22.0s +tttg: c178/250 lr:0.000193 t:22.1s +tttg: c179/250 lr:0.000188 t:22.2s +tttg: c180/250 lr:0.000183 t:22.3s +tttg: c181/250 lr:0.000178 t:22.4s +tttg: c182/250 lr:0.000173 t:22.5s +tttg: c183/250 lr:0.000168 t:22.6s +tttg: c184/250 lr:0.000164 t:22.7s +tttg: c185/250 lr:0.000159 t:22.8s +tttg: c186/250 lr:0.000154 t:23.0s +tttg: c187/250 lr:0.000150 t:23.1s +tttg: c188/250 lr:0.000145 t:23.2s +tttg: c189/250 lr:0.000141 t:23.3s +tttg: c190/250 lr:0.000137 t:23.4s +tttg: c191/250 lr:0.000132 t:23.5s +tttg: c192/250 lr:0.000128 t:23.6s +tttg: c193/250 lr:0.000124 t:23.7s +tttg: c194/250 lr:0.000120 t:23.8s +tttg: c195/250 lr:0.000116 t:23.9s +tttg: c196/250 lr:0.000112 t:24.0s +tttg: c197/250 lr:0.000108 t:24.1s +tttg: c198/250 lr:0.000104 t:24.2s +tttg: c199/250 lr:0.000100 t:24.3s +tttg: c200/250 lr:0.000096 t:24.4s +tttg: c201/250 lr:0.000093 t:24.5s +tttg: c202/250 lr:0.000089 t:24.6s +tttg: c203/250 lr:0.000085 t:24.7s +tttg: c204/250 lr:0.000082 t:24.8s +tttg: c205/250 lr:0.000078 t:24.9s +tttg: c206/250 lr:0.000075 t:25.0s +tttg: c207/250 lr:0.000072 t:25.1s +tttg: c208/250 lr:0.000069 t:25.2s +tttg: c209/250 lr:0.000065 t:25.3s +tttg: c210/250 lr:0.000062 t:25.4s +tttg: c211/250 lr:0.000059 t:25.5s +tttg: c212/250 lr:0.000056 t:25.6s +tttg: c213/250 lr:0.000053 t:25.7s +tttg: c214/250 lr:0.000051 t:25.8s +tttg: c215/250 lr:0.000048 t:25.9s +tttg: c216/250 lr:0.000045 t:26.0s +tttg: c217/250 lr:0.000043 t:26.1s +tttg: c218/250 lr:0.000040 t:26.3s +tttg: c219/250 lr:0.000038 t:26.4s +tttg: c220/250 lr:0.000035 t:26.5s +tttg: c221/250 lr:0.000033 t:26.6s +tttg: c222/250 lr:0.000031 t:26.7s +tttg: c223/250 lr:0.000029 t:26.8s +tttg: c224/250 lr:0.000027 t:26.9s +tttg: c225/250 lr:0.000025 t:27.0s +tttg: c226/250 lr:0.000023 t:27.1s +tttg: c227/250 lr:0.000021 t:27.2s +tttg: c228/250 lr:0.000019 t:27.3s +tttg: c229/250 lr:0.000017 t:27.4s +tttg: c230/250 lr:0.000016 t:27.5s +tttg: c231/250 lr:0.000014 t:27.6s +tttg: c232/250 lr:0.000013 t:27.7s +tttg: c233/250 lr:0.000011 t:27.8s +tttg: c234/250 lr:0.000010 t:27.9s +tttg: c235/250 lr:0.000009 t:28.0s +tttg: c236/250 lr:0.000008 t:28.1s +tttg: c237/250 lr:0.000007 t:28.2s +tttg: c238/250 lr:0.000006 t:28.3s +tttg: c239/250 lr:0.000005 t:28.4s +tttg: c240/250 lr:0.000004 t:28.5s +tttg: c241/250 lr:0.000003 t:28.6s +tttg: c242/250 lr:0.000003 t:28.7s +tttg: c243/250 lr:0.000002 t:28.9s +tttg: c244/250 lr:0.000001 t:29.0s +tttg: c245/250 lr:0.000001 t:29.1s +tttg: c246/250 lr:0.000001 t:29.2s +tttg: c247/250 lr:0.000000 t:29.3s +tttg: c248/250 lr:0.000000 t:29.4s +tttg: c249/250 lr:0.000000 t:29.5s +ttpr: phase:3/3 t:416.3s +ttp: b736/782 bl:2.2480 bb:1.0591 rl:2.2979 rb:1.0607 dl:2526-2550 gd:1 +ttp: b735/782 bl:2.3872 bb:1.0983 rl:2.3050 rb:1.0637 dl:2495-2526 gd:1 +ttp: b721/782 bl:2.3122 bb:1.0268 rl:2.3054 rb:1.0613 dl:2144-2163 gd:1 +ttp: b717/782 bl:2.2557 bb:1.0328 rl:2.3026 rb:1.0596 dl:2070-2088 gd:1 +ttp: b708/782 bl:2.3067 bb:1.0318 rl:2.3028 rb:1.0581 dl:1924-1937 gd:1 +ttp: b698/782 bl:2.2529 bb:1.0312 rl:2.3005 rb:1.0569 dl:1803-1814 gd:1 +ttp: b691/782 bl:2.4523 bb:1.0676 rl:2.3068 rb:1.0574 dl:1725-1737 gd:1 +ttp: b682/782 bl:2.3446 bb:1.0580 rl:2.3083 rb:1.0574 dl:1638-1646 gd:1 +ttp: b675/782 bl:2.3667 bb:1.0584 rl:2.3104 rb:1.0574 dl:1578-1586 gd:1 +ttp: b665/782 bl:2.3308 bb:1.0471 rl:2.3110 rb:1.0571 dl:1500-1507 gd:1 +ttp: b658/782 bl:2.2574 bb:1.0219 rl:2.3094 rb:1.0560 dl:1452-1459 gd:1 +ttp: b650/782 bl:2.3121 bb:1.0500 rl:2.3095 rb:1.0558 dl:1398-1406 gd:1 +ttp: b642/782 bl:2.3215 bb:1.0394 rl:2.3098 rb:1.0554 dl:1349-1356 gd:1 +ttp: b633/782 bl:2.2780 bb:1.0235 rl:2.3090 rb:1.0546 dl:1297-1302 gd:1 +ttp: b624/782 bl:2.3592 bb:1.0679 rl:2.3102 rb:1.0549 dl:1249-1255 gd:1 +ttp: b616/782 bl:2.4002 bb:1.0411 rl:2.3122 rb:1.0546 dl:1205-1211 gd:1 +ttp: b608/782 bl:2.3460 bb:1.0779 rl:2.3129 rb:1.0551 dl:1168-1172 gd:1 +ttp: b600/782 bl:2.2620 bb:1.0135 rl:2.3119 rb:1.0542 dl:1133-1137 gd:1 +ttp: b595/782 bl:2.3530 bb:1.0621 rl:2.3127 rb:1.0544 dl:1110-1115 gd:1 +ttp: b586/782 bl:2.2570 bb:1.0320 rl:2.3116 rb:1.0540 dl:1073-1076 gd:1 +ttp: b577/782 bl:2.2922 bb:1.0317 rl:2.3113 rb:1.0536 dl:1037-1041 gd:1 +ttp: b569/782 bl:2.3087 bb:1.0439 rl:2.3113 rb:1.0534 dl:1007-1010 gd:1 +ttp: b561/782 bl:2.2442 bb:1.0123 rl:2.3102 rb:1.0527 dl:979-983 gd:1 +ttp: b553/782 bl:2.2848 bb:1.0301 rl:2.3098 rb:1.0524 dl:952-955 gd:1 +ttp: b545/782 bl:2.3329 bb:1.0316 rl:2.3101 rb:1.0521 dl:927-930 gd:1 +ttp: b537/782 bl:2.3809 bb:1.0739 rl:2.3111 rb:1.0524 dl:902-905 gd:1 +ttp: b529/782 bl:2.3119 bb:1.0156 rl:2.3111 rb:1.0519 dl:878-882 gd:1 +ttp: b521/782 bl:2.3568 bb:1.0682 rl:2.3117 rb:1.0521 dl:854-858 gd:1 +ttp: b513/782 bl:2.3634 bb:1.0375 rl:2.3124 rb:1.0519 dl:832-835 gd:1 +ttp: b500/782 bl:2.3225 bb:1.0629 rl:2.3125 rb:1.0520 dl:796-799 gd:1 +ttp: b490/782 bl:2.3885 bb:1.0548 rl:2.3134 rb:1.0521 dl:771-773 gd:1 +ttp: b482/782 bl:2.3332 bb:1.0489 rl:2.3136 rb:1.0520 dl:752-754 gd:1 +ttp: b474/782 bl:2.3407 bb:1.0717 rl:2.3139 rb:1.0522 dl:733-735 gd:1 +ttp: b466/782 bl:2.3903 bb:1.0305 rl:2.3146 rb:1.0520 dl:714-717 gd:1 +ttp: b458/782 bl:2.2071 bb:1.0236 rl:2.3136 rb:1.0517 dl:697-700 gd:1 +ttp: b450/782 bl:2.3669 bb:1.0375 rl:2.3141 rb:1.0516 dl:680-682 gd:1 +ttp: b442/782 bl:2.2549 bb:1.0290 rl:2.3135 rb:1.0514 dl:664-666 gd:1 +ttp: b434/782 bl:2.3689 bb:1.0517 rl:2.3140 rb:1.0514 dl:647-648 gd:1 +ttp: b426/782 bl:2.2544 bb:1.0432 rl:2.3135 rb:1.0513 dl:632-634 gd:1 +ttp: b418/782 bl:2.2852 bb:1.0376 rl:2.3133 rb:1.0512 dl:617-618 gd:1 +ttp: b410/782 bl:2.3228 bb:1.0199 rl:2.3134 rb:1.0509 dl:601-603 gd:1 +ttp: b402/782 bl:2.2406 bb:0.9971 rl:2.3128 rb:1.0505 dl:586-588 gd:1 +ttp: b394/782 bl:2.2488 bb:0.9901 rl:2.3123 rb:1.0501 dl:571-573 gd:1 +ttp: b386/782 bl:2.3407 bb:1.0992 rl:2.3125 rb:1.0504 dl:557-559 gd:1 +ttp: b377/782 bl:2.2269 bb:1.0202 rl:2.3119 rb:1.0502 dl:542-544 gd:1 +ttp: b368/782 bl:2.3656 bb:1.1017 rl:2.3123 rb:1.0505 dl:527-528 gd:1 +ttp: b360/782 bl:2.3111 bb:1.0812 rl:2.3123 rb:1.0507 dl:513-515 gd:1 +ttp: b352/782 bl:2.4255 bb:1.0976 rl:2.3130 rb:1.0510 dl:499-501 gd:1 +ttp: b344/782 bl:2.3746 bb:1.0583 rl:2.3134 rb:1.0511 dl:488-489 gd:1 +ttp: b313/782 bl:2.2866 bb:1.0774 rl:2.3132 rb:1.0512 dl:440-442 gd:1 +ttp: b305/782 bl:2.3406 bb:1.0879 rl:2.3134 rb:1.0514 dl:429-430 gd:1 +ttp: b296/782 bl:2.3843 bb:1.0978 rl:2.3137 rb:1.0516 dl:415-417 gd:1 +ttp: b286/782 bl:2.3777 bb:1.1091 rl:2.3141 rb:1.0519 dl:400-402 gd:1 +ttp: b279/782 bl:2.3160 bb:1.0943 rl:2.3141 rb:1.0521 dl:391-392 gd:1 +ttp: b272/782 bl:2.3667 bb:1.0932 rl:2.3143 rb:1.0523 dl:382-383 gd:1 +ttp: b265/782 bl:2.3699 bb:1.1027 rl:2.3146 rb:1.0525 dl:372-374 gd:1 +ttp: b243/782 bl:2.3602 bb:1.0830 rl:2.3147 rb:1.0526 dl:345-346 gd:1 +ttp: b234/782 bl:2.4219 bb:1.1476 rl:2.3152 rb:1.0530 dl:334-335 gd:1 +ttp: b226/782 bl:2.3706 bb:1.0995 rl:2.3154 rb:1.0532 dl:324-325 gd:1 +ttp: b215/782 bl:2.3956 bb:1.0981 rl:2.3157 rb:1.0533 dl:312-313 gd:1 +ttp: b205/782 bl:2.3235 bb:1.1125 rl:2.3157 rb:1.0536 dl:301-302 gd:1 +ttp: b197/782 bl:2.3620 bb:1.1165 rl:2.3159 rb:1.0538 dl:292-294 gd:1 +ttp: b188/782 bl:2.3497 bb:1.1034 rl:2.3160 rb:1.0539 dl:282-283 gd:1 +ttp: b179/782 bl:2.3625 bb:1.1263 rl:2.3161 rb:1.0541 dl:273-274 gd:1 +ttp: b171/782 bl:2.4757 bb:1.1416 rl:2.3166 rb:1.0544 dl:266-266 gd:1 +ttp: b160/782 bl:2.3845 bb:1.1135 rl:2.3168 rb:1.0546 dl:255-255 gd:1 +ttp: b150/782 bl:2.3287 bb:1.1057 rl:2.3169 rb:1.0547 dl:245-246 gd:1 +ttp: b142/782 bl:2.3897 bb:1.1123 rl:2.3171 rb:1.0549 dl:237-238 gd:1 +ttp: b134/782 bl:2.4242 bb:1.1368 rl:2.3174 rb:1.0551 dl:230-231 gd:1 +ttp: b126/782 bl:2.3995 bb:1.1431 rl:2.3176 rb:1.0553 dl:222-223 gd:1 +ttp: b118/782 bl:2.4664 bb:1.1258 rl:2.3179 rb:1.0555 dl:215-216 gd:1 +ttp: b109/782 bl:2.4918 bb:1.1875 rl:2.3184 rb:1.0558 dl:207-208 gd:1 +ttp: b103/782 bl:2.4594 bb:1.1838 rl:2.3187 rb:1.0561 dl:202-202 gd:1 +ttp: b93/782 bl:2.4711 bb:1.1852 rl:2.3190 rb:1.0563 dl:192-193 gd:1 +ttp: b85/782 bl:2.4965 bb:1.1956 rl:2.3194 rb:1.0566 dl:185-186 gd:1 +ttp: b77/782 bl:2.5144 bb:1.2350 rl:2.3198 rb:1.0569 dl:178-179 gd:1 +ttp: b68/782 bl:2.5108 bb:1.1718 rl:2.3202 rb:1.0572 dl:170-171 gd:1 +ttp: b57/782 bl:2.4643 bb:1.1604 rl:2.3204 rb:1.0573 dl:160-161 gd:1 +ttp: b49/782 bl:2.4542 bb:1.1670 rl:2.3207 rb:1.0575 dl:152-153 gd:1 +ttp: b39/782 bl:2.4482 bb:1.1851 rl:2.3209 rb:1.0577 dl:142-143 gd:1 +ttp: b30/782 bl:2.5960 bb:1.2657 rl:2.3213 rb:1.0580 dl:133-134 gd:1 +ttp: b20/782 bl:2.5784 bb:1.2347 rl:2.3216 rb:1.0582 dl:122-123 gd:1 +ttp: b10/782 bl:2.6238 bb:1.1755 rl:2.3220 rb:1.0584 dl:107-109 gd:1 +ttp: b1/782 bl:2.8421 bb:1.1831 rl:2.3224 rb:1.0585 dl:27-83 gd:1 +quantized_ttt_phased val_loss:2.32247803 val_bpb:1.06128183 eval_time:519546ms +total_eval_time:519.5s From b34643195229234effa4c4006db24bb12afd1c12 Mon Sep 17 00:00:00 2001 From: aquariouseworkman Date: Wed, 29 Apr 2026 02:43:10 -0400 Subject: [PATCH 2/2] =?UTF-8?q?Record:=20base=20+=20AWQ-lite=20mixed-preci?= =?UTF-8?q?sion=20GPTQ=20=E2=80=94=20val=5Fbpb=201.06086=20(3-seed=20mean)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Applies activation-aware mixed-precision GPTQ (from PR #1908 / romeerp) on top of codemath3000 PR #1855 stack. ## Results | Seed | val_bpb (post-TTT) | artifact bytes | steps | eval time | |------|--------------------|----------------|-------|-----------| | 42 | 1.06118 | 15,978,503 | 4989 | 392.8s | | 314 | 1.06005 | 15,976,469 | 4986 | 395.8s | | 1234 | 1.06135 | 15,976,673 | 4977 | 395.5s | | **mean** | **1.06086** | — | — | — | 3-seed std: 0.00069. Beats codemath3000 PR #1855 (1.06108) by 0.00022 BPB. ## Technique Training is identical to PR #1855. The only change is post-training quantization: **AWQ-lite (activation-aware GPTQ):** 1. Collect per-input-channel activation RMS during GPTQ calibration 2. Score column groups: `saliency = act_rms * mean(abs(weight))` 3. Select top-1 most salient 64-column group per matrix 4. Quantize that group at int8 inside the same full-tensor GPTQ solve (rest stays int6) Env vars: `AWQ_LITE_ENABLED=1 AWQ_LITE_BITS=8 AWQ_LITE_GROUP_TOP_K=1 AWQ_LITE_GROUP_SIZE=64` ## Setup 1. `pip install -r requirements.txt` 2. `apt-get install -y lrzip` 3. Install FA3: `pip install --no-deps flash_attn_3 --find-links https://windreamer.github.io/flash-attention3-wheels/cu128_torch291/` 4. Run `prepare_caseops_data.py` to build the dataset 5. `AWQ_LITE_ENABLED=1 AWQ_LITE_BITS=8 AWQ_LITE_GROUP_TOP_K=1 AWQ_LITE_GROUP_SIZE=64 torchrun --standalone --nproc_per_node=8 train_gpt.py` ## Environment - 8xH100 80GB SXM (RunPod) - PyTorch 2.9.1+cu128 - FlashAttention 3.0.0 - Triton 3.5.1 --- .../README.md | 39 + .../lossless_caps.py | 833 ++++ .../prepare_caseops_data.py | 177 + .../requirements.txt | 13 + .../submission.json | 25 + ...pe_lossless_caps_caseops_v1_reserved.model | Bin 0 -> 366510 bytes .../train_gpt.py | 3998 +++++++++++++++++ .../train_seed1234.log | 947 ++++ .../train_seed314.log | 951 ++++ .../train_seed42.log | 950 ++++ 10 files changed, 7933 insertions(+) create mode 100644 records/track_10min_16mb/2020-04-29_AWQ_lite_mixedprecision_GPTQ/README.md create mode 100644 records/track_10min_16mb/2020-04-29_AWQ_lite_mixedprecision_GPTQ/lossless_caps.py create mode 100644 records/track_10min_16mb/2020-04-29_AWQ_lite_mixedprecision_GPTQ/prepare_caseops_data.py create mode 100644 records/track_10min_16mb/2020-04-29_AWQ_lite_mixedprecision_GPTQ/requirements.txt create mode 100644 records/track_10min_16mb/2020-04-29_AWQ_lite_mixedprecision_GPTQ/submission.json create mode 100644 records/track_10min_16mb/2020-04-29_AWQ_lite_mixedprecision_GPTQ/tokenizers/fineweb_8192_bpe_lossless_caps_caseops_v1_reserved.model create mode 100644 records/track_10min_16mb/2020-04-29_AWQ_lite_mixedprecision_GPTQ/train_gpt.py create mode 100644 records/track_10min_16mb/2020-04-29_AWQ_lite_mixedprecision_GPTQ/train_seed1234.log create mode 100644 records/track_10min_16mb/2020-04-29_AWQ_lite_mixedprecision_GPTQ/train_seed314.log create mode 100644 records/track_10min_16mb/2020-04-29_AWQ_lite_mixedprecision_GPTQ/train_seed42.log diff --git a/records/track_10min_16mb/2020-04-29_AWQ_lite_mixedprecision_GPTQ/README.md b/records/track_10min_16mb/2020-04-29_AWQ_lite_mixedprecision_GPTQ/README.md new file mode 100644 index 0000000000..f8c89c20f6 --- /dev/null +++ b/records/track_10min_16mb/2020-04-29_AWQ_lite_mixedprecision_GPTQ/README.md @@ -0,0 +1,39 @@ +# PR #1855 base + AWQ-lite mixed-precision GPTQ — val_bpb 1.06086 (3-seed mean) + +Applies activation-aware mixed-precision GPTQ (from PR #1908 / romeerp) on top of codemath3000 PR #1855 stack. + +## Results + +| Seed | val_bpb (post-TTT) | artifact bytes | steps | eval time | +|------|--------------------|----------------|-------|-----------| +| 42 | 1.06118 | 15,978,503 | 4989 | 392.8s | +| 314 | 1.06005 | 15,976,469 | 4986 | 395.8s | +| 1234 | 1.06135 | 15,976,673 | 4977 | 395.5s | +| **mean** | **1.06086** | — | — | — | + +3-seed std: 0.00069. Beats codemath3000 PR #1855 (1.06108) by 0.00022 BPB. + +## Technique + +Training is identical to PR #1855. The only change is post-training quantization: + +**AWQ-lite (activation-aware GPTQ):** +1. Collect per-input-channel activation RMS during GPTQ calibration +2. Score column groups: `saliency = act_rms * mean(abs(weight))` +3. Select top-1 most salient 64-column group per matrix +4. Quantize that group at int8 inside the same full-tensor GPTQ solve (rest stays int6) + +Env vars: `AWQ_LITE_ENABLED=1 AWQ_LITE_BITS=8 AWQ_LITE_GROUP_TOP_K=1 AWQ_LITE_GROUP_SIZE=64` + +## Setup +1. `pip install -r requirements.txt` +2. `apt-get install -y lrzip` +3. Install FA3: `pip install --no-deps flash_attn_3 --find-links https://windreamer.github.io/flash-attention3-wheels/cu128_torch291/` +4. Run `prepare_caseops_data.py` to build the dataset +5. `AWQ_LITE_ENABLED=1 AWQ_LITE_BITS=8 AWQ_LITE_GROUP_TOP_K=1 AWQ_LITE_GROUP_SIZE=64 torchrun --standalone --nproc_per_node=8 train_gpt.py` + +## Environment +- 8xH100 80GB SXM (RunPod) +- PyTorch 2.9.1+cu128 +- FlashAttention 3.0.0 +- Triton 3.5.1 diff --git a/records/track_10min_16mb/2020-04-29_AWQ_lite_mixedprecision_GPTQ/lossless_caps.py b/records/track_10min_16mb/2020-04-29_AWQ_lite_mixedprecision_GPTQ/lossless_caps.py new file mode 100644 index 0000000000..98e472f824 --- /dev/null +++ b/records/track_10min_16mb/2020-04-29_AWQ_lite_mixedprecision_GPTQ/lossless_caps.py @@ -0,0 +1,833 @@ +"""Lossless capitalization pre-encoding helpers. + +This module provides a narrow, reversible transform that only touches +ASCII capital letters `A-Z`. Each uppercase ASCII letter is rewritten as +``, where `sentinel` is a private-use Unicode +character that is escaped by doubling if it appears literally in the +input text. + +Example with the default sentinel `\\uE000`: + + "The NASA Launch" -> "\\uE000the \\uE000n\\uE000a\\uE000s\\uE000a \\uE000launch" + +The transform is intentionally simple for v1: + +- lowercase ASCII letters are unchanged +- uppercase ASCII letters become sentinel + lowercase letter +- non-ASCII characters are left untouched +- literal sentinel characters are escaped as sentinel + sentinel + +This makes the transform exactly invertible while allowing a downstream +tokenizer to reuse lowercase subwords across case variants. +""" + +from __future__ import annotations + +import json +from pathlib import Path +from typing import Callable, Iterable + +LOSSLESS_CAPS_V1 = "lossless_caps_v1" +LOSSLESS_CAPS_V2 = "lossless_caps_v2" +LOSSLESS_CAPS_V3 = "lossless_caps_v3" +LOSSLESS_CAPS_V4 = "lossless_caps_v4" +LOSSLESS_CAPS_V5 = "lossless_caps_v5" +LOSSLESS_CAPS_V6 = "lossless_caps_v6" +LOSSLESS_CAPS_V7 = "lossless_caps_v7" +LOSSLESS_CAPS_CASEOPS_V1 = "lossless_caps_caseops_v1" +IDENTITY = "identity" +DEFAULT_SENTINEL = "\uE000" +DEFAULT_V2_TITLE = "\uE001" +DEFAULT_V2_ALLCAPS = "\uE002" +DEFAULT_V2_CAPNEXT = "\uE003" +DEFAULT_V2_ESC = "\uE004" +DEFAULT_V5_TITLE_MIN_LEN = 7 +DEFAULT_V6_ALLCAPS_MIN_LEN = 3 +DEFAULT_V7_ALLCAPS_MIN_LEN = 4 + + +class LosslessCapsError(ValueError): + """Raised when a transformed string is malformed.""" + + +def _is_ascii_upper(ch: str) -> bool: + return "A" <= ch <= "Z" + + +def _is_ascii_lower(ch: str) -> bool: + return "a" <= ch <= "z" + + +def _is_ascii_alpha(ch: str) -> bool: + return _is_ascii_lower(ch) or _is_ascii_upper(ch) + + +def _validate_distinct_single_chars(*chars: str) -> None: + if any(len(ch) != 1 for ch in chars): + raise ValueError("all control characters must be exactly one character") + if len(set(chars)) != len(chars): + raise ValueError("control characters must be distinct") + + +def encode_lossless_caps_v1(text: str, *, sentinel: str = DEFAULT_SENTINEL) -> str: + """Encode ASCII capitals reversibly using a one-character sentinel.""" + if len(sentinel) != 1: + raise ValueError("sentinel must be exactly one character") + out: list[str] = [] + for ch in text: + if ch == sentinel: + out.append(sentinel) + out.append(sentinel) + elif _is_ascii_upper(ch): + out.append(sentinel) + out.append(ch.lower()) + else: + out.append(ch) + return "".join(out) + + +def decode_lossless_caps_v1(text: str, *, sentinel: str = DEFAULT_SENTINEL) -> str: + """Decode the `lossless_caps_v1` transform back to the original text.""" + if len(sentinel) != 1: + raise ValueError("sentinel must be exactly one character") + out: list[str] = [] + i = 0 + n = len(text) + while i < n: + ch = text[i] + if ch != sentinel: + out.append(ch) + i += 1 + continue + if i + 1 >= n: + raise LosslessCapsError("dangling capitalization sentinel at end of string") + nxt = text[i + 1] + if nxt == sentinel: + out.append(sentinel) + elif _is_ascii_lower(nxt): + out.append(nxt.upper()) + else: + raise LosslessCapsError( + f"invalid sentinel escape sequence {sentinel + nxt!r}; " + "expected doubled sentinel or sentinel + lowercase ASCII letter" + ) + i += 2 + return "".join(out) + + +def encode_lossless_caps_v2( + text: str, + *, + title: str = DEFAULT_V2_TITLE, + allcaps: str = DEFAULT_V2_ALLCAPS, + capnext: str = DEFAULT_V2_CAPNEXT, + esc: str = DEFAULT_V2_ESC, +) -> str: + """Encode ASCII word capitalization with cheap word-level markers. + + Rules over maximal ASCII alphabetic runs: + - lowercase words stay unchanged + - TitleCase words become `title + lowercase(word)` + - ALLCAPS words become `allcaps + lowercase(word)` + - mixed-case words use: + - optional `title` when the first letter is uppercase + - `capnext + lowercase(letter)` for subsequent uppercase letters + - literal control characters are escaped as `esc + literal` + """ + _validate_distinct_single_chars(title, allcaps, capnext, esc) + controls = {title, allcaps, capnext, esc} + out: list[str] = [] + i = 0 + n = len(text) + while i < n: + ch = text[i] + if ch in controls: + out.append(esc) + out.append(ch) + i += 1 + continue + if not _is_ascii_alpha(ch): + out.append(ch) + i += 1 + continue + + j = i + 1 + while j < n and _is_ascii_alpha(text[j]): + j += 1 + word = text[i:j] + lower_word = word.lower() + + if word.islower(): + out.append(word) + elif len(word) >= 2 and word.isupper(): + out.append(allcaps) + out.append(lower_word) + elif _is_ascii_upper(word[0]) and word[1:].islower(): + out.append(title) + out.append(lower_word) + else: + if _is_ascii_upper(word[0]): + out.append(title) + out.append(lower_word[0]) + for orig_ch, lower_ch in zip(word[1:], lower_word[1:], strict=True): + if _is_ascii_upper(orig_ch): + out.append(capnext) + out.append(lower_ch) + i = j + return "".join(out) + + +def decode_lossless_caps_v2( + text: str, + *, + title: str = DEFAULT_V2_TITLE, + allcaps: str = DEFAULT_V2_ALLCAPS, + capnext: str = DEFAULT_V2_CAPNEXT, + esc: str = DEFAULT_V2_ESC, +) -> str: + """Decode the `lossless_caps_v2` transform back to the original text.""" + _validate_distinct_single_chars(title, allcaps, capnext, esc) + out: list[str] = [] + pending_escape = False + pending_word_mode: str | None = None + active_allcaps = False + pending_capnext = False + in_ascii_word = False + + for ch in text: + if pending_escape: + if pending_word_mode is not None and not _is_ascii_alpha(ch): + raise LosslessCapsError("escaped control char cannot satisfy pending word capitalization mode") + out.append(ch) + pending_escape = False + if _is_ascii_alpha(ch): + in_ascii_word = True + else: + in_ascii_word = False + active_allcaps = False + continue + + if ch == esc: + pending_escape = True + continue + if ch == title: + if pending_word_mode is not None or in_ascii_word or pending_capnext: + raise LosslessCapsError("invalid title marker placement") + pending_word_mode = "title" + continue + if ch == allcaps: + if pending_word_mode is not None or in_ascii_word or pending_capnext: + raise LosslessCapsError("invalid allcaps marker placement") + pending_word_mode = "allcaps" + continue + if ch == capnext: + if pending_capnext: + raise LosslessCapsError("duplicate capnext marker") + pending_capnext = True + continue + + if _is_ascii_alpha(ch): + at_word_start = not in_ascii_word + if at_word_start: + if pending_word_mode == "allcaps": + out.append(ch.upper()) + active_allcaps = True + elif pending_word_mode == "title": + out.append(ch.upper()) + elif pending_capnext: + out.append(ch.upper()) + else: + out.append(ch) + pending_word_mode = None + pending_capnext = False + in_ascii_word = True + continue + + if pending_word_mode is not None: + raise LosslessCapsError("word capitalization marker leaked into the middle of a word") + if active_allcaps: + out.append(ch.upper()) + elif pending_capnext: + out.append(ch.upper()) + else: + out.append(ch) + pending_capnext = False + continue + + if pending_word_mode is not None or pending_capnext: + raise LosslessCapsError("capitalization marker not followed by an ASCII letter") + out.append(ch) + in_ascii_word = False + active_allcaps = False + + if pending_escape: + raise LosslessCapsError("dangling escape marker at end of string") + if pending_word_mode is not None or pending_capnext: + raise LosslessCapsError("dangling capitalization marker at end of string") + return "".join(out) + + +def encode_lossless_caps_v3( + text: str, + *, + title: str = DEFAULT_V2_TITLE, + allcaps: str = DEFAULT_V2_ALLCAPS, + esc: str = DEFAULT_V2_ESC, +) -> str: + """Encode only common word-level capitalization patterns. + + Rules over maximal ASCII alphabetic runs: + - lowercase words stay unchanged + - TitleCase words become `title + lowercase(word)` + - ALLCAPS words become `allcaps + lowercase(word)` + - all other mixed-case words are left unchanged + - literal control characters are escaped as `esc + literal` + """ + _validate_distinct_single_chars(title, allcaps, esc) + controls = {title, allcaps, esc} + out: list[str] = [] + i = 0 + n = len(text) + while i < n: + ch = text[i] + if ch in controls: + out.append(esc) + out.append(ch) + i += 1 + continue + if not _is_ascii_alpha(ch): + out.append(ch) + i += 1 + continue + + j = i + 1 + while j < n and _is_ascii_alpha(text[j]): + j += 1 + word = text[i:j] + + if word.islower(): + out.append(word) + elif len(word) >= 2 and word.isupper(): + out.append(allcaps) + out.append(word.lower()) + elif _is_ascii_upper(word[0]) and word[1:].islower(): + out.append(title) + out.append(word.lower()) + else: + out.append(word) + i = j + return "".join(out) + + +def decode_lossless_caps_v3( + text: str, + *, + title: str = DEFAULT_V2_TITLE, + allcaps: str = DEFAULT_V2_ALLCAPS, + esc: str = DEFAULT_V2_ESC, +) -> str: + """Decode the `lossless_caps_v3` transform back to the original text.""" + _validate_distinct_single_chars(title, allcaps, esc) + out: list[str] = [] + pending_escape = False + pending_word_mode: str | None = None + active_allcaps = False + in_ascii_word = False + + for ch in text: + if pending_escape: + if pending_word_mode is not None and not _is_ascii_alpha(ch): + raise LosslessCapsError("escaped control char cannot satisfy pending word capitalization mode") + out.append(ch) + pending_escape = False + if _is_ascii_alpha(ch): + in_ascii_word = True + else: + in_ascii_word = False + active_allcaps = False + continue + + if ch == esc: + pending_escape = True + continue + if ch == title: + if pending_word_mode is not None or in_ascii_word: + raise LosslessCapsError("invalid title marker placement") + pending_word_mode = "title" + continue + if ch == allcaps: + if pending_word_mode is not None or in_ascii_word: + raise LosslessCapsError("invalid allcaps marker placement") + pending_word_mode = "allcaps" + continue + + if _is_ascii_alpha(ch): + at_word_start = not in_ascii_word + if at_word_start: + if pending_word_mode == "allcaps": + out.append(ch.upper()) + active_allcaps = True + elif pending_word_mode == "title": + out.append(ch.upper()) + else: + out.append(ch) + pending_word_mode = None + in_ascii_word = True + continue + + if pending_word_mode is not None: + raise LosslessCapsError("word capitalization marker leaked into the middle of a word") + out.append(ch.upper() if active_allcaps else ch) + continue + + if pending_word_mode is not None: + raise LosslessCapsError("capitalization marker not followed by an ASCII letter") + out.append(ch) + in_ascii_word = False + active_allcaps = False + + if pending_escape: + raise LosslessCapsError("dangling escape marker at end of string") + if pending_word_mode is not None: + raise LosslessCapsError("dangling capitalization marker at end of string") + return "".join(out) + + +def encode_lossless_caps_v4( + text: str, + *, + allcaps: str = DEFAULT_V2_ALLCAPS, + esc: str = DEFAULT_V2_ESC, +) -> str: + """Encode only ALLCAPS ASCII words, leaving all other case untouched.""" + _validate_distinct_single_chars(allcaps, esc) + controls = {allcaps, esc} + out: list[str] = [] + i = 0 + n = len(text) + while i < n: + ch = text[i] + if ch in controls: + out.append(esc) + out.append(ch) + i += 1 + continue + if not _is_ascii_alpha(ch): + out.append(ch) + i += 1 + continue + j = i + 1 + while j < n and _is_ascii_alpha(text[j]): + j += 1 + word = text[i:j] + if len(word) >= 2 and word.isupper(): + out.append(allcaps) + out.append(word.lower()) + else: + out.append(word) + i = j + return "".join(out) + + +def decode_lossless_caps_v4( + text: str, + *, + allcaps: str = DEFAULT_V2_ALLCAPS, + esc: str = DEFAULT_V2_ESC, +) -> str: + """Decode the `lossless_caps_v4` transform back to the original text.""" + _validate_distinct_single_chars(allcaps, esc) + out: list[str] = [] + pending_escape = False + pending_allcaps = False + in_ascii_word = False + active_allcaps = False + + for ch in text: + if pending_escape: + if pending_allcaps and not _is_ascii_alpha(ch): + raise LosslessCapsError("escaped control char cannot satisfy pending allcaps mode") + out.append(ch) + pending_escape = False + if _is_ascii_alpha(ch): + in_ascii_word = True + else: + in_ascii_word = False + active_allcaps = False + continue + + if ch == esc: + pending_escape = True + continue + if ch == allcaps: + if pending_allcaps or in_ascii_word: + raise LosslessCapsError("invalid allcaps marker placement") + pending_allcaps = True + continue + + if _is_ascii_alpha(ch): + if not in_ascii_word: + active_allcaps = pending_allcaps + pending_allcaps = False + in_ascii_word = True + out.append(ch.upper() if active_allcaps else ch) + continue + + if pending_allcaps: + raise LosslessCapsError("allcaps marker not followed by an ASCII letter") + out.append(ch) + in_ascii_word = False + active_allcaps = False + + if pending_escape: + raise LosslessCapsError("dangling escape marker at end of string") + if pending_allcaps: + raise LosslessCapsError("dangling allcaps marker at end of string") + return "".join(out) + + +def encode_lossless_caps_v5( + text: str, + *, + title: str = DEFAULT_V2_TITLE, + allcaps: str = DEFAULT_V2_ALLCAPS, + esc: str = DEFAULT_V2_ESC, + title_min_len: int = DEFAULT_V5_TITLE_MIN_LEN, +) -> str: + """Encode ALLCAPS words and only sufficiently long TitleCase words.""" + _validate_distinct_single_chars(title, allcaps, esc) + controls = {title, allcaps, esc} + out: list[str] = [] + i = 0 + n = len(text) + while i < n: + ch = text[i] + if ch in controls: + out.append(esc) + out.append(ch) + i += 1 + continue + if not _is_ascii_alpha(ch): + out.append(ch) + i += 1 + continue + j = i + 1 + while j < n and _is_ascii_alpha(text[j]): + j += 1 + word = text[i:j] + if len(word) >= 2 and word.isupper(): + out.append(allcaps) + out.append(word.lower()) + elif len(word) >= title_min_len and _is_ascii_upper(word[0]) and word[1:].islower(): + out.append(title) + out.append(word.lower()) + else: + out.append(word) + i = j + return "".join(out) + + +def decode_lossless_caps_v5( + text: str, + *, + title: str = DEFAULT_V2_TITLE, + allcaps: str = DEFAULT_V2_ALLCAPS, + esc: str = DEFAULT_V2_ESC, +) -> str: + """Decode the `lossless_caps_v5` transform back to the original text.""" + return decode_lossless_caps_v3(text, title=title, allcaps=allcaps, esc=esc) + + +def encode_lossless_caps_v6( + text: str, + *, + allcaps: str = DEFAULT_V2_ALLCAPS, + esc: str = DEFAULT_V2_ESC, + allcaps_min_len: int = DEFAULT_V6_ALLCAPS_MIN_LEN, +) -> str: + """Encode only ALLCAPS words with length >= allcaps_min_len.""" + _validate_distinct_single_chars(allcaps, esc) + controls = {allcaps, esc} + out: list[str] = [] + i = 0 + n = len(text) + while i < n: + ch = text[i] + if ch in controls: + out.append(esc) + out.append(ch) + i += 1 + continue + if not _is_ascii_alpha(ch): + out.append(ch) + i += 1 + continue + j = i + 1 + while j < n and _is_ascii_alpha(text[j]): + j += 1 + word = text[i:j] + if len(word) >= allcaps_min_len and word.isupper(): + out.append(allcaps) + out.append(word.lower()) + else: + out.append(word) + i = j + return "".join(out) + + +def decode_lossless_caps_v6( + text: str, + *, + allcaps: str = DEFAULT_V2_ALLCAPS, + esc: str = DEFAULT_V2_ESC, +) -> str: + """Decode the `lossless_caps_v6` transform back to the original text.""" + return decode_lossless_caps_v4(text, allcaps=allcaps, esc=esc) + + +def encode_lossless_caps_v7( + text: str, + *, + allcaps: str = DEFAULT_V2_ALLCAPS, + esc: str = DEFAULT_V2_ESC, + allcaps_min_len: int = DEFAULT_V7_ALLCAPS_MIN_LEN, +) -> str: + """Encode only ALLCAPS words with length >= 4.""" + return encode_lossless_caps_v6( + text, + allcaps=allcaps, + esc=esc, + allcaps_min_len=allcaps_min_len, + ) + + +def decode_lossless_caps_v7( + text: str, + *, + allcaps: str = DEFAULT_V2_ALLCAPS, + esc: str = DEFAULT_V2_ESC, +) -> str: + """Decode the `lossless_caps_v7` transform back to the original text.""" + return decode_lossless_caps_v6(text, allcaps=allcaps, esc=esc) + + +def get_text_transform(name: str | None) -> Callable[[str], str]: + """Return the forward text transform for the given config name.""" + normalized = IDENTITY if name in {None, "", IDENTITY} else str(name) + if normalized == IDENTITY: + return lambda text: text + if normalized == LOSSLESS_CAPS_V1: + return encode_lossless_caps_v1 + if normalized == LOSSLESS_CAPS_V2: + return encode_lossless_caps_v2 + if normalized == LOSSLESS_CAPS_V3: + return encode_lossless_caps_v3 + if normalized == LOSSLESS_CAPS_V4: + return encode_lossless_caps_v4 + if normalized == LOSSLESS_CAPS_V5: + return encode_lossless_caps_v5 + if normalized == LOSSLESS_CAPS_V6: + return encode_lossless_caps_v6 + if normalized == LOSSLESS_CAPS_V7: + return encode_lossless_caps_v7 + if normalized == LOSSLESS_CAPS_CASEOPS_V1: + return encode_lossless_caps_v2 + raise ValueError(f"unsupported text_transform={name!r}") + + +def get_text_inverse_transform(name: str | None) -> Callable[[str], str]: + """Return the inverse transform for the given config name.""" + normalized = IDENTITY if name in {None, "", IDENTITY} else str(name) + if normalized == IDENTITY: + return lambda text: text + if normalized == LOSSLESS_CAPS_V1: + return decode_lossless_caps_v1 + if normalized == LOSSLESS_CAPS_V2: + return decode_lossless_caps_v2 + if normalized == LOSSLESS_CAPS_V3: + return decode_lossless_caps_v3 + if normalized == LOSSLESS_CAPS_V4: + return decode_lossless_caps_v4 + if normalized == LOSSLESS_CAPS_V5: + return decode_lossless_caps_v5 + if normalized == LOSSLESS_CAPS_V6: + return decode_lossless_caps_v6 + if normalized == LOSSLESS_CAPS_V7: + return decode_lossless_caps_v7 + if normalized == LOSSLESS_CAPS_CASEOPS_V1: + return decode_lossless_caps_v2 + raise ValueError(f"unsupported text_transform={name!r}") + + +def normalize_text_transform_name(name: str | None) -> str: + """Normalize empty/None transform names to the identity transform.""" + return IDENTITY if name in {None, "", IDENTITY} else str(name) + + +def get_text_transform_control_symbols(name: str | None) -> list[str]: + """Return reserved control symbols used by a transform, if any.""" + normalized = normalize_text_transform_name(name) + if normalized == IDENTITY: + return [] + if normalized == LOSSLESS_CAPS_V1: + return [DEFAULT_SENTINEL] + if normalized == LOSSLESS_CAPS_V2: + return [DEFAULT_V2_TITLE, DEFAULT_V2_ALLCAPS, DEFAULT_V2_CAPNEXT, DEFAULT_V2_ESC] + if normalized == LOSSLESS_CAPS_CASEOPS_V1: + return [DEFAULT_V2_TITLE, DEFAULT_V2_ALLCAPS, DEFAULT_V2_CAPNEXT, DEFAULT_V2_ESC] + if normalized in {LOSSLESS_CAPS_V3, LOSSLESS_CAPS_V5}: + return [DEFAULT_V2_TITLE, DEFAULT_V2_ALLCAPS, DEFAULT_V2_ESC] + if normalized in {LOSSLESS_CAPS_V4, LOSSLESS_CAPS_V6, LOSSLESS_CAPS_V7}: + return [DEFAULT_V2_ALLCAPS, DEFAULT_V2_ESC] + raise ValueError(f"unsupported text_transform={name!r}") + + +def infer_text_transform_from_manifest(tokenizer_path: str | Path) -> str: + """Best-effort lookup of a tokenizer's text transform from a local manifest.""" + tokenizer_path = Path(tokenizer_path).expanduser().resolve() + manifest_candidates = [ + tokenizer_path.parent.parent / "manifest.json", + tokenizer_path.parent / "manifest.json", + ] + for manifest_path in manifest_candidates: + if not manifest_path.is_file(): + continue + try: + payload = json.loads(manifest_path.read_text(encoding="utf-8")) + except (OSError, json.JSONDecodeError): + continue + tokenizers = payload.get("tokenizers") + if not isinstance(tokenizers, list): + continue + for tokenizer_meta in tokenizers: + if not isinstance(tokenizer_meta, dict): + continue + model_path = tokenizer_meta.get("model_path") or tokenizer_meta.get("path") + if not model_path: + continue + candidate = (manifest_path.parent / str(model_path)).resolve() + if candidate == tokenizer_path: + return normalize_text_transform_name(tokenizer_meta.get("text_transform")) + return IDENTITY + + +def surface_piece_original_byte_counts( + surfaces: Iterable[str], + *, + text_transform_name: str | None = None, + sentinel: str = DEFAULT_SENTINEL, +) -> list[int]: + """Return exact original UTF-8 byte counts contributed by each surface piece. + + `surfaces` must be the exact decoded text fragments emitted by SentencePiece + in order, e.g. `piece.surface` from `encode_as_immutable_proto`. + """ + normalized = normalize_text_transform_name(text_transform_name) + if normalized == IDENTITY: + return [len(surface.encode("utf-8")) for surface in surfaces] + if normalized == LOSSLESS_CAPS_V1: + if len(sentinel) != 1: + raise ValueError("sentinel must be exactly one character") + sentinel_bytes = len(sentinel.encode("utf-8")) + pending_sentinel = False + counts: list[int] = [] + for surface in surfaces: + piece_bytes = 0 + for ch in surface: + if pending_sentinel: + if ch == sentinel: + piece_bytes += sentinel_bytes + elif _is_ascii_lower(ch): + piece_bytes += 1 + else: + raise LosslessCapsError( + f"invalid continuation {ch!r} after capitalization sentinel" + ) + pending_sentinel = False + continue + if ch == sentinel: + pending_sentinel = True + else: + piece_bytes += len(ch.encode("utf-8")) + counts.append(piece_bytes) + if pending_sentinel: + raise LosslessCapsError("dangling capitalization sentinel across piece boundary") + return counts + if normalized not in {LOSSLESS_CAPS_V2, LOSSLESS_CAPS_V3, LOSSLESS_CAPS_V4, LOSSLESS_CAPS_V5, LOSSLESS_CAPS_V6, LOSSLESS_CAPS_V7, LOSSLESS_CAPS_CASEOPS_V1}: + raise ValueError(f"unsupported text_transform={text_transform_name!r}") + + title = DEFAULT_V2_TITLE + allcaps = DEFAULT_V2_ALLCAPS + capnext = DEFAULT_V2_CAPNEXT + esc = DEFAULT_V2_ESC + if normalized in {LOSSLESS_CAPS_V2, LOSSLESS_CAPS_CASEOPS_V1}: + _validate_distinct_single_chars(title, allcaps, capnext, esc) + elif normalized in {LOSSLESS_CAPS_V4, LOSSLESS_CAPS_V6, LOSSLESS_CAPS_V7}: + _validate_distinct_single_chars(allcaps, esc) + else: + _validate_distinct_single_chars(title, allcaps, esc) + pending_escape = False + pending_word_mode: str | None = None + active_allcaps = False + pending_capnext = False + in_ascii_word = False + counts: list[int] = [] + for surface in surfaces: + piece_bytes = 0 + for ch in surface: + if pending_escape: + if pending_word_mode is not None and not _is_ascii_alpha(ch): + raise LosslessCapsError("escaped control char cannot satisfy pending word capitalization mode") + piece_bytes += len(ch.encode("utf-8")) + pending_escape = False + if _is_ascii_alpha(ch): + in_ascii_word = True + else: + in_ascii_word = False + active_allcaps = False + continue + if ch == esc: + pending_escape = True + continue + if normalized in {LOSSLESS_CAPS_V2, LOSSLESS_CAPS_V3, LOSSLESS_CAPS_V5, LOSSLESS_CAPS_CASEOPS_V1} and ch == title: + if pending_word_mode is not None or in_ascii_word or pending_capnext: + raise LosslessCapsError("invalid title marker placement") + pending_word_mode = "title" + continue + if ch == allcaps: + if pending_word_mode is not None or in_ascii_word or pending_capnext: + raise LosslessCapsError("invalid allcaps marker placement") + pending_word_mode = "allcaps" + continue + if normalized in {LOSSLESS_CAPS_V2, LOSSLESS_CAPS_CASEOPS_V1} and ch == capnext: + if pending_capnext: + raise LosslessCapsError("duplicate capnext marker") + pending_capnext = True + continue + + if _is_ascii_alpha(ch): + at_word_start = not in_ascii_word + if at_word_start: + piece_bytes += 1 + active_allcaps = pending_word_mode == "allcaps" + pending_word_mode = None + pending_capnext = False + in_ascii_word = True + continue + if pending_word_mode is not None: + raise LosslessCapsError("word capitalization marker leaked into the middle of a word") + piece_bytes += 1 + pending_capnext = False + continue + + if pending_word_mode is not None or pending_capnext: + raise LosslessCapsError("capitalization marker not followed by an ASCII letter") + piece_bytes += len(ch.encode("utf-8")) + in_ascii_word = False + active_allcaps = False + counts.append(piece_bytes) + if pending_escape: + raise LosslessCapsError("dangling escape marker across piece boundary") + if pending_word_mode is not None or pending_capnext: + raise LosslessCapsError("dangling capitalization marker across piece boundary") + return counts diff --git a/records/track_10min_16mb/2020-04-29_AWQ_lite_mixedprecision_GPTQ/prepare_caseops_data.py b/records/track_10min_16mb/2020-04-29_AWQ_lite_mixedprecision_GPTQ/prepare_caseops_data.py new file mode 100644 index 0000000000..5c3f13e69c --- /dev/null +++ b/records/track_10min_16mb/2020-04-29_AWQ_lite_mixedprecision_GPTQ/prepare_caseops_data.py @@ -0,0 +1,177 @@ +"""Prepare CaseOps-tokenized FineWeb shards + per-token byte sidecar. + +CaseOps (``lossless_caps_caseops_v1``) is a bijective, character-level text +transform that introduces four operator tokens in place of explicit +capitalization: TITLE, ALLCAPS, CAPNEXT, ESC. The transform is fully +reversible — no information is lost relative to the untransformed UTF-8 +text, so BPB stays computable on TRUE byte counts. + +Forward pipeline: + 1. Read the canonical FineWeb-10B doc stream (``docs_selected.jsonl`` + produced by ``data/download_hf_docs_and_tokenize.py`` in the root repo). + 2. Apply ``encode_lossless_caps_v2`` (the caseops_v1 alias) to each doc. + 3. Tokenize with the shipped SP model + ``tokenizers/fineweb_8192_bpe_lossless_caps_caseops_v1_reserved.model`` + (reserves TITLE/ALLCAPS/CAPNEXT/ESC + sentinel as user_defined_symbols). + 4. Write uint16 train/val shards (``fineweb_{train,val}_XXXXXX.bin``). + 5. For the VAL stream only, emit per-token byte sidecar shards + (``fineweb_val_bytes_XXXXXX.bin``, uint16 parallel arrays) that record + each token's ORIGINAL pre-transform UTF-8 byte count. BPB is computed + from these canonical bytes so the score is on the untransformed text + (not the transformed representation). + +Output layout — matches what ``train_gpt.py`` expects under +``DATA_DIR=./data`` with ``CASEOPS_ENABLED=1``: + + data/datasets/fineweb10B_sp8192_caseops/datasets/ + tokenizers/fineweb_8192_bpe_lossless_caps_caseops_v1_reserved.model + datasets/fineweb10B_sp8192_lossless_caps_caseops_v1_reserved/ + fineweb_train_000000.bin + fineweb_train_000001.bin + ... + fineweb_val_000000.bin + fineweb_val_bytes_000000.bin + +Usage: + + python3 prepare_caseops_data.py \\ + --docs ./fineweb10B_raw/docs_selected.jsonl \\ + --out ./data/datasets/fineweb10B_sp8192_caseops/datasets \\ + --sp ./tokenizers/fineweb_8192_bpe_lossless_caps_caseops_v1_reserved.model + +Requirements: sentencepiece, numpy. CPU-only. Runs once; reused across seeds. +""" +from __future__ import annotations + +import argparse +import json +import pathlib +import struct +import sys + +import numpy as np +import sentencepiece as spm + +# Local import — lossless_caps.py ships next to this script. +sys.path.insert(0, str(pathlib.Path(__file__).resolve().parent)) +from lossless_caps import ( # noqa: E402 + LOSSLESS_CAPS_CASEOPS_V1, + encode_lossless_caps_v2, + surface_piece_original_byte_counts, +) + + +SHARD_MAGIC = 20240520 +SHARD_VERSION = 1 +SHARD_TOKENS = 10_000_000 # tokens per shard — matches the main pipeline +BOS_ID = 1 # SP model's control token; train_gpt.py:_find_docs requires BOS per doc + + +def _write_shard(out_path: pathlib.Path, arr: np.ndarray) -> None: + """Write a uint16 shard in the standard header-prefixed format.""" + assert arr.dtype == np.uint16 + header = np.zeros(256, dtype=np.int32) + header[0] = SHARD_MAGIC + header[1] = SHARD_VERSION + header[2] = int(arr.size) + with out_path.open("wb") as fh: + fh.write(header.tobytes()) + fh.write(arr.tobytes()) + + +def _iter_docs(docs_path: pathlib.Path): + """Yield doc strings from a jsonl file (one json object per line).""" + with docs_path.open("r", encoding="utf-8") as fh: + for line in fh: + line = line.strip() + if not line: + continue + obj = json.loads(line) + # Support both {"text": ...} and raw strings. + yield obj["text"] if isinstance(obj, dict) else obj + + +def _token_original_byte_counts( + sp: spm.SentencePieceProcessor, + original_text: str, + transformed_text: str, +) -> np.ndarray: + """Per-token canonical (pre-transform) UTF-8 byte counts. + + Delegates to ``surface_piece_original_byte_counts`` in ``lossless_caps.py`` + — the canonical exporter used by the PR #1729 / HF-hosted CaseOps dataset. + Operator pieces (U+E001..U+E004) contribute 0 original bytes; letter pieces + contribute their pre-transform UTF-8 byte count. + """ + proto = sp.encode_as_immutable_proto(transformed_text) + byte_counts = surface_piece_original_byte_counts( + (piece.surface for piece in proto.pieces), + text_transform_name=LOSSLESS_CAPS_CASEOPS_V1, + ) + return np.asarray(list(byte_counts), dtype=np.uint16) + + +def main() -> None: + ap = argparse.ArgumentParser(description=__doc__, formatter_class=argparse.RawDescriptionHelpFormatter) + ap.add_argument("--docs", required=True, type=pathlib.Path, help="Path to docs_selected.jsonl") + ap.add_argument("--out", required=True, type=pathlib.Path, help="Output datasets dir") + ap.add_argument("--sp", required=True, type=pathlib.Path, help="Path to CaseOps SP model") + ap.add_argument("--val-docs", type=int, default=10_000, help="Validation docs count") + args = ap.parse_args() + + sp = spm.SentencePieceProcessor(model_file=str(args.sp)) + print(f"loaded sp: vocab={sp.vocab_size()}", flush=True) + + train_out = args.out / "datasets" / "fineweb10B_sp8192_lossless_caps_caseops_v1_reserved" + train_out.mkdir(parents=True, exist_ok=True) + + val_buf_tokens: list[int] = [] + val_buf_bytes: list[int] = [] + train_buf: list[int] = [] + val_written = 0 + train_written = 0 + n_docs = 0 + + for text in _iter_docs(args.docs): + transformed = encode_lossless_caps_v2(text) + token_ids = [BOS_ID] + sp.encode(transformed, out_type=int) + if n_docs < args.val_docs: + # Validation doc — also compute byte sidecar + byte_counts = _token_original_byte_counts(sp, text, transformed) + val_buf_tokens.extend(token_ids) + val_buf_bytes.append(0) # BOS contributes 0 original bytes + val_buf_bytes.extend(int(b) for b in byte_counts) + if len(val_buf_tokens) >= SHARD_TOKENS: + _write_shard(train_out / f"fineweb_val_{val_written:06d}.bin", + np.array(val_buf_tokens[:SHARD_TOKENS], dtype=np.uint16)) + _write_shard(train_out / f"fineweb_val_bytes_{val_written:06d}.bin", + np.array(val_buf_bytes[:SHARD_TOKENS], dtype=np.uint16)) + val_buf_tokens = val_buf_tokens[SHARD_TOKENS:] + val_buf_bytes = val_buf_bytes[SHARD_TOKENS:] + val_written += 1 + else: + train_buf.extend(token_ids) + if len(train_buf) >= SHARD_TOKENS: + _write_shard(train_out / f"fineweb_train_{train_written:06d}.bin", + np.array(train_buf[:SHARD_TOKENS], dtype=np.uint16)) + train_buf = train_buf[SHARD_TOKENS:] + train_written += 1 + n_docs += 1 + if n_docs % 10_000 == 0: + print(f" processed {n_docs} docs train_shards={train_written} val_shards={val_written}", flush=True) + + # Flush tail buffers into final (possibly short) shards. + if val_buf_tokens: + _write_shard(train_out / f"fineweb_val_{val_written:06d}.bin", + np.array(val_buf_tokens, dtype=np.uint16)) + _write_shard(train_out / f"fineweb_val_bytes_{val_written:06d}.bin", + np.array(val_buf_bytes, dtype=np.uint16)) + if train_buf: + _write_shard(train_out / f"fineweb_train_{train_written:06d}.bin", + np.array(train_buf, dtype=np.uint16)) + + print(f"done. docs={n_docs} train_shards={train_written + (1 if train_buf else 0)} val_shards={val_written + (1 if val_buf_tokens else 0)}") + + +if __name__ == "__main__": + main() diff --git a/records/track_10min_16mb/2020-04-29_AWQ_lite_mixedprecision_GPTQ/requirements.txt b/records/track_10min_16mb/2020-04-29_AWQ_lite_mixedprecision_GPTQ/requirements.txt new file mode 100644 index 0000000000..b6c55e13aa --- /dev/null +++ b/records/track_10min_16mb/2020-04-29_AWQ_lite_mixedprecision_GPTQ/requirements.txt @@ -0,0 +1,13 @@ +# Python deps. Install with: pip install -r requirements.txt +torch==2.9.1+cu128 +sentencepiece +brotli +huggingface_hub +numpy +python-minifier + +# FlashAttention 3 must be installed separately (not on PyPI): +# pip install --no-deps flash_attn_3 --find-links https://windreamer.github.io/flash-attention3-wheels/cu128_torch291/ + +# System dep (apt): lrzip (used by per-group compressor) +# apt-get install -y lrzip diff --git a/records/track_10min_16mb/2020-04-29_AWQ_lite_mixedprecision_GPTQ/submission.json b/records/track_10min_16mb/2020-04-29_AWQ_lite_mixedprecision_GPTQ/submission.json new file mode 100644 index 0000000000..4a7c9e6970 --- /dev/null +++ b/records/track_10min_16mb/2020-04-29_AWQ_lite_mixedprecision_GPTQ/submission.json @@ -0,0 +1,25 @@ +{ + "author": "aquarious dante workman", + "github_id": "aquariouseworkman", + "name": "PR #1855 base + activation-aware GPTQ mixed precision (AWQ-lite)", + "date": "2026-04-29", + "track": "10min_16mb", + "val_bpb": 1.06086, + "val_bpb_std": 0.00069, + "seeds": [42, 314, 1234], + "seed_results": { + "42": {"val_loss": 2.32225638, "val_bpb": 1.06118055, "artifact_bytes": 15978503, "steps": 4989, "eval_time_s": 392.8}, + "314": {"val_loss": 2.31977393, "val_bpb": 1.06004616, "artifact_bytes": 15976469, "steps": 4986, "eval_time_s": 395.8}, + "1234": {"val_loss": 2.32262534, "val_bpb": 1.06134915, "artifact_bytes": 15976673, "steps": 4977, "eval_time_s": 395.5} + }, + "hardware": "8xH100 80GB SXM", + "pytorch_version": "2.9.1+cu128", + "cuda_version": "12.8", + "flash_attn_version": "FA3 3.0.0 (cu128_torch291 wheel)", + "compressor": "pergroup", + "technique_summary": "PR #1855 9-knob hyperparameter stack + PR #1908 activation-aware GPTQ mixed precision (AWQ-lite). Training identical to PR #1855. Post-training: AWQ_LITE_ENABLED=1 selects 1 salient 64-column group per weight matrix via activation RMS * weight magnitude saliency, quantizes at int8 (vs int6 baseline) inside the same GPTQ solve. LQER asymmetric rank-4 correction on top. Per-group lrzip+brotli compression. Phased TTT eval (3 phases, prefix=2500 docs).", + "compliance": { + "artifact_bytes_max": 15978503, + "eval_time_max_s": 395.8 + } +} diff --git a/records/track_10min_16mb/2020-04-29_AWQ_lite_mixedprecision_GPTQ/tokenizers/fineweb_8192_bpe_lossless_caps_caseops_v1_reserved.model b/records/track_10min_16mb/2020-04-29_AWQ_lite_mixedprecision_GPTQ/tokenizers/fineweb_8192_bpe_lossless_caps_caseops_v1_reserved.model new file mode 100644 index 0000000000000000000000000000000000000000..fffc8bb3062a77df55030b36cb0d85f2c6a9c211 GIT binary patch literal 366510 zcmZ6Ud4O!&Rn`wAOqURvAf6Pl7e(&*uTQ9oxf>)jm=pAmo@RhH49xC$1$us5iTPpHzbYGUFFAVzf z{||k||A)S^qIbOY*00=t_4anj=&f%R!o6d{ePhD?W5NSt!h>VNLu10jW5Oe2!lPrt zV`IYOW5N?-!jogdQ)9x@Bf`C7ecU_N$Gu~H+&k9Cy<>gcJJ!d&V}0B^*2leLecU_N z$Gu~H+&k9Cy<>gcJJ!d&V}0B^*2jHgecU(J$9-dc+&9+8ePeyxH`d2}V}0B=*2jHg zecU(J$9-dc+&9+8ePeyxH`d2}V}0B=*2n#0ecV6R$Ngh{+&|XG{bPOHKi0?nV}0B| z*2n#0ecV6R$Ngh{+&|XG{bPOHKi0?nV}0B|*2e>5eLOJM#{*-1JTTVB17m$WFxJNd zV|_d@*2e>5eLOJM#{*-1JTTVB17m$WFxJNdV|_d@*2jZmeLOhU$Ae>iJUG_JgJXR> zIM&C5V|_e0*2jZmeLOhU$Ae>iJUG_JgJXR>IM&C5V|_e0*2hC*eLOVQ$3tU%JT%tF zLt}kBG}gyMV|_d{*2hC*eLOVQ$3tU%JT%tFLt}kBG}gyMV|_d{*2lwReLOtY$HQZN zJUrIN!()9sJl4m+m$amBdx_pHCdgw5iz~1Ag^&MJY3jb5j#p zyT4P>Qmi+xms*ab;1|BWs7nRCt?4s~^dbirQovS?avSijrY}|Cd%WJFx^mv<^_4|M znk2Er_#g+*q}30r`H229(@!PPw)2^vD9!^WKP%22snMSUys`q@M=i`I@?`xa1%7-5 z(*GwrvzcVrad568KI`>GPtPr70iWkDM=Iot%t&l%YVtx_*-ti#al58HuuUCRMix@p zsv>LTyrNyP{OaT(wY}ECrOdN)@JtcDJXA6Gm5HmOzbaHI?5iDINUpDOa8Gf+HdN*N zb)icCmsJZA@arwrvu-%wm?}G0q;GQk3+3|7l~>o(xA?Dym_>&3U; z20OgFa1QGfE=&B5`sw_(o7i{ylVg2X%2BNEc5qMfeormZE%LX;fEc3Mg{(eGg@AVj~_L8CSCR>&rel>udlVcvR%Hh z=6YA^`tgd~Jbt1^o!JI|vVOd)qJFBP+v?+f+J!A;>DPq^>HKHHr5OHM2UqGCelApT z^YaeQ)OY-X)6LG6`7egyGe!BOT99?VsXXnuw%+tSlzIGejcd#O6=!}b{(jZLx#Ij< z__IjAo)`XJL zOH{S|z9YU$M%%o>J(SwOTf9NtOggohHu^44!C z9n<{PM0=Z(mn3;x7qJqDZWl%BiBfeJ$cyz>5;eebYPC4`TbpNnvw+$2XKF za(m{8TmH6mZtvvqg_(cnHOnS-JNi$ zF77?*hcng9dxl#pF}7W3XiBx(75!nsF?_pRmJU09MoP&wT5$n<_S;hu7zSbnTP zT6+KbX;=FD02k)Y`~xHWu^9cJ6m_Z^{otDQ$x`V*#3^jtAL`(RB7IoR)x*?Fo&0pJ zFCXqmZfPG;bGmD*epw2%hF|Vj3zdD3gNF*fqUO3%_pn`&TiLx9itb;PdS8;O;_o;8 znT+WVxQK<7|6pWtaSu71Vc}tiSI*~=lr5z_>fk;15d28TIQ0PZQ6?XVyH}dLaG&C-h6^{a&ez%@`XE#PcnH=I{jpmmyYr&4$o&&=%-d)@2e;LG$%My zaHp~@c|JX5In&RmDK2EWpIOmNJbhN;xo7|Egk8U%<9we{g3ql9Y=58U4-0idpI=2? zdhq*#3U>{^&=F5PA8>G{G+%5csS44*q~?5~PhVQo-}O*7bxa0K@h~%Md+s53otvj{ zd&zCrroCU&rxTnNP2INwPm;#^TGoiJqgB4R?Z3+5w35>r%T<5&2DUZdHq-Kj>ipbMmo~hM zggr5SHu9glIp6h%UALB3*Zj^jGCgO$SL!2P<47~J_6JQ~yaw`Xs|skf66gKdkttR+oVN)j85so~w-I&UkJBJyD+7a}qq+0%pgf*sh zCG7U~m4&Xn2xsoZ;4FduXoNt_F2Pgr^;J&c@w+1&z7`r5{+PckRPkL2AId1cy2erm z(t)xtx_etq>yJmzC*r09!13vvaPu`a-tvIiR0otWuHHXUKOL*z?|>{7c4duU>)cM~ znrw6clAL%N`zI?8hZNd@!o$g2bKI|s&XBGnn5EwbK>t%I^EoMpaFS%myG#=L-i{!} zEZo=q>8OkeQAaS^d@h~K*VhbYnniU$n3ibGH`Fhl8+1TepqrLXzp?VIDMtsQsQIb7 z{6FLHQbTx`@XW31o1A>*@w@|2L3?F#{AZoy;z-INOetjm|7LUGIkaRAq>*HN_;V>~ zDVs`o;a=fes)Vy6wZIM>6&-pJ^XHvxe}>U_grkh}h1q= z(sKcO9!$Th#-vIqy8|PDCrW?C8O}~s0SRaJ|J{|n3pE2!)W7dZ*GjB8cX;9IAROI!!uoyrqQSi* zTpEwN(7#b5YSitXN9z5rDjN({k_zm?G@|~^noet<4upc1Y75_AtwM!W;Q9aGa`Khz zwIiImxRVL)4@ARiSOoLwZ07a!-wyNAewXm_uI%LpQ+=nJPjvuTn|ZkSI}V?kWx~nq zWbV=9hg^>OhmN46@zUb&hJW=H1f!obtL}&WMRT5xV7{z8p8ma>^BtAUz`ca4HnhbP=NnlZb%w#(tI>M2NIV@r{y`yP@*%8bq zR@Ktb>%zO*3qjfo)~q)D`pW6uO!ztoN%%~9=r=g_iKnm~Ky|C}@{Ojmu#`Xn2L{bQ zsJgkW`Bn#v`OVaE{w=>*GH% zGkZsx{&xVBxH!ianxCxkZmYp`zzD~@N8J2lM_sCe?g-}_twtl@PdU$JY!pDA_jm^W zPaMt|M>u@Esrv@f{B+IZ_WVey2BYeeeK)Uv>U0MevLC`Rkb{Mdwz;mtx{-sD?ZO@J zKeO8Js9p)mLY5j;f2PKo?MrSTm2)7m|9R!(c;<-;xOATySAW)dc+T1pj?{ErYO?>L z#?-jofuzb48Q0G_JUc<(9pSB8Z|9LH3TpnPzs}`9>Dj z38JXQ(rbiYGQgbG+yby>wVZ!j3!7(t+(C%SmM5OmZ>n&+vc(Qac{3^c-wnzXlb}@U zvBuu!P4(l6dQDI=-OcLk-@oD3JKehF330V~NK_)T0rzab>|(Tx=m;YHg@>#E5TO_i z`2c_w4U)~TIF9F49pPAmhRNnX){^heJX{R;cPF@hQH?GclC)NMXoaaY5ubj4U-OX`(1~<{Oky$(Or#7|GTPc z{%X%PI|zJxwHoE`RpHLg-Q9xWZZEEf|Kt2F)Nne&`jDpl`~GmIIblb57#%(2{$GDw zx>XXUq;ql6ywO4IrzMyqnzhTIAV}~d!aZRMdNu{U#T$$6Sqdp7I4LjE4R;Wt%?~0e zKy9jcXx`GFW=C>9w}6=Xu2*!xG#YiNJt#F?*32(3s|Rr15`?o0_n(B5$C3POJus;b zv#@<(&GU@Gs0ERb886}yPC^gDeLn5k)ZVIQ!U%*#wZUE_s0Zl4$TvIF9H$2)pSy0a zZ(ZT(oJm~^B*(0v^&?^&ptZ@bfk`)w#UB?Evq34FMz*oH?or0A0TpN!9cJ zwl(p-W;iX7QkZ+l(cFQ=>xmpNJwR0|p_;d=wRV_AV+(236kHUOrdW% z*3$Fd7Lb}hJNG2NgFpduP567lQOto|m~UU9*F`OmT3cG_H=c14Ut{@p$(T(Cb1w3h!1^#vIxK@6NrB0aFZq zI5lKInBQ}$?_Nupq3RYy%;k~%bRB>aUduSrC+L;^d(`v?nufOnoOv)E;F(6yet_rR zdwNgjsIj9Zh|&)2-${63_6Uc)V>hSwN-b#^XbG23Zt_5Qeby7X9+28S_dKL*o zP1X`7;@z42{v9B-xj#Q!$a~d8DGKo%@_nLL#>AE&30LVV05E)^MO&XZUQN8O;~bt^ z83AYKZm)oMzV-I1zl$?WQ0nQSh0Ws&JG%bB8*jbj){BX=IC-6YpT{0 zrbcG+I=)|mRDVk_nI6qt-C$BLG-&n1H=xGciId6U)e?Cq zdw+kRTPA}xfR+d70GKEpw479ZKF!=AHy==QKlT!>1!o${+%}V=4iaV9Thsz8 z`{V3fRnY<{<;Zdoj*;xnGcD_(AnEcb?vf92T<;vUgp=v{TsF|5$Ow+?V$3%NcqPnQ(lY`aAKQ`(PdL9nu`2{=K7oyn2a6HZZQ=_c+Cn;ErRkl4hz zL9%?hIPrkqgOU($l1jU;;-jU!yDgZc&sv}k5DCw3+hNt`D{F0ucfa}ByRDv=aKt-S z`|5yjt<`2vFx7IVmHh)wb124I!pR*Eu)T8-!rOuU|NVfE?Bi}89Bc^(f_i}#3M#SB zqa#e}+2UyCfwl)zFnbZ+5`3r{&)G#;8n%#R^oPSkIQ5KPs{{Cbes zHGi+Hd>nat*@8>;YZt8pl5nOzii9Bayg1K>acX6GBGi6#tQ&qif~3K-p!I4GilBS9 zJqh`kDC9NRT}zn!Sl`Kw2`D;uS-_GmM_xYG>FoPy36t*B{Za>@IMm|`2PK!e_mx|J zWYBfJfnuk7*r~*WfF@GsiOLephtM9Y`Ouo+bb$LFG=Q;~gECC?1BH26c5NT8Ih?w~ z0cKFd{gf$02PwInJ(qbz4@C+-wz)nrWT3&L9pH)1ZFIn*AMfM#U__bQ_4lMJLmS^M zLDjVkq6_%~Cv{J-^ap5By~k52nYjyz0Qq{5_Rv8{VMjg-(+?5UKazFxbd7K@*K(%? zm;7;psEt8*ms>BQB4*D%->@OV+>z?0`M6Z$`MElf7L@ddtjjcGfq-Vp)N1(3^2t8G zk9S53E1EDdnNX>@0FsY(lGI=lGwZics0!XU&p^1DooJ*^c;Thn5bZ$q(0pRek{Od% zVGyKwtvtGfZ)@MbC!9v-V^5z{3p?g{KgBU$mI)RBX3xDqA)F#sM>CBBeLfz$oqlR9ZqF40lvaHZ zuLFczMiaZ%`yuv$eXXBn13aYST7qFMQ}7NPt;p}uC*mC}wb`i=ckRt+!Qt-wwg-_8 zg6c^HCDO`tB0b>_A<8lT!UYW!dN}mqjTWqLcK;3E&it(h zr%Tp>mbj3<(D~v9M!pun0D8{{AS%Haqt;72PcR*L_GglOTC{oXflyG@B8;Q+sRo2N|&gK?e z*PrLb;2^;OK0QzGKxp#}udr9OsRgtbYb$Euei;(E*^$~DIPBS9OE}!2 zekBFRN{;1y?tyA@nLiyGFu_$107%5UUTFiv15+1mEc6g4Fx%9(><WFQr=uNql}v zui6c8U$6(tWDvYr)T+}NBo)*`Nm3u9>3|fo=lLFCm}Pn)d$>~tlc$7q&tp4fx&gD0 z*1!dhk<5-V@HfxY9EvXkOi^wlUBV|i!r2p+j~rr z+w{WHYYTxo$|Zmf01sy};LvBM`zr3d7I$b?fkhOLjvYAq-NUIy{2l@g7O#cZn-|fH zSZfKV6ZCX4eP{`Sgn3SvNXKPu<_PDg*l*RPK4m=1^mIhe$;5`(Xou`S- zU5m8R07|g*3wSBLan?af$vz+414+qbNfq?!TJp+5wP4hE;NFQa0%GU(<$=P}`O+I1 z&2yC}+KvyHwNR4aOoIwYO zzlIcDu`{R}D*cdTPaWiDNWvPjoUr&Zb7KeFgmB6~o_qBL#x`EYoM}ohg=y8`1Fan- zB#gsz4UkYeCDjv7HgWlE2E1!$R`cbxODcn+M2Dv4u5OPY1_gWS6sAF#_g(d7plHM^ zyETwfeVU?A_(=NR5Do{a;N~l8O|jheVc`jsWR#skpjz8Z=OLzm!aoxQ3LWC^YJ6G` z@XTAf1N5P-4G=-@>hQ-`7S+P|B;kct`z_(*zh`&Rbij=Cm|X{kw}qVD2(wPoxi>or zqZ{TH`m#wxbn~X}8!7LPy1a#IdXg~HM9L!^J7pivli?|pNbfGa8zL-Da7@9%8cLZU zdaCJjAw5L2ZFliz$RTU?ud>wpDmlSjI+T`RF&c(sPco;FP&8-S&}0Y5*rbC5X#;C$ zQO@1D&W-fo(x(=0&L&xuvnhYfJRNF6G9gUf476B&3rJHq$t-RPAu^w-F?GZDqwLGA z^PRn6ckVq9VNVrzLpVGgaHn zN^(B*A&@@lHN~P>04&_L|LBj`ylAP~M41c(22Yt~3t`CIM>eKIl;eolL4Z3ZwP+NuUBn__`{OV8B;V$gES$Ds3FOD2p8K;zduWla#tFQl!W zaIqhs`q+SBc5!x=A?r`N2uSWL{geDn6i6p(y4qz0=UST-u(?>Z6j1wvf z!v~_m7(LWO0wP(w0L^7htYOTZ1{BzpVYu4>7Qc7T$gw=O-$B=BMAdM_0kU* zW*N~o;Am}?-L2*uYF;aCo=t$1gEI}#2ITU^w0yA}vt{?oD9}2&q)f>C#dT1iE=mpw~ktEH03}2g5V9 zAO->!Ht*)o#*B1sa6*_^c)>Kw2T(4m(Z-G0oF=ZTzf0IB&enuek>|_WTldgV3=Knu zw*jHP6?WBpvkRdUl@S3V?92x&S}^hC{+$v`W!T-_0iy;4%rS#CBvr(@`nw)Ld7OHa zp>2X@?4dbdFGWDr!}07#z9HSEJ22t@aEBGGepxgWl+PTejy z5YRY-vrl_|YgHH9eUh^FZ_V2z$pl!-v9mSlAcc{9|SeAhEi%m z8Bua#E4U#@mS(T{3zdhscu6{el%)8bb>6atfWR!{y%9JCii<1^I}j4F4TpH^Au0Q0 zT4D7NDC#-6i8sT?*^y5qHESm=Lp&gAJhQ*31&0f!yz*a8A*il(ZJ|qexrcvdJw&7{ z(s{5R0vw@n4LJb|@8LIpvDVyyPQy)rDH69Vxwc?Ru)APNSVGd{qv|eSmKrP8gkvwb z(Y(LYLrFd--ecH+!#XQq8}fGy5$pq+|L2|tCu}s1#P7PDd6hiHEY3061koYsjUlT^}4Ba+b z5IivdrV=)Qw7cC4Oso0J{fLCWc1s06qX(K#eIooPvw7&2^Y&fVbtESp!kitTYZ#s@tQUx;iLm z_Xxc-f3406b8N(RJ%O0g?)s=>3r>x423f1i=@5b2%IfgIVaJ~L^$=gzwj?p-|3oyiu#%OVZhx$&?XVwtp~yx4r>bQA;8?? zHRblg226(KEL`(9sxY&?Y{yTal(Bwsl8I_NM4F%EM8yDxEn_7dVFX^sN zc;P`V;id0yUDam85{xChI)Mo^()mbcOBmBU#x$n{qtF9y0p-h{8Z*Jt&W`HL1yXop zOA?#f3>fvx#{Q-?9+~fcQEP z8wy>^ouxX(4Il#CRff&?*W`@5+KHJ!NEgET;;dYQfEGDWBRZaf$uQ2O4p3{UY!s}A z_*{uoO9Q7&Z?J8EsCMQfgMTXqsN=vB!c`QHdo2VEq<9gh!)NBF>VG;2Dfvuw?XDYA zO3JR~06kQ1fRSU)x*p+wpcaVNUk;E71S;BR?g%2IHs%o+0LMP5*yY2;i8|LcAUW>k z)&w}^b7B`|15;i2vvY^*Z(F1?$(eviu9coI7`9*vWnG}w4M<{+hiBh9pINmd{Th&> zI2nSDf#`~!PlNI%F_+#JZ+_69$|Z>j2wBpZJ9jk9tpO=- z*C)+^QN9Ni^#Gehr$3Wn{hcAY4hBwuWVPo7Yzs(5?`Bw=4upfu2092xL*Fb-tcTBZ zyvn?XKyG*yQN#ui#^8j4f2hJ#DNy3dlh6`QZQBC>5Ux4&MGZ{@7?H@M14IO_HBgspu-cXnMG!8A zk!iwah|HNna%z60M&LA;8sG#%s^QH0KY=

g^^VO~^xMs<4AXX-joWYr^4V*Oz;G zFg%~)u0Yg)az*6t*O)k#wL}1u%X5;aB^;jTxg9zkA|L0~kzjnV*>+ol>11X)1VWH} zPPFm6A&kk>3u`*j{HS^13_4Oz0_c!6?Es`(;JmC|Q&T8pn70fnGy6FYy5<`t~%ylzNocr5)h0oS;)+x`Vrnv|SQ16p?TZ$gs(@ZRkjQsHxhO zZ+vgS7Ke_kd0j2)L?hq?7*-jiy`j-UNsa361W~3#6zv;&=z&6hIE>+dFwB)pdp!sv z!cl-28$cPMbW2hxo7dOO&tQ!>69{N+MOV-gB-caDET)7@Gx3S44jeYMAEL$9I-~j1R$9$)RBi%t6j5;W(a;*r~ps@Fx zPbWYB5w&eV-)c1O@Z)u6Ku8K)OeS#zUx4YYHv3O&6pJ=?21HybWsz z#h4ay2lPV(ot>0zLqL+5wndsBuh|_VvMd_{taBzU-RKXe` zW02AGNrX4hv;0ls;S<&*<{vuVt~(0CGR0hZYJ(_Bo9R;!wI)AkN034}qpm4Tz+G>9 z^@((X$~Od~m*Y$TnxCk~%q%$<&nJ*lEzkG{22d4Y-|KXUjy0TnBdvpkNIuEEPF$G# zaOBH}mA7^XC(k2K5dTqSgRw-*yvab2H&q=XgjCO6yGEwqWRN?DKy)HswYm!sES7B@ zK-Ojq9+j0qR+hz-|#znIsqYxmp)(7CGK--G6ZGox+mTf zhM`sIgo9=$-n47}am|8uj1f%0G6LN(Y4c6KYR8>|W6V1H$_x(vyCFSuBb5tA++4iv z1Ii2$K+ZmP(fm}E5*4csCJ;zQ<>)RYplFFHq&!E{fkFppUOFh@MZ1=G|1r2Vw5X$8 zH5@R9DUoLbrk=Fv~NR9q?_Ri|N4WXmu&iud%Hm z$rw+x7VQ&Xn@n?;4Mut_#^+SskTWvwv{GNhHx9?}V2&YUIv37OfpLo{5(m?1nsfuZscMnLXH ztplFRG<8U*s|aF#1BEvFidP`>&(xxPj*w{&wKRc9#<>~X0wNjyZLM>r`MR{29YD>U zHnAocm9R7i_lcK_=^Fqxv%B=kyylj0kMtx+(o^CG6(z*8~E2%`bd}6@W~7jBQE~ zCO%TC>V`zpxCG9}nKp|0g#ARrhA@iIodWl_&A)IacRc_RjzOxXbjYKH66SX2hweNQ zeJMdY!tizKO`vrE?+mFxK(x(bml^jasnS#YT(xWZHL4Sc5DPsC&?dZG!T*%7`dRpt zIzh>hAsf6Na5?V}E(h|dvggXV|4s5>?WJz9`IohqqZ-P+y$Ljx?&H^lsTWV-DPkH( zFM2+sAMJo?b6Qk*#8@YWdT;e|J+QViH-Qa=Fr8DK&Ck0q?t3D3!n?joK^ReWonPgG z&?>eexf3ntk`i9XoO{Bh>!2ttr8fEMTX@aCazQh>ye5PblXnC>|7f95+*vu6l(;^y z-4QOWDd)l05Ksw+(mbB_Kx|L@4VwhTWAh7D_rBIqu0Kqmrd$pr;<%=Ep+tiida88_ zDJ%8)$(t|pAgAlwm)m>|gkffgnlSW3Hk_!B^KAnuWicH2tU&XxqX9jyF(DkOxJe;Z zwGc{GYNOvrJRngb4uEVx0o8ivvdVP;K6;f7z+h&2{bNHo^_#um<`?~K;mvBoDHLPT zhH{&a?&hb%M{XZ4v?hW;d1t;+w+19zE-|afAvQ-h++#op|7ej#s_Nk1RFbA^ZrBq@ zQKI+GS|CNlRHuLp`1DET%0>sR6zdJPb<%69s6zYUGY4oj80NE@$8!w3%`er`I2VV` zfoi?u>r4qpF`OaMc+x?LG+M9NL%D`DoGanVk|!yBGQq<{;e;dGxvvNO zTSFWjK1nbMv}X%bZGkEANM03KfPkc$x@7t@M4(M*ezAs-^5ujrla4;IvR1MNAfVB` zjFX$xy|;M?hdWOjT40pzn+ZU5+QhL*ApTLF%Gih zV5sIrkSK6fuQ306L$zDaeStP}H}n9Q;eC-QAbB%u_G82ON~eN$)_{m{EQ{)avftaT zjSVQO8yHB7c4xV}R(t@%1Gu_Xe4)HB$x|n=lc=Lg9SffU?KioqI zH(nTS0x?A!PxBw@*E2nqGXbhU*EmGEpp1l3be`gw4oNbcCJEOJZ2%w{a<+N$Ne9%C zY6BX&npIxk{*_v5yc*!!a1$t`&ryID5IwM5(p)qj56yW8s788Zw={$Z8Xw;CiMeh@ z@nHA1gq#29@X8Zl!pR4JmlW1QAfI|mbqa)uMLinSCGBl3#MXr2R_2MK0~R^D4G1&d z^J{*!3M#ju>llixBKL?}P@*5}M!|G|9CgCDfngD~`a}){WbrEiJ>jY(tvxqMs~MhO zQ~gh2(a+~j0EsJ;N7MnI`;fqNzz3d{bl|YHuldy)h`5Y$$le1GU?KN1r4A|C#pP@B zYyQefG5cmg5kPk?G=4*nMazXV&Bt8PQM>CPWF%*ZV)e>33wdMKNyEu=3MULFXYOJ* zfV6#S$9q}-xi%{9sFn%b1X^OzHTot<3nkLs)^WrskQs><=H2sgaZ2iD-Wm!8;@n~u z&<_ds)g(8-^hbF$$@2fY1>E%jMmWvL7e>m96)jY9VBLK=iC-aVAbhV3Z;GB4ZzS?gUjvGKcAM30Tl}YP8WW{{=YivET>K;Ac}Ek=E1y$fDV=%tw7~~ z=wRme6@g^qKl@lG9*?7te-9{jbAU0b#6W+b1z&&Y9j--2hWfI@aK=_U6A?=W_f`2ucrnc%~(6+Y$@Y zfHPn9?*LIfSNG&qTMqojoD-k>>5@s=xLt!{S~`+aj^pHksT`g}$q6ke z$#J+&jS3)ACpwra+XkU2J1WF@+N4l!JjPHkrV;$FKE}h|@Ku^ib5J=78b`ltNZ{T>pRgtLJsF0E%Wgww`gh zg``UNx?~zaox*Pi2!9OwUY)I>paMVt($N|Fd$i=WVnJ zm~Xye)DkZCtb@8}9s=y;xIveg3$@xcVf5((Q)>S`I81Obl>&h>ku&%3&F{E~eIIlr zoQmKPX*KC!UCZ(2*)(yFH86LCDV^=wdnge=mi3lk&_zzwY(U|gLoXUYoB!p=>|EI~ z213LGjs7iR@?<`b6jMMdh~>oGE9oxZxu*{UYkRSZrp z*k*3j{hk@mRCoeN^k)`QWeW}$RFLikP9dagDHl6ESs7C8dpWrVqZ&=VHEHRAwFf*d z!v`c-r$^g|$>#r5RkA*ybpg{{iu)Rkw$M;7#wR8cpYATNTLB{zbB{gkoUfro4BTfN zb?Ao}bH?30hp%Gn0JIz*4a#N$*sXr z83+C3*6tyMJ!;CWXEP)@_i2IV|JE4n!NvBWgaI~E07t}HU_b*EkI(Hb7~U^vfgsgZyOj1 zbH*N994ND+Tn^oV0>>$u!J(K=69@}92a!7AQ?<$sVWeJU8gK(j<*>q&H8(G;lQd-qZKcYCs2sZ?gJ_q zVjuYUNtKP9E+GGHvmVmwp)y_R zxvFu!!3Th=xlH()a7t!%U5q|)IU$9nZ-(!=ETC5ZwpHvq_F-*-v5b3OO2rc_-8V1C zN@xhlo)(w`_AMkzpY7Av>?G{1gel<|5zqg?@(Nsfn_L>`1_JXKy;pb%A(`uvxodMh zWWqTI$qkMccjwGS;JzPX?D?3-H8`zF+sV0+w1I|1yG-tG2&&(w3NY;E?TqM-HJ)sN zQoaWBOr;=Uww&%U+zuqZ_YGShqO*64=q0R4+mvv)Oh1b7P?EK7TsW@|64kxy=fN%k zQBLVm)k4F7-J2B5lHt*qahe8XEn z_RZUu?%EF%ZGj@=U9ajU;MAA43uFr0P$CN_qvQ>2lYxd;INJds8RwW-yiEaU&u+*S z;b@DAFikqC7JF_ym!M>dm(UX!2xOB%;tCM0@J>%T;nqV-c0Pc29jJp4;$j1&U@s_d z2q$mtG|&Y#FRJ>|0sXR;+=3W0^vh4G-w7m%;seFo0CGIlgPLtVuw#lob^ysJ1NT5rG8uq5oQ%!l+U&Q)1)*>|IwE)&$WND$l{`tE9QTVC?M) zml~Gu`Z^G$$9o8bpYz+kPjUmQ{gV#tQG3mctDHPlL7%2+)fO~ZT516_A)M@e=6xFo zCma|_qin%&%nX6Nc9QN?4)Q5sT7;^%n9eFmQB8R}864dmbGe*iFGDCtqSd4!q{V6! zE8gcTh^obNZA&0NF!hv2Xw@pN6GfGvE@lHvV?FSu{|y+Pb6K@{$NDkD<`y_A(q_JW z^AkwK-?J-x8-Q+LnaU;{-5hJ@ZU;y?9LUeQX$p>j=XoG%1tEDe5h?dMlLl6`;z0*P z6wd5$$WdAtNW%%@D@f^X&Yt0D9Fck`(#o0cwxEF_f{rZNi5rSs(BCmwylApSC#bg-vo-xLoXt16QtDf9>Z-%&@hTi~dL(Q?U zcd`(!(=EcXRg_R{bpnNwI7a1j?b|?dUU>VXO>Cs7lg}L}!*HZ$vZ;{i5Q~dSo=mR> zineBDryZ0CaPE!yOK@osIoh&L1a)loiClq856@qNh+k?xiF9TqkM~?dpn7~UXhS$N zV0A#xmJ<>;P!We78;fh+x#mfaP$qm^&|;{0MDjk51HoIHy z2O8Hj>~6+7ps5aR4x1sL;u2?m11ZL}=l>Xcm)a1RPS`QO1qD9cmBvjsL}3Hg!}0b& z!4c^F@-~Su7P)&6ZfA(XW{j6rQz(&1&)Zo=D@bt?=Zb!@%5KO^w}Hv*5<;4=#3&E0 ztRZFyk}vWKgxL9`oj%~glZk79nz4I_gl9V4cayM3g63VF*Rwi*%eVkajC z{p#(+E+_R{5c&JRp>~Ft9RI=Srr>m7Y(Jjs>6{f5RgceOc8RXD>pqKj2~OSk!0Z~B zDlc0*^b}V^6hAN311TU4*tK;%B*GO^4xn#_MC?!^IS|d&7kXdfLUZ2=;ejw%u66-p zvs9}_?rjJpooB(+2J!BSJPtcUP(w=v-I_m z>@6S+aU74XfG}lfL5wz!>nrpi8Oj?jB42i#RNFCCb3!wG13E_)Yu8spVF#Rn*vLhiuc&t+AB!X zsScxe!)L8r-3eSmLNPP{3Di0uXTPoh43-}E38qQ%wh5EQYcM?O-4oQg0a1eAOTHl- z1=dMI^B$GqXEaUP0@jYy&p$D^OcJHeUblfMjnxyD-GZY7o>5jU?FsRKbb^>ofiBSB<6$MVf)Z=26x%M_twH~u*K}BM0iH5%efxL5OC@IxC z_1x78NXCOJyGvAWdtDN)*5jT1HH27cJhe7~D~Pbde<WyUWtOBUrLyUWD0QLP(Kb?WxMwkdQNf9eI_shotQZ!YgBf z__zkudXCe2lXMK&#mMWg=Dq94s(*Iyj87i-Ey(YFYm1u8HDM39Vr9Yy ziY))lxDBGReR!o!*xl9+;Ud51B-4CbXmfQ%7-8_@(M@##DwHc*_FrEDox2|hT7$?% zZQQ(G1;Z4MDcW`qCa3waCgIn^NB-7}mbw9#seljryH~t{7#rcnZq>m1q%I^i!LU6$ z)m&i$h8-3?`UA`Dpz_jP3raOHO^j=62U4n%rIUBYAtb*8pS)RtOP8bZh&t<_AU`7Q z>cr0_kUARad<|6#UwF78h{C;h)f0w0`Wf96yatgIo9J#Sn?&^EIX8sE7boN7+iKp| zio^3snOlGe!d-FoO2`c?0$%_ zXUAak2WllS6Z74UEhwcD^nlO=D826Q+gmq8XZPIR+!jh2Qt`gnICq9j&gAr)0;wp! z!?sG8?G`GWaQNp%5M6A(3_*i>o~&3yh}QBbug1H8=GP4b-7=wu6ItZx+UZ8bzx*U?^;_n(e1N}UC*-;XNUqFbJzJaPGrW>? zyTm>B-vbVD=`;MVkvmXQA-)Vf1!5NE#>xs50XVfwm(>lJ3Q`*fr~0&frFz#8Qsd=z zEJ6;>Ub8Z_5vdnD!-+&^Wj=k7j{ehLDsf#DHP*Rt9J5Je7 zpi!6p|N8BOml}y$!sK55$z}%xCwF{WY6^%dnA2kLKbpL%MXCu5F$p-!4c;MQdl|kzh^cd zT%+j1vp2i9AVoS&CG-cJ6DY{ez<}(8lL1qH7;FcOse^YC*O^ZVBiVs>23LS^NBfc% zItXZijzoRMWzcE7z3U+|ySZMQL!el6l)L@?5JBE@(qAXN=6C37ljtB^05tR)u-Z1a zMm67QKEy@iL=q!gL3&sc4NX9b%8O5`kZoY8>+*jIEtFahy2Pb=*hv{UME*3z7y10_R0TSSSMP@+bQ zlGncf1?(nYH8rE8=0ly1D*#AMn99jRg&*&PSO(&S6<;>5AaN$lTC@K zInE8?)QR6+%Ky)QSS^cxB&4h@aB}k$W&+F{KW>@SHbj_b7Z|A%AN$E14Oj=-!}KFl zNa*DxEq(=xEcISsm+#ntRsxNu!DOVT zA`l%MarHP?oEkUKVEwNDMB=5+{G9j7Z53N!GDlF1e*#F2=_Lj#Wjh3~RQ%U=Ed-g= z$)OkJAUO4WRSSSAn9TAL#41r4M>K?XpxSThas*nxgp|@)@M?c{JrI}!=&I-y1RFD2 z=?TX^&b-fi4W_I-;j~Fa{)~ePCk%_bIV0M9cr9d}ji{}GpnuCi#>qfnCaq?^4Iw4% zyMt+m2)fckgdGT!fX+1lO^4`t!(;`nlKPZ4!b6HdX921 zd*N#3snpj)NwvyPmgj@6p`o3b{wr}qIJ#WmPe8dh5F*5FpF3zi!Y0mw+ahcMiC-L{ z(n7u?lkc=|10$m6Eg&1oGqAD)hOlGP9q*7v2Jp zNVlgXc0OUxNVXlo9Xh_WfbgqrQ?<(-C=`GTj!GvSPPLgsM?T;gqYl!7DTM`(YVQ(M zI}}0$$g8g2O}tlZ{5LM0~zXdaA%WLz9M@=m}+6YNg2({E5BH& zd#o)esRF!In$u1oNOU%QD1RFilVU-^SzvX#Ei`yqX|1qBSPd-4ApvOP&91JjphN@Q zPLRZj&VdLCoak6n&XpnUKw+@+eD4ZMZ6|QxTRQj_Pr|HhCueG{&zLPNQC`nRl5U@1^AQ`&o2F9udgB~iVH*$tS38Yuk|j2f48l4t=?1{!tNS3pY0gW0}S(GRJSP(wI4BCFj~ zYa0kK>@CHcgb(BxXkKCZLN>BRIHmX?>STa(JY@$Uusf*b9aXmji_pIo=;wL!pQ~(R&3*mGWMi ze4l;3(v4v**8yBJqgUAg5Y*?uZt?+lG9Q|IYaR>Mkt~U}AW#DPT2#~ofad2+Th-1% zVNlCnEfd>BmYy?FA8jKfpbU;Oob-4%5Gi_B9Z_>%t%Jo0f048WOj*dMUTye% z?2{71XC_2^Zu5D@!9Y2rc7WOPtp=94>X1s}QPJR3G*{9x)9N5akNyuJRWuazOw-qa z#r1&Hm0f`lfD8GkfH0MV;iI=}K-kyIJ&2!=ngwIofGCR|k-z-&(Z~C10@08k96o&F z0R>GGgA1MI@FO_P({6M8x`h-aTkJgUyaQ3=K~8c^LCIIQX}lPP665#t^&Jq-^Ufs2 zUV_tBnYbR;YdUMF#Zb=u#KIK>G;yJBq9;s_^#2y^8W=`>c2aKD4W!a-v%t2;1+_q| zu2CAgOa%2e4^+j_;nP=cfg?b66HyG5s8ctV%03IU@W-k|OZFB*N}S_VEW1%VLo7Wq zPGQp_s~mN3hX@TTzfI+L1XHVr{^Nm5a3ss+depTZa`06JzoBsjO?{8g>GVS$xS%nH zYjEi(@-H|W2!p!Pao-FicHQK%M=cLlHF(&P#Xt#j`qvlK0#>~<=>ctnN)N0XZf!nt zI8#0E4hRMhe4%L?kUwQu1+Z|S);d5csBV!ZrXxS`U$=9NPSG8gf%_V zniD$P&>}fau>KQL3lUAt_R;Llkd2OEX?-+>f;1o>OjrS7gCmU^A-jCcc_y?0jCMHd ztVRGPXU~zZhHqy+m*0a+&%qWFR&@+Fc9efWO~~W!jqmurw0J5&Ep%P`U7+umYN@8a?e8I zgG-Ik1xs`Y(FgORddKN9S;Psr1{9sDE4m7_BZc%mu$qg;!)q|+vd|9IhH&M@v7V+HIzTwbG%(IfFr4qp7}tc88-E$2p8W~}*69s%ga(B(KT~iGOil;h zf8T&9hM5tJ-3%YOg-(q$A5|OqY(GbuwxAF_Z!}E;(!p*6!Uk_oskYjDeU3*h5E+o} zLiHK>a(YYIz#SRQEtfuvfrpqdLXeB`s}3c6A9V z_MUTw>mfBSms0%$T>5-55H6yS>Hg&^c{=LaHVdf`IuTOv?(TN zC;>_|T_*6-0kkM4Tc$KAI)Wi08Jy5i7~%tH3QiJrXqCn&3?9N(i7wC$onWO2+_b@zb)N?(ayO>&&%N(tqQyb$CP-s z9npcsAkXM5a@n>|9QQ7$=Q>%)vFe-h8*o_S_EA|ZLz2-k2f+{?Gtebuga^P2x0tYg z!qsHxtPvbSQM{h!B5gZFIn2M^??51DcG^(X6j1vWZY|_*x>ku1=Trw!#j)8f;X$@O z^@4@fBe>V80$2dO+$<#{#ni9!U8Y2{PX#34cSQ0Ur>*nbsTS;)JUC ziKv=Ui&$Hr@W8wlTdbRfv#N|F@> zq~S&t8tVY+C7Co5_i~8KwzxXhHH4IyO#z0DtAWH-at0bQ=^>?`E(=(sxgMewUm-ey zBsJPHZUAVX9b5G9WHlmtd$xe7IQ>;$IjKK^7(r)8-aXugKyGDw2cX=RTI>)G&-ydd zTpNH?nyelYUI7srf3zRLNm@?*p#Z{^GCQh!qw5gpDYWx|tXB}qn54ZuXWv7E6_%~2 z>>7ZD=|7S;1mS9?tCSLu-8!(HposxCU0UI>{dvY#^l$ zXwYnEd(jImENRypnVOHUf^rY3KFG8MG1_4Bg-8={>gIrp{JNRF4Miq|_Z3wH=rRm? z7}B2kCA=vZ0hm!EKVeFzIw@Q$=newy_JDSKNst`#4=n2d9f3ydq@uAhkJ6@JKM?f9 zh*|`MDDaNXN^O81O+A;p0gqYzU`|k}sUKXr0_aM}c|*sQIKS;rs-v+XAK$ z@QA;*J|+;NAkN((7WNG#+~IkMyRIEb!?BGWNYNZ`{~$Urs$lhjDpp`Pcn0h4-YoG6 zBA}d0NGVj#O#R>18VZWlKR?0c6$rb{k?0uXKH;h}9nTgN_+82uHz7>QJTtltNLgGlVy>y)3=+(6$3V?@C$V*>0I>m7 z=u7znxD`Qi^wH@qaTb0B;u1h|Y(#_eI-l}(^;H0U1;)|?qBe#mVqSwwCFP{=CWJiF ztFGfF$@Mov)bFR(=!g8d2j;Xj5VSm$QA3E%SVVg(avO?z!u(YU06yjDAiJF*wcceS zyXl(|Sb`yN;?p4S5WfRSp_-3e5>7#EvD&oPP|(u{+=CSpjF{TxMeG5`@p=uW>SXgO zb^|0|o}fiu!mx`kO7ebMErcfS3E&o#^iw_s6~l&>t~6)$V^G^rQ^|*P+cuvT66JUT zzC$?H<5zE{`Q*#JE5fi)UOEOu6#XG1a)VQ2hZkNnt|6ox9f-2|TqOghu_WjLDaiZ$ z*FbnU({B8RFtvep)jn@Pl%&oGEg}3)6`oV_zS+73C0Y52xCxNt#h0=T3Ztl%ey9aU zQ_r`-9cbaz=lErjQ%DFK1+4%P4qv=1q#F{boAt%a%OuE*X+0$Hy9CsHzOMX$QV$4A zyS|ln4UWy6VErn614T8W=TmM7V?Vp}iOr{1LAmFHl>^~1S4$><$WYF9(QoHFM^s?B z1yHC?8~ZlX4j4Ufi%t(lXUaH*MzKDTz9O8#6}91#^ktMTnNTyDepBn zfuRBxuZ9S^uE+4*LqJv*bI0W|&1)zsR3`v7L-ga>bLvlTAi(u-*{u1DDy7F`HSH}( zR5ud}4dxS2bg65Qc2EqIJkEqFpkOZ~NUsKr<}@|57*j|Q-haK4BOWV=va7|OmT(;? z1@WY(T@RN~s1BC6nD-h`Y*PPVas^Bo7)%`FJtSq9|3$wB#U}19*4BJhwJ!Y;G?HyWD3e3Io?D#E6R2>ZKS#vo0TP~iR6}Y58L|AT&H7(_8Tw-%5eZk zEd-d=`wxt0fMkth!PCnr6lAogbVWG4G5OMi%+xZZ$ipQF>gjUGfL%+ko7YfkKU(js zTtTBu1f<{U0rXpUHJn$vG`<0Zm-IbU`#`}$FFYmx91G4hE6KJ6Le$!zCqS|8V+Vx8 z=WI`Bwp(zxl6~a3-VQX`SpJ=K3Mv($f6(=A3?$fQZc9aV01DIKBj3zr;9@U#7uG<^ zo_Q&jWX#$apL;;Ha?PTz!6_!o3<(Y?ys@cCVP2C%OKAt%xotkz*)#1%YQnWfxs^`4 z&PV?v**4*%yY0#brgZ*Wo3uOmTpu%;CMBL`nl$04A2)&z19th!&NJc?MpsM=)OyyS zaI9H=F7;hOLs_&~O<(}w5htc9_ZmWS$JK4G!)C}MjtdRpH_)OSwou~1Yd+5fpRt5j zjf0Xe=BKIS1Olb%%@Nu-Akxllzb!b0>KdnyltZBaE;rC|WZpA{gvJgK#624@CCp## zmzlaD6ZSx}1(!^)1dYnR5xj;Jp9KRL17~?E=n68XeoRB@3BoA4J9V(z3hx~`$`xgvgX+aq(DXs9~)?)L=v5Q z%+L-gZ2FHM=ltunDgdw6%x2wZlLrL~J?%^5s6{OPaj&!B0 z2P3j~JJlG5Ja~%N2{r?TULUTp0Vp&$kooMD;}$8UZNs z)Bo(N%W0vgjm&($9uU^JYeJiu4)~6@OIHKt2oJn=U^0Idj~BtmB`BHK7ctgQ$XtJ1 zz-X7q#i`Q|k@qwyL#S&gaDB%XPdH}4<5tKUxX+7s>Oh(=s!G)xwLbN|1u1o=Kea)w z354X}a;#8i zl+-(4LPZYTb5uX!u%$U78oPpkMmQ7?=o8b!$u;39FMm5hs}H5TkfS5UH&DWL4iq$B zT#u#rGB4&pok~Gle4rH0KhvK~07=E-jBaupTwJKSuQ5z|AV&py=pzjw*%VtyHoCLr zkEQX82-NE&tw?DN=$Qkp8~qby?!2|eP} zfQ20Z{8?o*4G=xRo{S(q(TsF~e!*n3nFM}E&q)u`&5Wc#Sg|61n_=gBQB!NYs zto1-zakVTj^Fj)1bU%!h)cz2OsTmCp1ztmvlBIR zfDq6;dsD0!;BXSG`+vUCz6~a?)1`e+ZN7R!pdE%CaOtwe{rU z(IuTOmv~IN9MT=|Jf7r+5Fzc!gORHwp(yqO!96(I)E{TK(yj-RPU6lq@7q91N35AC zPb5gsHxN+{$7JN;*sUe)b2C_D$rcno=l{1RfLP+;h5lP*8;qsZ^G@xMnkS^ale+^2 zF&Axcy+C2>UqM1c9CIxHXxBj~b96Sc;?fhrMH9(C+Q~V8KRX+<}RDV+t|2{${&0eiA?NO1-76dl~Kl(ZWk&U15*dC(t z7@E4(7DA-c!ZF{$cWpu4KL zQ*?FHpGFee(T;XTE85YHcC@1%?Py0kAqx`|Q@?YbyjkDOweorH`@WfZ^5n_wWTxr9 zG<`R4zSl=VRLAKKl{D3?XQk-@Lx^Lw`3YL;*F3oxmr}^e$IAPbV&Ag0CY&hj)r(ij z1}cvmM(T8K>?Bk7%GeOAg`3w$q4!mu@p;f=-^QMfMR&+X74A##CWWF(E^x^m&Y2cd{`#Y z*SaZM!?aRSnGNB*Y%b5sTk~n%=bnb$$Oa&L$x_ZI<+KJ(%VAPS|F1@;>_AFSH>rRMSPCY7gDu?LjGbo?Db z2uE4R)Xd8ybnHrdR}MRllOMWULJV*+s;0OXvMTKcjvNk|oGn3lHVCQ8y8~5Jm6APS znm|L2%JJ#|j5zu*W6T1RFPm@p;;v60IK#<%_5fZ0$(~tI8)-@zP^lV}eCb7ZgDkmc z!R&6nW$v@KdXfOD^{J}c6CjOmey(;ff0$^foF6Sycp|Gcfw+cAY`p<57HtD5Hc2ky zvo&Apy6UAJOlrU{b+*PHm@+G04|0G--(G|ZVd>K8_Qx5J+}iJ+S)vP+s7%^d|LY2x z3e{beo9|4WXRc)K^2SFqifZ4(_y23mg?YjZnb+Bt&As5Hs68oGe(V%|eN$KRtO3bM z_mVfjj75f7>bJMBsh+OJlFA)8sVqmvnj7E4MCUA*m&^|GO5JSHs|AbsiVf&pU(YaN zb_##DnHTd53mYF~^D1Ac9nS3yasv}oo$4aOJ4l9l8Sx=cvXnIkOaS?>Pu73QTlTcq z#o;SIEtBcqgPK`bL!|U6zPz-j#|AQ`l!-3JWeX*-b^&+Vcjk8_zZ!@=L?jf)*Ms>$ z-NRBo+4(>2| zl!NKJtXTQ9EIY1&pWbefb<_eaYvFkUp zjyHM5Tvof^+<}o?Kn1EEa+yUaB6FTV zgu3!&nYEvDJk}bbvepZL!u7U!Wd*u5A7zkh*=7eJ&CbTk{Nf%Q6{^#)vb=tPD=o-j zzkp0srR@rl_dz8_O)v5{EA**W53ayS{f!Nd{=Y%w`?c4*{Bh(QCy90CdNl};6wSNa zo`OqEZ^||1W#W(h#g*eu5Np&e#TqzNYS~O~pc1D^&M|GUg&X^xzS`S1cFvbT{85?h zarhpPc|ApQ=X^8y5KJWdvsg|Vlh=-VOH&3!z41TR?P1PO71cdwFRUodk$d- zl=N|n^IFi4MqDy;H9u0GMV7C6Hu5Hq z=;4Ux4wwrH{EK>5GBKc2otNu-PZ%oBG7*AsskqAl)a#W$xqQ|VXP-s0hVg$L^6UQ% zNTR<_DQ+QVoV^O)inN1XRJj`z5eDC^@aEol5UR{6Nd`bbqK} zTThyzs!9<@FLrLgEwCfZ zzR*^S5KcON=K_=9T7czE8nzLTORKhke91n_c7EKMO!u-SW)CB7_NP2i51{0QK^Vu| zEpXK-@bFoJU$h1ndCyXNyDviOnsI$+;Rcj_2ZOO&l;92~V`Vr8G!J=1yyZb8*KmZQ zzta;%h>;ZQ)XCk-&&w*^JE_*dDh4f{oDYx*L!cy@j>=A6d;4CQdvXsLm+AI3`@0%aQP_u^Lma6r>uR&ac<>vdfLK}#b=$jfPK_bt^;X6Qdz4lag zQSIe_J_ywIAg{WYp%ifeC3hT`y0V_-VP16lju$|5{L{B`gTI3Fad&6eq93)-^PKX` z3fuuh>@|%>?GX}ceXW|#Cup?1==BL44N zDzn|Gtna1A&q5w7a&`+(B#){3; zHoLRuR*ozJ5bgp< z5fWRkYMsq5agZoSf^yEhKuUxGlIouDn~%ab!V=WOZsiN*W2U&+ZiS2ym$8SOJO%vR zjn~V3Hv1Bk^Y1#2?hdR8$CUl3mTf@3LfmTWR{k>kUNb&BP&Bq1%)R{Cu8gc2H1xC9 z-KBSd`8`wB(?64{Z5YXIzETU3@_seH!b-Q}@C_!)V`ZaO3GXIRYuSj0e8J5eg`Vc$ zHG`@YzUvCI1}F6#<@?n74NO%TcA`4%Tl0ZA4}gh&8e&!LA)!5- zqU#IE5AuO(j>}(u5vVEdSt!s#`_lGg<)!wLQ4v2iHp(al%8-Mj&nm-)1w5P(#+ zzVfxJI=s}2>}P($B&u>|Z-e$Ssm$?;edSN}cNpV%x2(bRfGqsE0i+PBKaAKyBo>EV zq|b+`I$c};_Jm^qoU)*qIhaq^dC!%~#bm5)^&H+ACdy?{L>TiEk}Se2IMV(;qkDsk z;w~8GJ4k}R^j3w3{8e5h3hxO@YBQ-rDZ*lYt76_ekN zfk6y@HJUq6k`_m(%HJL?O4kP}r>G8)L}a%L0SHG#E?cTm51b^n%aG4rU{Y+xedEqQ zlq_o9s?R^ako#fo;8IvlWqq1epx(=;_WuUQ6i)etF1N5?eK033ul$w$fx7aw%6ttR zDXP^OncBc4MNWjvRLT}=nfK$LbK})czEgwkRt7M-Q+L?;@WCW%Mb3C`Fw6Es=?6ReE!F8b)Ue)d}jC}d**3AQ|os00UrAN z#;l_PS^29X4dUy4ykHXXp=TsF;K;%u0XbQA3ng8jtJ}6CoJ^Zys>8(g5K)MpN;j8R zlE@qFm9$9s$C}tX6HaQ(f%Gc*1txN_gf(XFuO_#gza$SFJvGZw1IarmiT=fL4zJV*?nUVV z@CGW`?n7QPUB8JWC^vF3B*H$*i=qwFHYhnYO-zVDGF>-( zRA#ti-JcKjc=`Y&hB{)aC$J9ZOJiO!9q~L>U+!zOPvPXRoEO`n2@?VIElcO~0UFbr zA1^RcZ(@QpE`g{3hsxfnGoN?nSu@v|2&_kVCBqw3GNe%J?9E=Qy2Vz%PWyZ(C=QFW zq<0UbfF1Q&1stqI81i7aTj& zE0kN9VL;3G+GjkrjTB3t@<%Qc(2w|4n*lz&2V~qfBp|wojyP$na22C zfQOi+29Mt1Vp8SX)#Hj&R3c_64Cu~+B|LMf=Wt})uDC7M3tZw-)V*2f5;F_`mwxC9 zl^oR4w$i$uUmB}@r6KdZ%rm2I=VQwTjh^H&sxZAx?@?8>W3A``l#Qd0Bd3qB0(oD% zBcFsR+)oA)&k6og<77edKl!2W_I?ejnP}2o`D>PGx1+cE&#Qacps(JH>K?4_4idmyTXZ7 zZ_C1K8Dx|%^2VtT>_aJ@MRi9?IWS!hvKRjllo(fPYp;aWm)uBv3iu8SQTI)qK}nfe zkQ$MlPoj31lhZ|>KVkr&%lWrOhf3)cB6`12VI`%E!ZlLlKbF7Ijj&Qvb$@8D^;_nY*s2nF^F|ey#G12?N5szl{sngjSc!e3c{>#J zxw`VT&y&jsRFxWr`2HzkNSR^n6r7>t@5C|H3LcQyaUSvol)AkK>uWfQI~YB>X8NUw`{86DntUp`s_{^ZCCzOx7wr zCOLETv@-{)^ifg3GI^FuyXSJ14^{WTGuIG_+4PZ(csFn|VGuMf8NJOjiv9ynLxm5= zfMjW`2WR)l)D7pyPy;X-a1KG6;75$?f9;m}2@ow^cQOS%dCs3X`TYgRr#{yOTTkbJ$F@QFPs$j zkdAD&^MRbEND1ZxCGzUb?oMWTRN5X!3QWya>%Sj#oZ0IGD9UMhfrt)+YkyRm?FBr_ zyzH6;oMNIDV^nxH30~v1zH>g`={+spxxhpZj0*IcQJ1JuE8f&*&MTPAY+;m&Yd{=k zdVj9T4%R>a<}Bx1p!3t(d>3RmTN5ewLE6WvMh{RTp?~c3BRs@O8%x+v-kRZ7NdiEtHkGP#+j)QJE$mQA33xOrbuek>$7`^IE2OO{8^t^03~G2jMq=+qc*IXTsni36%mvtlu}1Ye|EYF*O%J! z>Jo^Kw=-OGS9zc&qm?$0s+(#VC)hk8Ec#nOVr=xN2ILNsUA`bABln1;rv1fgdgvjq zn-esP@dzZNafN+?6QPSjm!8VcNO9C4lWbm~k-_(0DXnNRKv#ggtOmLIO(a5ouY?RT)z(glr0RsD1U}+!IX9HhuHz8LVt$# z?gIJH2|sS-9$e#_?$YeTF}7uDyT2>#2Us~}I;&%K2#q@No~yVTu)st-Ig_qui%;{? z569e;l|Ll2A8R?`%jo^*0cVwztHlMBlr6rNPxB=lB}m&)b?<6E<6KQDcMV64%R^E^ zZx9jbr{!%(xAQxF5>YGwj+hVY{-S$fdF$>JEr6t==B4|putzw3^PGj$b3TkfKJc*? zXP$*44mPmQxOkb26^TA6@fDMF#luhjR?a)#aY5!Pq|83P&k5^mpk&M=#@68>ayz%G zpSF?LYo$>h?t3+j)a6ttznt4bfS9tDgTPYWYgO_7E*3^VIwLh|`3fjZx7z3fE9t*!irRXN ztgIruTvJCqGra~!5Y|w7;Jl7W>Pj$K#ooZBTA$R~$|fjjb3{P?X*=XD-8u9N+n z`L|(m*Y8^l+5GW8uH^INvF2P|2dWImQg`0V6+qoiJYvYKkK%tyxG!XQ7EWdPjWJhQ zUh-<6#*pi#UooQmfpK2>n>(vVC#e>zU|z1{qoraE5u0Lty)Rr;66`%%typ+|GyEwWoT_cc{u(T`7E@Fn2&ry>KGzW>%*UQIZ}f z6FlYFzVQB8cyw^y(?*FN$!01%w!o|6dB9t$E^58*$t0koOa6Vk!>b9PX zN##6S@1LTi+1ZkcIj<*!=YZ_;5-MwO0S(=AzGxhyFL9$Vy`rQiu2pY^4a;bbc~{n; zl$w86({2G@ugP=~-{nnR3R37fKa z4G04drtB^6)HKEmLMpy~D-pc{iN~BtcluZU_I}+8mlKqgIVjq(dA8Td)UC6vWBRmOC7<4yTusMherpp))_u~#mauf__j`C7Ajvt}*@2gnWloqJ8id%DTa1HzRcTi>XNOigb%AncP*@pNvihT-vnveN;htyB*9EZ&3N4*5^Z;# z?&trkXDQ$VLLStMczr*@QHc0Bs7X&KBK^_QtAfw-bxu}yXjHwxK;BB6|0}Msr|SA)2qip$N3D==QZkMDn08 zdM$K;i5f_cT%W>2nO$ku5E)HL$ z=e+a2Z|HpiM6$g5^L2irwxagj`O3vI8GYE;DoD=kPz_+$porCsTVDsWjv2+Q=J%fZ zjY-vFLT7XnwN#**1MY>cEhORf1w7lrBw+zXuWV;N{o&~Aax!!msadaDxhEJ&+tFLi z#y&#X*4Y$21$h7u<2W{Ac4se7(R+>pYRm#A3+@bGt9yJ3`l`cfMi=@Fkwh=AEhB+* zBzcqzg9iEn67k$Ub_qv5Z%&f)E6iv*YTBp=GS@f}$8fU^^O0Y2CcJ2F^N6%A+V2Y# zPp@BAIot=EV@v&hfJ=P6iTe?#YW79NcuIItr@KBU+?%&vgk__bU|9sn=jSmED}U#* zP??_`&%Qb-=Yp4;3}rz1Mt8-T1IRa|xb_q=2j5><`g?4mB8xiWJZlkK`G^y5%X45m zA8`a;yli%0S?MA2ZvGR2K93r@2j^?n!RrkIa2!iq}mH%W#9~dAG`#jqE6^23x0(d zjoPsr%0{_e=bb-c(S#lV{V_9oxAXnc1-Z+gd`p#n!Y6g-{6oUMpYBmu5jYjMycZNc zoQxVl)#fZZfTEyTiRrsBUN8|O&)b@1qh7DLbd=lEu*S1me}Am}UCS@_%^{@)tEge8 zmMi~M<2!j;D<54e*O3{i){dBkl ztG0tsL_G_ozX*+jtx5N4a`sTmg4GAlw7@=+DXCqt2ZFxt%p!V`a35Q}5YDH11+hML ziV_$7E1!w8pmBHTIh2Iey~9#^k-tZ;;Sw5VlMyIebcMw!YD$QnLe!R3G~TwC=tPP zP#G|3c*&zwhPMRW023c4-XoKoD}Q(Ez0EwO4ZtX&S`eZMh~(b5jBZ62@{arMs* zgp)8jZr?{+hWvk0m%%TDeOyiI`2c80`D^#c&JYox&a!3!%-3=>NX2%6OW~O)C>O0F zFOeyo5?EUXOmbO6<4g^rq|B{B(fF! zaXwH3jp_tFVIrn@uEwkS8C#X2P7ryS@6-|W-F?L5qh@Lp|KGDrpOqStUxmrd8K2eV zS%akZhdy?2eLngwJw`Y}wVqZhUYiIJI~z#h=S>@D{ILxr&Yw!!J798J8e8M+T}*^% zk~CLu?9F%HR(bD(kz-v=p#1Z9ExnfS;~`kqyN@?m0Qs5|-ZQc~MX?!w@W)hrL8*LG z`;E`@S}C>y2TC8!^{XhCP>g7|vfmf(uJS(DBz?rZQm5^BF%Om4LWhpIJr2gdD55p4P)CQ1DRJu z1k9zlJ?&Ti-hSNStcqAz{94c2SPu-j>gDPOz>2EQW2!w38{iNnXRIFVZ03nFq+QTp zGVntVY1^KJgHp>5>>#404BByZ&uX1{cd}{CvO=^s*;=r1v|NvW_OT_g+#CjcZxhBu z#Ni~5my3pM0FsP;>QSO_l4*vwdZ}mmv(KG67nW(0O4d0j0!R_60V9G-C?e1dXKmyP zm8Q^+>8v|cH}4uPK2s`mAz{1R)%k?m3-nGnRpCpWYj6*yX!{1T9+^Gl@%Obj`Y4>3 z%3EeSmpo0nrgxovms#9rbeC*xi+cg&qPD>)^j9dkFnJ%R^p(G_lj{mZIw2f2u=G|H zAEvtRG)U`$N%2Xgcq4D=;HmJ!;yX@D4@AN&NlglGPr^-y)iCWK@}=(Y?1B^?GzT)pkyhl?R&%VjFPaK2}|t>T^!=h78zJ|>yg-?24BqRvneIIC{2 zS`B^AalXk(N7>JhltNaiUgqD=rR-P2L)o3%AkORgxC^5CVAY#E-e)u33MYPDrVJ?V z^0z}M6~1uN((^8MeCZJ8e@Ejed|~4K!F(wr04O0(##iqd9(@ab=X{TA%I9U0b#BgZ za(G21nz$*}EaJ-Fzs!isWO~*B5lR0vW=7WXh^5al3$czE(lmtVuD}LrB*F^jm^|J@ zN@?{Q+Z|`?`+H0Ozim|ItljN*0(?=PcY&cJwFZ?_8BkpYs>7LZ|=8mwW*A*6$ zzMFSt#2lJ$?ag=l09w!QW0HcpuBi3zU{ZPoS!6pzrA#z%*)50!m*)*_gZX+lc)dKigG)Y#Ck}l{7Iv8CP==y=j$Ah^z!$szS{+uq}Q=D zAKS(1GLMMVY!8zG%6fT8NYOPU6V-&52Lr`Gi0D^Stha(9QW9Rf%WI|h&^;7&cFvp6 zJWRa!R_EeA!X;64qQ-nrU?frf!>SL@m?X4%fz_sHJ8+^twMOq({z57;Tlc>9XWZd%h+q{s4}EAHs6?^_^J1*Zsu`9;W3`|g{}FB5xm#8 z;q-iRWdGiB@NLVK8*IlaN|NZ<&Qo1k((`2LD*{3W47%=#bs zbE}a0cDa)yXA|Tugz=)v%xnZC%V$7zGE2$Qe2$t1;zKv;lmeL)o2;t)N-q&g@5kfA zFTHK&3Qi7AYAaR^D=;xX3{8Kh+Uy%_l}fFjQjhsoyZej%4il}Yiv?&2ObhBB!0OdL zmPqz1m zS1N5l$&BMjEr#iEU?Q+STDb>{Ta)|s{o6VTg_yT-WsB7vT0jwED7!+e*ff4 zirv$wdw2!)<8h6__XBjzXv+a0S-m*QctFXCLpcq+7VsGFI-!Ry_PYHkHiE3}k-m6_ zNN(yPg0jHpd8DiWqhH#Ah-jU`q4^Z`B`P_tz18{j6;AFfE>aI+i8hJB(%b-v)1x)7 zc8d_F=BoSxO9AgN`JUpgb1O;x9w&Rcz$Lo}h=S3`_JVi>^Gz;LwR-auqEGkiUf^?H zSl)B}GG8#uQY-bZ2uWm!fR%rEsTC&k*$gl_uG3MwJl9Zi_fvHl*M+658W|GY2$GR< z0NR8i&pr`Y)3Alf$LdzKt{&TYMJrhu-GO8+4zUygj0%pc!D>=x4_T@GQdczW1H(*o zvwQ##0kigmbvOz2vHEKZMC9AYf%ieor?^oC<1|odIYY`??>?XkP%=}))EdrTV4_vy z?6%I&CANyb{Q9qirBrW{q^<#<(_O00`wb+c8X$cdY5w{- zPOESP>tLa3de5Uy@8R8dCP#DY@($4xP8=wOCoUw-G)XU`i7t# zSbE-S=WFD*o4>~#0WE+dv7V+E|2{?vzn=ZFi&xfMjZdtIa=$hG=V=q%6P%D%#U?8lSR3HE)?rEn!_j$z07!PzG4$XniZq z4H)6}$}@dHBzIg0dTZkyCh?X{>)LjYl4R3067c}k7ss6yk6_X?I{H=KPe?hbyPQb$ zIj@tK1il3PbX>ak3M4tTR!Oug|7a(lzelup70%b=tJ;}ZL$-)Ks zyx|XNsIH;PxCxDno3Itr79wTlev)w`z&4J~EZ(soI|(0uRQGpSnN0 zmErwKT6XyK?pG#3Tai!nCg#I@!qJEt`vs81*kstw%9E&FiBfll7{&7bv^{*zG0S3M z(pnu*n(jl-Ukb|Xs4-VizF&)}{=S|s>yy0i*|Zx}6os!BMY)9&W8eGcr8`VAI?PUG z^d2dqvXan4(6>+RwEH+eLD$>bJYkaPIv#`@G-W`|Yg$d=mc=e5dBKx?cB=`;LVPMC zvvSq&$jW04N>;(-&PLjrKU_nMnxf%oBeaehLx{f8$jWf_vNrOn)@XD?HxXi$2pS=r zSldH$dw!x6tIsE_$?KiG;W!X_kvD98?(KVf;3%}Z7udu52AHQ0>GvrWUO)W8;PeNCw)>bYMO0?H^E5b`S8s5E?7Qhvb z%DmNMzZMi{PnKyWf}Ac}!{b{h%JEAMZ-s}1-+VRrDn-Z(?NdMzAhI(vqJN<$36^H+ zl{^WOa8G$Z3(GTCBpG@ElA*Dy;dK(f#FcO5AL~~?)YZMK3HL7HHQ}Vr5SAp?;bc^c zGh78WCim8v8i&0{YZE7tIv?#9umwuZv0F)W69;Hl#~q+VQ=Yy`8A^0D>4s$ia|;%Gm)T(yF5ed#dOVl_KqSI5#0LkYlQ;Z*Pfn+$mP%RW+*Eq3BdKBQ+mz zKAG$sRq&X6yp5fURy=lzl)Dc-a<~fkxi9Ly2BPPZr{uv=4q6Q2yhS8R!sxZYJ4_^C zL9WU^U!aHW`|%K18;Glu_XsBYedA`CvnSLjA(K`1;^3Zf5|~?WU-Aikf}SgU4eZ@A z9$opzm+>oLcon8o&T3crg?+}98%wc1AAH*mz#A}GR*zS~ZO(sdjHJTD1X0V~rP149 z1W|{#_E>hv09U8}nNEC0k&2HXKlyH*kTWa)XybPXecA51o_3zEAd zr`dX8Wz|lnV@78)fB)3hUU(Gldk(R_vz>oC+8tp@>JzhB0EnXGRxL#iMho7!9j7-K z?xT_|b1q}o%faLvZE9MnGIxj@1H@W_sw*E$53p$y=dO*uV&2z)l+4e^X0Vu!OEBtFTYp4zg-E*flkEOdg4ZZ97`yxb2B_MSxt81cbWXgK z`nyT>xl;o*)^o?_nw9x6h*9pKlxy9ispb zqpR;k_J@;DHOpi!5b|9Xu^RW&`2qI<`ui+@*Kr@E9Oq#4uTRbIgC{R=OC6~t%c#{Hgt;)@rvK&LDs#;0#U=p@_m|Iu>@AgFL>hQK#UD@o*d`eunb(-+Ewn4n(=W@P5!) zfTPuQWw-QQA`-)g(UG~rjGEVH!+7f&rEuO5{EeWmYY>&x?R-=@ed=>`?oiTylmig% zp%LA0R>+k9Bn<7#DdiENEbPcv@WP`X>|m+7q`iF^vB?6{UqwGZVfwH{{EA2xZ0zh+ z>6L$GDfBv!&oy<@x;nYOjH{Jy4Hwye{bnSz4odC!qX{<<5rv7;bl^89#iu4cr!AB$ z8}Rq0M<~g4I{-i6or(YCw{pGOg_FsuiDnHl(TpYGUnu*&APLw0cj^$ZI4oO#2qd|E z@$&*qOxbI83QNbh^ZYD`l2y$)UY4I-L$^7j*Q*f z(MQ1CgvnNqmgX%cV{MBd<19R>zC$Zvr}MA{!s4*%>yHN@Rm13ZSm($24L$twhRYJp z(|pug{_rVz#zl}$Zm`QJ?ZyjMY@A^zhHwN#31n*JdZ~@Ht2~KTL)0X?-X1ZnA*8SR zt<+&Y?o83{Hy}oBtfG^J%{#ULI@@Teaxf5uNZ2$TWe^@k*yH8B ziR=0&x&Wu?X3z8qyZd&K4}7ln{!mzgEgaWGb3o0~Xe9vJP**gH=nSGd%boq_pm@tg zzmJN!2zDk&u8X}4wiTHmsmm3doG;V|-@hQEGXM|+ern5e9;ss8!n4?zWS4`D`TG3YE z|Ljt{yfNWbI3N6Q^r=*EZ4y8KdQ_9G>v>~LVQLTHk?}VY4Hz3k_%@`dl+aJHin|F8S?6C)_ApYXAzj8tT3AQfb$|u;jX%YAE*sf}kyZsFH_( zthQ&8vJbMLRn_}6ZxUo(ZBylsNS~p{;yB z0Ms3YNm-+-BidL9R8*N+_SVgOgixfxr*BaaLRs#3W8F=zSC6`He~(Me?vA&eCiTGw zY$C`K{v%A$4E_40Conm&R@?UIc`~l=X*ZJ>OmdPrit0U}QV3R;mp5Ro{BtemkCI7g zfPA%ORso zj3hdpQjcJlLbv5c|!SY79`u& zJY(e6nmv)d0Mej3*VkVm;-A+q|MN>x7%p~KViKQ^jSVRTBE4D{Yw?;ks!H`*_lGYj zr?vtc^Tj&aTe3D0Be(D0@Svj_nB>-nMwfOQSGx1HC!+!!X`xVd0cFq*EPMGQju!hs ziq93UWlIha(R0=D!*IJgL`t4o*7w&fz{v6E-UEILCoQagH6&E+43`g^+U{vcM6zJ@ zvy0;bqc8J8te1jPUe#D5GGDcZQ)#-ML`?|D{0$-z@&`PL9l6EL{?fcDp<0?HB5BSE(5k*gzJ;4tAIE?X-#=;E=V8UuE9PCr!d}@jk}z%+x;srDbd9!FNAAElD^zsi@ZUm zWtHQ;kv=_5w4yVPt*s?@(5U`>1;PEq<_`V;5M1t&efrZQ9I=(lQ4KMkFe$963cdRI zj2gP1d#-)x1t)jM)ggXO*eFIf<6rC&GcfhgDv)e5CiEcCDdQ%5=m40w`$!wlZ(z!d zmN({R9#-^U>yq&;P{z2={nR?O4JS=UbplZv?H%0Evn~!_-2MKTg;2&?!SCV3@>6Yl z*cT?fkLg+;K#|_3H+9_mA)JDt0@dqXAn2pm6!7VM`UhK8nS=SXQyR-Qo+Bv3sdz2~ zM_w1kWxjp+ov9TA%oQfdw~eQ)CPhGvI#KtQ_ZaYIa@|*t65uLJFp^m3%j|-wR+go{j^WhaBy$c^-xalw%-6HwZaL+7FhA(vGWro{ zcG8uUv}1nC(ZI`Q?-WU7Z>w)|CQRFULX>(0nQJKXcL7BP9MKfOB`obVFfQYC1uhln zeV_DtjY+B1oLpveZg3*gkn24p?1mLOA?#z;0Ma&q`IuerY8bnK8M=2|b;0|O^Px9C zXC>(gszB?!@|MK!*mrRurT1CvkvC+zKk6+7k zCJV>X#5y90Vp{5Ob((>a1a7h{gs>!(;}?AZtwJG&!KB~#|oo6_? z`j#VCUJ$8ad~R*EOBWJZHnuMP(m>oasz%>1eH9X!aYBzT*1)8NPe?Tp6T5&?t>1uC zY`w#Trfp89CMwFjZ((FkudNQ|2rvW@^$w6S>C*&u1K+uNO zswM}B#BchdtJ2|o>)rb?QMo`x;&D5zM4jS>=`mF1gjS+BLlW(~>g%2hSAF77#TEqA zTn-%oA`@rxQtc~PE{=5p$^rEntm3cM>}LEm?q|CNsbe+0cX_IB#JSH?I^yP0ls8y0 zw{yzl0R6eAfZw%a?K6<9>*cktz{_O%tS=dQ#U$rU+}GdQ!C>WISvDlv-`arETD6Lm ziA=I+55S10w?EguCWVirH~g(?=(zz!3=&E;Hs>pSw`f#t3n?#kJbGk2Vg$ksQMtRUa0!xnLqa1@b*+ zs~uJHG|5%dsI=kaS9@5y<15mOJS2wd>>&>6n0B`BxiKimX?INyta*6rx=VQOi_m#@7+iq_JK~yEY~_e6MhuI7zg3zdABo z`AmDj_FUd}{^pQVUhF$i%B;p%v|wTtwVD7}qRqdxK=;9+@yEYrc?v_C-+FTI5J)Dz zxS@}}E)Y=~$5Z8&^HY?{*&U>_yll@(DL{ZkQsoPvGLdVOFQH*7`tp|U4P52<&%I!N z4M-4Jd{82e9-Uyc_Wo!5? z6n(Ecsu9SOZ{O(7+zZPEKQMB60HRRpqk2CfcMKIDj1a2cne>#7XIzxjfpp8h_X{%p zx?H$yAWy_7sT7f3EKR85kM8ecwygaAc5qOGW}C z2i!ZgU}dT=5lL{(vgQP*D_jzsL;70~*Lm>wPKl%_xGMfM5&`yPE7?0>=t3Endytqb z`&8iplFZr=)k|TIaC))T3%5(fVb?X|^MsdeV+*R1FI2-R-!D)Kg5s3x^eZAsW`y4l z)4TF-EM=?<^aCyI-GHmuxT4g~yWe*sSo4qgoTNxugcHN(qtj|vHxQ9!ZKSD<%A1%` zSeX?_cW(>r5r* z14L5wE06oCCx>}NkHhZE8geZ1$Oollg`n)w>3pfKmZ@jXFj0KDjZeinDiW<3k6MBe z-33zG3`^zz5-2nA&SDCi5>y-MH1pR`(#0J}Lq=CHiK6C4TPnACXq@0xlR0-`7T@M= zN%xpY)2WAUCaU4v15U)X&MFy?P?Vl^rE5?5Mzw3TIwon)7(LiuNarQ5)^S~>H9_Ss z=R+(1=2Ez_bX=1S2w%3YXlE}ST!Sn3y+pU3r=9c1Y`>HCjSw}RnL8#o5u*JQe}OKA zpcIcgQMaL_n7(WBci`l4@#eQNZr;U3{L^7d*8&+e(*A$?`OqpUaqMG9Wr(UCd?&a- zDUCzK(A^_j8!!uuB=q`BRkc&NYH|zW42aV8aa!kKzOQ%R3tEU2rJZQG+4T~Y7@YLk zDs?p(3)kHvxSp?-b5GB++@QLKD~#Lu0#Eur#rST%;BwrY)pw7IsQLuJ-pBla8%Gv!gOi%RZ4|SQk(=6m=<^4V^Ldtl zRhUmWlquI8zfLN5no8QrZAWOJ$TSEi!nPD^U{d{?HncLjj!C3UjmxE~1(}rnK+(ZX zRAi-3RB7EpkWYJqRcCh_&WC%8RTVyFX)Nlx^qQ{Ro!l>c=hJxZ;gSGNOQtdQCsn(3 z+n^tylxl72WF~;pd1sd>w#9t9`vqmgPf=Nws~&yLV6nhe7H9d4`D&jz2gs&Qnys?A z`2HR15?!R1sA$0_H`~wThZ{@eL|fMR!Xj&IsamvdjB5YYcWb zSzB;a?i{vvk8L%tK8SD%u$|T(!DV&TNvX^}p_uPO2KrgB4F7Q{*9(xu>w{@;Ug0GE z?Kl)<<=-Avr4(TmAk@Ckg`t4FTswN(yL25+Y1*JVwr6kPB-7n7?1OL=WHhr|lc1N? z1K36+<>lYiQ!_jH02e2!AuONW8ztAh`Fh>7;`Mz*Qg1(S-++D)rj2m=DTkP}MI_I$ zx*PAsB$Xla+4~hvk&@nLSe+%@2e+OJN7{=TFD@Z${ifBon#NzeIEEwUDZ<92RKq=B2CSZ#3L%XtUZG| z#}(-lGJQP5$nN)b3ZK!jNp*Sk51DrLjTd}rSWUmZCfs)#to%Ez5$0$-v>M=$H5Gmh zh-BPK$!vf!V%0Y@>Kky1N929%+va@XT`9;G7+I<2b93J|wwv?H>kdG0W9fSrifA-S zHGjB=NED88@1yqi^9Wgud6WZ$Y&q(mkr4Lj__KDT*oXO6c6_#(xkkzsrD=~TD7h%-h$_$9$@EMU z<_WYBeNq618;3zrc*_ zmkU%~jCwgKQ$)F^=?aykvsYtb>>8IETZ46@FzKm@s~ddcsAuc^;@ioz2_&mKjPlhm z*;83B-6NLrt-}afz7I%wG43c6_Xw#bA9%%47$71_FVFcJmEULd(4)SB@db_^IWV(k zD9Mxiu-B$N}PRIWq$yw*}SHsE~zqgL1Bn|WXy zjlh<`OX-@AscdXds`eOnK6g-&?)MJlE^egU?l7O;oA22n!)HqL`;+3*6L0W?`KjuU zeCgi&;pA$`tkyquMJ87ZshLze#m(+OtzoAlf*s1Ju*x)dcj6pPN;L_u^#Q29HOHQ^ zxdfAIb3#-@Qm;_tsD-WRifbrxeDkils5kky&IV!iZs)(mL+W?1sNTMLTZ$4;mQI@k zL`b}K%HQx25#8e{*S7cxqiz_4(+**Y_1TLE=@E5!f^vF@m^@#79i zMvia{xK>5Nfw7n zubu{cYrm2+K-3!I>gcf=aHQ%!&|k`f;$kw!KkZO;iAi#X-<&c>iTZb?_1D5BgU{tr zb`!w;mMy*o(p(?b!1@l1oa=-GDZ58VOmE?@Vtas-7)G;?w|m4y>dS*Wd#VeY1o2I! zxjyxbiji@4TOF@mM*IaCT{QnHaj(JI1?Q!e|Dbd6V|7edh2`R7{IG$MQfK~ob3H^I zLlCcw4a889_UI_J+RP_Rl6hkbNZb}-)spRefx#Pl)^{f1ko(#&gD4?L)4a$uy(~=!mbDoU3XcVj>j|Z>@adSb2Hh_eTR~Y z-VbpvEEPweO1KX|)k`&%FPy9X!s_or%H_XPHFWY#_017Q(-bNtrtU0HgVC;If% zMZ(`csvxJr;;MOKeeg3#GH5uFodc15)9uwWr&q%NG0kteQ^9q^Rc$_2`yZQGxLYPkZR{f(S23eDeXVmqoiRW; zkX2Q>!?_;t%TZO74S;aR!F-!ga;QDyv`rT#<7(ON32$R&1d6|xA$O1>_`!gajR>eE zT-}EG0q%NjxArlSa+$^G;Q=lmt@gS!mU12jRq4jhU_U8BIK17oL zg?+8o`g}5rY7l+&0yBz?J5MM65|spfKkVuXH;U1rgLRb$F1rFQ-R*)tH*0coG(cGi9^_&jB{v z(V`cC&T|K=%0xwA`g-E8mH((;QgK{CybAal14|u*hv{g43b2w)^fIs!#CNSew+W?6 zabi)NTNCFt=k4H=-`MinEV2Wq2z1DMR(%%}p()m^ic~YYhnCKT3cfE`%A<3rzYw4= z@1a9L$}C})C0;;DOWkV5Fifl^mew;kLV!u`Tj%do zjk?05fb@FoVKg&%jmmdxNyj*zV%^}PAIAJg#-~NzA|uysP45CtsB=74^npBRtS@LQ zK)L?VThAT?)KC4-Ctzt+#;oG=q~i8K8ZW5IiN&$r;0Gp=&eI$V?JNKBGCgm;)UhC7 z}P~U2?e&s*u@UKVGu`28ypd*skKD4Yf0F!L=KAs*9 zJmAEuYR~}T2=D#J8X7#o`J^Sunq_~U5AX{o-4{3?tfL^uMjc!-o-NoJf3N(fo$xn( zBHk*H@7DHP5w9VVRA)9=m1G()qk-;~@lNXoGU=0Ny0AIF^WMgnvJQEm_DQq_-wyuK zfWQm_?SS*udVaZDj!c5POvZ3@539ZSg(~k061oj7Y|a5tjhEey>hNLyQaUxj1fpw} zX6OV+vR3+;f)TE($mSOM-3bTBZB~lvMgbzZw*e4jO{tTOVv6mMO6lC z6xBvPQpe&deV}qt^ID1?l0{aeR}{CwsFgQ5(!^o>ENM6T?vZs(ns2Hl5pig zS)^4&bsmKHFA$0U*+OkH07lpOendoBkSvZq3%ZB_*e@R)kWOQpLP;Db80KV*Cl>CPZATCFHPYNEW<}N zo6)n#yCHHTNwpGth-9}%`pmGh$5n1>U$ElOEA>rDli)){sv}>H_NiE4VnbMMGcjXw zFrVS%UJdTq{ETj0S(WqO`wVXbk6d6<8tlx^@7sBDQhnBVJ#~euTKGFDy%r`WeIjDH z0aErSwe9E@jB0+!*&TP2;H_L8#CyatBRTE$0pr`ppMCC8IG@uHs_O+YjbPn5E4FIHIaa zv{JQ0ObV;Es2Z6w1}JHj9?3feBEAo0D9!@?)z>ec2lhF7WPJe+3*QrzDnBNlKYsh8 zafdHnp%hNfEnLqR%mVjKYd4tGZShwXx}9IB4YEBMcZZ6yn;$$(D{-I4Oly}*q{&?k zKWeOG+JM-*+u*m0@pFu?an&;v?f6}H5?|m*+{{z?biX2!LpsnKbyohX zB~z}cr9`V3FXaG}>fo#)B7`1}m3dl65kjwxYzRxjw`_ddgpvfSGV1;ooJv`FTaKf) zvBNjFj9}H&VH$S)BX`m6T_7>nf>+nhy-DTD6D6`gDN6+r=fR}(daG+nQ&36BIIE>K zV1cdZ+CSkGh_d`po9xdf{G~cx=U{1irWr4wh*np)#CC}&k(6uPm2ef22Ag%5Px7xT z%XNbwqPnT4tH~|AG=9|sEVa4ACAD>K7mM85e2>*K{-gI|*a#>^ zU}na*o+oyihyP!KN#>-F!F+{f#SmhY!ODN#gd3xrd0EwZ6|t;kDp~t2OsYK8+nlVU zhW$M~bUMaZXT(O0YwN9hVv3gLO}q@$9S5`|;9F0wZv!L!Z%zns2abI7^p2~O$#mdi zx>PF-+Q)81kfVrsNU-;vtbJ_hXO6l&fKqPiYaJwX7@|&wu?)<_E)eQS+yVOwrJgbPvq1GQxPR8&N>w8*jwPhVh zRcjU82vm@L5BDaR)KrI}v$Tau8T8C)bKAJ8a&;rgPQrRhltWldrlzEI4_H=erMnMC zrfwq6m53N(=UJhnQQCfpNbDA6I9jUE3siE{w`nkSRgq7z;xX^%kuxCi9KS7V0TP&| zugdKrf7W7qpEC=lq&dIVm#z@W9dV2!H^O4_r-yIn!wkr{+xRXIzp1Ku zpRhJw$>+r!yZi_Y1&jaC6C&CAP<#GZ;j;J7dEd;7a!mFkhKPx8bv-f|5RDW~5~%+78KcMa+LVYi@^7LoV)!P}F` zwnZh6N!(hJH^V&ON;9f6z9%f%Z~Lr(eIPaCm!r3`aR3jId~*d>I(6jue(qm8qGU)$y29Ectq*4DiXFe27REg|$LFR{zzk2-vIn2Ia3WdBlQ zz-wVKl*^Rz2S~-?7B!V|3nkIcM5>)?cL=$9^KZ;)6j0gNjg_en|MNws@euRl$y&y26ye}+=5 zChFJ);VRZ{nJ)k$H#t`A_9c{3>T>q^tI1RsxT@VXCTXZKPmknoP*Kmixhl89-QvXJ zXg`K&rUdR+Y%9GECRyLUs;Sffk&GVIi7h3@ zO;j@aPECt<@)j=Dt2}AT39KU9#*(X;YhGk0$SvyCv)YA3ywr%jz}`YczWW2(lUJp6 z0478|fOX7p%O3uja>Jhp+9j6XErND%;edtc{|Z3&mvaxX7s-i9PW z)!_ImjXQ86WEc&SS?*m_v}n2V!4lx8^B?}j6d=K`K6W4|PJXk#bQo}aQr3R~q!RX0{qfgB|U}SR96JU=4BqhctKrEVrQo&~^ zS^Dw)*bDxG8j{p=&at(xdFdyoimJpb|HD#FMv9baH8{t_iee2;a;(jFA9x)jIq9}; z0P>>O)Y`KNM&UkW57E|qy|xI}0C*b{-RWcCEzU^#4l*N<(!*M}WlxA|7hjgEW=r>k zOO3e5LHp1sUq{u_gozE$RH8#z!fI!KO@=N&3H$CIbc%?u=4{y+N@R!S%YQB`lgDq% z8RY`-t(q;Z&h%wqEsm8YU4hB-+v9dTyGBS_t+}`1yn!Ra8cCHYxy1~fZ+{eWzMGWV z6o;LA6iJEa^PfEaqIRVNXO_)m_@O>+ZIj4D%&cQ+BEGI24m5>pcm zI6}3`;f?KpGOo%W@NtuhC7xY~ytC(A!Gn=sxmi?N_Ax`#mn)J7lNv`gQGrR-ftW|t zw_Sv?Q*<86dy10K(e;acJHwSGRLAW+VNC=@5SD@3_D>B0+!6^~1+es9xd!A$eXYda zK%=m0EKoCYw&l-82! zhOm?xp#q!0P>H@3ehW^@Et^*-XB)BnR_3vG@`#qEwlc}>E+T1|b1KRo$@DMeZ(opP zxqZ|e&H)t3rdjlXLsY8A7`$YFzsP&4$L3$C<%cAj^mN3}fF!eaBpQC6PvXSc2y%RZ zkTFMdWVPoKPCm-7wATBtCgT{T?(tn?qW(4HupB3CH}ffn(X~^??PP4_>?(3M-*d_6 zm)tgakCR-}w+itAAc5M-t>5$rMdn5_67mEmGh|xM49|#&2=PPamhuJBRX^-CU&u|j z61nm}FEi(a?^f1TOg_P#Mf)rfYbZKkNrOHJ$2i)*P9?xm>Oc1(dYg!4sasf;rmejF zuFeDBp8qbYx=BbTomHr zWP9PL&eW9q1y)9wSX!|R)3Y;SQnnUfL%VZmXf`)83K9H6oqtsta|tJl%q>qB3V|77 zuV>oX02S39)~9U)a;DW7$m|+Bd5e{*Zx#2QaPrG2x8kRnq@vXOF2d4K+ zAHXEn29i?SyGfYpsL{whBKlS-?jigGiq(8e8y^K}^E+dQ(g{dh4r6W?`)4pYvtVOv zN+mGK2iel38Wy}x?!5s}y!uxD*X46{f@|j6R&i0MqtDgZYOAz{9OBM?lXZmbt17XK z8*q|uU5Rw1auZdW>Y{*O9qm)9f=ROKyn&pHy^TwBWt(igD(&4t(gNGf=!vkbo0|Hj zce4j2A(rE7jd_0(25FV|1B8^ZJQW2^fg>He=6ZO)$SdFPQ#?f^4Ru$i^DS%g`wSWV z?2Vx2NUOGek|q&xG84|S8M?Ceg)3Q~4kVp3&|6ze$6bdUDYw5m5c z9tcbMZ-y2meh8&9A9jaq0V~LnX%Czt%YreH9J`5@_xRhv8WaE zhe?dhd5?&aw(euMQcrM7$fQ;~d_L!ujGoJCyda{BeY$+V_BuHeW_7I!{p2xKw(5`Z z*jN=TF@M%JtqDi4)ih~a;Ang8rR}xG4b+Ii%24`Co0D_ft33cc^uR4;@UBY!sOAQ@ zu_SNyr+U^qK~?irtz9T$G}%?1_C18u)FO5d%lF}_p*{LN$SZXeEln!TKrGGjaD`q> z>VqlI9=D(7L%kT6nYc4dQu~X^ioV##*Uqs~%y&m7!Zmtr=r7PDW{r=k2-7RmD2&@x zz;PeFehtWKU0gxsZU%}WzlA7JuQn3%T_C&9YS9cZBCZymG$rK$m!w$jbL-o3=L1?& zKEElmAzUhMiyNZ>L`Chqpch~YwhS#}k-tq_bV02A?@Q6w74V{4MRa+mupY! zm`M6v-NjSAvJF(y-4{l-8f@YsSc{4L8KVYUSjp}q7q%0&rbHoxWx+D3H0%bb!)(!B zKs6c~5g@tome&s8Xikl-ym~kZr}os4ZIL$!+;X7&pTfzHt6=-U&@)VIYMD5|+)4?$Kft9f{Ymcsa|C;b#$E=V>D(}dt^uH>C6z$vBgH-WrP zu4dg!<(P>7qszh;dYM`U5=qr%h5}Bms)tu1U&rLLV;dqXv+;YJS*PBrw25PR>SWn1 z!HAJd6x%RiAMdsUMk*?8&O6yfm3ml}q7ec5SfsoU5Rh?i4F(RNq&62z9m4rk&YYFp z#iXcAPJTK?4GoazvH$rDNmEU+&<0`AJ0eNmE`Y>Q)0vb29@eX_jiLv6U|v>u4JQ$G zY*t^Nbu+0C<=#_LzePzz9j^FYLGJP?+K|0$_up~v=6Zk=cdcjWc0D4bRL@sJo&d5j z;H}BC=lQNdvZIY&;FM%l+UoSYPG-4svhsf}jgQW`k;ym-U==--qHD_<+*a+gdyQ#C z{$|}5cD&ODI5G7|n?N@w<4CV|@!P@>(tC0IHg;-nqf#t}^*oH*D`f{6vtqed=Pl!< zU97_TQ1fbg!bG5VsQT{%U4Pnx>Htc*xywk#;l%ow61sqgIk9(uL{9U-7)I($z(Z!= zvY>uGnG@3l)ls~_q&{fg>x4vwdx=X{bitPXTvteGQ@9`4xofC&?5O6KZ-7(~6IXO2 zPySegbS5HyYWlk6y9bl#kMR?GKqQgxzkKamx*jLfzG!m(gc&+%Sk~jGXH?RCQj5Pm z6ZwLZsBfRzXBMcc#v4f?ul!$2S>}egRd`ebbL3>VhDb7WA$lHi9h0)Nyg@h_t#4q* z-oD!JT^m}yRmW`;Uu9Z`c1w8F>puK#8zvu4I-T?#FiE$V@N=%)*~Llv$KNXHd-MGo zjZlk;-Ei*z1K9U!({EYULoi7{JQ};EFuqpXtY}4^R&XDCOC?AyznYLxI_FT5T014F zLmsZYR;zFcE|q$m^21_Gl1f{&Yn{2yi(=`9`vx5H&Asurh-5n5)wjNWhm%Zm-*o>T zkglFw(sl&>V&0RR55KM zqQsh_cuVb|WMjD{o2Kt#A|?-wMdCeN6j76}PNgfjkBuV!-VC)b9N;CNBQecLxc4qD zgbBMG8R-O&Vt>=q-)FFV_EB^ijr07?ZSTl|$Fu@Y(%&7|E!U_^l)NkNs`6KmkL#29 z_;vnbO{)IiKt5e_`_(wzf+arZc%v+qO$`j8KZsl8$zK@mG7 zzvnfBC}K@yKj%@duI)_s0w%6RnNA9xCmCCU^}R$$H{94 zN;2KoR_=AXh$_NbYS|MWCDmQV{a}R9k!tBbSq){*}HfMB-3sox`m7C)1Z>O7#sUW5KF3j)=>A$t{|+zg6A76Bg|s z){iy>y-zFn0LkW3%#YwGY@INfdYVM7Tgmrxh|HB%v+;sR$$Vgax|w=KB~87mzU-0N ziPaj=r!@&nQ&~2GS%XOFb6*d)p1-VQmH;=Ps7kFAQv^6NG3VGLpe@X>Z3LSE$Tluz zk$Tk0+Ch%KhVziP-6$j~fsLp2pRGxI{wWJld+qjxrLb1f=>||bH4ZR`h_FsB>d_^b z1ojzZr5Knbuudj_+u1mq+?eF{+&NAhy45Q8ML_8h%?J>m1YZG>hzh79c@3AQ>atY< z-Q;hc!1l{a>@7SLl@#iUJIts{F8b+x5%;JhvEDOolAH%zWQeQ9ovI{{|36{x9$#ac z=6hdjt(-JTXr)m^7-eHCgV8nyg;5%#Y>duel#NnKOsLSL(JGa-a!z5;q0vgCSLK|Q zgO!zpBrA+E7|CEO8?Uw{lQL*!!d3=bd+qP_+$*d5^S*!Vd_KQihv$Co=RRE5eSV&W zl80#%t();J!TYqwHzt1{9ob+eO@sFs!6fz*Yaai?+;4q>nrBF>WpAtmSZ@n5w?x6s zdQc0qK%kth#R3`$BQY)3Z1qP9G$EXf!7h+ZrmSzqXg)`xGMQrOlrtq-tmDv`Oj*A& z!16x(K;!L_1A=<`$!}y1Ad3N^qhIG$4WRWRT!&lo%mD>W57jKIvyfc>tdDkX|(jBQVt@ z=0XIMIIE$mFrwApP$9L5lpD9LvDRo$AqR{>duuJ)^Qbx;N`ISAb8s1*acLY1*93ky0xxsOCLR;*=TiJE?FL?(0A3c(A+K#?;SCAj>nl`eiE&+HjT zAP3TgJz77p*%cK}s*t^o`QDmX={%lRWwm`cGE%!W9cfO)W! zY<_`t3c8@p)7e|y?@W~14(RX3&Vwhz4ED(IA|nu;t~(*9u_ju-rx$M|mZ+_bP9sqt zT25P#OxAu*`^aF7ZUU7_i|*x6NVr;dP#3UhN^y5k$6WSPof%^Z=uFkD(&LRQt(Qfx z3Dl&TRuq$fH_pO1NjnYXKyj_Mz6ex{B2X4)*Oq6ULZ@}hDls-;1YJSH7!b%R;~m}6 zx`EW;iL10tK-P+<$JK7}z7h2WLY?e9Ml_G6{np*=7v^>y6Z%%Gg4#5L+BylDes0EG=Vo384Oxga-D(gi-*{tX+ zM<%}<+O`&l;}K0}nClv&UXRK&N6hD~pN`=fTY^gK%kQR*a-uXQ$7gVCr@WYp5zG?lQ3R^NI(US=AdxIqLao)c3Ph7E{iz+x*J^{R&DJ?@3iK2zlTt0{*o{s}u;B<4#%{>F)%kWsWAemE$Gqv$3zaEAy-Q!O)J3=c zK`p-`$VkmBPYprK(Lk+)EzGjh6$NfUR$_8iSo`SQ9BWM5|eDR z*@+l%XI8Ow5`w~%4O0|rrS?Ej!%NN9DW9W3jV>Oj`xKgJtaWOu52!E-zHJm(OsYj; zN?3O+tyb|AD&uozXG8n0^XN>eTWzN<08;9g(FW{>FLGF?)2ycU26_tH`jlP^yylXP z4Eh)&1m5XHpc>e;)PDa35(PjzX(dAbt)l?euCcy}9}!gOr~vosY7`xgP0xZN*f%mS zQ;X~Wr#hAev{xB|&bR=R5d9*fb;1TE$>db_yEG>Vq<=*ML;5L(XEPV$Fnppem(4Rb z=!a=B3|QY^!~tKQTAs zf!ba(Qg~q$f!34M$~`N3nvp;+6@h}tIG9V-!xI@}Y8|HaGC_$9Kh()ie-x(VR;=b* zSJpWhRDn9T7RaqKABmDES(Tv5;VHKyP8Lz;2-u>&FsL2>Ohl9U)$EySZOuaya0gvZ z4V-3KgqHYtdmalgJTql#-|&fA4(s80&M!=*Bk&wj{;?hnbs~~2{o)h$osDFg0A7&M zdERam9Y(|HA~Xj~5UN-A>M4q&C``^j()o}kwGdU%D*`jW3UtAN07Yh55@QV1G?$-X zAE#hRrmpPa{0O8Am(p*3T{hBc&LNV78%TjvuH+CBqe>Jem3kma{p~&)lPK%wFtlBI zYywd??5SDX^DofwK)wS>V<-15LBDcHuYh18)K?NTJzde52=zy0tX%d&Wpb&%!C}m= zKRT*|Jvn?Hp6Kb5Fx`;>ftHtw4+z#8=@bNVs=q!>!e<)ES_h`ijifJoX;*+`^3ATk z7Q}KCfpWGc_`2j0AJpkF4701X9-Xno#E5 zK))N2^s(!sMSZ|Xf$hViMoM7T&5eafCVhORUk?mcpupqJ$3l5bEwU-r_$1m;a&RiB z>Db;mkH!=<-_c@h%x#y0D&76F%Kio_;|v|YiKdNGOHfNKTdQo=%NJ-(emE3`9Uzh) zrbueNFOW>5lw`foEDx-}`W12Qj3bOli)uCFT@g(VX+y24CJJt6pO*7TR9}A+;1fne zOjyX^5EQ298R)W0W{ogMqBN-we2c`&(>SzL2tUwGYXDD$tY()Tn?NubSeMA=);vTq zphqy(D3DAB)>-f6EjNJ~d|P`W@#ZokC=N#we7%taZ<5moAe$;-uhsf4kTa;k@;9mN zW}`7xqA#IagR$K~744ttC=LWweBg|)!IJr-r~*xv^_3xWyr~ea>A`8ntrujBz5+dY zHlI1GMIe(phEp>^GO5%LSLn{q`JhHk(XtXWrldI<&c=;36ue=SG-jXvas1JOl7b$p zpP{)AZ_3tc?;a!aSY9F27W@U0DO;Uh(nkEgL*`Xy3ML{bug4*2B#a#VT6QmF?_~mFR4_@k{`G7a&!OvA-E{H@)1Ug{cVx$DtGLf7} zrgX>~Jy^Dp6ZFF@y7RFc*;I)6PMBrn2T+nK4vR2Swfb4i1RW2d4bgqzLL}p3e1TR+ zd=){3eu05p)S@u`d3Q#0YQCX9|q<{1IQpq>^Pfe$)R|H~1m3*21oCEiF$yx@tn9?9gI{ZhslTaY{2 zg}}m|6P3w@9)&V{C)wysnd|I{lP0=!zZ)eVD;&_$lE zMzR*>8BQYc3|7UgMU(!Bf#!}SI(3R6CiuD)j1Q@A6I?T+CyK@)n(|``sTDN^1r?>A zHnxU!Gm%YvI+C^qlW0r{f5M`+X&X=(FQ7?JWYyW2>T=Xn)S68!^u-(AYMs`@Q+m8g zrws!Fr4X1lIg!Xs$CstDjcEP0G>O}d7|0wSPSTmn0W>Btx)N_4YCjrO>3iVjd>krM z-{jkx_-We^RGPhOV+1IT5p;aFRnvG1Eg9i8KY9;%60e`Gr|JU&XK$!_Bk=K=4U8uc z$QENtZC>vii3F+7Jw^&N0oK={ULc!FposOw26@zxE6z6HLEIC8%6JDYgPzN0tlTxI zvwmRF+JNna&SZ5nUsb0k*MR*|GE!d0>emoh&59b}NO<*YYs-5a0tvP@)i47jlcb;V zdMpMx69sA3Z!uYO=6T4b;Pn?*tfMT8P!Tn2__PIv=PTt@Jv=cpZHT9o)*FdaU^;8u zVkE||)*KSaJe4&OYiBd4;9Er+V)q{v44Npa13?9^!s^WUXi&ihgSNkgD2&mVuV+&8 zXiVC5tZeE%sAAzx_p+&URHnpP{pMJaj?vDWU@SW6*&(YSE~7FQ6F9zNdGn2+maa*U zeOPwbf|knp<5Hj+*L{@K!Yb>VWRDRjt4|WCcG?6ayg|2nbXqR|-cdaI-D@=p6dlfK zp3oJINi>}lPu3Aoub@tUNQ^Vrlv00m9ZMmaZtKhti5Y^PI%L@l^MDVSDV-5*MNr0J zh+_&esbg2qD#DpYIVV#1}T5budw6n3Iy^qQi10|ygA+3+m1?-OPpN=0ER=* zkfpuDLRN78RSzyipfPdjZ(ZuC1R4@_9mPjm%eg^Jp%==)&<1NK|7KX+A2n^sN^Tos z*aSM6k+75sEu@YF#~F#lTBA**fFAqOmhk6>Ek z=jEsZ$=3I1tYV2rXObQKb}8)(N@LmjBUD-tW;V42J(iuQ!%HVTE=K|2=h+A(omDGc zGuv$>J$gYQA3!n=#!9U9g^Q!+){m@Zr$Qs(RV2E?2pg>f6}1RNYBj+$B}U?vb`5wQ z37gRcb#3D=Ba(R<1v7KFVPxy4m9%nNkWFFGm#pHn0!@aALB~YusE-jlnoXw7xBj%1Kr|23 z*XB@+G~xC8Bi0aY9I{COm1Wh{6f`Cm47pZX%ds;}Al(QI9HW_shC1Tp5jFs?iL+k& zTW$m$2hg7&kQfqYebsF}qDc&=thF)N5>#ML>u^G+QP65*4?5P$M!|DFTFcmKH$2Z_ z(UylEK%m+f$*FT5H4+0f-AOJqlJ(&fY8#0}@I?jd>_aW0$*_*Bbd-83sIfjrgE5^) z6YzC*SgcLS%cwdWTUXN6JZ_*Pan?sHTi{6?8}Q`oz7hBiv(`BRDdkam=><~20`&b} zx{4-947MTG2 zl)d?Sz;a|$z>Ey6&6apHG(>~{9G^GrM#f!RV70`o#5)mq=FeCGr(m;<#ECgwnb?h_ zCcR0I4p8%mB#0TZ`TYM;Wb-_}Id55TVNgS&a9wP`L_zi}52_6Y_=r)IKJnBaHLJ{d z$n$7Ssq0EIKYon=2UU!)&8f5-s7ys9ex%9KDsDk%N?6@qPsC9Z_feADU;M(_0!JXX z7}g5x`UR3H;q`0vyXx}4I^?8J_4s}SGWoYYbx%xP5lsbfh*!JWUPi&UYpm5UECEFz z={ir=Gz|%=^utY7DUC!$t!TqG4wl66TUJCj1);-Bt<8a%LB-ZJtE{g=p}_7x)pe>x z@FXsQg+9G`c`z(L9-y@qkAlqc{VcPdx*plo*6KiUZ3$|uc@mL0O(guxtM0*(`fL<| z3~@fi`u^Q+RHop`kRBAnD-NJD1>f-%G9o*Q()7N8Gs?z9(30Z>^EQ?d_!KkYYZ264 znKkg_Q%0g~)t2Hsl1aS&h>I@hUq;i>x6|fx99UiPMo=5i-eO0^wVVlskG_#VC$R_b@yYW_y`L&pFCKQb}r&&qe6l9*sTQSyF>r6yb7woAdST9S> zLltNnlW4cCCCNqTOkJ49$4cOGl#~G9n!qsNDFK$z5Y`)kp&HrPf?)jN=hlY46PW~9 zT^i}hHkYX$^ZESUMh?6KjTImVEKJXJVuMG~P!QI#o3$rUh{EK7@?UMObyfsb488(0 z4OC={m$3D7v!ewMiNT1fNZKmhiGK_ zz6rpVHJ^c1Ge9=QNQ=YdSIgoBIy1D>(YHC}N>KUU^4(9U)CgD-!79G4)^tTOnP-LB z`fg$`G^UbiwD_Q4;6#LpK!1s52rQKnzk(KSBs@jUXZYPI{`?I>k!#l%Ej z&9M-T$ttVy=5sj}L6v^`$NKsC+CQpv@msr=Q>X%iZw{rH?{$-c^QbA+6}rxU8J<+r z5vW-p7&F+!+m<}J1%*k=W?tV?d%cgwq{aGXv}WkBi9%1bU=&7R08jrc&pNWoRE6|J zAoH+9Yx%gV5omphqgOB_g-OW%L7DS)+D9Opl(F}vZ$6Dg5hx{=a<%-&nIQT-A5LtN zwJ9h}H5r>_H;Q00gIa5hK)a!XmU(DRrEIbm(ifS?uoQjukOVA8Ve-w-F7YdCY8$AG zPXvA}li1e>b^22}X0v4rI%8ORO2(Q=(2t<}{~eIfb~Bp`YDU(JD7z7`d6L0_Crkwq zLt+l5v+tuuvf4+C#01seUu~uWkqq&Mu(8=%WK&)g7O#0@96^mbpAPrV2Q}IV>v4n2 zXgX?}?oJz)8>oVA!Y3Onk7z-OMUhS{1U}*iHf=NvM`~Uej!KdLu&&Y^L>P|Nr>o(} zd@sZCG;oZ`1*3gu~Z&!IQ3)0QI%T^ z$I6z|a1?g7;r<+tH5_fw0mHpZ;Tw)pD>U42%G_`x>8K4iic&TlL#6YE`wL3daJ(>a z15V3LKa1C5w1X)@!~G>CXgF3rUl@+DsXVtza402cxY3lL;f7Fxh8sf(8t%uGpyB?E z5;WX+O3-lsg%UK}I7-lP11Lemu|2iOaAPS!!;PQ>4aX1At%tMH|0QME!5^j+JNR76 z(S)8rIU4RiQI3XthjKLBAj%QWifb<=X|(^Dl6-#lhjq3ex@;q`b9OIQjo7qq{RkG^ z_`S@H8<+7xH2t@66<=mq_VZE8Fm8s!t3F<)p9~tYi7Ai%Ed9@x>AJ|Lf0Q?l_=N78 zk3%dQH){M}Ec{1&lvh&a2zlEP9 z;!^%Qqu-@`F5)$P*%68bqQ& z6nwX{!Y>YrbZ-{<;RTT%*F@gBBGU7=$d7J{^u8+`i5KbfNaXDYA~DZI`aThfeJ#@O zrO1kLog{&DuACAjA3ctcRYRiXV`r4Cz7Z{-JdTz%>$^z&$S#ubxQqP#jV`ioW~8j` z8!3rhe<+`>jg(pBV||zIvaztc{Or5#k~E~NZ2F+9Y`)x0e*RWh*?Rd$bjV$0CC~W9 z_dk+t(2XD^*(Nz%#5vVzm)6eWoYqU^uTF_fpDFVGa*-Lk!{jIFHkmafOlGdN$?S3A z^4E{UWzPC=`QZIltcEwH{B3fqH%&VZ;Nv-W#e2$T#>{%NhB@C zAx&NF(oDWmhZ261L)@+~xky-NC20Ux;ayzIqfT$ZH{Ewg^K!ey5mzDgwgkEYtOjiw zb`A0TE<{qh+2kSeI`mJWjy~e-3|_%kQ$|fkL!_B-O%XOZfm}iwGr6Z#hDpn3gQYEd zru|>LjI+DZXYH{`9n=e)H*J&+&-mIdzkmN@`yq7QiMRd5YP+jjxTHR@$>Uy`_MAA% z^{!3&a4oCVMnaGy$@5l`t)$5Xb?&oC(>R+nzh#phr)`pgzLB)Jd)Va6-k$Q0U%n-0 z(|gL-^Lom;?|RBVIe#d*Ja0SUE@DUj-0~yt{gI5L%rDjVmw)-Pw_Is@hYwv0lB@UM zk;?ftd5>!ax5MQO_x+FhcjV^z!E)o9Ka*Q82Fqwo+oN#FAiky-Au59dUA*g%v+!DovfYWq9TrACTeCCsi_A{7rP zKdwC`Zud~^W)Shk*_+@MWE_cWBdnt`D)xA+EW}cX}x0 z2!EGrUTC2Yf7SbVZXLAg4}?F3y}@5peTGBcA}_AH4tc^oSq~iYFItv)4!O$p*XTUR zw>0cQeEGIRb~I4-?~u0sK>c|nzDrx0g)KPaDBH!14Y)6j{O!OF9&&FRxI#RxWWsE~ zpZXI=1^%DxkZ;htyE)`3<>?x3lgiih7qls!sSbIF&P|)-=!Y-EKP8?5WGCg|iV>-V z&mJyvh&&YRLr2_Bo}UJFKSL%>9+$`~?yKzNkQ~Y(eG<>d57VhH?X#Snw5=0y^rd_* zhf5RfY-jjAIfVNlOqxTm_w~?C&?(_^Xn3?V6=FN3;gU&RX2nHg@6pnQI!v{7ku+o{ zb?2h(C?H+CsN)@vME1eAk%#T1D|=nEw)t6TWBBh+Z_x81Hov} z890ZMC-8=Qx|7EAt@LeB*Bor;80`;w_iNHOj(VY=9jMnRkLJ(p*ixhn=K6=kUw9!} zvc~hp63Ra3ShR$p-_uRx7}sv2`x5yhc;|^|{5o20k?ype(ee`gFnum|HX1qrc>;7e z)ODCR2T&e9EgznLpJ!Heq228w`OVbnS&;>V-Q|jw9XZjWemk1`rlFe*t-Qd!#JdD} zHprr$Rzf$Cj`3V~hmw|;)JdU35;P3D9kZzCC-@igF9Z`WQ_~0ycir+L%1o80i+a+y| z$VI|u-K6e@^KA0y!j4jTRua#=#kH%TP2G1`&peyBr#mElJZ&1` zo@&_NQExNtvXS(EN7-*9uI=z1%0Tn@iZCv8?km(a`fZ>oJ49Sb#0}E*nveK}e*dR- zabUk+pi9S3c0)TOcLzs}m&wQr!CZark8SdTu+H0**8`EC zXnXmZv^OKa)G`d0M8d5GIpncv2;&9#&EQ+oulN5?sB1LQ@ zLvx-n_T-*j^xZpd0^)l`m=VPH|3dd1Bc7?m1JsW%@r+dBxemPp-1`_GWQ2>337U3O z?=7KH`GB-j&Wnimzk!dz-vb-STNN(%xUZ?!A%$__(hk2K{c~tI?RhkK10RCU?P1OU z-7zRkexP)YNIB_BT}PQP4oI~zKB1fnH2=^>rQH}Gcx+Mw-!zZ(FvfFHZ_W#pB~;sg z=WFWjnoW)nW(WO6&KQxO6TjM+_Nke}DKB*C$;>%OPihAD!aKEZp}mMjXSM&WBA;ehH-YkW53c$z#^RR)eQIzK_>a4fvnFa>!c{{!B= zlRUje+jN5Z00kM$6$ziZih3cOKI1<$uJ8Iv^BOMHVA0nlQtcjg=!y^xzr}lDXVm7JG7ZOWz%Eq#nxAHw%R0|c5+u7eas4* zyy3d*c0Y0Th?OHe!?iS4cDQ0CYica{r{3b}8|K7{D=$`t@T{~Vt`m00m3}e|Im;JI z-^BHUfO1M}K+b^AiIwNXr{k>Okmnq(zd_bMwG;UZr|ot|QeUr`$HTjC#)^AuKXFrc z(Omb8iIsvC{iJ~BeZ~Fhge``;(f>Km^$>sh2I_i1tW-|wCzU+&0N3x4pB~(Q5;Rp& z-xYmj0s2bvvlw~=eJuI!2}Xdyz`6SIu*y@!5?^*oo8wp^dyft`)qQ^#kFQ`1iIGJ{20Hu z%UXo?WsmUNJZ%^8&kvc0$FM#@9EC4M26O#C!!IMgRbT|Nj=#o2-T2KrT$`+M%;)~^ zx&N|FMxxX4(?azB1%3wfuT;l<%i!mNtS5};Zvq|H@4;`;Z6^L5FY$TC_gN>oX>X_d>-XT9m9t#QxQ?QtLStH2Cm)!FmQjTspaiy(`l!C(|`5yGp z%1FY|Ki=S4fe$@s(r40sxUPlEKPk`k{|=o&@qYsK(ZBwTXS!Y!ALV`o{eDmboCCOyUAn2aM&xtg5@=%Z*gVB9 zHGLhjg?qD@Ti-!{9So%Wo3quHn0MR`wfw5LmUD|;?r}YvGI7lzE#Rlz+m7zM`)s%rB7eb{-dg zjxFvY{1^23+Yd5+*h+tQn0A)3+qIan!@6*JL)=YH+L?FkQo(p<3UOv}&tTG@0?i^G z=T6FtI&Q+Yn(v25IM;Tfch3r!Txc}1yCGb9Li>=6-;=z~(692szQrS5v->#y~{ z1^>OlIsx{eW3GFoCG|D;_ZIn2T+hO825Pt;;};`CZ^NbWm_s6X-v6Wdp-`K@?Sk!gk({aYHF3;)u!%1~ zhL3SQ$lAvmpvxg4q&*z`033LibzABqpZQty4s5?LOfuHcUZNWXV4Y2m9TE;51HR!| zmHR}7ajzG8987?J5Bvm71wRH!gv}?tgP?~<|G)vPv&A`N7T03o`+(K`DT}otdsjuu zFU2ljQqPC;Z1U?I+G%j$piK@!oBGr5>imBS^g}3x*34RLQ$6cw`XC!~(#U8@B)*f} zlMyPi8Gae?XJ8vcSqm5xEeXh}1EM8ks7Mvh$)K&+h|bGA_;^ONR9>)21#1U2%#EwD z9UrIyu2r-_APrlr)p<3IaXNP3Y~{JPqh%X$tOQNxu@ek8gEldv2Ym*4oD7=o_m<|W z-tyH_n|wh2e>=y{d^uW<#@XZ;_oq^)8z`gqz*BrK5&8=H3+Qub71whku@Cq+ApKew z$#@VYd#RkP+fh=7?jZXJXP_5B4VcDr8=*%wu@(m1(~Af1WUl0jV*DQ^`*vUxIZ?9z zefkv2p{Q0^9Kz2ZF=itCT!KG@W-N}Dv>etXs8rYVDCRX$(vCilxlhIl%Hr``GLRw1 ziLY6+!>OB@Gg|Yh)7S|5#@(S-Iahv%AwMI2^aYfa4_h0}^&0BbNgiC}r)dcOOg@~= z$SNwF4ygJ$~Qy|gR2#MewZ zo4Qa|C%Bhqq|tul=D~Z2;~H@sh8EFh5OxvI)4FnA06z3C+7t9?YeHCuqn@j1Pc%;Y z-BQBl+CpeQSZn&8`XjHJuD$fB*Ga=Ia0giPcgEd1PSrKEa_-l%b~3)xd0KNkHnf*G zx^`kc^+(dQD2jPLcJWpu?feh057x1m7&W!@1jyq$OY`>;mVGTD4bJ)Wso-9J(zP4= zp|0BDn{E)#cZ|tZFX(w@Gy6)-tSL1Al=$O$_FBf5T>ltf+k23<<kb9PSJoi5&&g=ok9-r#-8H4E_O>>oC z_oSCG!x73RqZ8}V&{u?UfHu$>bO(JvGzbS?%B&|e7Bto3FN{63L)A4Gwf}+WHLeWo zAe^w}S}yecE;=~Px6X^B(Pu1Xtrhx)@Dz%b&s_C)>L&dGYgF)=cbT8+b?Q>>*R9uM zMCPI^QJ(T$sMq7TzRo;9cRKY>SoN*RT-#4L%m0Y?NZ|h2=nDe(FVOoRP<~IaUE(WO zy`~>5;o3C4zK-jZsoF|fo=v)I`pK_8zbAZWFqm*n=P8fh+2uHSzN*h1PrAS@_%?7I z+)+Jw(EQwkAExo2MZQLz=sb;akQ3g8E`xj1F3^q?Hh&Fu6-vBUT;Zn%86Jn?hx;ZKCYwS_)`p47y#z0j;M)&@?cC z=ZsLOevnC1(+l$XH9m;%)sik3{kn4}^4F9R{+uzNaI=#dT$(s`V3+7D3A z*g|tX{XA>^O)c8r<1g`0_F{Fe-n<1pYyQow{Wsl*o@e|!XR@Tdrry{)bv>}lLgHQq zoS~B>Bbj*t=^ah^>7L&Rx-b`gx|2Ra%P+MVN*SeUAA_Aa7X$iwt^e)BVWpq>MJoBy zH9_n8Q1}g$FTSkpf!?1>SZ&kQwsyhq2b+1WwMX;qB=SLfbDw*<5I=pCu5YAbD`|Hp zOEh77f>uunqIXN;?w>w%>{pXl1%82|Mpk^ zrLAOKzn^ep$j1?A4(lLo8Xs|JzhdQM9eY~(OzzvmGk0Taxwj|CFKr>Rk96F(5e4khu`~*`5e?*x4KPw?$Rc3yaONaONzYo+bB*+n|5O`e48|-?ACs0QyZm(^zNRjuCjwvXMqFQV zy@LCyXn($jo`O0}zPyCjy@{p+v=j5t_eZbK=%Vcg*aAYx*J6>gR(dG6uWd39{vx=d z_rJyc5%}nN(y#T7j^USq!^KJvHl6=46VQ27;o%D!I2 zPyOdEVouC6vq{tL7ZW9u^7&0+sH9I9`Bd-!#xB3EBJ5I;1Erx-@Lh;xYhBzRUiwdW zeTY^58VX*1$ZJtdfl7K(lOvTyCfhth1g{~Y1+m0 z&B(iYlTO-+U%>BV?w(DY2iV6x_^w0tq5HM^(g$|RgZKmGcR=F_r1ymKswcjB_^fA~ zOZdhiMf9g_$T!I|$Bvx00Mk3?-Z7NtD3P1-cKM&kH;H>M`TiDu-vHVf{NZoAGFST% zQ=^{ZyaClXCc*n|IHZcXv&M0AkzJOf(|d3JlsI_)Smc&XtZOj$uJ1wFKwF%4*@@nD zlzRWpE?=Ez-TFhW6aI*XPq*v(hq&RtdST~KF6%Z;4P3z%3OaAZ4f^8>ou$aK)(b>GHmi2=5|?M+Qsh?`Bk3C{s!v*$E>^U7%NS+Btf247rayvDqn}FuDQ)G4A#w&eo&9j@6<3YD;fn-HTFylwY=6JxI)+0WiiowcKHJ<59LxVPi#yeSoWC9fipHT;IQu)Ek>6 zqyMiZeYan-A}--0|0X$W$J(-6-nDJ-^||ef8{KU?Tk>o>9{-(f*B>6*{$bHBTgA1W zQfZ5q<2w^rgH4dCSqb9XmngBs-LjrFWawZp42%Fy{7Bc6)IUcfPXNdA?DAXI!`)NE z<*TV-l6S3-&zS~hfZ1R!SO98S$3Kx2E+>=2r4Cw;+`zhm zpS^)bP*D`d{#=+G-x@AnP{o?N4^%G=mzub6IY$|LPlw4u;u;I8W5Q$!bQ$oWKe;AM z>PXj0pNeX-#D1Q_#WxxYUK_Sm5 z8g7$f{HSCS>q$@#av8K7zs};G?O+$k1^Yp219bqLp!P89=^pGWk29mRt@TORS2Df= zZA7l1ov3_G`*NDT0aW?$%?5lFTEl&Zi0=q+1264C0n`Vr1|{&B*j5(f%9Et6ZVc=B z_;3U8C)=bkiFN-+q*Lz&HJ}dUqss%0@FiQx+Xl}2!Jh-=f{W-bfvey; zxCL5ES&xFYEf146=soZdwEr|r+M!Q@zv^vy4t)jQfXw*5l0}>Dn24`{&LA3etb?e% zc1P|5V!=Q#7!=0PPfvA7F(?70z%vVf-+;e^^!_n2jBq2sXfPH`0F%KqQ2T(k?U~5Q z*UTHAQ2u);f8eL>Yy=gp><@zDzzeE??~zFLP5K(p)+Jn8`-73R;Su37gE(h{xnKcU z2$q0lU?pfj5H72s3Cd?<(>ZoYgx?7K&bMVVv>KfcY=f^`McsiE_%zVS7{ia;K;6}Y znn__&4Sb+#0OxSW;47dKyRGOHCXLv#e}0%WKDf5i1SQ z`qTZS@JT-@0>z*Nl#U^O_Vml3<-_~QIqta#E`h7yI=BVyfHqKjH&#wOij|Ylx|hWH zfVlVY4A8hURx08+AF#Ba9EW;e$4V99e1xxt*4)-}2eERGcpidw@D$`X#7Z7`4qtMR zwv)Irv+2`6z}Luk*#Oe0W#M8HJ&U#gq%Ow34)&8*+~dpXC)M*gUjgr!gg=0j!|~ri z)H#0K0QG~$9_;7MA^)VUm^79^OOZW?@lnPRx2N{qfNLK)nsLKkz={uil64Ev5aq79)d+XBZd(MuYqq#yr&j zSoo4Pb}3$AmkIEbLHT0FLFhc7bc0={p_>6_gSlV3{Gb8U)7BKuw@VRuDkeXg*HRyOYaox%au0LyLi#nm51avd zcKjUF3Dy0ITKxG0IN6E%1;O?iz7bShV~_VX>%vg)8p@yc*axcNYgX|7K`rMrBg{0{p5A7g>$Vlhz8X;v`0I`q&s{a?Zipsp74D@<1^YLp;sp>2N%;V>HI7GuUQ)6f+@p)CT%k+e;rv{As* zi8hP&u$*>q24Q9c%m2D^{tSKrSO}JY+HSOky-6c&XC0`2#<+kr3_rB-0ppK3w54&h zsY_{Fp;gGfQM9?B2DmxPu#9+Cg4G}a`(GGC{s);cN(r=-e0#`8 z8R;!2t^2v>5I6$dpa7(vU`$@gx;wP?5%!mX{q4m5p!J9Gf2be15n4ffm67D1a`oQD z{-}2!v>Mc0!~gR_rGz-kffv+){9~b#=L?la_>v2uQv5PhGTH3U>Wfbez&FO@Bct$@ zE1_~m?|WjGbI^;Rntk?5(5t}L%6OYTyl$&Qt|Q+9{!tEToW$54pKJhygYdsG*ytqu z4_Z2$xcAWigK}_(d)mM~@DP+Tes7091Gv<&_?8nCl0B6 z$=Qa}v;m;1lD46NbpRjp&mxDsB91q}F&Q5Kok29{4%!Rf7C#5GTlaEy54o)`{cR-n zy8-(JebBj|IV2W35TrAo9tO?F-}3G{WCVN(epsBueEB@(_im(=Q+8$K(-TKtLF+vB zq6ydb7VoE=>LO#|CxFRd8khlQgSlV|-hR0d1T`{j(4|nCqLtHjo1H(dETONg8~KD~h>8lw`qg2jv-2 zQg$$kI*O7~sP &~k#-o^FyUY#Ttx*r?@Z4)Eq2()!k7jZ)~Su4t_XH6Qu1QZfa z5%CmXh>{W(QcGDN^_=Dzp!`{sq_b~PPB<^90d=4eoB`)R?dfPa0ZxKCP!Ae_A2dFT zmI}@tR8EhQ<18q9p;b$x#7F+BK@BLJ&+`}ae8QC+=K18$178L$Z-|nMJmV6$3a*1& z;0|a5_dqS_KSBCWdN>O)o${q@8=!um zr{FoLjwB3t1z*>RxNTAL2Hr7+yl;pSKXL>5`fgE@${a)M(Ao!G#o0INsx#rDK^}bZ zbn-Qaa~CspO+us$xqJ-yj3b}u3zy=5E3na3_#bv!x(?f{#703mNN0VvJ8|^{eLyT2 z2nK^;pmqcP2P{8_)?dT_ZsUK@#%BEQ3jH5A4!odhKK=))K@Au|9Nq_Z84dM2Ls*}) z%UJjcz-m9b*`*FlMxF-zdx#g*+w9T+W}ur5+{}UJLi6ae7eE(+{H=Cb0$m15$Y(L| z%ps4UoHUk!!fWLJHrIwT-x@@pIElUyT83N>UCF(xK>|nw8^Jb^0@6Tj2JLSS?eAgy zYajJrN&N%=R^}g|0(+>u%lNC6@z+b*-v_k6xAD(ww7(7X{g3f2;%R#;O148g=9RnP zQ$4Kh3;p_cp^}Td9~=TlK>PA2aYGA03CQ>`S<0ba(DpHLLtCpz6SVzklr%zXXy?*j z&}PG*0~djpK4&xZ61WPkgKBg>`rcdcb;sz#=wI)^w}D3H41N!N7pV7fu6`ZuG3VPl z?9cz9L+%kS^9A;{miM>d+rd-t9J~T;?}x}6XzLE5LO5G5;ryBKpW;XbIz)(!<2PlV?=Y|AGnJGZ{<+Gl13pSJK{r*~qn&&k4%H>gP#w zJ$$fzorC{hq5lQPuhIW~s}&8prqp-?#i9S^`Rehc=+>Fg~)6IV|_2fHaT=wu4p}XGiGyo(pb?}``AnQW{y4rogUz< zH1UH{cuzC&KOuf_hcIp69(V{Y==cNr6g&sDr}2Mq64c$MT$?G^XZZhX%K0$$KNbH6 z$AK4A0UxLaHK27|n7ksMHqIHmfja&QUjUs!`rc&mmwqPE@ZCXA;IB=QKG62Ayjubt z2nK^;U<7Ezenvyv-sfy2RM&2upxuehj^|tqx@jOB|C<4Atw@n``uuM`liA2)!Ca78 zi@mWvzW{zASOS)Tm0&e!@Bf)3K)pH1k_g=hHiK=TdS^28hh#~Cubck4oCIm`S)dW# z4;tb=XDsqL@2VurcJ#YIF4zz9=OjxWI0Rpk@wpU(Bk*oe4qpa5`#zUapzV)$)aSwW zr}{~Xq_ZzkKv>@Zp52@xbyreMJNk{T%camRr!Y=PVVsb{Z?vRH(X$jOew`vE(9%aK z;u*s;CVejD&=P$n@PZmp2O7Z{a1PYo{9I17@*GeH>hF@b2jubK=h6r&=6ue3*`LdC z;00CBJ{RB1&!w6$HNZ`JE)vfra1~q!`M%F3Z&ixif-l)e{&PN;JMe9woOG3uUk_za z3htrP{6B=YgX*(3c?x|Fd{?l`3)m%ih5QEivCqaz>=a!CD7>9a{*%dnGV3qNQW}H( z+pz!f*gtSg!&X3N5Dlz#Jg7B~>&E=I8~)!L{~v(=gZgXq%eQIAppDJ!56;pv&iMag z{2x@!!T(3$|DXo=SzmH*NVfJMwExe~uuC7}j|F*q@XH+h5@fQko7IIrd#CZsQgj|L zkZ|eeId{|)%KXM5gOPpE>PHS420sFf1}E1sKUl@MA({3G>IZrzcr4c@fJO-s_aMd@ z@OjYu;jC@I&j7PQv4?cz(LOZrTxc24_Hi!P7J!9d30MZ&njEqcs_Uq$q3z6@5}=7- zBiIbKffUfXK1CL)|ApWq$lF!Nd}9~1ZIO-5LfSKsItU;B_H)TazaO->d@hHeUK@Lr z(9HY1bN4QJfUh1y8xzV}GQ6&do^0h@CVV+)q~G#iVf}+R>(Tc_mj%3_1{|}6$kA@Z z(_7@XXG5fr{;G&Rs`vr@<7@hyNAx|{=!0(4Khl3z(2rKeg|oiKTIf2~_mf!P@4@E_jQojElILKI-8mqJL7qG zcav;C`!l)4wL9P+ZhtPjMkULRNgHJ6;SKW3-pP{N@C#|kl;yXyrV_ms?qH=DQrnGVoV)ZVv7Kmh<0vwg}$a<^7hTPLkiy zNs99LAPsvpC7}3q5AmGnCZ(0#q?~=7vSVGP@^xRSxEmwh2fgLEt(W+0??_d0Z%Jnl z(hscnC-z}Kw3DxRPUbw`(f*kC$Kf6C;}_Scm)q3SY3d31M}Z6{a zm3h?bVe0i6^$hRZN8PLmXWfi(6et2^pd6F}eZL}{XLJV9pgZUZ`hZw45EOO`m7*Bd z7{;>>0ZNC5iih$mn-wbMQ$uAiVTOSbU^EyDCVR;mDz?vuc z7=Lu)9aC_;mG%czCDHyQ)BZqf)}gBmk#zQO{k)hkgZO5H>MJ4Q%L|dY@O8~0^dTX# z0Dd88JVD%*JYx*c0!z@jpM}UW=t_{kFGLcci6E~vg!U_h_KUWwFj6v`x=7hG)-_(U z&#{4h4xsb@)*pupV{2(!#CcwP({-Ery2gGmacl?uKy0NrEqf5*HVzHSeF!9m*m`Lz8x^Z`)+V%q;lky7+JQi`8NO36Crg-Of{ zlbQcQ%aK=eZviL)<-iMSKpkiVwa=LUzGjXJt((gH8#K&f{=I?uH>l{r{5OXAFVx%2 z{P!Aj+uO{4p*6_v=o7l3hioDrh#PbB!Z1#GuQ^a>l~5-)wTEPB+gdBXMycN*Wl|wJ@(&#{rex# z|F-fDA=h%jesBmJ0d7zLNw9xp-UdBd#!aZ;C{w}mLhz8w(7dz_-?E_-LKv4af{vQm3*Ez{a zklqp^Baj=9QSS}dD5wX8q4e)DW{z1h+{~#xljz^a(7z95j!HUGY42TkX`8@c;usAw z`{Gv>llqg`K>f&#(2B*(f5CC$@`9@I%zp~{Ue-h)4WX2&b`Zs7H=_n#y#iXkQl)`&J*;ewO6efw>vk`0t+dv9P16g1@sJ%x1 zZ}nUi4MZ$Uot#p*6_vO4d-ApSkc+J!3nA^3E?J z|Hs%fApR2KFTM~ax$ypY=KrLzY!qpPmYxlhL+HIvIBOJd)(5JWG9ElY3#fO3C1iswTu0}RkCCU`{~Wvm*17IZ`0GLTSCLP&VuM4Odq7V{=;k!HmGEffsa8E24 z2nGYQ{?7YLm5gtYYv(X;jiaAmN~`KICd> z%~o^|!es>Uj0WkasJ9!`O)Fv0k40zY)5iLHHh#x?P9ypDBR4>`TqY34?cv)_&}kq) zk27S@*&q*n@pSSvhc$lW1)vOB_w`HT$S1Urycew^|LX_`{r~1)&;Kmsz9nE8SP52x z1ds@7_h5hYg(n%;)q(mAwC%tT8bQSq><=7&hW%leRcEokO7aM5fcI{wY$Tq|U>isQ z)vclAKUC7->+s`~APaswXiUO?ZTN2|{C5Cnwb8kcu|Eaf5Av~_L(n52&%?Za1AYbE z$o?UW+o#jFkHU{v;m33E-)s1<&Uaf`qauu+|0##+`5!M-+oBq%b^aip@jq=%9daW$ z1H3yOat?YCq&vIFC1`a6a{w3TlHu!^7n}sw;ctOPct2jJO#D*^9lU=B+#>d_4s>356ZtAJ{- z)7E)G6}%5r6Rsu+|KGyg{{ZtazAx0u_XVC4=PU3AIQ|-+0-ZrL=nlRk|Lwe!(S9G_ zK-Ruj+dy{)XZE3aId+LfKM>^mn3qi9+`-pfq=fuw9zCEGlmo4c!kx^2_XOs@Ma(Ig zpL&>|mO;yzd-o)s!C)8|0Y-zdU;>y7YLnQ1PiFreTF3VU>d!L2Zf1TBZG6rApExU- zOCJZ`4CcSgdwtB)t3eI$GXI@MJTt&-Fc(x0Ck$8sU$=p^g>_M~5Pk`0+(+Dt+5eu; z{x|dBW$4_cyk{e$`p>VGHok8kNZd&Oe>4;+7_zJ~w3 z#{Zxl=Wn(Xhwjsg-y>|;OS+`XgZ z5cG)hpaA5*jRx=pI8rH#{DZn0`T+B zTOzcUx;+6-uAy!~J!RSe{P2ym2NjGhDtF@l8TkJ?>Yr=D^M8cN>`(bb&;}FNX0Q#U zfHa_fuK92MkTU|vZIt(R=q`{8_5*hud#KPO%7X%6o!^WhKky};$lG}G0`CRE^M5a? z|8DsI8u}lgeRK&Zh4+B6!}L@8!elk~)POqB2+n|W;3BvLYM(LxeaTp0Cv(OO`b6$) zfcinBkNNK-=D&nF{+hW8v$PE%N+%?>}$gARF&QrlpQ;F(?IqTxC zqpIq|c6Ggejc_j-R?F9X`6Lr7w7(POf3oAY%0KFnKm(G}z3o4DaUTy+@D=Yb`lA?a z@|z$3cdkEr{+{oX@&72V$dCVb$-lH)U2CeJzL9hWV<<|{(ntQqkDzA;$v;NX#~|k! zW!K7oU-`#4ak07Pkdb$luj-j>Mfwy}iLaB_s1G(>cn0!*ol^OHSw69qjU27ycBCEq-x%pvm4)f6e8E4;&-z0d2F&&A3o8u$Q{XpC%p z9AopR&`eLGWhwukx}j|~{~upu2QvJFo#G?keSRL|Xg)>fOCp5~y1!8ndVga*=9i6M z3ZFpF)F^IJ=>DDkY=lqaEY73i)#;(~Z>;ryX}I~{`WfOL&o8`8zlt7o=x1cmi7sT3 z!*##iKre3NF7D$YdVkCQ_?xMrd#X7ybEU&C^Ay=1#Tba#pn@=%9EuW*K=tH;P&MB8 zCq1qXt3&qHXz3u)wu6_Kp(18p(&rM_7O$%A%&P)r9OQ(gV;nTvg$BxPYY?Z z43YLAaaGg8B4cI;mZ!a-?t ze|=hbdxtULjZ z>Jp`%NolA*Qp$&3YJ9UaWPeo{V*Yy+aU_vKD>7(bTpHTuml}I14IL{=jlGoW7nX*c zdz2-cww8uQX{W>=+fl0REe*|UOGB%03mF?eHRPmo0;g~q)nliIDx9UqU!JOMoEpy4 zFCy7KmEUqIKju_*(bRBR+*PDD%P$$Nd%RBGK(r3Bm%NS4Np%pq+SNg6{z=a?8b`eA zxBGaA-mj~RqqdMQDOzvY``u}e+W+583kCmkRQ{)h{$%9;D~`gS)9=4i5C+mK-KUDz z3&UV~^mEqqozszx*T`LaaZJzjK&rk?r8tnaa;70w(0}Ec~YP5RrZuU z6)S@^D$*)L+d2KeOZtD^`hV!$qyMMf%I?$u+oAo}=WAN8|G!!PpKKOR>zlWr6)ElY z2I*`=C3d1^v-V&7ZhEFk+O_t0q3=gDcP}g4b;P;IgW_U+J{}H764m-LN69#<#OM8r zcl7^-Q^@-j*XsM-WCO56x?gY|;(M&w&X75rz$u)@S)9j3Tt+naJz9g>YhC=C8-0sF zTo1~Brhn`GzClluEqK)bXMcYA8F|qDR(bcklz}6DLvEjUae({m`9^aY6Vdfnc@E-g zW=;;ZYbS?Z>D|U%+{Z%{%=6BoKe9KyM{~Uk=e!fwyeoV>nd9EOiTY(1yoc^bYmt7J z_bg36HeR2rRKIk-_ZBt!#qi(+`fvF$SIhP8JLA$dl&Zp()^j(9}9DY=V1YspjuoNmeJ#*ACEGvQ@gNk!*A6Y``Xzp+#JJqV)&#%ywxH zVgJx~BDX`@Tb+aK+*2HOi;KO&W+wNentqTxjH2`__Q)Q8b(^4#_j@f7hEJ zYtFI%FF9^9dl~hkl?yZ|!_oTRxc?-PLM!tA0Cq~W1~?6b`HJ$N{=NS#Cyf(GwR>lN;@wcDPYIt!vlp6c8sZ&ueYuT} zAE1p_hn#inb>-qbc@fp`v;Wmam+5h3J=!buD!m8E@$$A<9_P#JD}BOsaW~M5+lc&b zcgY^M{CzU#Q(h0rUjB}PuPS%wk75kOVDx;;_b=EF-7lGsAY6hG7=)#);|IxdHLiXG2lBe}Ap0f_!e`a77=Ai9P zw8xS04>GfhO~2LsM;19WUbFu9ruDyMip}3lPoJ~?_k#7mXY3~@jRjbQ)N$(t$h?2< zmhwYij+OX>^_!kUcBE(ESyb0rBk@=Ei*ZaeUvUi?J7C`{aswJRdyigruUEL|o>|>` z@07Srek;Q^RAMJ~qxYBgd?EMaAbLLWY&cAI+mA3tZW;Tbzh#g-Hk$Jor6<1fQ`Tr{ z|C976Z)xXC(K8?3o7PB2?uh?+ce}2*SE$j})TY(DUFu!3KA|4gJ~wEeljPCpKRAI? zIE}M7kBey2rnjTxJ@qa+FR6!-J)>Se$^Who(6mwiV79g%&5livR4?ljv?8XSzAT-q z=)rYV@3a0+eSL!-@8UyG+xwq>8%gbVgMLHekTx85#ofn46wLR2BHHUqw=Zl#f8k;b zMEN_~op;$w^!)mSU&#MS`M)6lh+mWc2l5}45kzHWsAEeo0;4bn<1hh}(55`ME8`v4 z9{J`v_kE=O-=+Rn&Z6%!9YYGuca;AJ%0Joaf2pO$5Tr95GcXI0Z$7==oCkWerlbS& z=nIgW?A;sh9NX0cs2XehM*I>)<$f8t9I?LoL1e=9)g$lsQxB{XH_5$Bq{&p&Eax3sbG z+b8OKmqxxM{f|ESAL@!|4RB6fkkUS0cb*&Q#ci|j81V&*D z#$f^`p;te@{MW|+zv&$j?)fU8CpiPNFbC0?={#})7GVjZwG&n1m(d@u-(F77);t&f z+x6QuZ`rj!DTOO2zmC{*-{QMt#5Vf6rKC~Op z?jSQ)jER$7!dWu6*!ufY>+kzoe_v$%J=r|W*t+=xE%eq0;@Y1NYoxah8?Xs2{+l*7 zUPjLtFYmbbeAq^>M9y#7OU`l5xyYU3V$#`7?nU)hM{F6!J_ecFFjwDiOMFu&XK->HBf5$aGkbkoCrfYNF zEOKZZCjZyuUwn#ec3hfl5pE?@SL|^hozpmr^LV^}Lit~-Q-(`hKC>?vyY*X6I6K)n z#yc1J`2CCRGZ(@8SCxJ~Ty)%3RMRUxzaDyAUg}&=Vxv5+eU!HbGVjOXpHFR74jh*z zZ;-ucp?}AC(`|ajJJoU1dw$m$Tmqyn9AKlu*hr&I<9D?7Kgn|X$ z`Kix`-hVZR;2S03DdGMo#z55e)dy9M;$$5XMf#;=a-x1J%3tpr20LyjN-zSWFb3n$ z{=RpxTiL#+jFVl$on%g3kR_W&oBuOX-@8;Fe7JJ0|J$tZ+j@zOGQhe+X-vWt#J1~S zkX2+gX3*zg9_pkUzo!0qp#G6=1Ckfm>$9~r3-t#PQzvA9SjZovUw{SvS7G11O61oU zs=Mn4=m+%Gz7(mG$wiJ?f@SFaSNo`uE3qmHf5-O-)RW!6Rz`$-o_jv5BP08NLlpjk z`b*xnf8+lRn}o}-4K3>RG%D$t^rOD%PWo;{HfeUN_JfSpiq8_a7yEG#hY`b3#8K17 zyl&L3H_w~xU9T-pXpwAz{;T#!_t!z19&ZTojb5v7gD>BHTrl@bI-R%C&HAn1-_PI*?tdEi(pU_q}ZfD2s zVb|5N>-1ro{hs!F3)#BS9x1cgf#_JwKaWm&7as3V6t%x6oTuUi<9RRYd(&GENdu?p znH~Ns-Lv%b$UX3191|*Iv zYwuYbRAugt_?1|NHK;zsza@SheFHY34BJqNkM>`73h%~VlxuS<{@L|^fuBI3S4nrA`5jRqkKiT=#XM_4mdmqxnM( z!b$g}o=iA)ljF*;4VBo5-5BM*?Wef5AJIy2HNk{L`N6 zF%O%k)D~a-!S`Q;WGUy^8E(kXbh~AjEye}J>nz(!40w()h`!?s^LZ9Ha)&a zzhH-c2K_#g+Uy45guY3%25^hIE!v0Y&AHYz`MvwqPlg`*aP&OgKSb;A3jSAV+PARx zGoKDm(JQ`cf0n;02>t2B$dCU^>ks^Y!VTN?L5A7y-7$kP6eSpeQ5b{rw!Wdwy>8#E zjFXvu%Cx#Xzdsv2*T?fw4>YMGj*%(HHNWQhEcJXAvj67V6I?nIFbPvI9W&7T5}V>D z&xUC1zxyl2_LnqA=hrO$+eFPm-26oNVd*Er{tE-b z51IyreSJR_-nup@>`e~|-yi>(uxIjT!uN`v3%g%=E?jrtQ|#wu(Ydvai^%0@(Jrqf zS0S@cJBqG_d<4iLi#2{*hd&x$*s7gO8&j^a_paYJAvUsaC?l)L>apgh(JQeNyRjGh zaS(?Q!%@VML<+ImzTwB`3d500);X^5{o>)n!nTRSLaX00sMu#L?#ic(Zw(3C@ef6V z!p^2og`D3`;2%#82)i#l6TW-q6Jgh|0pa_v6otL(i|j*bpI5*C;AB5*5evgxyPh$= z)i3<$W%J=s@w11~7bgLQaecvF3GZdxar+t5E;@cj3O{9-?58 zwgCN6jDcv|*w^}xzSe*A4H>f2m}3{270%tXH~E3W(1c^N`Kr)tu74UW^j5@%6^883 ze-{QzYbZ)ky|^$`O)Lx}=<%hl4WsB|kUV6K4HBrwIB_Zd^a2chyOzYAG5MUE}dsf=`yHCN<+wmyjQviNF!4t~ePK79~$02jh+o%uIbb)^YDZe+1e>^{+Rd|UsmLYad zKZmTkQxH~?t57}6x7NsYi1TIC^>uA)U1LAjMmB6@Z@u8VTz>EQrTs3+?icJu6@{%8 z*+y1kCwl+d{ygMf?2p1fvv!26nfri$hcD`tJkJ{{zqe1JC~h_WuVVcjp7+*$?#3AB1B_p?Tp4-q8=# ze;=s-KCmX-yDq(odhH_{GEQ$<`yiw@IzK(L@ImN6ir$Lc0cr1Xj+qZU!v`TFE;j5z z$dM;dJ@G*}MV>}gseBEPFPs%VkA@2$grsY%?<1e6IrA{o%2yrY!^I#(2Lt>+a>?5r(>V|C*(iv+S*-Xt^A|0`vYb41O8`uz4w9nDe~_cT3kb`<5G(s zhP%=^rF`8dAEM=mWBj+^ujPNFwDALJ8UZ?FWz6|2^%y(ZZuJ2IElv z=f?m4yf93l_x{xfVG=n7(cV4N$?TVv<1h6MGw3bKW163A7Cobl?&vZ`MW2V8(N;}$nBuj7Vrwl6RDdq53AJ=tB89ZlhkZ?3UANk|fNT=uDtY0BF;G_1#CgCz{ z!%pnRUi7}G??lGlXG@Te*Z&=+=k1dNY!`a8X0c9va5U19_rJ+kKA(G(V{s|8B7+=G z;1qiGBg@|@G)DL^oEGjW*H)0{aS@l1(q>#GxBq;I^~2szL}Sb8du$7Orb^$$F}?KL z$Q|+?pi6vbLjT{GYi$ubb-<%>*ZT31|C@b1TKhXv-@$+0e9!v7<4=Vx#>?*t-$&m6 z_dECgCj0u1wq3iQAv=Y;=09rNqjA~BrR?+J?DNU&Dd{#3VOyi+3j6$m^#cd&dm_Dp z#o8A1M|G3+1MSui&?BFGwExdQ`d}peFWP$`AwJrRV5qp1>nb5fpoPz26gdWwA0og0 zZ-qJj!V{2R7a-iZ**L{|;H}P%u7A{;obp6 z`;TsL%qElR67eI-cfxyLrF?6blgfUyhsJ;C|6{+V{J#|$R;<`=dS7h>*qHu)&D0`(rccmY);ny zr?+|@TRe}gRi0l$f8RWS+tR#?`%(JZ_Uw!HAGH4Op>U6H2N(Qro&*2iQ)JIG-YNBG ze|qJY?Jw{Za~0?Wa6whKL@?01|q=0G2cT$g9xptPfhu{$!t{o}NHMEt?RHm)O@g+1K~j(s$U```FiP_ZGCKi$fc`zWo*NA2R4f z7qZA9wzTN?>tBaiqwbv5JFI`_LzqsFA7K0Lu||MC3&{s;-&<_pdu-vY&xSeTqVrb# z%09)N>U7sKPuv2eukj0P+WCjG z|LJkpQrE@)r*A;A`@{1)cRl~cCUIrhhDz+jZtTT=9K>P7a1{CR@C%;z75+ZqnwRbT9a}@I@xb3|Jv|V^p*kosCVr3PtTNUS0~yxi+&!t^^QZ=AnhzJ zii;g$-;-BSz0f>3@;a)Tl&iJ+bQ{_B!o6r1ugydKe0^EeOg6rUf7k!%KX-8-4^gm0 zJAwWv#=qz{C)xk698Ra~hJ1~nr2f1O&A0Dsgm?y6LtJb=ZJyX4>$RH+vH?lX^ET>WSIvg~jRxG|-d! z@A>Z|_EX-5c>krd4x13|Em`s0bNZj||1J0bj{CpM{om*QZ*~8X>vI45MK*xGzvEKf z>OZ8>a!vhr$v5GowJk~yDfQJ(GJS>rOn+cEeJ}Q-Ltdh_%$?fruF3A3`2u+Ra4&A-F7D$Y zYK#%r_O<@rI{G+S-^cp`H_!T`^MA{~;rou#Sb|Fam5R6Re|Jv*?~*dst$&R_n*Tdj z`Cq8~FIE1@{QB=(`hUpJ|1}1`OnS?)606YinmGcm81toP_DFk&zC3*c^7DU%yS6$P zxhabGjAG^jl+mk3n=3(9qS{=6o#bxBU1QyG@8EmhMKZZr9Y^l<+kPCxVZ?A0aU_vK zD>BI81Ww~DqCP|SFO5+f^Y8gpNr=|nN50Lih{i?EJLd8Fzl-$CxQZV1e$Kl^-iUPE zj-+(%lK1ft1^>HqnAiRk*&oFii2NKHvd1`NNXpbS(D#(Mj(@ zcBA?M`T4)K<{osp*0i|??4C4Q{1(msogtlBn1gv(fJIn>-hX~Rzy6{7>)sLJ9&^u^ zlPj?bYY@%(V=wk2-+$F_jP}1hD4e%{*}xt5KD7VYyd&)7951}>|NS5>WRxOmTjq)|uf7Dll z#W&2BFGPDUB(TLe`_}i&UAw|xb;jPCj*0g0siv3ED?ISZx3teEBl}qUtn4I)X`jiY zaQEATL(gaU2tGxBNnSosMaTVeWSBya_To;=f8^^+ zuJAtgQ6BqwXBRu)LVb!~7lrALn}J!#{C|?E7{0i&imtqQ^Gyk)J4*XzQeJEEdQE4^~vQ}i6ifSEW9&_pT>NHAIonq zAL=UcYp@O*unF5=`il_lMNmdhA8>>^nq*FRr(yUxJTLv6t@+NZs|SbgpBWtXof{b5dhL_;sQhI3!I^7orL^Lx+%+sz?D&_t)GxesZ;-JsK9&*mQONPVWc%=4 z4C2GU7;&*Z#bF#d0oCc^Fo~Rks;%;MNWNY+k3x6`8fMCuJl4;bPtn&)TX#dSpK7RE@W}o+am-jpE{np;)$i~&moA>qn6$>r1}(vIdXbdzhzn8^lHoLa<_GY8^AUD~wk>9R`kLse>#J*vr_=~uX8>rr6 zP3cJf1$w;8+`F{BfarITyl4FNj`7!h#$T~TJ+l>Wva?gW>}BqlXzpC}4WWnhf>)Fk zbj)R|&t$VNWxK!3hDQ$l{Z@>-%GE$}FovQ8BarW(Xd~-h(~hBDdzSB?MC~{qLgTd$ z=O2>McL18-=Q}{lV)GCAC)(Kg?dU)T(cFVB*AUG;h{h*k$N4~5Rs?OQ} zhaT_e+|ECRJ{?JIZUYji*G7MI|Fao>&;F3jj#>0Ms0h~6|J3|_`T{J%5-h`V)M&43 zS38IN{3-v#O2@3i8mz+xY(g2fp^Z(^&bG+!Kl2{@mreTz`=2$jKib(JJJ=s+)-Oq; z#rxUn{Y)97uawSC?8aWSXn)h#Pk;RW9HbvcRHn0t+OSS!*+kKtikRPyqFPyxlSx$3 z>&6Rv=TgGuztC?`9-@8q>xU?Z$bM&PkiB157Ram7Z@7*d=*4aHeAl=Z**&!| z+$SHR;Q#R6qWp9G>trzoVlaANe<}Z(fJAMXcA^YysFo&Fn-sg-J7?)W<&*#6e zG%TX0Jl7>;dWh%S&sq@ra;!v0ANC14(SF$vKcIUxS zX~dC43a!W>hZAUfMfpPqGU!AXvdE$FjQn4ae?)s7G~0_HT`T|YMJr$(&GbMoA71&RU}utHrJV0>stFv3q9hl;|6+h8+UOZ5Anob-8Y1S<=&$< z^%MHji}45JU&>tlMCDHzY#@`u106FMLs5be7=jZD37?ERkpI9lkf-Qv_C6Qon|f_D1n_T#7L_wVW|2vg`8X?5%|$Cy3?(YM30 z=tAdK`~HVdg_wF|mfz-}TH5o-1*oFeUEo)|V*elEC5YzCBvF4-KFOZ1O$*)sI^7<- z?&VZ_M>r-bU(3mrScRy3t|6oG&)#oL3+w2c(EH`-kM)+(#ZS$Y( z9bD#^ZK%Xf?8aW~$3YxMn>`8I(Q&{Y27ByX&~6U{WY5{d0F8aUKSkajGBwQmL#BmW z$X3VI9P<7qydzcK-!AV2nGkLulbh`yP_Ivbm~$OP97&|miW=>CE$T*Ve-Mo+M|;cW z$Nz;lI3|M}PT&+y<1Ef2Z~yLL|Kf@LtDS$`zCYRj_a$((t5Mqi=>*Xd6oH-t^(y7KnF>x{~M?3#M>WqndqJ4dhMZlmgscJ-2c zA^-iW*Nv-bYm-ambF+NXYow9ifAF=B^e;Bbzi<+FrE?z-QShp3Kz|ftApZN`-&FpG zu>WyvtNf!GX|$jf(YFH!OJ^ubFaj<29COQ_KlIEM`NtUgION#+S#&LA|6_u<=o?Ct z$SH{SPM%KAK=cizXzoNDvxMiMp^tna>boS6|Nf13D8K*hpD6$H{AU58y$_a<%di|P zu?l}s{@J*lhm=j_GK*aLKQ8}UR`WkpDL)DIw?63_X{MXh4_HSp|Ha3`2C_rCnSIjV zCH?WzM-EZh-{kl0-+V2Uk=s!Hz&Hdd>G4bIfOEzQqV?~c%Km0`8=^JF^~bGebj)t- z#eVcQdVhXuP7nPsdcMuK@fP1kn{_z8jld4 z!wKZS|M&HxaGHJ==W!92aTPtdjvMI3?MSls%j=7*|M4CQ_vjzoCm*6NqJ?4he^W%@htqGuyK#u<)i!NhAony8CW6dw0XpFm*?Lp?}|8F<0h-CY- z;goZY^1m?{hiLDIt%$}JCJ47JHolMiIQvF6>1%9LWQQ1MM`M-w|F!1-ODol6{{LFG zEu#JTTTx|fJ^LMXy|k*?%ah3Ih$W2qlhL;bqHhu;6KrxUay_%e$Hmn}_H(=a>xC=+ zMmztEx}P5HEtps=cb(U%~{{>`GxF`X;eziZ_mb%>*WkGTbCKoZORXE|14 z71m%KHeeIl+^6xybwCVvDUgBn$pe_pXoeQnEj))D3k-@^!Kx6blbT^v%Dda6OsO zH`u}twDqQOapk4A&He?B>HcYC!x_`~7yB9uXMbH7Mv%QvhcGJtJHHFr^Qv_nf&KJ1 z<{=BGy7Xnpbb@_frO!{Fg6ZgZS^p57%4XMG&xXw1DEX=_?EWjcxLGLeI$Locm z`(M0=!adK~=aO8BRak@Acs>xa{Ljq;CO4sawDrHkKNia9kJou`qwhq1-Oqc*fsPvo zB5U5){v-OvYaI3T1fn@+(O$iS{pZaykLC(Qa|SAfccbDnkLCxoo#X$$q>a*!Wze}t z+jK}9wT~Zuhxu=_wSV}7@9*By{-G6(-RyT9>&Jda^APqsTIj9F?=MiqeixTPr?@UW z-rskxYdPxK?Ht+TY=> zKMHGWJ8*)23OVg-7F~#Z{insn^drua=TSXGe}=q_suhoX2XXD~RpA~q+;cwEBY_(K zt!1Ov%{KqQJ5kS;Pmm4XktBKDe?B@t;D&H7ZX^4(KA~-^@o(ea9ow}x2iX5R)a_mB z__VPSebUBK_J6neKS&`z{{6o3Z}w#?GJTb=OXkPF7_EO)W{Z?BagXQsMBmK0?_3X2 z?)QR~?$0&lQ{C_sJ-=_}%huD-i_t(&&Q`wWD_% zn)bN|?n!E{dq7Vwbq~-g+=3a>n1wl*hXq)KC0K@!+E2@cS78k*f9sxodzgMfVaWdM zIp6R3cvvTX12&-y+fa#}*p0o|kAsNzK|D;xa1`+g9QU3Zc?}z&5fP1sUJ(};{Acxra=fg_J zT*PHuMfGC)|IdFu^w6Vk($~$lPW)-*{dLdw(DNZ$L!UtX%;&>($E5m|gd1cpTIjdQ zyGYYJ`kEI|R1)qBKSWkIC%$u#F?{ij*US&NX?_5idSHG4nSS5=fFb4wpl!7I1rw!H zDjl+Ofd36M2S6G*X%+m9eh}_@Kl_u#7>L0bikfr2FM#|!fUf`M`vQkb>@D|vC~@2f zjKUaHi>tyodR*S?UVc7Ipie?le1mX8e7$@0(e`(W-=|{+%A4I|~9~pGw z5BC4}yqj=rG+!T@OZ5#${(k;xW;Op5Hb`p|p6t(mg?|dsy7@Bk+fa#e?_$eF<$tZQ zE8#kAO|-|%}M}=*>{xVe5Xh*$Y z6$d^aD$k4yadL8b2x$S&wVnyeM|fPxlf0~!%Mx{%u^?(=;x8 zZ{nD+JMkBx=kH6x{}TfLojv)Lu~W{m<(7U0vNcbI)52$Q9?|pfeX%55q+dq&yUN+m z`TxG{`wE@r@{8-ib(Fu&1}3ww`ww{=chUO^{=dTK!+rWg6uhPkpg)Q+5Q8xkTT1oU z`>7wadnLlLd&V*K%Z`jM2=AOx{${TYO?|!{j`jO?NDbc{n*02HNDun^FiX0v7~wzJ z7xZ(U(ZBw#?<)$A!8lC7Bpgwn{&*ofd~*oz+FP<}&@-X>&NJbtJt=F3+i&vZ(|qXP z3h{p5;P;E&UI~$LfDf)c+{e{~(jXQ=~r~GcXHtFb@l` z2yGAaKXz%Ox2r1?+T?a^Goo)4=c@EC7W)3%Qr~|gpUl5M=N(3V|KW{=VTp8>VL4W! zrK`{wO<`C?&lr#FFdn&vz7DxT`W}<@LB{KgY%L5M#KlSr!zQu})%Oa+HnI{`^tua$ z`W=N~r|@nxY&NfPzJAC_>+IH>fB2gI3F-E1Jmm-_z>)R}q|Aq2DSDZC| zG3gvd97(inwnh&rdZx+RzQxw+MLKc^__p?0<7dzR&RT2y#KqEu;RJaK)sy>%)8tuH z(d&kJu6O!|^THR=aL#kR;yD|4NJP)R-58vHK>R&@buyt&sK?{|KQ8;vRrKIGZlD*p zaTodd|7`ro_n6=R_@rx0CJ^lipOAJ^)))8fwJJT4;NADQTVQtw|D zzKR}{duOt!_=0}OZ$r2tt`{}O^})}5=>OBN&wu}FA|IJ@ktAnwgAdj6~L3X#$L|0U!yEXPW$Ld7@qTb}kU z0D80+$T~8;`O*052KpwHp+jFggU);2k8_>{a@gj#m~vQ2?nHH$??8}yQI(G73qP9A zzhC$u8iwc>qJF%7B6(O`G;bhA_I&DDJGkkGd|TTag}?e|AxM2-eO^@vV`{H72&2=CPUibWl*rT27kE&OGZ~qqy z4@C6s!sr`@dHes4{{Kk*|B0TnetDd%AM3g9`ThEz!TvK8B^ZHGc(aSo99z)F=5J5v z|5xe%uh#!xum7(fmL+q-e{%g#7uz?@2ClUyr}V~Q0?M~&+n)1%4SL0Izim9u*mk}0 zB0K}LFbDIn0E-aKKUqR9!}q;=%gGkyFPeX{lAhVDe6RQYYx)}GCM(|)mG6bhH)0d{ zuf%V_CX}Ii5C8p0YYypgd8|uEwx{xe%W7-hY>@? z&*UelE9gn2(25LlIDu0*jk7q9=sN)Azy5ZJ?Cp!fJzppYm&vPo10zH%2d5?OfW2{Q4iyJ@UUlC9Xe;5!C|&$ttos;kd7TJ5+3O-L9eT zruXBT_oLhU@jm-d-I2sl$9J<6$_vXvw0@xHUp^j6qPQ;ZdnC;f z^Wr!7*53#%j%!W75o#8`5o%v~Bh(>|dL+<*Bqlh|QF)&fK_84Mn2s5!k!CIG5J&x< zGVgyG|9ct#ds&#}m^qk-1z3b7Scc_j+g_&sU#9d~u{I^SD&iD8`bk*+1HpgYZXD^SXzt2~txSeQt*?mF&J~_6AUOV4$^!R4+?svld zZn)?9-qKzxQ|O4Fh82xChrjk)De61f63@u7AM5T4r~sm$kV8<-5k!6 z=TSwk8>DYP%=d4FFQegtzWo*bjK2E%~dXXLO5ER9JRC2*!d*KO~G``z%0zcJhT;=|1p5?5l_}vU+}*x+IO;1J8{O-* z?*C%&Jy8}mMR8wwBb1RnfBr_e>z=Np$JvT?$j@sdliJ1TTQL>t%*q$k z)7$O0k4eZmn|0Iz@D>BI81WuvteeFLw?rHzgN$)~-srFy{{-l4?hpjtI|3==@ zY~$!#hpm^)|7%h|e#HJbEzRm$^Z$-K70%M*5A;oNo?ia0cj7&LljHg*Wc>x_`MGB( z9~b?887;N?6J!sfeF*dGLe&2ie7lv;crWPDy0K{OT^F9@f3du>&GX;?TkYPbmH%4f zKgNKrOX~)DaT|AW9}n>#&Of^3{X63QLki7^_84#3XQqnd4AWejK_ zJ-$NOP<{u~ha#!GG>A*AS5{CWZUjbQ48~ysCSeMm_@DTwrwh-(9Au{&t0gM}yZTf1 z2c$>y@fVTlQue__d*jfTA@|)sQ@+>==+tj{GQOeTv)u1J-|7=ql9B&x6}blMumPJ; zhHa?CPDFc|?hY`b3#F710QD|GO{P$Hx z)dNrLKjEx!j%>VS{1e9&Jdm|0SIx zg$!~yf&Wi?aEk0DdtX&Pzv2G<@|h4*C!ZEywee%&EO{Q)Yt_5tWkhQ!>X5jn4{^)8 z^}ap?+3L4hxQZTJ#|`x2Hfk2Kj~BB+*{X5l+Vu&x=RP*5aqHwA_QM|bEsi0DC-!X< z`xb5D+7Gdh{U@@+JB`bC;qm@_seaFfyUueT4^i+zi+mlmOc(`?|J@+_E^cB`_R5~kFy6n|9zhScK64# zf3p9Z=bjeMKYz!i#y%S+NM{nJU^-e}mVbGiLC+|w9m2EdbC46xD#Kl5XOnrw;$p8^ zKTa+}b>C;h5^@=;T>*ZUh~liZ6e ze@@S*ltJxz_ZHte5qB6dRQ^nRKeaGq`mirvv%ikGICA|wXK_!)FIUKi_!L@^L5sNb zdcFqw37o=d#C9k#2h_o6bSexN6>nOE$$tH^%2Zz%st z-_S$9jvMI3ZQR9uJj6%m&lIdxXMM){p07Xa`=3yc^%qx+ff$VZ{>-oT4MXWA7=cmf z{r0n=x5oRk-n%F~4ihj5E#tjogZhLi^vp2t4}Cg)26EkO!w2e6bmA#-voHtqu-`Q< zAQxc?YIbR#wQqHYv`^QxPY?7F&S{^JL}L^GK|BA!Y_>F-7qhR;|7$_?ecdPD|Cy|P zM*jOhqqUnuw9kn4|6k@j%drxxP`%dvHw%4Njvm(!h}I^qqi5goEVauGU601_>&Y$i zy+4b+KiPPortn-g_{!;w|GK>o~-}Wr+)JN7xlnn|4w6-`=cLljLh#(OwX^6 za8IN4pV8jIyAN#)JEgrFdl7xZr)vDi!+v_S<{?_cc#wV=$;GZsT=b2`Xbs~(?RYvI zKHfLH{a(Lt2tTrB=U~?}#;)w)*j^NVaAa-RKXYw(Yv%f}uW4=g{*m?ea9kh0cWqYFCneOQ99r-?P8rV&iL1e>Qx-U$MDMe-<+0d%yT;Ys2IVZ*Bgw zuy11jP`m5Xp=SH1L%h!?)YYF3iM5{$_32NBWc%ksL!TGJk$7 z;uj#>R^l7L%K!WPFT&9p{|esq58nEP_<`R*g%h44_Jpy^fDwj8viz~ni%kWsGn%9xwzEol2A$RM9Yej zu$$bAwD=C=s~PMUK8Wl{**2BvNQa25kxR z1Jcf8Y(GPG3U@6uKVZJ|%r(9#jV2sJ3e8pK7u1?xVC=tjpYi|E_RZHfjpj6ZSwc>l z(YpC)9sLP6eSpe$i|P&9~MT@t2Pb`)%!jc#?Z%M0-mhTzoq?sp#8n0&hhR~@mu$A zv~Oh3xBKM3#j%yYE1Hi#LtK9Ufp-hTEczVG!vZWq?{|$o&es1&-BRtldaQn<^wH2I z{j|QvJAK0v$1cNiWIGGPN^%v}plz)C?>TmOt{F1&>2+Oj|F5|JXWaiY`u`X7|F0N> zB%6h!wz*}P`%iY48~=a3B=nd&(EZo;_m}1dY(g2fp%Oc>8}CQ`{@?hX!dIRTdxcZ` z%&{O3;xJ-pX?Kn$`Jq4Fe>wksM!t;P9@i$nkN01m6#ZwuFQ5ze7OIH^6ZCll=dEj!E&#G41S|Ju(Jj(I$OaEg8!XYuc?|Eqaf`#bkh8~;c9 z53KljIPbp~aT!<9gX_3~UbHP{|D)qI?K?U*YTuDv@u+{%c#{AB4F5lwx}<;dp8g41 z9NUW66??8p=PvH!A*!#j|L@pq?OV!!Ki9^0@D#m2{%HLlAA5d&h5Dn|?*kF7|BL*8 zgXtANVfX8Q45gP~1V&*D#v#A{Zx{dnA^zQc`sM291R9jt8|M8ky%7@y&KgbO*l4N`NtFg6TLNx8|3>j(pi8-Sb~?2$N`T63fpiVfx%Xs27`}7IVz%0x`i@5ZiXTv;t<_P=U*!KeZ zBIK&r@9gZZCiXj7Gl=~?lAX#9j+6DJ>~FF`I7#O1!w>sEOZ0QV^FwTF+`h%tHZ|NM9Ui7WOX6dJg8-J&_4l!<8RUB4Hr{V?o;w$#gq*sfp zvW|WoJ>G2%{ej}JfxZdJYt|v$vv&TLHS}bexYYZ;RY+E%Wsh&Ple>|==DOrFBcFSP z_v7)pz|I4%6E&`**0trozc5+;U3+4o{JZXGe#AllIgA*NB90_dXhqu{@1OGBfebnk zecL74<1vRP-(Tn_|7h;k{-Xu0+NW5eC}gCQ!wH;1^#%Lyvm;K^;}6W;xKk9)($6C~ zQrSS_igyEBmMXvMz&BU0zvi>O-)38z57RQlSefIa^*dL|{P(x`^g6|NA$y7ajhgND z|JrAa4{^SMdgFc3y!?i9Y(3=d-9SfwDvh#13ig81<4EU9}Y1*#&X=BF|Nl(iu$qK%#1uD$Ij(*E|< z{tnarl3C;?^1o|in{bRxo^ot6dogYPTMJqd8_WKc&R*=tK~#&2zH4`w9({AD4l(*s zB=@oZYuW$&0QKZY>xX#sJ5nhBns@B$#<=MjRQyUi{1xvJ{RB?oG|r-@urQn_yPNxl zi{xcojpA_~H_(gQxEtxlneLMhk-BMbo(@o&`!StcXy{v3% zFQPfvon*~O{U3R)BcF_a%xB}zcK`K_ws?mh_b*EPe+0^3l!iHgW9V%Ulz(YHSzkiW z??0t&$?rd;|I&nG%6jUg_Fum=O-A2{Y9%Y)nHr*f*T+dKW=`J(vMS;G3g+ldqEA8g z1$(dXF~kwQvx#BeVKP}{oy*JS2RLR1qCV#=GIhlK8FC)7Gu`vK%H?|Hp6uXz%dF<_ z_uC>Y!7^M__Lh??u?lOj4mH}T+JztKpKH4k%5+1M`hg#@QC-n=MfoOEH8FW{O+XhMskR}4{!}+ z{c!!4*E}EO=V$u;APysjXdQ9+>wK4Q^$ACXdyE0a$s{WN+I|13?~>D7kwJO6HNWHu zoI_9G_G|ZWu8q#JT2;Q|E-#;yIxS>4oOq zyXJ%AdmcG{f_~rh&7FlJ-zsHx&AlvmvKVe8MNLte;GOR z+uQD_JMOcmxp*p-r&T{U=8|4}VUG2`=Y*m3T&n%Q7vyj3_a6sw7{yQgKZ&Dc zHI5^R_NDUItMc{h^4A*qYpZ-t791DJ<}CkLoF{hhf6@Ae{D~ae5a<7;#gjoB;yysR zrTmZS#`pQo^6KQBZ>YQatr*FQ8&B7-IVCL4|2spTMXkJfp1g<}*ENVIIa?dPqT~1>3L_ z?eFM+Lgy9!?*(<5hw3&c98&*7^9uEkvFabl%vS#pXBN4H`Uf(*>~SET-Pnu$$Vp?I zqkoW|zrlVV(*90AisBvP7tnpn_=Wq%H3&P7B+|&B4SDo^uPhYFo`2OYLiYYnd&xxh zBKkA>y9(N?;y%4+{Z=af_#fx#wbRvqJl~7-*e24TkKi)>D&pFOaSek~`~OB`4TNQs z@4e(r#C0WalYPk2OX~vp_U<`;fP!aJL}?x>StpOw$9#e$8neEC`L(n@R!aNV@175j zT=xY1{%>VI24XOVqFvux2Rik;UnDQ1cHf|ImAsA`*ENVI*=>(E z$2ZZ$r%m&18=aRz9ld@P{})L#;@SRo^xLlK!#zB}BRoOBExtvxTg$Fvv2p&$qw5Xh z3s682%_skC{XNH7;`sQ|{CW0dr{g$=JdXW|V}Kf!6)7A&_I&u~ z&3_$^y!O}O?F%0chg<$r`0>X76b@bZbof!ve+&m-|BvB^YyP9YzVh&cRdd4rH|B)z zPoEX`z44#J_cs6Mu>FAY@QCv8nErqN(f4=i%ikCr_R@PlX5Tx{azDN2H@@G0P+rPU zKlQwiUHeG*Nr$$a`hMY{-z$Dz7IMe57yRD%w0|p8u3N)Hc=sF1BxOyJpHiS76?R5h z7TXFsl|^q}GygC#%iIV)>(7m+8B!jqMwFX>I4kU0@t?y#syFQ(KP^k`B2UZOXKkh#`99ADs`z?bu zXr2gft0(2@J)ifTl5uYTDe?@?;yf;*<>le}u!e_B{q4kw#BUSX#P1TVLvAPfzWb4I z+3)qI^`Rv`rj1qq>Au+aGBh+=<1>9_cxdWBG#tBYjDqv3-x&~odQ!VY;-lebYX*el z<~i0r9BKTS@%byX^DpJ!z0SYetX&^X^zC<+b!M~ZaZ#&tAFS~=MjAb z`U$!^%nd_<4_id@jsBtKj`??F=CS?X=vlsH?vgoW-PVnx_lf5oD$Fr_K-NA~|G3*f zJfbJvU&C1ID*cFmpe;N-!o3Mgp>b;e@WeTpRebPoDG!hvYfW5oFtTiDr?!Lqo&jN~ zK~2P(RFbI03li#b;#lC^F!fN-Jzku`fWPfSFMfz2w=}mar{#W^N&+iwb!gas(;wEmR zFJj*BJ+kL()+Z!;zcC;@BA=k&R^K}^%9erTU<}1@#!ahunFxK^#5Pe|9?mS{|){BJ^KGqKoQO6>$fZ(9Zn#F z*1P)ukwY8m^yk;_@&D42jB6T@DaTD6{y#f9Z1Er4uoG3-jlI~9gJ^e6M@m0`&FGMi z&s5zmKHJjzF6xBM7yLg?AcIzMX1hm+9NFf&%%IWXuy~H58pn|nmPL}DA0}?3=@}Hg zm%9g?qxP`RfnJN2uLs zEg-Vrx23=4f0rWz91q0Peuc)Dr4w~mq<^gbS0vGh6q=C6VAl-AaK!r0NOBCuVG`Q+ zsQ=4@orv|Uu5Rytqdc~ipG7v`;Q!y@|3Bpa-{t>bkvEZ(Kijg}{y%D+3GwuP(jFJ& zj+)@cLzqE-lPvXr{J}hU$K{xddFcD{=&*oXgeBPZ`#&U>k;~Eh6@3C^EXP)pab1zM z)Q2rts(nuHDR z=M?=6(xdplNR8M2Fq!WxERH)oPhLcB>ws{Xyo#*;hED!}9QSwKaWCTDK}EKqbl`G;2!uDOjq+`|Ku*1uN&+1_F88PA}j+cV0t4gX*H{~G)2 zS)FiRM!H%N=LO`nDYW^&>Ot0Y6Xz53GmR*&JN0z@U(CnJRqy%#l^<4)YA=Xu62?6Y zQtWqJ3wfYxGQOk1ahy-*8@|iU%74e>P?)M-Fxz*!*msJ*=>Jw0r1r`G zNMnL)CSeMuVFqTQ9CPuf>tCyDZ8hve#<$v@0kBh69txu9^WYC5@D(b8`K%T-G z6q*Nzvt+CroF^}$O?h{jyo%nx58*l)$3OIvecFz1lT{y&`vVRRrTG|RJ)iOHweZZj zL17bl6Zh}{xyStflY_z|dS0J#XW}E_3BBKT`QLHDaW~ne9bka4UTqcw$(}!2%bpyH z;TVZA7>5a%gemCz^7CODIRn)T2ZdQ=Ip$&>YPafte%U?`^wR!&JNUo!B}l);|8<-q z<63>u{$-}?H+|p!<@EVi{Hr!c=Vpxo=)9m^0Ht#DUt_t<-&&)cfbZYE+4_I_^Xu>E z#+fw(9RF-=<$q@N7oI{=9XuqZ@ds?!y83Kjk~Ek^gU4pFtZ>tfR(xdZqTY*}nhj z^4AJB+cj;-qli-elE-3u+bPHGldXU6+)jBjkFHC`ms~Txge=Ne%@f&wzn>6ihHQ0y zb{PL)l=bh)v*M_DTbn&uy+b{jta(fRZ_#E?ul#tK_7vj-dfeY*_jg17yyO1JIFB&) z^A+@8?)vWIMC|vw>Y5CBo$N(HSybwO)E5!Ur8pNk&Pl%Mx7+B$JyeugmuS3t1U=3@ zu63V}=ugmZhw>-o|7-kzxBn+g^8*i91I=#(F&INJ9KHInMv^^^%3ZRL{ToM4K*@i; z;rZRse;d#FvNn#-$*Z5yZuiZPgek(Nq3{DXpL{z0znnf7^U(K4WB-l+UqH_&!xxbQ zi>vs% zKZ!d0fP0 zTt&Mwy(8u`fMlWj7A%onXC<8JiWMy+sG|eM;LGY z0eaqZ=w#>b(I23AOxzv*qb1sKbq8V9>I6^7es6kq!;DoV2coq8z-;LnWSp4ep=e@r z(l^v|otL`rzAt$%*Sx2b-V+K_%R&)dcfBW^NO`YW<2cBc8t?Up_j=!Zre_a$H?L{8 zz|(b%n&@dXB8BZ8)+*o&zo|Vg_T^Om{T~yty`b=EdBpwHtzmyRvcFs9FS7A<`75h^ zzgs^YImS8TFaeV=1=BDCv(Ucy{r*RN33+r4kn?c>B=`hFb5 zVf3jR_UTuuo~}*PaW#%3iQ4usn)5g}$FX zPMjgn;yil4`8aWr?D^au6PL-WxE{lC6PX+DCO+8yaa&kUSoZ$Ai9ULM)ssZ$m3I^O z=oSCwoxJuWQ4rR>@kyd<^OHo~<98GFwAN#ewvCy7kYlSC^$ zOV3?-l4wJ9&6C7H@eIaL3`ec78gY!IC&#)s@s6R7Lwcfn6PB9pUT40Wm>?{3>)pg8 zatd+>-c3v+XCN!Qv*q1HzT@4*EXU<2?F*v}=-T&gq8oLiq+ePaq%|q+jbv)E|3DgZ zT{900un0@A49l?+?N^jP=)9x+=@BowQ9u#RQ>EX1o=Ci#$e>l8&ng3QWZNxuh1BE3 zYVj1lpr4gokJ<~56E(s&(36iJCmQZQPHduYL0Ub$iJebfdYov)_J{H}-dq^Mu5Wxi zY;#VX|Y18jUfnG$jeBGi9IB{BgUrPUDhxS0^=xsF+M;=9- z!Wo=J-*2_wkv;m)E|R@pQ@0?ut2gh^ha1PlTz8y#XpSOT`BnWs|DZ3Oo?D@ggWL4{ zeB}mTrH_6OMP*6>-JWCDV)YGSJ)iZRTkEs;_tp$?`~>|feBT&|!5E6+zWd?iNQ^<> zzv;&(C!kuHHi@jcWWP0X8fxh?$XQ6r?+tgow-)*Sin>$QdqmH_%Gds{_wUO>-*?LF zw_-eh^q*!Mf9Se6Ze=bx4-2peJzsF{zghG28)a<1eMp3r=5hZOY``XL!8Ytf`$qov zR{uffNrVz=0{0$Ft2%=Wg3`@|9cH(6T$j@^tlwhh&zv|kMSU7|`{yRjFwuNeP7 zeL&bxPcCNT<_`!5>4#Aoe|Ugxdzp`EDw>gNX)$QZC{J5X8 z>owL~{MofZ;rNB;LalzO`oxRoIDa@KPrn!%&J9zy84?=x6{YnVHEkSX9711W=fu85 zS{xZv_S{cY4f!95ikbf{v1`wNOZ?;J|B=|;b2ky!pUeCGpB}PB%9QWU{_lxBtNwf9 z`}5yT>_74@oBn*L{Mz&3hb{jraqwaP@Pk+XSKW!*sMT*g&gM=x&THu})MQ9sRA{ZG&7w?UU{x)b`J zP@Juw<_-NcIDrgW7wgAC4s8pCy;2tLiRS?x;R$kE%R*Lozg@oHY?-#5vM_)?5XA#! z+8@jKPU=V)EG+I_H`(LRmB&9X22iShd`;b%-h>nyaojl> z_n#!wh~xh=WE-+Q+6A)G-3`Y@6xjYaFYnp-{~_(1aT&f_93<0{(O{SN+oCrax(X4(IZ?0@_pvU$4nYb!YMocls6+aLFa$f4~L`(HmO zTo+F-ZsImd9Onv+x2Yx{~+%}TW_?7kNo}w{VKh$@A-zm zC9nLhEDUr!7(+1}BQXZ!P@0$F9!u*`j`v3+0%L(z*`bLEUN3`I_f?#B=q08*6-L2ZRq&{}{$LBDs)l zd{x@eL{Gn>|HHb(^Te|Ni%`*C7M769upI4=wST?H|u+|2U@(ep>s- zHFX0r=eUjR`4#{F1!MpJ!2f^VI{)HajkQ>Z_1J*IC(6PWQWZG#{T+TOZ#aH)jrK40 zywv{nx^>--7^@+?3fTkd9&c%vL1mqK*l)a#6?_-zihH-kF_hKH%K^^agx%PS+E?|z zDIo

|5G>gZy_D? z|E2z6xbsG04920hKmSpFm_Sdik~e(6lju{BeofvG_H_N9X~Je;7RoUfJ=Uw3N0!Dr z4pBZJj(6<)!-v8G;ft^Y1#`G6e!+*PXL{&ASO4M9uXMZ`Id*C-xeiaq>vU(eTdCI< z$+*YCdcXBV|C;Rm2jBVE^{2C+n;dV!Hta+dc4II0;~@IJssB$NsFxQSq$#Gg@q)CW z332@MVdou1HI5^RG%{#I`)l40I-m1?@NE7cz1ZLT&AvbWkBn;twr=)*k)yY*(*FMo z-`3C6{oAzzeCtE+wP#I#ul}7Ee-Wop_|2$rmOPKXFMF21H9nLc`#vv|S8*M^h-0AR z-X}NdPvXx*CRfQW!}>Udh(<`?svR*GVU=D=N88~#+&@M1>3L_RjBx=wf)Jx*pGua zjH9SV5@{5^{+>@#n*XxdT$fPx-u`#r;A{K6?|EY^$eu=HBgo$0j}Arh6wcr*&f_93 z<0|^TXucrXih6g)c-eA{zt3?T7k%F` z|6Bd3=ZmAmJjcCXw3h_A2urXG)vqW&$(2}*wWt+Vqg-1@Ps-yB^X;of-+=T)X%m*3 zF0ImC+FzG#Xy6McUuPe*HJA3+rKi`Jx8|Bn*n(~NVE@lf$L%-XUmy4(TaB(;>^kBY zfg*pqc@+EWoD+B0U$XTv`>PKjhc;xcIQL()4~c6x_9ExJxc}XLdVZ`rgmX&c|Guf* zAMgB`{$r~DA`d#J+J7A;k0Op8s3wo2wEvyo;u!j*<20JKO4lmsBx4_X-5v7})KeP7 zmqg<};b=k{8P~KSk0MUt49?;_+ILC+9`{Jby$ZYNac`i4`c3Km-<5uxK<2dcpOiM_ z(1uKha$G!@aTV8*8{(W1Yyv&cW_RKy{Wjv7{snY9r)#`^l9%-p2)~Eg`5!9P#kb3g z)jjq`9A-ZqVKJ?7&%-8EeA-+z`6j;W1{8j2{d?EObtImU*(>J1{-gYa9Q#+f-FW)X zXj?n2E{Uh>Y88jcM;Pe5!RY&vxIZV)(T8Is#$X&KU=pSvws%h>dq3g7zs`sBZKlw~ z2B&?qb))3R{=Qixg;mr&A7(gz7RpiR8WiS|^RNJ23*}{by?{91sQs4qKyY zQ5GO~MOz=5N3h2@LC;{3IC}rh8sB7GKX4ft+rD-n_LVPpyb`Oi4(rk9nQb69q54|C zu!XGrZJc*t|8aWlA>;q{8MjDJ^2ZxcMc<9sryA25$2>M-udw|%h{HIFY8*#t|1WDG z^nBO1={WBHlO{80Lq6*2a7FSI&fqLc^ZyQMU!gyp|96po70=cWJ)*2aU4MQNzp!Bx z|8p3>h|ifq6CXK^wXS*d93P*ryq(W{+3|H$n%h{_9G|!TZ*)H9LkV0Im z?}@OC`|7vbH;dd3gIBQQ! zzqFOpliCI1`u}t31^&ge{{MKdGxY%rk7Ita?9V)UoMX0#to)|;V-84hruqUuqkFvf zjXXWBN%}xuU*f!FSdNM^Z9#8Y^Mn3${kzrl;wNW@wPeFC`SFf#_MvYVPuIWe{gSrr zpYvVWs&&p=j}6#_E!c*gsKRdaeKO8@H=cps^LL(oR-S*&9Fc@P|D3qVIL0SE)ql|A zxLnw2k^@$x*f6WL&4aqK}3rSU(?xrY1F zHz@Xtsw4GRH$t5Mm-ZhSHItA zWXM*>A6)-uyYM*XFYbTZXRm?EW@GMut=&`HecA>}`~PVhxa>I2hqy{cf9HBU)?N_z zXX>Tj#BKB;ciMkk<8#vUE$*$}`=&oaY5gC^-N)Q3*(U4>8eUPxun&!7Y9U)errC{t z-<7X05Y-o~g+Ru&>1x@`q4eSS;QZi`j>ljeV*mRDauO;(uMAXfPN7f349r4ZJ^PCW zefdemIsGX#CG;a}gD-c^T+G7)EW#2j!*aAwj{VlgI2g~6e}7CvxA7GP6pt8xsr|lX zg??=3Wyb5bo~_@S%%N?VFlkvSp4C{3b%<>kasR*d^!y?A_ki^W>6=hwR|^l>-@EKD zxkXs@Mq?Prov39~tH|9b?f-YdbG>FPgX8^Z>h@e~JZCbMjqM-S|G%$p?z>5njlP?d z@1)6hktVkrqp{q4f(zJDztv>fv+ zQqsIa+L85qT5($(eYl4Q*fE{mAEYgl{wBT9I6FL{_uFHP?N?`qf#hIRe&(~G>gUQ8 z_5Goa|HVBHCr2VDEQ>Mp{6zV0ynZ_R1QbWPKXi|Ee}3!z^ncRs$M2M@FGyJ$_x;1Oe{o4GgaOjPx;fG@BpsCtnIxLneocQ4_3-!sjz3%)bRaP)5HFA)8lu%J&hl(ni&or`b_x2vCo79TRo3ev%}&3)_L$- z8}caPZDrLd@(j-6JTBs(Z|yR971vR>Z;tjE@8HgyknHiyk-Fs_T$rPM%KZMt=J(?S zGHBgA$1^m)|8?{Gy|?zU)(1f6OzQ)nYohf5hFKqAi1h&m%?aDpRd*nc=j-)<1%35B z&9lR8dhh3~Tksk7oqi8}e<%+R$VaIB5kJ^`?u@ime7Gz;5!UaY*b3yHn{E7=yh6|4 zx3@h8(}$vX&EEFtzG1I>3>P*M)#rr&qAb*$oE^qE9*0``1acCRvuA6|nH^G-XNN|l zhs_R6$i1e&+waqmrOzNU^Q{X=mLu*ZR77cC;*;tK57iYg*KhMsH`)3_#v7E@A4(~g zvc?~DYbPSpSF~>^3!e6GEO7oJEJ2mEVk+M*3+>9j4s=de7An*JWc=eh>K^W?1t(rs zR-kpJvK6@@>L6GyuHH}22`kB-#yMd%+4n8&p5!{LN8EpW1KIN}`H<}U?VPZM+=iW~ zLN#A!H(9gMekkOA)Y1=)Eo`MYA|t2JwIU^L6j=fA6!cJ@5Pf*M!yc zQzyUojJJ>TR>G^lW&CZ^pR~TPV^8eZF?wRZ_+Zy){uNLo3-`^SM)E+V? zfIN?yThi4nU3;`2d|rL{YitW!UGo1ANat4e*L~Ofy}@;*@&EK_mrL{SQhamqT*g&g zM=x&THu})cmUpaD#v_j|bmQqdU(HvfA19#xU(mzW2F@Qc0#Wm8rS^Bq1KL!h{UaBre4o6%&c_cXov27rZolIh!;|XY*E&XG? z=b6%pI<~pq_s}rW`det6>H9|$J&j4OnSyDUg>uYApfmlQ?L1|pcAB-!ZFULx( z##&VHKV!eaI{JESz$WA#`j5N(a(bSB-idAWohYtyZ=2ntI!GKVR3&UT_M&>S^-jnd zb?RFA`5^r;j-o+cP2$hjN9g^9@jowEzt37dah~4xH>3x#Jgs(J<a6;uOx{EY9O1`hFJIe;gEgwp;(faqkP(ktMH39XFA=ufK`x!#zAePMt5- zKOWKZi+!8)C-i>b^IuQ>!Iyoj!k_uSv)}iBckqAr@PBvlf3qJ6$6YtTbptUNA8h{{ z>bU&~|3Ah5t>ORk_q+K1-7EP2^ZEZT^Z(EBe=qTW`Lm_{@5Gty=KqszuB#?v9v&{P zkr;#8LHyt6J`%>!lSBBw^wRtnW3bYj-P;@PF~R>8KEXMYFa??H;4qEMx`&*5nBm@M zIG%-0WBv2!(zpNY{@i2TYkW{Nh%<>eryzwUq*3nLxtNCqScD~5hUIAYoI3VMKbiL| zy6D}~SCFpv+W(~guJj{wn*V=N`jJB$svqj}70+s{#X8guk^T|JQqYsr-5WO0HzD2M zy&>hC#_=Ed6Z^Eq@7u5w73zEiRDRC5G;5;o7Pc4raS(@b6m@&#{{!+rlI&Nb@*vuX zI7T%6SpTqdjw6XQia}kCY(pOHTjl?g@)+_c+1YpGG5N2E=3VUHEAszBd6jHkA#aj7 z$8BU>|EDOPQ#gaOcshRWJUx~vo$TgC`el^%^*kj1?(<*dRbg@cpX+2VYPB8QByS`3 zftA`3Ps?+T@1aRqkapZi#&rfC35)yhR+QnHH9b=#y_Fvt*tuB6q{L@BUBG^WEkk95Xh+*u5_83q`Vk zx&-?_-hYf`C((%1WcFWJT<7?VYd_e&an|v9Ttwkh{BO^w1Eu{D-;sXLq8kNz5zXRi znJWF$rT;nUA0hq7(c2K`KUP?Wy7H6OQ2#sSyLf9;@6CU_PEX!mgW z8hryPH2H>NU(o>948&jz#do8hOOC`CwBJ$xmB%}kBl#Wbzw~Z;pOqc z4ZHiCHNWYF9~lEjE<(j8jDaJozG^(ucRv=E(U)T-R%0#JA@Yq>U_$NQ7;E+lead#$N2lLGwq+dqbw-EOZjC;(-HPGtzjSBVF`)@clDkRZ(VU%xcRA{<1 zDx}{T73R674Oej;y|{_n=tKKv^WV{VP9N`See&1z&65Qb?~e}69iu{v{R2+yGRAeG z{{N-=wpZx?C)?(a4(;cRAGl=vfI0kmvdf(O?#IRt+%kRu%{A7aZ?XP7GJCAwf580x z`{wUEr;V)B-(8P}$@=g0r8kb(e@|~B)8f469v|Qlo}k};&p}=oKn_Ia#+Wde9E#x> ziQFCkfid*_bZHxFoB@3Tif>4pe)Vqu+2udpd`&&?*qE^WRo~xY^Y81&gh|euf*N|m z8u#Uz(mbx;dG^vB*EMKd$)}^##(4 zksi-Re(S#b-u$;}`TmsfGdPR$sO^w{TI3n})AHvs{VL*`0C8==)KvM0ye_O4eP0<9 zZj!g7j(d23XXzWJP0?}BZ$1{DkiFj-6Z-u?8sna;-__>-dF=@=m4$)A2BYt1*7*O! z=+G1J_x1;P$vy$iqr*_Y4aZ1~!8lAnMMYVN`wLE@XSFZph9$xj`ZUbIv+-m1rTwvI zKo;+4b7gDlyV;m5`}Yo;!gi(5L{FbI#@Myxn2UK>fJIn>Wq7v#?jUwon|&9$Q9u#R zZ?S(k@j9PpjrRAg+TY0>;{Lk_J&Wp@<_d^w0BTogk0)0n?g7*=-+f`N<8_F8|DX70*#B^uyo%WWaGmT$ssBOzab4kxM%Nj? z_;lac#u3tqx@+qHci0H__%G(y&h8g(i>D9wQ25I8;Q{#wPtfmx^sDQ4p!16Q{{?mO zhwA1i98&*3!2YY_x2Wr%h;@B=v6aj^j$?G%oVR1CHh}qlTf>&qW52@y@eV|eUh@C{ z?Y*{z&bP$PABgq-k{{qW`s{<9I~2n)5^?Ts4aU%u?yZ4eGLAk0X~#{DQ)Hw2?fo?y z+sJ=^n~lXJ=Tul5U1QPuM4f$(V=bPp=NQ)?SR}kx-^>y+uD`K7 zp7(WqHUCvRz|RJSm5x_qE!Lq^-7Amwn*O1~+=qg?R}o$GZgsO0!ZRuJ7>3A4$kE%5 zn8%PckAWWdhKg%M)E$wJ5BP5BrS`2iykD&M9~-a$Us^KV;@@qdv) zE3)fB-6;HoP!KKzlLw^zS^RW;_G~CVci?~+KBnS`VPM+*8k|W zx8l5e`3N=i2JHp05B-eevuL8neGHz?^RHVa{o-lZ>O8Vh{HgoW|JZqCrEzCf-s5@K zUc_Zw#dY-JPu72AS7V=3_i1?zrTo7~`VUC|5$TcMzu14*f7}$$ZS>(DavP=phA|KH z{9@^!|B>*B{shHC;@;;!UXgy`{eI{@v{;9M9EjSf9|?oWp{Vh!8wN?&F#97q9*L$) z`W4+{qwv^ShXxYzey=kCY9 zxBl;rUD^b3(7A8k;e(JzQH|sHlk#6X+rPX1uR7lc>wigcq>(`zdOoE-@)dIeKB+$P z3-z{-Zw-0jefqBYKI)sIpTZfOMP`L(@%e9s^Yn|jj5w|-t_6OTo|pc(Hp_K-FXEo$ z1#~;73)M%qhAQDVaT|T8y|gvdWVeQU^yI^>zM-w*0sRru!@eDwkaA8VKDhsQrsdn= ziF2}Rw&{P~7W(}t*1wf~W$eSPZ6S}YhuhQ-x9J1k7K-P#g#pePh~cgqOb(4YMq&)= zUjKHeU-9kGu=Lv@xocZ!+_5dB(1bLaU)~m4Z~_^$Zrr90`QHDxk?kwCg^tPFLg#q@ zKimJ)yGQx|Vg8>FRQ-5c80UW`U=pUFcI>yU-~a6}jh?LW-T-zWXqZ|e*C zb|@D%7xS(JF?)|e8bSZ+4vuva9-wyF)w_-ESW>w zC1Jjwo#Ls&ZtO+wkncxc*-wvox)TTKhf!pI3p3f^scbPBee>!O#t`|f8nxH{DjX-1 zsJSAqJ(TB=cAP=eRyH`=UNW`O_5Zajv zI8)9r9e}(RX7n_TwN9 z<0$G5@hi}9haGv$ZrtEk&`az8Tw~wZpq3Q-M#i!1t@LQSO8fgPw64rX zKkOCHi(dO2JM6k5YUmC9-S;5l2OOV46Fq&OogXG0elN|BTk1R{-;%x-;oZ_#?>w}* z<}A+RA}-@9uA>+2^ZEaXV`}qwHvdB%JC?Q0p3$@Cv&ZxCEVIbb+jdE#-*1ZNHu`W6 zv2Ku^sU4r5e~tYgVVxiP6BJjl|LA^I`rj}{;K$yt_y&*zQM=ig0CFg5wn~?ClatNk za6A%C;!2}&x^#|`{!8!o|NCB(Xq+xz&Q?bz)05TD&+*aUFTcjPejMWbpu#td4WK_8 zZ~vD1`7UL`k@tM`!VdLw_50?f>i;X$|H%wLp!K2pKXUZ89`*8%^Fv=SE>v98Faxtt zjz07G=8`?~!#uLrUVerDX^a4U5tbm1<>};C6jY>_IWGFI0=m&9{>*d6*a%;V)mV$% zHTQsZ^g8dj9;NlIIy@J8sr|>i*z_=cYR=h!P1u5M*oi9aM*9eU`(*16jDOFM@4E7y z-(EPUEIZ9_f4zTb*~pJa<`sVX3V!=+>kmxl$4~4Zdd!LJ`67G&B75Iu{)T70y*`A! z;(l|ceo1Qv?2rHV9qTJtC*UysDEfXmAoQ6#SUpks>-ad5NTc@gd-b7A42xy(1$`~_ zeEfZ@`VdlRJgNPkZ(YwvZP>-f;G@Ph3vWxHEO%@y%$>vM^-)sC|vgH3C;M@A29NFf7GyKr2;<=7q+(eukki~6!UO#x} z5dIl`0*bHl|M|ov|DXSQPgpfS_yPF{we%-szr((Ndc#ubTEUO{S-&tqSko}+8YP|l z=eW*aU5oUi;hghM3wN(6vI)`uFU|kDt3Mb6T_4BXR<&AhlRgyB)_+vq=H>Y=<#G38 zW&9!e|A73zNBUo8f002evWWA9+mLY|!^JZaV=xXmVOjS*fu2|QjQ;;5`VQmkoW!nliI8tFF+GL&i!k=z&@Zf{%tlNS$Bn4 z^6(O* z;^XxELgfRJ^fZd&l@BxDufIrlMp#=sFaCzUFAWZ-$TK*L-tT=RoF{v%yLpkkjH|eg zUToLKu|t~UUK|x4wU#~q=(g~}i}ZNTzw`_D$R6#`56H@Yk%li>+n)Xe{odB@je!`9 zp%{*l=xa3o|955f^Y};@<2cU$8%IvSBuqhEGj$p{1G7+$+E?ZO#mY{4T+^Td^XLnZ z9xwmTl>hkzjkD$dL+{)76g$za{A0&c^d>U4f8TVEv2UidKlaH1VUakNAkO_Oun}4(Nv;`-Wtr@oUnf=?W{U5Q9dbxO3B942j{1iL?58lh{ z4~5mjO5@x%@_Sxk_pr(RtQDRY7Td+E*L*0fbG#lKunDmZyarq7(e^c98+|9DPyfO7 zgX@+`|Etb%er!i=d`X1^g*eBr`k4C9 zD(?t!?7%7K#r=nCYNYEm=|Y7z%(KFq=y5LK({z^n-#xyO1MGi?Z$f>bH2#N7pNsSV z^fQ0OT39j8FZT-<$;-Hk>uBF7|8JGY$h>^sbw%DnfnGex{&(~L#eZTK|6e|AT`F&` zkpIax$8r2muXrjxtnNL|-9Q0RVpyR=4x};pYrkooV(`mk`;EoQ7E_ zN6+>FVJ_MGWzSumGd}k>k=d%86`p-v`M$>3e0t@_N6GJ_Lgz#Ffek734_#4)x3CZB z`$-6koVNtaupBF~8gajqim!bv^!!r)m*Y79uh%%%b@ZN3sq>T7;@m*)GA?TqxdpYi zy!#$w6X;3z)UYuTcG9bmp3S#JYAG9lx;y&+^_Mry)c=geiTb6{bVUF2KI{IwW;ga? zKMvx}htjP7bo*oLU)SsZN9TO~_vm_6|NSfa?-%O7C!6n@|NqeZ|C8qWqqRf-KXNJk z|24)34AH-j6Ub+*4wDU7)LmoNl&c0&&LOjk& z8&1*Bptwi>{|^2C&gpXhXN6U79Uab-7g2l3ehTDO)FeDt=}+Ri<6bnK_J8kq&OM&9 zbL+f=`W@!Ez&fScH_Ux`%Q)p0Yn9@rYi^?t_wWFZ@C5z-S^i^VJJ{MzGR}qRBD)V6onVExi{EIGa2=jzh?=fD7T!grH z#u9QF;vO0e>hQ@s+8iCPMANPJ^Y4B26ndeeUsx@CE!JT@HeeIB;DhZ4+Z^x2Zd82Q zIC(NN+5SRgm2n4^U$D*%Jtr)S!}NUg?KUUEQF=9s-SRHFJLKiX{P%B;3CD%^{)_L6 zOryuR%S;UWsxpquqlmum^b4oRGf{6G6V8(7aS@ks71z;=o9O$y(cw1P^RLDN#PE-e z4)@4Mc!GXM%~yI&KJ>1uZ>Xbs|250~n((U+k3D*LM6VeNFq{c>ag)IWw03fh=-pLup)_`rsyUZ9(xg9~<55e;ntQInqCD z6TTBwh~xiq{k3mh>mPPIF0I?kw(q6yN7D}V29(y(*r(l4ST&#cFnJWUFKcrqkE7;L z|M2F%|6)w(e+fy)X;gOnZK%36-2PjC8+IM~+whOa{wC~3#&2!-C*|(mS4W5M9vT|< z3>h81|Hgj^`)~Y*koQ~PZ$A-!c-NQ>@9qb}4tVcBo@fmp=_&gC6waXMlOx01*VH>c zJ~$j5YYj*B+9T72O&%Ez+ZXG{566T-|YKS zhlhPF!^8I`t_yn?uM54#4^)&Hr~fhIQ3iQm)4i|R_5$)>=W!8*kJ}fIypFyv3<#B9 z((dtP`{FsiiQ6$875^|e+>5&QuD*)*$Nx36`@b>%?(C;VeIT(mS;46Gi z-!FYAhGQhgU>qi35~|gIeu`tt-k-=rYsGUJQ~Wj!Gw|jiY1kLc4P!s`-_;+n2HXL2 z7{>FDXY-Hwl}$*ac`N^xuYRIGUw*Rw?U{U6l-6-`{#ltPoKwAd5Idl*wB5X9 zKKl}3%TRNH|Lfdj$~yjzSE5N=X*6b~!+o6SKV84nr$h7WpOTL0VYPGCqBZeJ?KBg^ zI{JESz$R?LHta+de)>vWL(4u9^kWMXp;o)k@x*7s&&)@x*{WYH@!61kWp=2WKQq+d zn5oR28q!;*hQ<}ddUuYWSJ|~+eFW&t)h{HIFY8*!r?W456 zc+Q>9&68cTJsWvG_VEYa)aiSKTMw}Nq5U6S`{sPM=ky@!#%d>|*IIk@lsL|y=92FQNnvLl zpGQ;H|5;nG(KV@V`#-Ax*T2I5SfQT2RJpNPy&R?Xhqw3}uDOh>xQ@7YO)q&9x6$sL zjy38Suk%01zu14657L4Y$eiOd0GB*<}?geiz)8?r6xMfAM1b)vNX-p{lN(hGRHe{tLovgal1e}33H82b53 z;~_36KQP1fvrvw?n1=;egud@+6MM=0kzwjfj(7cHQdmYVN9|1GALQ?q^yF#J7OUxN zk)}5xrF@URUfm$}kxeSK6E9_d*sm0tI@l>paLqcb#|CV|7Hq>#v@c|zm$J``+2_~T zXFh%R8us~h_L*#chyBNiYkb{H{NHoz|3>zGEBj8yuqyHF#$N14?xAyHIZn^JzfP3e z|6h{-WBdPe>~DhqOU6AntM~E4URGwIw%eF%*BwP^{h#^lDykhHN7F*-y00EUre0D1 zXmNiyanAjnbbrXcqkiK5@xl3jy^hbahrnqjS54HX0XuFyd?$MKPxVP8lyqEv^Q_t2Pc-6hG znG+s3C$q0SJR+YU=fC?^s}~{LQLbIeeQhld104@W!9B(MD&6N&_jkbk9dUp4?$3X= z9&>-lp$+Xfr0o ze8tmr-}4n$wRomDZyIJ`7HUUHzjMmz$(gR5Iw#Df&qKPuYmpiv{mzSPYsNa~0>8yE z28+lgD9!(S!MM{n|1TJaDr_ZIV=dNUJ^o_;pE`dErFA;ki4D%#ge};HSpVNiR$({V zN3p-`=>Kv4-&@-MaAKnTk5;xRiyXa8yFhIJ+bf>^IEcf@y~@APws(}CU&wxM$K0YO^@IT8!rMCL2FIaQ*d49kz z)z#y*=Kq~1FXA$;;yNl`Qs*FV;x^*^zdrIF;`(0?$VYgBen0d64!yVj*8uv{^}hzv z2P3TvXo}?kV*C40VZ$*JV=xXAFbRF1^lgwmpPdt?k-eWugzf(?W%nQ7_cb^E|D|tMUJL{@>T#)AJmcZ-4XUc^&wker#cEb{J+fqd#7~XgZJSBXkCAL{@;h`XXgKXjQ&YHdj8)gaVdQVpC-3qaQd9^ z8S*Bi*>$6~#sAyfB77UNZ2la^_iCq*!|U~5V061ac8se_Gl)KR`T2|Uv=2(P57faa z`wggb(-@==tyO=nQomO|to;<*73$#bYV|DM==nXo{MraEgxh%G^wBIdH=tc zvH9t(Y2z0S&PVTbEgkk5qK~2Hq_mMlSG{W#7w7+dh5R~V-^I7c@8aR}|9&9+BmC3+ zzs&!A{@;H(=I5BiuMzYA@5m|q31{d3vH2e{|F4hDjsKVVe}9qA-|!F2Ot}Br)ARq% zp`V$5^BDRCc=Y_g`VI0o9%oyeLzjKa`Qb_QOE4AJ*4I9mc(OT$!jpeLCp?{e1{#0N z{|K|VqEc-qO&%+C_952R8@d`Y`|8L*J`uFL%J?yWv@-L(O^L$cx^!$%kOC$Cj zypDV$vQIugyqTP`zTVV55840!#y{YL+In9wH^#Pi;yrjDqW{(h$PeRV_$1Ew|81h5 z@&EfY{WJJy|G%Q}1o7K&6K+9%{dacSZS*tizvuh^jQ{i$vrUm?*L7jDHTLVRv$tip z_5Txd!yS%E?`B_`YkxPr1z*DGO8x=-k;lo5bpoEiDTAUqn}xS;OF#7^yr)DW~ZN7f8f{R9-hB{Cp?8e zAz>~pKRw&eY3uqq;IGDgt7oz@6ktZ z-^btrJPuF5lW+-!|F{0X75e}1Kl&fcn-QKSou}g&coqg%sekbt`dEYd7tf==06BUV znI`>%Xt?&`5dF)R+xB9#8f$wg`3fZH9eddIc(w5B&`s~bnRT{fo8^t-&$KJwOn)2R ziT5F{|Mvm%!}u6JiB0%4wqfcw{*y@N#~)s!PoI8Ej5n|DHgdu{xPzShcYXL#1~zky zzs^5W$hK0gRNcsqV3m%6oxQ{9KM3#4G*&qwFk%Nun)`Ghh_5r zA^Bfhst^74ORdxYTBmJJ#__?SO5+D_Hi?h9Q_7l2GF``ix<;Ft{yR)zv_koearQ#qKZVR4W8>KN zBXsn+PvrgLO4Tvyl%67M)7bVG{0%LY)ek)Xq@iDe{QrY@s$&!C+IID)bG0u}$KmY!dwSkZC7xpo8YD?k6xA_CF+XZ$vud z-dC~DDz(3b--hf`&mH3{y$7<*b+v0Vd=x_x(vT{u_RfKD1l=3ka7@ENpDtA9@v zhMVYd&6^JS_!jzYc!d90fxdNdccTSgLjJpDk95-Gcicm!kiAs@9GSyc(D-A11=``4s=#qECFIApD4&dNx0H^5>Yuukkxf;ZKPDZ+{{GhJRpYo9}PrbpP8q z^sx%I$tryp^a~JuzT|yuVVrxf;U5 z#sEjEJpZ+xKgQ>I{>UP(5s+X1`=I=PSp829>{qr{d;S>0#J@k}|HqLb`u{ymn$JMv z6Fr~TmxgE2pM&S&1<3yO$zeG;^+)|YzwrGw@$(UWDPDnBxW`8Wsrjr2F8 z=eW8>IR5)Q&cSZmVE%c{{Lo3kcg)-HPP_;2!w2wTdHnXs)BjgBKWw%BL+*RV-{>7H-Ph`e z=HGw8y$kmUcU8Ji@(yXIvq^d`-6y5v+Iz+K zHMu{e67DbK+8_3Q^WR5>?*nlyrbm~*sfF(6Tw{^Wm5S%Z-~wrsd7kvKYWKgMF9Q85 zh(3j9|9`O7{~5%!kl!M|i`I4X!w<+GA>r8kx(L$xPvM^<|Nnyn+8NFp|9>#?`isM) z_{q1N7k*9t4paCO{(`?D>mNA(AZNB~BQCijJcgY5>fgczBY~6RB_S|{KWX=t7UG#gW{X6Xl z-yhe%rQjLi(|7{?NoZX1ENjs}D{S5VjL@+1%5Z~w`Ap&C^gqSo*F+)^JX^wZfPoK40;q(6sd8aC@C^OnrQbw4a80WyR*of^cV* zensCy_8V*x@z21s@Ekl3FTm9QnID#u6PNl&1UY#f|041gcr{*!vGv+#Rq{9e&B&E1 zvydrK*HZ(ZBE0j!9SP-(RBNiT)Yfgi&?q7{-x77CCik zoBFbSMEzHz{%=sH?ozj^$Gi2>_mr^P9eW$@z};xUm(YnGJYs%9q4Z0o@7!7B&}&Sh z5B;^q8ZbbA`22(x=SoRu2xG{hwT}H=%%-MyCS4o8PX89-KLNUhqmN`4zANqr_!0gS zXX^)b-_M07@jK-IKWtoSivB131%E@^4*oyQ+W&{N|6{(K*8WFaVtT60-jKuds&tV)H^zZO|v~_yZo}>2@*HjwPz8aYJ9MPeT)miC1t}>4j-G{xm zy>WcSdpu}vqiehbPs7vk3_J_Z!Sj$hU>-5K9P$5mFD3`4eV-@T5%e?bkGz8ZYCL-V zk-Yz-I`FLjW7@nM`@I>7%;~x>u0N8Wf8XWbb@Yd?Kk_gCKisGNj~>MJ_ueM0cj7&G zA3lH&<6{_JqW;GyVqg0|&Hp=~{f~acdmWhP`C|}6m@u~gN$E_+zKR!K5H``{_~WNz z+?S1wk)Od$m@*FVd;KrBMEzaX&LHo=-Dp8-#M~<~-LCyFk9X30Vmx9QF@|yXpF!R~ zy;=LO#=HsJhcJd5TKkGZVx4hZdS}z=vH$EBwbg{yPq z{#|;bKErK~*9K!xkFod1mCYG;c$Ph$BipLjzxob3(1|W2(TyJbKpH>7f8ytu#INx? zOkucy{f|+Ml}f)z`eo8DkLmmVT}vN(zn`t2TFw5iWdE;mee|If?EgjP5J=}Q_#6I# z)&=Z;@iRN!KYPDJf5tiV#~}I+#I*sFWc-KR1>zouC*Vn#y0j=N&z~2ju@l^8X_FA8}2YB)aK6$SU(+V82G~;kf4aa{7z$QoI8H za{gbv{15*w*Z(H}qyL!lS@|#^->1nzwXWIu5=bn6+{1_6)<-bF|Lwr(r6COSPZ}_P43#03mUl^}deywx; z+I2ayw~zng2>(U*;=k?24_!Y7-OrGFXfx*DUc*nY&R75$=kzBJ@Sj8vKJA>_@EQEN zT|Odj!ELw$n@?KrZ;tm-62jfWEy(_xYbJXT=O3iVZGTmU=nEL4k0FPzAmtvvPJRpD z#SakIc8P18{fIus-XE>8t{eU5$i=<^WV8XsG4Vn6zxc^vb5y=;&cHYLd#Q!)Np z-zPbtJ^L4O>qpKBe~a6E7ylqum0cjP`|tQd_R1jV-n`MJI~Yc3_J^iNon9Y^s!@( z#q;PdKx{K)kvSqQ@yo@vsJC8Bz7(xT&k3&}UyTI4quq7mb;55%ceCq8{yzh|&k1i9 z_cpu}@4@@<0el!A!Ob6Id}{%z~k@)JPDUzGMEEHPW+-EJe?d(>sQE_i%5SK;@ZABj3?Om zLfw&!7RC%mMNB&$CLNUL$X>kjIg$WB(Vc|4Y^XWNM%K|B$j3gY+RxK85|S4G{lt zHu>1ukMJLSv-IDFcjA5c0ODFgA0|H*^;h~nK6^5JlHSrhBWxlQ+xZ`m+t8X(W_|hZ z{O10yZ`nQCCt2wp?GyjC)Qz^fQ=z^7lx?R%C%V{KNpz2#^6pNB_eke6xCyu5Hr#=` z(SqUhsW6f`rT^m}`ak{=#*sl5IrQ#375dPB_>}e!8%~;O4DLJ?hLDs8orS1z|_DilfWqzyjUmd&!DcAL%_DQ?W!Gov5&*_bCm(MXOe~!tQSa@Hn(B^8C?J?mEj{mwQQ8 zdH$}i$MrwKK2O3Wcp9FLXW&_Q4jwW8SiN($|F6XL75o17s{h?%KbbnL{ZHoqFG>#8 z7KGnc=m@C z*t2W+pO?%GZ*a_;&^k|^{_-r}pSrn79gSpxI=Yk{Np?G?cY=Qq`Vr^y4Y148e0=ij zgO~faPa*#T$BmL>jvFU4!dWtx3E{2I^A5Zl@5Nt~CD)T5!bj0It$lF9x!YX>x<*_B zy6HU$=Ug!(e8PSku?3Ad<9~1i{fz(Kjr5z5Tcu2JOl(7s%Rje@yB&AoUQB(~KTyaH zbYt@U?v0#yjW!E8itNV=!+qq|53*adVd{^nuTO;VHF1sVjBT$k4Bwh>LJB9K4!zy2TFz7PM? z`hP8lW{3IGc`7c$6^Ls9Cb0ER-yB=K1F`@3O51wW+1(@BAleRHxAAxW<0V@FKhft;Qx2`Z8Wl z?>yrC_IVZkwdgtK*@#OX_pC7S{KD`C@spR%3U4Ccig)1McrUKUhwxE+0voXfQ-7Wn z8p*a3`akA4$2`w}u{4)Td!cjCd&nE?b0coXtvEAI7VU@Ig@=!@KaY9GY7vN9jij1Bm}+j{g`OLfZlLpZX}~%}#kL=FKG8jh-_3>|OfjoTn8X=te&V zF^b`Z^5$XEva%(h?RSNJWazA-cWfxI6NU~*t)c#w?k?^9&-wW_c2 zz14*xQd*3=In4a8OKJV{eKO88D4~!pha8$<>adn=l;Exd;=2nj%w}7 z+Jf*V;kP1hSE?7fv@Mg1j6d-OZ|6JSLH^VFe@%14T*tfv@5Xy^J+|!f46qr)#_>nk z()scID)#p(_BZnX*<*9}PV+xM!T+4CoEkAdp#8LO|KKk3|LfTQ`T*Jwv;XI?|It;# z{x4+zv*UZn4>{jQ@d;%4W^Rn<$F`6ACir0u7dwBc^DkikV_cus+4(PQ#9lUjpX=*q z^QYMU1N3wm`+o`hAC1zQvKHbEd)?!Kx#64iv3<4+e~12kCqt7>w^N@YEH7^#)`z6W%3>V0s@?RDi#QA-3 z&Tm}%Kdu29%Yvb$^1k@=3T1)tT7j2OF}Ezfo@Fc^XVt!`u~IE$7L&ZLGXMpnjz| z2k>j+;~cJepmlz*Q?*r zE3H0#3H{m%DQ$)U=SX9)R{sQMOXEDu#f6wMjx+J)IpHGuRYu*#1;phAS|*P90k_J6uU0Q`e4qzE{z&My}YiK}Ovk=kLe% z$4$+D57*fCBD@6o_J?Qla(d@c?`DF3LHs|!o$Tp@r`u-9Biag{?Ohw@guC7EokiAu zJT@oXZf?#UON*=t`Nz<#483(?I^1@2I(&Zbabt3SGFJDOaLe@Z@VS#G!p>tS_{9Fh z_}7EsRnGNVG!{P)wk>@iY@PF9Xqf(UxM9y9!)J`Gy}`CO;l_#c!%f9=!)MFx4?7B^ zmz*1JsXsTo)wXxw)}yn+Z8hfK)t(ocrq2tvA21(7S@Uk&-i!LunPD^TtkO?#Tz)Qi zto(dYXy0*BXhToamB#a)X$6_$t@O|X_X{waR70P3BOu0Ks{?N9cU>UmcGyD?2!f){h z+>f@T^#Rh>aYyGuR0|-B(&4!2JFknJO&}4>-?*IE5K0_hbLG<7d;y_Brn?NM*{QjA0y^eaiP~Z9Me4 zmp*Cd{R5Q2XZ^p_4@1Hoi#^w}(r~r3u0gkOPt%iqhyKH`uvGt!F&h79SZ3Rc&^oU) zyo7u?5+s`WQL7bbfdT{oTk-xHk1r#?(H=^%)KRJle7{-108m4}FN%$riefYjZPCmIHe4qRweuAH2 ztj71eUYkRH`K549*^)&DoxLy9o+5W#O&sN=^Go| zmBt`_Xt#e3UC!6xx%{3!B7Z;8cP`DjXdFJ>53u$5=Y$J|FT%yhnrl3toccE3h;Q(p zI6fmhRroSofh*CvMt|&)8R05=oKqS7f3BuqgGbCiSgHSSE&CH^))DWbCvj#S@l)<~ znd4rBm*C}i6<&)sU|5|#f>Deizy3|1Iu1GXs;BzYQ~hM>r23ys3lAPr|EtTQ|KOXX z^H#hA@5UMb!S~Y7)SuVWKZKn6G_I|iA;*_IwEkaGzv)Wf;TrirF*|(JexE=>e8)O= z^#&_#MYQ z+`sty!au}M@JkG9OO7DFzKnc*wtcBw%2g_V7JB}RT_YLSR2rbCOFjQ$&)+xQxY=|0 zuNmQ2(rV%N@mn&{$G;PQpx=)N&~c1^=n+02(f5PwM$bX(*lEK*X#3W$%2O+6hEw!7 zmM|kNe<1qWX!$5hsio6=vU>RP7=tG+y-%~!lL^#_!GrXMK_I%~jPtOmprN04h!dvkU zOuc1xcsKc8WPf9B?&oKR>**7Gkv>Fz6raFGq$>0sla06mHzNA>rE3eq&Ga+n_pZ)`mN6M9s2k2L&UW|I?=UV`ybt|ug7)%%=>zk?LWcKki{?YEBqF1 zyUhQO{R2z&4=gd}%`VN)|L3dU!*{*6gnf^G+fo=f!1g`FUfIvSXZH@DG(TN59){LJ;44)KT;S-v;Q;fe{#riDfabT>0F45a53`h4~m~pAJf(y#Z&2*Ay@0# ztK9>ed>ncIlht$o+y7+M+;D~cvXA9|D88e@bHmn4_2r1~F7w>VJ;xQEBifege?dpp z>HWH-okaIe?+1RP7m*C}iHn7ES`lav3YpQdY8 z7^IJ4>U?4HzNo+5{D1!|_!|A2*!pj?LTc@d@Ev-Z9K`qOKg3TkxaAsnBRTd%V ztMFR90dK-v@eaHj@5S|)`i*DK*3FN1YbVBg|B(2&c0hWMb_jh;-9M_0@CkaB{SfCw zo~{4Y{Vi$7+m@gI_e%Ml-nzv2LxH}3dgoftf8OkHBmHLdRCxaDJ%7Zt8g3Pr%IHTT z@4}#R{a&&au?`xQ&&SrVGlaVl$9{50loKm`2PgT@RQvvspZ`KW`v10De{XKR;|4Jr zkH>xZ8or6)HTwRvv;W8SHx6q59yb0>rZ%vDYqb;XKS&=Uuk_t~M>^j}Y|H+TOf2Eg zf}hZThF_wiRQ@b>eMRzTTAQGceXbplw?F2n|4KsmmHmE;`;mR@wc!D>@ypkSZN9q) z>8CJbSlxm1Fc<&Z{J#tBa}h4ad^{DG;R;-dN0z_x*CEe!ul$8r*Z=Rk#D)mCKaB%R-m^ zl4uidHo0L?YMpnViZ~2hp%CH<#PQm=J}h;KeoZVe)Ih^=KE*K zoN(`+<)IJ#NMQhJ3}OhIN0je~&+ePjo7lW8e4m`WygdAnY}CiQZ5F@XbIZd|gnx!# z;#c@BQtivbcgR2BemsD|-OeHYLHgLrWnmPj=rcwCB^U*_(YcnL9JS#j^T*^DSjJyJa3zmf|$*YjIZosIzYwScpxLWuc z!t% zTj}q>yYXIJj}PIa_yjg$3*x+@MzT$r((WEQT>shspDcDR;piiCgMDtq&A1h}<1TFN zofYmShYz^_L+;=8jbVJJ``_jMo813)_b;tJ_u7wCq5D^krKjCLhA{E*l$`qUvM}{+-x2*jq&(;R{73g6|2^G1gzI#T%fxl#hxiG8hF{`W_$~75=c&UwkY67-sqU#)2eEJC zSa@#}`*Iiil1$k)pgxQ9-r_vCA>m>1BhBhS{q+B|{=c{`;}1!;W!|2g!{&6(`_aL+ z?A%`%9-t@8Gib$w^i!BIstiYqxRiFtdGxus5Eo%kota)aJ6ud3Tk72E)9kxKm@k}P zKUaO4DPeyvQ2%Z>cR`t5kInk-o@$@`{J(~C!e#W%6WS)z?5=b90ewoF{E)WE0c{kr zYvP=6g?&=T_4SiiVX(niEqM*no=bjR#0>>unedB{U8i0U9AV-&m&xDOrEA@z{dcG^TraM3k-V|5FnoyqQS=CRFHi@Q zT}$}Rh}(!QXvD+)6K)XRs+_o)%=`C!`}}Y#{dU}id(md>uf19S`#!e4GA93D?mfzU z^yt%WwND4S(T_olB8&TQ`v2n(`9Jda9oN3eAls~+gWetb-&dX|2!!6sFhntI^A9j{6 z4WGkJ>lX5XwLV^$6}H_|5bB(#9u1iKO+naBPVjNvLB?`^XOw(dnqvI9MPc{ug3wHF zQTE-dO`O<2D|~UDH3UnI1*3g;sXl@db@P+?u<3i>IWKe>GwUgMy!SUR^qnZy$6Bl( zbv;}Cx{xk-zc%*zuuGb|kw80==tK0=ySK!7PZouHuw~(8;nRiJgtToVXl&PBF4OKR zy(To2zaZSOeVOsUt3t-MJ-D&&x#6ZISBKA*Uln#NzAD_j@ak~Ofpf#we^Yj-e{L;) zOt{VS{`?x_f0f4nd|!8LpBwf%b|3cR0Pd_0;UGDzZW_tRW8}XaKdUJUeRV~le-&Q~ z45+iBkMbZnw8QtC(LbdRp`+3|gX_&nv2W7cmF^AJ8Jt%XYMf(>^3G66-ab1dd>A+S zuCx08Q@hSL#$wD@+)*6EVB&o3-}A#Xeat`cMyLII<8t%=o?`vJ68AYT`Zt^(#&O&} zEr-qzC&-g%H9lG}9yS*u;kXXzbY{*EbA*e~U8!7SGj%neACj0St{5dK#R4qEB20}J zgvI2*`^*`2X9~fuZrZ3?U{U-~o zt5m7)`=B{|d->8~jbmEtl>fDbVJ*G$g!zBF3&T44di1Clx(}$c$*zgQut8j^>YPwb z)*#x|wPYQlojuB)A49!x1G4JV9LABs5znkr{C4cXPBftzyU=FrKHoo()<1Aeo$Ht+ zy6HVfjPJLz@6o@CeUE`E_B{saLl~~*AF$GOtZ*G`oR^+i%|C$5E#?1LYA&erB+!l| zTC3RK>ft_m=W2B`()1DZXxnrni7q_+|HB#E_h9Q6)W7Vmz4XRly!NXFVIO@z4&WdT z;V|0PsQ;VT=jcr6pVc-`9#l?goA+SCK1XmA$1siKIDwNGu2TPFR6oerUiIH$^&go< zoa^43*8e2U{(AL425L|DO%JYD|0AxyRgm$$BD?v?VGcQ1>H8_05sK(zwen{5j4+R0 zjGS*Pi!v?{DW~i{O5>;4(wOEJA_sxjUIY8lIX&5ajApz!wK>v2928+ zWPMLa8$Tc2ZQLJogo}_(n9JU7?A};=24#*(V4iJTXX($0?;CM_vtn^=3yja>pZ-r! zV0<3E=tDnJ7$`LMJ%|5+vHhVk{s%?;4~kt!sp}xeg)>L^9~|RB$PN$~LnETDJVrvnS=i_qh^?g{@8IPPBpSS&8Jcz#$ymLd8*EG5ek{UAo& z;}}*5uSB*+ySz?2eYfjC+X44?$o)<5KR{On{{wWR2df;j8WpHS71m%a)?xUt{I7n? z`9uoz1)==eB(UPd1)PzKs(DZLyy;cN-}eIEOr zTp_MSTVy4<3bCJHHCchgw6;rHKSHK3R0>z2yTZ6N;@SmC#DC|;zT^D=jV21h8vC@a zv;I%rIbkimv!4BLpSb={wo9EX9On=wcWR4}>+F+iXNUbp9q>%~N?p8O+y4HHf}9Ippnhvh9H9>;Ce73d;Mg4W92V^?#Fd$9|y3Fy%5LWAmv$ ze~*87{=SE_?;+p*a@@pq+8=*p|Ht)z{_H!KemPcP6;`7Hm8e42*#8=G>iyaum#e?q zXN0xFjo0xDu#Vt5dTX6`hxPP0r?I1AM%X~FMqE>_ds-Wl>{{Xe8`%Fl*vVx6|9|u( z+rK-_PG+;ObW9CuQHOdoU^{kTIB9&}zN3yABgc&yXX@DKWR88`+s^(M-*0=09I$^} z+duFBPsacM?UYUvnz0Lm<@PC@8+Oyj*z=_wZzdj_4uXikRUuDMMtzDlM*S*+%6&rsilM9W%?{fbM`z7s1cDc@^euM7A_S^5j z1NscMy~{gxYzBL<7yGau!?m_ybie$!*ZV*0{ge6eA9=4=dVM?Ge}j8n<^5NA{}`kX zA?-R2Nar91m(C4`$iqmLTMv$mW3}V%FOJp5|Nb2jchEOJ<~fXd1}*F6hNI$+VH(HL zDn3y&H=Lm7=MPBtB)wpd_wV|;kwo<6C=@pbQ{THF6p`~#j1rV$0kYT442#Icn0P%~ zl$?Cm+)zfwy1yPFi*;C!4X8#9YEg$a zzCrEi&_~#bE_xE(%7-{VsaGA|hy4FV>(uYX-gBw?og6AMC&7Ma=MNrJX0VC!{ewrf z4^ZzM4cLxUxqccly;%Q({{Nlyta1Kka&$!h1jdu;39>X|2gRD9;hu%5t8uXWFA7Fr< zMqJ~62&we!P$Zp313S?_Zyr7V8!wKZ7t_b~OB*HhQsn4aWH62e;##J)A;?8&-7z~X zCYKh$QvqQOKR$wJoVKpjHi7E^q zcmLWdqvSuWA6)-`@qbkB54rv|(m3NUxRxG$B-fGau>rZI?stj%CF6L-nemK;-b=OZ zElK_ZwWq(q&G8$23*Rqs(f6^|_W19==-b)&vQ7N_%pVYrzMXmhf$G_z&OTE=)jvZv zU^{kTCz{ZVt@rqb$=s#2VK+H>o%Rtq@e%Lufclf3um3mKg(Q8DeILiT`si`&X;dAR zrjH;eub%BcHwM*S=o^%;;@Ey15GjB74Ze>BB837(VI%AvKYGG4||8_lQziGrW{y2F8 zW8z0ws~f~c-;3NL<@r+W72BqMIy)46MVke4P=tAydbT;R&)gU$jP(}_$Fi7q73jUF6veM=ozjulvmN6i0QyfKVSYz(6q!#Fa?B8T1;8$;jfjiJA4V@OqQ z3HtE0J6jnLkYE+;SjeoR0 z$#4i&^fef^&q&FpFuGuq`G1>Em+vRF5sxe1mG`~Xn?fJ@ky^Pa46N{uR&NS}<(tA# znK;+ARyymj9vd*&Za#!x4&rF3JcCmYauU}M-$ z?m%Lx=Q{1VVyAEuy7zdl`#opRCV9~F(T42Px7E*AFNFbmdcXO5)7H1aFq?V=qZq4G zf2~u0ZE$~N&bQhu&0W}y8t*DWwj+r?v}tR!qr$`;3$q^8pm-0CsFWK-}IL^ zhC*@NBB|GR)bCZCO}@3WUG+1+No<>aR}hLyq- zn+w7!ay2S2^K|!by zSBpB-qXFBo15;n!6n2smf8Q9I$jLu$^3ar1HQF1#g=TTPup2EWq#Mo+an5hMa1yP} z%A~f!&_|EHza1m;{Z9G5ooz{W+kUqGSHEz8!Q2x1*QwpSf>SY1VCk(*Zkm8t*B)&FFwNc~@|ZZB2;lS6aNecS#1 zkYY>c``@fDIJjGXz4K&{?$d8@M8AQ){yoBbk)_8qfX0vO$EV+Y;^X12BOkY(WNo-} z!KcC<)1L~rudOqFR~^1^=!4<5i4TTbYd;d2Hhd&}9=9ad`)AyBVQ1}i;d7> zgijy&K-eek{b*c$UD(!C6Sg+54-L2>Q4>BhQWFl?b`UpKe=yus|EciVrPW~vZg#F) z+Uvq0+YaN_^|j$P&*$?K9}i7+wbs7$?>qgq6Sf_}Q5?ga?IBE)E&ic(ulKPV$A!Pt zHz%}Or!wKcYhP?OrnR$}ZB`uGjrX-x&j~%Hj}6^N=dsaFj{~O6gX^Dua_HNmuSkE< z-ml1S(n(J|QD00+I7u(qt8T#@SxabcK8HvR|uQCqcy-n!8EPg!9o zrFX8^hqAUXETAt$PnA3*E{U${!mvo(Vr-ptepo`5VdDMlSaS0BpA6;X3arFb+w8E4 zT#X7;qFw)XTfIJ-r1LaOceiv$^wA`or`?*G?C&c3t-)HX!+LB$_RsnoR`~`!<1yDY zN@j$|$((D;*7^3O(JRfAH2cZ4a}0;tlQy><}J`adnz7Q)~>Z4flx z6OoP&`j zM=(gwkb5w8#5E#w*fka5iAMMZz?YE}6;kbQIAhFAJkGSpzcD!}fwy`m~ z>s>dxY)jhrr2SGm=Z1o>$s-s{%nftMBBYbfJ>j}BPq-M_qipem&VAf9&|4Dcg%a_l zSb&9S-EaLwETVUAFb0gp^d;yiw+1DWmFMeA_5X=Eo-aB|U1zcDTHw0SjUJRaW+}?C z0xPi!t5JbrZMczx>KOL?7&$JSsZbXnhhAy)p?{(G<2?>6@@$=F(D{b+PgF{y3Tx1! zyje>o5=CqX|M~kVpE~{b>&0~(RTiO(&p^^Y0K3t1m_N%(&waJ>#Pe_Qy={ocui&?X zYI=VCpP(LItR7w{f0fH$WH)+>tvOJ|257Lo-gZ1X|J6vN7Immc1GZxac4BxP`+kEq z0y$Q9dd^=)I4hhxto@f@10?kikSXn@0eX6udZtPLz#4ILW`ri`G-DTbW3WgbMS`AR zUzeTSPER7|db9F#2627eK5=P`AcG0xj(f=X@87*-v?=16kFj4M+R^*O?Z*K`|AV+L zMk3s3#k`W0{2KAWX{Yf{<lrSQrm(%> z>+XYINX|iAdwR6Ww{ct>a+Pm`jBVt+{aeXKL|p5(E3N*|sQ<|x;e*mDa{N3LqXeZ` zfQ1-dufE2aasKfV^)j+W>gfXZ|3US?HbFm`vfn_h@p;7cd4|>+f46OsbQWU?%FtTQ ze#KIHoD&!Q-^=MM(38>sjwHGYX8Uk`f7-)a)@eUiIPZ_W-)iY&rTrU!A>Zqd8eO9u zu~fTZk?Z4^kS%q6hunXY{*PVGLH?`zUnPyzs6ZvEum)?f4#U;*A4V~Tv+KVe^o-@j z{QdX2f24M3|2R*&LjNCzuwEK1?r{TIjaF@`b)I<*Jwfj%c3stMdEq+b+hseo;i3({ zW_G9-*MRNVft_eVGj?G&5@<&f53j%agtuy{WySwIE2HPz!9Y6xufJ6|Ndk2X&lFBw9${qpC{Ex$K+3K^Bhjtb`k~OQ2t^L ziZBnwXj3M%<81jrPof(==v^Uyua>`8%FC7Vcdhr2!4ml!!;6#;7+vc87^mmgC$cU2 z^5n;T<|(@toL;vuL6$ggDOwlJ2n)zX=-jP;Lf>Dqo$WuOf1*kM1hyJO$iA#JYgq*r_W+)?V*9-`Onl1sAsqh! zR87{P7IjEzkJpn8*p3~DzMbjge9h=%yM3F7)Cd3dR1_ihs2g+~7;+GZ~0@E+sq7{wUIkwF$Y^sX^} zDV=`l#kKAS78t*Do;XK+2ravF2;vLP7p^u>Fm}@&?jC0zx zo^UR4spd<<9&#@RGna&YzK8|Mk+Wl{UKR zJvii;!RQGye$+J~YwSPA7ofMqeV|`l3Ip~@V~{?Cmcysl zFv!;vr6HVq2J8kbkI_8mmvHjrM5Z5kgNza%W8PdtBq zSWGTK8J40PE6}o^9~zm^wp>N7Myt3AvJ&y%I`LmXN$uh;^vF-~A3N>Zply5*Ity%{ zV>{VZV!JkVH+}2-3qzIT*I+HyVaqPxJT_xQnKF$0I$>l6NV!_fMlNI<7qF4B-uX758nuZ34y+^V(STw5kCb}Oi#%sM z`u`63_17!;U#;P5MGkl$=`!Vix$@uhZ#1WH+cyfsc4a(iKZC-toQp&Yk>T5 zerOh+eDeITi=1E+?2ch$fGN)^L2pMATlE19?sbmC#(jmy4!E}c!t@d3`g{ZXy!$<_ zm25dw7;404um^k5TB!alogMbkJJr1%?DqZi1Bm~$>t?qn)xBNhL2(c7KRP6Q7!x>x z{QBQtRWHzwVH(GA0wrJ zeTkeY1|28(|DlVX#F_Q~H*2HSpVYqD6GEjl;+kjMKBCWwUg-J#kMkdQvX$_^t^Yq| z%z2Hp)?yvvyqNXm22^A67w3c;GWLJf#<20Ix)}D&)|1iyt%2N*TlvB5AkX-}?WCXa ze`}&Q4rM_sBf1dFh*&oC;NkuSBlgQ+53;X6 zKkOqLzs$bAlur-+01o014r2mw?4eyg??5NI_Aiw_d(pdi%g(LPmies3@ah$+O z3@?`d<+rorzw%Xn{TJ^t`g!EnU#^h<<^9wm`Cppph4Md!irMyj4hz2Jx$-3}Bk23lytVU~*@nclbqfHwBOXVCI`(f&lS~SClRYCBgj&bcp&kv`jvd&E zCJdWnK4K2SXr=S4moDFfjJXI|z6ZIr{15h-w~qcp`Wi4mPh-%2@&A9#(unie2eFI3 zSz9o_KEH9HRI&6*r6-*%a*o+;p9Ds28=K?*0ZHo@2 zK@UC7K}aIbL1?RX|L8y`x^_vU!S{!rw6gm5$}`99!CvgcejLC-ob^9htNy__GRPu_ zUS)q@!uPk^_lE&`I_dk{>H90@j48>yF%(WjAHue~cS zQ^Wqnad9nt@K2B@(Yk^^@wb&tNK~j-SIf8R-8sTV$j_@=EFUX3lBhq%KE~Ad%)!+D znP;C8G(MkQ`VX$V>@=t7~c|XhwWx$<$x2m-X(lbJ{?fLb8+*{+zJkM{>3TOR)+;qBQb9i=Wkj5_TMgr|fq7P~0*Z-|y|JSjv$?*;B#=Y$8{p@S9 zS2}&t?pN2v{{s!EPh;IW$Q~b3FQ*o=H>H!o9_+>70{fI{f6&L)tAE#Of6xygSE>F* zW}W(XgZ78G7TXVzhtVp2f;@smwfmBOTvzv~@G*4Hb6?1hb2v|1JNq9UuCKFC{C@Yp zME*k$Ht*BcM4TTy?fC4+wMlWDeggli|80f*hknGhVjk^(8~;goQaX*7tJi;85DNCY z|32Ty%GsfiKGx*h!W?=L;=cp4OMGJuzBLp(wxwovm}grtT8|qeCQA|55a>9dUc&<6 zh3NL(#dR(6{~6s=5EhABj3p?;Qj}u_R$>)aqXLzv!Wyha_Jh{n4c8jmFi!n%#>zf> zjt#y=#BXv+A6Ub`+5T8<3j0s~_!*(`HRWNOF}4lDQ(rW1B+3`g2sLCa>QIjcY{%AX zuMIoMooGTcCXGk#BIEdXs=~j+=n1qViNV^~2c+#!k2e445@GrX&h&d_^gWKp{>O6p zzh3@tmj8Fk6WRm$|9{G}J=p3$;#p)Iy9ax*5BqTd!xid(j3V~2jaPd&$RdY-n*T2@ zjWhrM+nn~iGM*QCNP3OE=H>t5+HjaY^?H4D&onRi^}cE0$^Qu92zeC8FpZW9-y)g& z@nzuzc@nL)@_)5*_&f6dQunrGMku7uLH_@LzL8|Hd&TD#UmA86ULI~Px;)%+=<@LS zhQ*<&?3v-#z0VA{rI&;+6g)dL7e71P-c%OuSoGX*=Z5EoUCm|TuJY%HyUU&*);Ztc z!DYrAmW6}#)PZH8$a&_W7`a2sLe}<-?c*4uj~-nXN^HAn&84CJ$Q*qT=Z4Ng{p3aZ z#m#N&^3S%O6~?(Tj}5yg_&V6PrNDe^dSaLPaZUcCF>-Exoa~mMM6 z0i;)3|G<7jWy?cL{k5S+I<=@nJz5*C4T+{}Lj%2&-Z64**iPSpp6%Can_O$`4h^cC0+?w5L@~^#>MOe_*lo2gshP@<+7q+Z~@oA4XzY!)$-~bwqn)w8Z!u#m-kfn?}nK`It@Q}9g^p=_ ztiIwe@UfsT#<2Q%B-YXDn&_)PUZ`#^I_fd_yfAd4@|BkT#(S3;hfAE~J#5u~a(se8)%dsMcu?oZ55F^?Wqs9})FmB8tgR}o1 zNV5O+EA%7P!2aK5{6RZqP*&FW^JG|X1H z#|rnVt+8HQi}uI{vKsNxApkU8?vcWMA?_b$2yaPLt{a`^DuIC@LL;pvE{*Pw;AKTdr>J#g@u~*ptYqUR< zoBc>(0O_Oriw^KFLcaenF(c&H!@K-+zG#;AF2sL;#eaW|E%5%|ubz$Pr04XZ$F;pP zXqiygh;K&{eTcSu{P$Oy-YLJt|NV^6Gw9hM|APeV$D(QY$=zRqmY} zsB-V*pzvX7OyCHP;uxmP!=EOPBWsLp(m%IO&?i1Mi*cg9o+Cdw=Sgt|-;@8>YoC!@ zzv}sw>VKurL%P^C&CxDE<8S;20NKN*uPyH1QH|0C`G(l)ly(l(`NqoplcY@?+%ZCT5#tTH1zW)#QF zlvPG?Wi+Eg1V~E?0a8dIKnh6{@|*l1zdnhKWkzPmXjD|jW;K)I6g8V>ETfD~ z`+eR}x}BNb?>-*i?;o$n`+d$m_uPBVJ->h5l~{$k0kqO8x0-{p&L4%IU-Rmg=9ENBwi@Q@kZGNFPFRwfYeG zo)xvR3&?dSL@_o+b5q)jrSvkCqXLx?;=iLxAD&)~8q}gY&pV)j-oMn~|GijWog86z8fP$y zx6ZGPF_##JIZJMSNg4>r;T(MeQ#g-$X{14#IYKsO`8GNKHveDXW*SK&>H(x84e7{0 z&q?_oeP`qW`T0Mbf5FdoWyw*pgWsL>=-)8io_FkmI)NXVn1_xEdw0o8=hM!oFTg_d z8Iz0FkPJ+#x5z<6d)exyVjH930e~Bs&Z}3T6GuwNSAgtxzx2~ zcyqs}Z@-**P##uNN(pMswu%O#PWx^wYm|RX85Gxl=ld&JS3m zk1*R>|ChYaBPrnpW1yG*UwV?eesL7F0fy-#7%EUMA#p-n;&kK}f6tKPAGa1JGUF`H zVFFV)j|-Sa()XkVOul$^s9R-hg`S4mHTG$fhnlVHA?KiFOkPKH=0`J*EEfOBbZs8$ ztJJ?Z(x9F~Q}4xb=@xn`vbcF`{WG6=0TyELYuaDU>i;fvq;Z3Oa)3EutbGteXsglx z$5C{k6Vbl7?ppP~=jIAeGzJizAA5q{onx$>--{590YrWOer&p2|5*QLkUoTge9uAD z_s`*WF_vH{>eiXRaBUgAk>Al6z;b#nqJQeNpqby%S-Ug)@7Hj%O8Cp!d8g)5Zs@J# z3isq;C01cI)*v4R=;5Z9+dlWb`Tv7vZG>9yS*ZUvE&L~he^~g*&QakfyP1!X9TVpE z`BR8ul%hL@oAk><8GUB|<0AXg=@t0*`yVUW9rB&3$ZFIrcAf!QBHh>K3Rj_UE%83e zp$7IXdBVlqL^iu#U#0(hT)IFbn$Rr&w#e75C^lDLgl6|{xL}RIcKh#r_jcwk^qeri zKCF*T_K#{8&=cnP2hVFKRKGDlZ;t*bI?##e-yhvLhK{A$2K-6jI8LCOT^CN$`%C$a zVfqM$=z~a9dKONzJCtV43^|HA`WSf@we;wpKaDuYJb{^Wg_xU0gb7idI%XUqI%j_z z(YW~(cjMm;;XHW()0n*1{@L%7uc$N0)JXrW^(X4GXzn)3BPAg-q_I!O;5C_H4!P-@ z;;Sh$WYXs$3-hr63(E#%;Iuc^5hn zoym0s(cb=gv=&OcXhsvVJ!27auoz3Q6w9z2^%KtbH#dJ|q4{sjoQq#=j@jIGYx&I~ zgPUBeKps|N6;@*ndK%1sqtAK%{r-DCkZ)ccg9YZgXfnY1Gn>qzG4^pwf}=#$s`qdeIwZ(>ls7(&}w zWrgzNC^_?gKYEvPqWiS`pHQ|m_-^_t8xq>+wegVj193ohH``-IFZr4JjqF>%|1AFJ z@ShySP=@};IqR5R-}D9bueM@3y>84oO6)V}jjM%irFG5pOth{LHg?Tu%8!S6?6NQ) z3$PGl_sqY43llz^oOkxeBtI$&OCmNH9ce{s!)w+Ou2@v zMFW~Kv;RQd(@u}(B>K=r@5RuxIuy|ydGxRNnf|x>CHlu*V}&xKOW84^3^}PRIc_bQ zdlEQ~6POuKX!pHvlDWqkzFzd9U!QtlmHxLmfI)I7Q~h72?7~rf^Z&5^kDEj4xncf{ zAX;LGZ+>9YQ`{yhfo&9r;oItns#}s*9I_+QV z9rgPYOS~h75S;;3zfReSOZx<8>VI)@X`kSE_g}y?l1^)PAr)yz$D98@s#O2iYL_IU zcF9}De=i8Xy8h@Hbv-)8YZos4|L9PXy zsHGn{pn7k{56XZz@ zV+5OiW_`gc+VS)=7{wSSPhA$yMtYm{NJjtmogk-h9v3i;q#sICNJSdbk%2jg&W|g- z-udAC%48nDNqb|yvL@5}>MzY>H`D((<@~uXI0w-D{@8cF7w?{u-!GWkalQYH{68WM zyl4$NbJQjsL;}&jYU-z)pN1o7MAJfjTeP4RUEVRveG8F|MaaQoEJ4pX>-*8CFWg^h zUA%sCLf?20LtXw4kZb*Up7rNs2X~zp_=#@%u`%oC*QJG}{8@(O$VFYd{&(+HVFkUh zO8>jUIs^JjL}%x>pcze`v5H-XF^ARU8g!qt)_^QP7roDzMZd9%CBm_e-Jr3FA!7yu z>=QN4t)FN99Y^pV{(tD53*A$UQj{So<7dWy64tsf_bd{A^qmxbM0*kvi2m_6gti63 zpDp~}cjo^H8NxqD_)~=+hZ>xJz@LljaD8uM1a-~Ak1F)5(XJ5@t$Amh7tVOi;vw>_zbk66@y!0CBIWl9AK9uL%M}1Iwy*2y|*7YBmC;y`f z&4|wUY(=ko5;%?%csAXbISzX78vWXyGHx(4PFSf=t8bqukVgyUf3oeY`R@ty-(<(M zwRiNcQR8IpJ;rTxe#N#EH-wY?9>xgjYK$MUKTV%Gf7E-Np^u`~73J#aGcgZYn2!ZmhnezeGX&+$b{Jku7Z@M}-P5r+# zC1iWfBIIB(>XzAmh9&gIQuQB}(wCuCUD$%??6M{-XP1i=$iqsk!fLER6j#NF%FBG_ zXwIkv>*$53KcW6V?HP#v_1iSf?@VK(8OBZ0?9b$;7@O?9-h7k%NiRb=D)8q2cgnQ? zF;FF6VURwAHuUlyt~hN^3iAof{P$&@@gU_yGg*H@ z{g0XbIqLhSEcN07ZDca~_vT6W3}XbR@z(a=8Rni!^?$WGx<>tvfqZRC3}Ogv=hXi= zdR|?x?(G~^|97eX+tvRVJQWY4{29YJOkfJglbMk>2B45jmPo%TV#cUgS#nI$j18c%mE19Q-=-tR&t{czH&@!G7{<8^0Vk3X06 z+xR?oH5txG&D6KZQO2bICf=O%a=c~1uj8#rzmC_Z|1RE8^}G0yq*vmNnZJp5CcPBz zTJ_8L;PbD>OVF0|t9X0vWc=u|$#_Tp|BPqxZ$1`aA+oUuIarLD^MCJ2Hm+!V|E0^q z5_U_m49iiJE&b+5zvK$$Jgh|2H<r%Y|*F^*`mW#TzPLiyx_eE#62rwZ9f`PP`UxX?QK(id^@sLm`S$iZYa= z0zKlccfsrNKC(aO_4ojpU>+oghF_1j@uM9_i(iX(pc7qXuf@ApzaBr9`FgyPA7i(? z8m}V9AAL1mP1c|m4QNI?y3mW6`d7NzfXUg@@s8Zr;tBT0G5C=78QERv8S7q)pQQIM zcrD(Cne};1(q!6e@xc`LWV{w1z@f_53=i+{2uU;HD*J*h}TIx;W^nV5&3rSdz!`tm%79N=f7;9?su8f$J-_O}l! zf5?teuJE}{>bA8%!j*4{Rg z(HWj^J^v=!|ChsEEm?=f^qKvC*Xe`L*ACB8z9?hf9AA*nmT~1GUiyJ@oDeTt9B)rjiOHDo@bJ%D}kNWc73z`PF8-oK$d?VJMb9C{(UVw9o`n-AudBTI$W$8ymPYIW(|qMtkJG-nSyhzV=$YmmGgt zx+Ra}1WsZYBRGwYillIcytKb;ls<;D=<}`nrH6qv(#N#&`MkVvPXB+t`Y%^Kftm5| zO7&m0I+l#i39WVSIZR**=WzkkNcsoqPhY<`Mc-Oo-cJtb^C!re`S%?C|MTi5b?Q;F zW0m>$Li6v&3c3-k{~Nz1C8Y3Y?B(Q;O1{~?R%fR%r{mK4gADqlJ&tq8&BkLlX%8Rr zu9?j9kcIiEQ>WHq0ljgZu;uF~)3Xts8QgNg8iOf&4YXJ4D}*0MFth)!SNLm%zghUn zMefPLVl2T@EW>i-qGy%-Cr)SD+2VNSTtDVP_CxC8wo&2d&r$w$kTdfi4cbRl7w12s zGm%#CCl4#J3f)=SPz$Xspht5becY^}=VNG%vHVpR=ROA3S!=-Vkg%>J3sJ{?F&Ujf zJhT45wP+4xP?{`b-!f0QeDkIp;mj8H6Y79=ZGZ9zqP~9<*?L|+N4a|{P>Cv3qXxBT zK-Bl|IWBLVk;llW??1pU>iZ8)$(!<7GzJizy&vt-i}t5SV*tkxjR6!bwFeIyqgnw~U&cZNQSF^n5GIUDI)lfpT20#mqvX%wwdKNqNv z3XLCF$GGWb;{lJRg`}Toryvz+NJj?dAQSVDh51;3g~-MtnHU8 z^r4REr#6yJQQw-JIVX=iFN_<^>ldHBT0wAiSjCTgY}#W!&Ysbk^Z%CUXD`#wCI^t9 z50XRctPjZ3|Gl99t3TUOtN)+S|0Sb+_Qxu(3I+U{6vlOAA&OCoGL&Ob{8x~bs6sWy zt)r+R$DBEF=&Zgny#dW=M_puh-gD^Do`NI#HK@6cSUHEYn9q63$Ug*X#ba=Nj{Fzz* z+haWnJ-QZ+k&My%PxBjR>E|$%&2J=5cortu9jey9B+sL+%ejB#G-}0lG{Tx!YJ(4qcDjxKv_`ICkDSb*+1+-O@Yr1yKTK4jAu zA^NBGpz8_OqknDZu#3*XTTCuR^zYkc>1k*+ z573gQ{%6i-G#|FrsF8)#;H#AL-LvV`r~5fVI}>)K~+EqJ+GZD*etep3A=q41UUb z$m{Iq-{=49U&;)pm8DhetD|cJH-#Fq77ggQpd2CF(S=?_XFEh^ybiu#PF&pgt&`u_ zAID7no5PF&ZpOYcE1Y0Ie!2ZFQm=BpT-&ZnR7#9hBNQeoPBS8zXh#mOP_0f+1zl{_-V(3gp6zqz63 z#N5!!zVEcSXs|w@*ZKeq(uZo?V~>8i=VV|GGSNN4k6L@s>HX2Y=bgV#pN}EW8C=AV z9DZ@PfZal5V-a#N_Q>3@m>loCDJ&tEVi_hwW>`+SWDEsEHl3az=Sc5wA>a|(c zCD0qqvmYriPF$}K$lO|MZ9uQN^MpBc?uwR)yIko99VhLP<)%x#cGC;#n+|FxB})V9 zg#KpVyvw&2_k(0~7DIiN{Es7P@_&kNlIh!!Gy6}orA>a6q73DzKqabBjh-y|e*ym& z@gD>8`H#U->HoC!zfAt;<|sEEWM_`FNp{bZ|2?aQAGK&e^be`#$Xq2~)7$A?=*1Cv zy;0t6I?v7Y#lAr+bHcUbIDx4Bdy*W+2u@>Wyiodz*2kP-9>o|ocSh%bC(n%konxNB z6eeG?4xYS#X^g*W4g60prvDT&6=_IE20B)n+b1(I4_Uah|7Siu>ht$u0evBc#*_`p zgiG_^ZN0*Yqp9KtopZ!biul1Xan(c4{9kR2_+2G_SBhU;+Fx1vnOPy5zl)HAEct0M zxdcm5ztH%_BIVvm@rR}};twtKRzzp^j1}D+mT@~i;9msJSXdtE(#_yC$zcUO50kT{ zQ*sqnV@+iDwVSPfv;QxugBL5yl$rVLFYW&+pf~1uHfH|+bH6eBF==aB8bcH6OZ5TE z^?%96D)pbbEIJprC83|#Zham<)}au^C`B2{QGp(HWN)+juh#r`s``D7`W-Xp|D@>u zkEs7}6dm~Y{|AiD1gzvw6{=B#?o|EXBz<^#e+IwNKySv-I(2KlIyOaJi*|Nn2a`h= zIsO^#ku-6UExqNa7uY3m93_wFGk!^)cv;(RyKzExlY5;1Up_08T&`{n`fXdSX<%Ph zru|SnD-6>cE42+ULO+dGV+So$+7G4L4>-fF6jV+TPpvrkUrHr1M8$u3`)l{ z|NlGj|1$o^T?RkqAQOkIEt^Nyl6AxBnGOVa(JW{$DT$lo+&D=AGFk8#y6s5;V6=bykFOQ7Q|6WP1!fLER z^zYAnvHHTH;PYHbldKqTUFI%qMUZE{lq5VHUEtIpbKqabBmuDZr zinLHoZ(M3_xKMwa-hkFBXTMcA1CDH3p7w9s{|l_8=BA76K51V7eWv|?xiXl!|Ag^@ z59?Ek6O{$)-u50XQPH`@Q|yrXBePt<#z6WYI{+P~Tc z$6Sy4j|=&ejYY^ocd7JKsZULh*4#yB-!Gvr#mxWj<{N`UvMWKrQp9UORr2UU3dNW$8wf`$aSmmD8Sc7~N zU_*mEBi@S8QziV(!e6WXzee~AguhVu$sy)8;cY)J{OC9#{KDQP+}&NmuRk#Ms5ZEM zb=1xtzh6I_|AiR5E;$sFB~7tVMs9x8`26gYP)@HvbXL#gNA$m^tUY3`LN#g-{e!n_ z%orp+IuovMwQo!>IhY)V7Rl!;l+jUHtsT)E*>6n_?U4-kx=%hp0>^Pg`is(FeXo4b zF6~VlzZsMEj1M)Re`8#2m2oCxJngl{m5g^CW9}ro3XC1)MdMOwp@;0vjmD+YjQgiq z`1U|Y_g9yGk&6{5%F!N)`j?p@CP0oz}J>;BddfgdqpAl!h(Hq6_kzwQf^mT~F?xQjMW-=P9C}h`>kRQlW zbgR?L$Z~Yi`*O7l^2|puSK`vRKy(Iu^dF4+eC<;lK_mXd|8JV^4^{4~Mh$AwfM&F# zXSMk?^rae~$3UjCAw{{6X8e!*H~+uk?#=W0=*N@`UHs@p0*CZ1j+3%po%|4_TOx1z3n|EW)I{3OQtPp>XGiu$UgLaal?(i>|9T z20!8Kh3!dUIrF$P1arw1$it=|i=S8QnXk?Xr-f??yOmgly3;wv|8l}=dgBFqv!`;x z8hSoj=V(`@KMB3?PB}+y_F# zst2r5cpx;AO@;hk?HRrHzoWuEm8e2BYEX*?G@~ci{`Wll-^u_t?^^yAI;KYa*o({E}2t1mlyg#+F6W7?h_XLG_i z{!CyB=g~c8kG#G67wFMHdHWW{!ZbbUMeko|><@``+K=hZm0)+MCMTqlX{f8s3F%}8 zYMZ4UZX1_5Q15C##o@c`}dv z*4|fgA+oUuIarJ(Sc>TUpJn8w^M97pbFl*dU(f%E&L7Nk|4OXFYOF!D_b;FP|9$?? zLl1^=ZHe*A?c@K_1EGMQ>rjYdl%fpfsKB7|D>@fsa^C}?ih1VzpVi`UrE-9*W_RiQ zpBnm`=l^^=+4&U7`bGNxQ`#L%d;`?i>VK-sX3oFqRj1IK(V`A&MI}FK(ST;OqYJ%A zpeNt_KW6^#H>&@CLjT{G`XD(pt^d!jc6HoQ{IBzG)xXQtzlhGiInJLGIEi6&&sYDd z<3{NH`Rd;_+8^{Y7}6IXR5vG9sDD>#f3SLR;wOdV95i2IP{-p%DiW8l%* z^!fZ9l>fTb!RzRS=vVjlA)4D8XgBuXpd28JU8`HIt!o^rME~_rx%MQxO}{ms67AQR zGFG5H+^jv^L`G#nbS79CH|3~6C8|)3adBNkjw$zQ$>?mL2C^CL=t3_NIF61fbNghL z?{<HPn5{F}fO&f@~6k@Rz6|A8~0$f*CHN{%0PHp1+TkVa2O2BNvoIbE^$)JZru^0_+ZX)}}4;sWU2~wexxGR-(36-+i@kVHNXg zwB!nxzEIN|;nc_PD0Ai-`+O8&9lDD>pM4>{ztZ!|t-Gd|V(5bB3rE5;2T;bY92Ka< z`0TV$MUGu>{DhornjLD$S~Q>;?dU=;>XY>A@aF#&=*?(BwAZ0%RNTCg{u10DN6G#2 zw{sm&(t9$+zxe6P694Dq{|iyO-gWvA+Lns{<>DV5=*$*2{&thc+;^xfEe!K#lezKe z{~sgtI{zf8Wq+F9SZVyH+!=xNQM6t#{$rm=bE)wkjIryemiIp*UpmTWj+CN7MjGxe# zqBUFDf@tkQ)CO9{ZaH$X0(n@8RhZP~8T?YR`2lBYGLK)Ue@Nz|0P9eQVw56U-&sa> zY4b(@cP^(_pc2s<^8QlkqgwjN=5LPlK{lkAgZ6KfM)l$m^f2W_SEB~C=v_Q3L~{T$`|FH%&g`!<_IYW4UG(q2=pVt+KXQ*Isn@kJI@`^C zH=6?|4^`Ovzt#N2ZSt@7UjskuxT$rmncjHPdrfOY(7P}*{*dq5I`27R?txuLmFJSj z5w$B$kS8(o{~&$+=v=~K<`K;NmvD)83j2gHfYa>GU=(9Gi*uO3WPM7QBFFAY3Fpai z|8%@SP9y0h{XiVb^#8pD7uyg~{G_t0JMI0oInwBjQ{MlaJ|4Z~Hg)=O@jB}L$GkuL z|Ly(*-=c-Tt>oaZjQzQHX8+?Y(k6W#vM?V#h2p?B>`U_v$N_PjNOj*L=MSJwd#JrY z+;i7KcCPRZmWX@b_ZV4UA%9d!qc!pennt9}QEBw_#rdfPp0N?Kx zdvBinsXyPpO8(TRPhb#3n3-QISO1l&|BUx_7RcX)@;7;mc_}}Zp+i1hPIk#VC0msB z^ltg%L{w(nc9Zm_jxA8PwtI%Ud59eBwf=8)EUe%*4=b?>b(!+t9A^&FXVx$0Skp_- zN2{_V%KOb^w0^l>9T=TKek39MWK*;7%NH&3M{AT%?z|}!xNjW_QH(Nd{(|)ZH)Vz% zd8)Tu`-|+Cw+85M&hM@>{&e{lrblLpN#Td4G28=)%w{VL;-vcf+OqK)tRd zaL7NzOUdInfs?2+Cth2d8HVYN8Row+LO+exuA4&(np4bw;|#k|jNvTaI{$W#c>+`D zQeOV$4cv8#;~#4SF@J-(-u!<<##`1GEVRDB+?tTnZ82`l}hS0{3c77g32Rc)&Ex@J!>&^J@KR1(~$PA()Y>u8W;F;g`j`PhA zWclA&g|n|RGQ+_%^HoJRhbOriAD}-kO`o<`U;P|~Y1|oE7b@1w58JW(LV9?5 zDm_$|-f8dJ0&4+`f$uVgHudhXvHbQ>Qh9qQ9lJAZ%6xCwyzafB$#eI5k0)~97oME| zHvc`dA8yXI;i;+X!nVw}&xGrM>pODa5q4HwA9m&65GpU+5T3qpQ`lYTe1@OJ!_!xq z|H}2QzSBPGbkl5U7CmKGgr=lqZ3*RF@nzwZb3#w)GxWN4h@U6OZ{zO}=SOkv`(s1& z4C(2aO7m&C?}`mnDDT>%@2$L3r}QXOdtK{eevTiV%G<7C#t_U4;%TbGtkz3Es4&{fF!l{>I;dxVCe}l5?xSMlDcye|Kw`iWN zzySAug`2LBcfXkw{+`~m=87;v{s^P^8OG7i-}mz89DRsBcKGJ7ka_$Q&Sl8@YB=Q# zwtseQ66c{CRrEWYDvE_&_7A*9-*KJv`iMP@!P*G-rlDu*if|42ta!SXESrC2cstqZ z83T8z>z`K+M0E+fQ}%K6oVX(NJ*<70>b=?h?Pa0pr}$^QllfoCEb@w3`m?wm7w|r` zvcHe)BL7Pym2+PtkLBMUQ`Hu=8;kf$vLkJNY&)4aCm)f91Bz^iF6y0gN9?c3{uS57 z-g&vQ+dDi+YLXWH3Asp7Xih#F)-3v4@(%J{+~kmdN`9DJNq)q&HCW0VC;yOK&wd4Y zn|J$DvOn#eu^-Y;UH5_5T;^y^*r0qC{X^4{okc~f_3sMIzvd@}jid8oC97wJQgi>C zu$kSK8f~iPr0|6K=qJl(g{{UP%4?ItAMk4%xqZyo`oyfT!;dC*3g9koDwkg-k0piO z`rUg*l0ubw_Zju?Ught;)ynd!>tg%4Ie>!&*TrgjFAL8uwHK#!LG186<@y5SknEr9 zQa{(E#~PMj7mNN+bZnk*3}nZK@6V2T{h}kf?oDXLw_N*MjNv7mbp2}d)W{3I)Wjr?Y95( z7`L70ok25UUucfD<8lAdBO9`v8#!!^iEB+{^NE!3Ec-(#DdC`J)GV^DATuRA$9$Nq z&2Zk0ci8Lx2gqFPA594d7ABh~kM{AWguh@{hQr?Z1Mdt)KM{t5^c}w46wYI(Z=T}& zr^0sJqTS3#<;A~n?I}EiS~N$vmSicrZS_T_kBd$v-yIunyDN6;7V>B0HTVKPbysY7 zYosrX4PUn~cIpR_{6Or~S8j<7fA>AHQ+LsCWk37=*zgw~jGemv!PxLUk<5t=fAjsZ zQxD%yUlcoa@B3rJ^~_ty-`*WNwcc|cB5xxvC%-)}Hhk;5W2c(%(7mx!d$MA~-@Y$) z>ZV&`!`IvzJ5}-__kYNB_5ah1c$o;+Vj{R*i^S$>;Km5C9QEd2;2V$p0=3tJwz==C!KXkocnL0}x_2#@Y z_8orsHFQxe`#OFfCBH1)XpZ>bvpxvDHQ|4DRqu^8*Uk=q#oTz}ow2y<&%6FE^1GyA zv7-6(JRC~8K6a4Z4gAf-fxOva|MJ<^hs+M|Vcxr9UhF8@iDT%;?J#6l^nL7H@b1_T z$uIlXe}Qc&v%^#Q%IQ_J!+Pc?m)sKjtoJX=eqZd*$S>k?_FpB}!cbSyW#XmSbBk6Y z$p7{cZ3xNX3ic(}$p6_0{PgsgaP`~x^(D33n!sqFY>yyKbJNM@^pHgoZknOv^5qh*idW@r-dR4o{ zyRVC`Ym*d`(KGs9@{i1t-xn)ea9`|+dH2Ph%(^eOb&)h!5(`f)yf3yb+co}IoVz-d zM$bDX4C13Q(qFJP=&bb@^S#$X?Ya=uLumoM3>s-4$ z=QpyeZ@4ek(0pGkYUedG_hg&TkKBG-|Gmq*+!+t2t~0*JuEY9{PU|~P1^tUu>pu|v zKfFg_*;C-T#wZ5nUmebHJ1FxF)+bFJ&ElbmvOAB2p z&uDBX`d{=EH|=w*jhSx@?!5eMolatfIsfe0=7Q5gA9~S)qQn)USX*F2MsnDA@(S%Q zfW1Dv}aO6RnFz%8Ew72?%(JBYWJU#uTI^fURZTSsL@t>mOM3} zUT9AiUh}_XkMV^1;^c7SyVP}6&Ir%bUi2Q#4as3HyVmT>tuMShv`;06@5lNzDD?-n_D?@MA<-w3}QNK3PKw4%jF)CdwyfO@-NLU-x@kiQ|LQJ`I zBTDC_hD}weVRL0_*fN$H%CfRz?`6LjpTOtwb(CX!^A(|DX^QnWVNZ4x{*>?xxi>8(?8~qgPTjYkJdi5<4XL4KT0MDQJ((o@>c(2`>d5EZS5G!D zA0Zp(s8`EUg+Dr<*}s6NQ$t%)YG^NXzH)(j+WnoYQ>~p;Pji3F{XJywSXQi$T|Wj! zvSNvx)G)X%)jrnLu<@>xQ1j@l@M~ciRkt3d*M4kPxHd_<5Kk&|ZXs{QmX9O{-Oi#r zp-EYE4|KVTevYHF&EDiJbOp1Xj(qFa7S$ByZiPfV^`=0 zN%!LWNo+wGo+?cZh3?%tCLffih8?|k#C9%zPi%YD9kGfNSB9sZ`?0&=J+WOU(wsYb zYwQ{3y=QKZ?IEky%?jJy`!CoxePuW(e1}mF(>z6Ouq3Rg2Lt#f_W4hJN56o%Hc z!q8SwDE$>me}(pMJ{CIj9t&N|9t+*{W9uH1haU^QxsNGx9t-{CfNP2D$Fw;f3quS1 z*IZrR$NesH8944o;EDb|MKoV8)Y(zRjZlC_~^@!C+jY;D+d;cH=Y;adG4 zb9Ky5tXS(m=wA<8xht>!dU&ecct_^f!}f|lidD>C8+H`04LgU|hFxdYhRSnm<%6|h z_sELao|?6xYSpJ>&rGcidzXDWwr^x@s9yQ?u)p|^Vh2vI)&D6BHRYd*rC+5 z;qbh*p>~1vSN7@Hb5);?)%Sin)=;q48uq^ozvp}X5&i`K4S$X&@D%EB1V`~#_>%?N zhd7GA#^2+6_(!C?SFn+Ts?}G7JaRSGARlEYM=v(K`t`8!rLTt*^kHoJ$~VGhoTi_} zD|i*Ry-&G_dvGrv!bh;VMJi_-O13dE#d5#dA1@Z{a)m5succ4;}ak{ihhm zKcVyC^`Yxi>kVqo3dN^cCiH*Ft@^m-=S$%3S}!$kYEN_pFKfze)PPNntO!kGY!te%B9>2lcIMx{R?U zqO&NH!eQpx)B3-oN#QxNe!?8ZdG+(O^|53Vb2IxE*IUUp?%TVJue!g3>}2lBHNKj6 zaejMde!Rf=Y@xAFa_0O4*9To6A`iS@eBjsk4St8$G3)M7bS38EL-;T@Vl%d3C-&k1 zzJnP5o!I5yEtR=f`Tp9*g zTU>7?+qiG|d8`y=b!aqRF9~JjP91W^!mxOAbdDrTc8c}<4`x5zWnfylX zWZs%3KQ53T$!$6EYl{4uCclzTGMBI~b$t`LnYn0*{OUdOC^r7OG-e}K>8^Y}9M;s9Flef$)Yn6*fKjBD`@+=%z!4&04Dz#rn1_%gnV zZ(tjCVmA)p5S~LLS}}wl;V1Yf{1TJ+HGYTJ5!Y6@8t=edybE{WeYhJRLXq;KxXbvT z@?#@e!d!aVy8BVFj9Fh)>@0;1FJ2ZlZb=R$Ke#cJ?z+L* zCdTpg1GhZy4B~K2c%t5Ub4}@CYq%|xcis`6>bX5^8<_9k6kiMD2mO1ihu`|234RZL z&AE7~)`;mNZupWlwd|_A)40ap{_0fd55-)@6#a+5O zD^r^Ij5f|g^!dK|m(&@L#KM#2VD>L^?Rsu6H^w0ztCsubby4Y$-$2svt=Lc;ixJ#R-upAOYbuW`!V`5+1I(}B)gBf2hW&m*?-2kHZ}~n z9=+3f`U}RDx46#jeseAdM%eYZCyJAW&Ru^+`hC^)MXqIW6ETmD!Rxuv!FwiyKAY~n zCU2E~f5tt1e-d}8{D|I#oAEpO8@)68F9`4C%j{F$c<;2!!dKXB_?r8hxZC9nMgxoE zFB>EIICtk}g#*LP_ZXl0Of2j>IVve(^Za+;hHJbxzXreLTCHoJ zV4s*ZqetIyd^U3hvvyTvj`HK=DelF^hVA4YWADkqe4RCv(n!^Ob6B5|Ctor49@&eJ zjvo2tHfzWpRYwcgvHQ6bwysy>^0+;lAMx(tjr=dZT5r1OuK6MM4|~2aN8fXBpES2n zdMnbls`noJ+Mts;ei%BC<1b3rBmB{MtUAkm6o>M}zA6Y;S>au+>! z4tsI8;ZyEcZf!_*&x_X1Mb8(9Gv%B9Vbv{RV zarz8B`gR+dtV>vOd)V-bw$HbP`M2WgF7s~uA6MR$a5H$XdCF@pzUvm(_U8rdl@Ok3 zSDv}P&3CU_>>CN^ffL?wYb@-z-d>;|$3y%*+F2*r;~4`Wd-%Wik$9*Q<_*fmEZ6Ts z5q4obdfqMgxO~g}d}Q~tSeSjAe9r&_sI>&?X&M&9}$t zV>gBU%RG0L`8Va>xH$g?`DT6avUGL-O*83XXn3yonHwH|%k?M^MQLRtyC~jg(oeQ` z=1){_ZHwG~MVu%@_ly7NTSW2y`t|DCIiA;+8Qyb)I-hut>}2jLF#cU=eGS>e+{-@N1K&>$FekF5clSr@@cvF5dZph7xnW%p6^tvVh4Akx|YLG*p_Nxb}# zSomY|Pf@Ww7CsxquoHddr^9Y?FAm_# z{Q4>m(KpC1&ykIo#qLU6jhFA$hI!$8v1{q$pZM$8JIEU`7w1o;ksf$aYPDQIc;>Mxpt)g3H3@HJ61gx&DKreey)J{!ESW z2bAL}C?bpA>plL8+~MDzJN@T#m-@TXr0~u#dS~oxHzX zT|fW2P?cqj0(K)sRNfKrrTGd7{ORvtw^`fzDo`Vf5(?Px;>jw1RuMkkJ;8x7is^~#_| z@`!e0^BQ9$%&nQmNKivRs9Zbb`m^RaYAdue**!d&$!9Qlu5bL*cX%u`TB%km#52> z9|!am>%Cu}btKPQ2RLjE-4CrHa&E$Z>i5hR^+T;POP+n&*za@XFY)`v!mh#}d*6T5 zPrFwB-mMSwW#-$N50H)Q=H45M{>*(l*v&FN^ua~0Gxwp<{U?R_hpzn@me3dBckX+4 zej89Uou+YPPM1({*%i5o-t)Q#b(!fV2|u^Ym6_7zK^|&3IP?W9>j1Aa`l7+_qunC*7CEfTRp1_mXns59MPhlIj zryKvn4(+9#8OHz8oO!Fvep;Emo7|%ee{=m)!DYt(tbfYZZ_CkdBM%Nc=lrzwPs-Lq z3RF1)diOaG`zpQsrv!`eS5l#PjtXN(@zu8GbV?bObR&KT`xj?NhE zDb)TU`xYx>3$2e{Wxb$xh|W+QTF&nbX-xWPls=E3Sz2vEYlD3b?asX@Fi*Gg%5do1 zmEqZC(&pHe;qV1%b5XQL;quU{q1;oE=4>nT>SUtVJiGfoC~gPI_A_arZ6wY5_q1@7 z>~g)6{W0cl@&WFCA1l#=zrt=jhaaN&Y?=ZfP5t4F5|p9@o9*M@wBUMqD=qBd=U(y| zR4+{n`^W?NY5EOmp~7B{?XK@6ca)ivBzIM%`6ohJc$)d=(#N|!XD7ag7jcy|dN)3T ze_;;f+b|a%44SdWk6OZXZ#VF#W;175_l=s`2Sg@3?HnDrjzG3Fr`doU5{()nlc zdHi?$3%B>Me;B`y`B;I~D8X9n!?*DrG~o=!(TAJmrE4%nzk~cO86y|r_waFi8lS_X zD8*JBMk9JKf}dalv%J%Fn2Y(i4~ww^-%=+0JeeOU=45g0WuAkNLy{{h!Cs7Wr~6)% z!R>AM5Plyk@ilD5GYI^+8aHDB?#G{9Zu}IQB}K2#Qf8&{@7`;}2gn8N@4`K})wOf# zpdXnt`nfvi+vI;$PuQbKgMjzSXzl<}CZ$aR7ge>)rpa>|dh41JB}@xB;h7i%Hy!@1Ozyir~H{ z`CCq3LB{bAyNB^HEW(X=jo-iF<}Uix_yzN6<_bLGp1aw-4gZt{QrjDL*03oqf-Yr4RI7A+1u4P~6`g3GG_YK+hk-NW4$LGwOZ8CpF{nk2SWpn&>PX@S~FVc zt!P{4yaF7hcc9a?E_Bn6p=Xiv5zt5P$AD`I4AO^CbV2yBVQEg-h>{gKp%k0wo3Uk` z?~fv|4ciy_{+Ox%mtE#RJNig@>VI-K^PU3rzk2r>axe2f_SLTMCl4?m z>{9$+v5+6`#%GldNZMSS7yKh;Q<+Z(X~@ z_gLn8kO!G-QhYDp`Vcu#Ek0|-r~8M<9_HRs@m(&y$<9&VgMGK_$H+G3_7lF>ub8py9Zg)=wc3>yIr40HBEN3eEHP$JgvXqzW$r5~$evtfo zd=t+r+g?yNeM4RJ_b>!q^gjF^K8vrT0zXo}8CEY!<@TM(#z*kS_;VCv7ycEN!4+MP z_uvCqg5Sp{@d#F<68j^wcl^v%#)$Ay(b1D?j1a9?@Z(DTLUw#Wb|5DtqBfQmwbu;b$G58i?+3=f<`*GYa;$F@*9dMLz zF5sP16ZdLUlzh|qeNufz9kb`F^G{z@ zm)=+RS5B$tYaht3lL{=ERB*xX71CeSyYh^}EC0&)_}|p8Kj}llZzBADp37y#r}hEb z@00{#7NSyYW zydizJlD_9*F=5{aJtiMWe-HX&Mjj>aHxpJ0X}pbi z^}`Ri{ywg`i))|7z6U?HQ}ln%(EnL}qY@_R|9~BmAbIje`oA|Sm2ua!U1tB2#r`LL z69KZ&XGdGg+09%Z&-O1P&7MUsv_LDg?Pd%e+MxqFH!b4($23z@-g@G@sfHlm;qpaCL=3>r4<~MwiH65(gILDfe?er&*RZx1KH63SJ%W;}D z9Z35O_U1z}*C(-OJbiT@1zW#gWOOvRjnOy~MEWS5UMJB&y5AiEjA z@`V@hPG>C-=61}cuikrls;}O6p1qcs{i&?wK?X6qSFvUU>c9gIr~wx^!F!JJFldDq zFl&kG!K@`Rzm4??WvzrVa}DLFh4N8G8JtBKd66=8ALVEm_kY0k__+?d`;j5a(k#kQ z7Q6*d;y#P(FTwp0uGvXhyq&QA1xqMrom`VnS^l0W>$s&;Ki`9o;a|9QaQ!D-_Yvjw z4gCKJUcvrVt{H}xxnDWg-o`y#@F9L*B5|orKd* z_#bfn3c|S>zyFQDF7B0K{Lo*HJA19-*}t0riF+ykkc2)NQnu3nhqNQ#< zr_Y^*%*LE^g#P~t`v1s$>QTWt`u`W0|3DUFF2TMO_cCNT{wune|KR#6WHsiREzEzE zGXFte+y%9}d4GTh>Y#ozeROEtGh0pgZQjKEM;FgB%rix{9i+@0p{$&s%pf~4cb%gw zU7$=M{g?yT2XPM}dohQ*82{n={_th`9(l48UL{}4;YITEA8-lJ<~H~i`ISxHJ%QbE zs3p%n=DqP>*l!^Z_dqRqJWjqJC!A!S%ToOP(L7h=S9vb6n${2#&{!2fp%x0En%!{6U{jz7e{8Sdix*Kz+8_dgP*9Y2S-$0F>Xhf7S@g!wu= zO_-nO+IjE+*F1y&$8i5M@+k6E{1g+Gqm>k2%=DGi5B>oPAYC7#_lMBWnrU zg>+(eAZxa9h9Gfh0WWcCh9>ll&=6R{*(avVE-gM4X?wk*d2v+g!Ni=54f5-&tRw8xG{y6m?nT$DQ^g5+d57Q1` zr}RD4|6SC7+&Sz

PfLe;M^3O6F7lq3ksEA1cmM|DkH~ zI=zhFnz_tDBF*pZeTaO5ym8N={?E8Xw~-%D!V}Oqlk-8LnXtUjg1!~n_A>ql?dUt8 z6SpqtM&ASeHH`nkf6ga7wOnED)psTHpWG*$c@1Y4@GY#oMtu;-HD}iItwaV(*(<(< zJ>$>=-Qa^Z=zw#tD_W*5@A z@fuZf&V2<`J6P}4z?{7O8kImfltCeO1-KQ@pihO&ZJ-U$+<1QA8rtw*qwm0)w1c-O zJ5I{l!F<%?tXbR4TD1+VS=+>1wwF0=>Pj+oDv5Vq+#DurAYq8LAdpO)o1h<#!`+nU z@4=Ih4#(j#%H+#%4drq*?1bl`1A5>x%HxM{BjxjM$f0a`{!BUN+J4@Z2PmUyrkui0 zp$cxGtlkS>r+nT3_kst`!UdR5nOz27A*==P0K5P-;D>tXg@40Cl*1pvPfU4)FH;`B z2|FoIPeMK{CT)K+lJfODq(dv1-*SlqbEa$pBnHScNJ5_sDY&H``I^$mzjR~y zf^5jiWsY$9*O(h*{t>?gsjLOF$Es-WS5zGHRh5wEW*t`9l{{12D$eoDE9p-VUUepO zb7#LM$8P54zy-CreE-06=4+}eL;d}5PJOQu5KVUIs;W2m>hT$6X-hy&iOS!lOo`DaRQFbUF z^WZO-=YPn%36|1tTMhTpr`Q6I!Y0@ZKcGMQ@fT?$!5R2B{2ex5#-12RhE?z**adsQ z1tZW01JDLNFdb~vk+==K|F-h{A#o$mACmU+{I~G_YheB5dg{`4=07%2*S7Kg!(Z0W zt;*)woK-ykwXFX{<{#ww^Bf9KFgJ`W#$0la=YN6oX%6JYGon|}-=3qA+=0Sey=X5#p8i)-|cE##dy(j#*;9+!NWjB9n>sjylL9) za;~|9v(Fi*a1bB3gRm3a%*}^Cr|+UqYG95YQc78WaEiHo;%(;mGcHWc@n>N+bNo4# zpJV?IYXk6`?`8bUM}BpYUxZT}VEoI@_*Wd`Ur>&|0>719Q&q_L7Xu|VyNJ&Y#=rJ5 z{?ii7)nb*w=3%J)0T-f+qCM8yWxF#Q4_+#=q7x{N4}IT#^nv%%_nko>m~#U1ArE~a=k65H_bkS*=>wM%MhRh+(`PQD z4_dj9J}~{}YV519cTCU+K0)92usQdizVD6)t=e4=T6KJb%~|3N7w7|@=WHI#jhM~Z zMCLa%S_#XXCF)!Dpw+hgLF*x&*>_+w`~ZGMIsSk;u=;YwRNz`z49g)F=EEcSORV^! z{(`@=Fax`*jh_^^t%J?*V|WSnLJcs4ZwmucH@4Go$CnsSgH5m%w!<^< zQ}`v6KqXB4@Bzz>Y=lVnt z>}6w5hkG{dklF0+cC&1o@pE$0GM<$%R zOo@zP*ujitBtr_MLK>t)24q4O{FlQrkxIUNfV;S{cCn%lN?D1>TgfDZVC z_XtCrwoBnvut6Gp2p>TdVXT35a1T5}otQ^>UxAyT4w_*Q{zX`qaZNTX!tV+0y#fEL zaN7Z+unGHTuy=zWuA*Kph8rQD_XN{}wxjSnxCFZ|!1MTf3BnN1T80F$otdRLWYX5@ zN`}Pc)0qdFt~9=>>0bKkO!B7gp3c0_bmo1gD`V4iW%?K^IKr60cJ=}8W6XeWbJ1Sb z-EE$wg8hsspfB6S{%5|^C9I(+Jg=g*!d!oD7VBVVsfB&itSR`TkGx{ol*?e?EH! zpcA_;=w8G87x>W!!2E`A2zt>+&IdeC`yXDT42)2QBEK8Hk$U2rsSx;A(Qa67_EFAM z&m8I%vTMsswO`A@2obrJeUgu2c>s+h(W~iZ-+F4GPwiuaJj6A9Bvq?#4X} znGG4!SeK2A?9)0$zm2wz*|(L1Odg_LkF;ZsM<(EIL&jnL=z)tmNcteK;u`o0e2n=z2` zGFQ?6TT8h^zE2*UfV>TS{~qW2hb-L0_YYaT@Bicbhg%hXs*yEe>H+&0oCi50EtBzY z`uy%n`v1r}%=H2K|InC9{~wx5>Hk9u`c`N=&-fR#qwj#uX`k}_|5V+KA@qQsetrOg z5Q5%@sWrxZgkgT8`^%Kc%b*aRf-K0L{&}sX?7a;Ia4%(R24!F!l=J+vAq@SPb17S& zNB<^%4xoPryAJpi{Q&GFycYNwcK?O>C*;+ggn0%t#|YY9#_k^MZi5``KEUnY`0vDx zpAxfug!yCmDR!44JG4i1b91ugA ze+V9U3;qfm-Du+oN83E|;>*xR`SDSX+9^*R$WF>&7qT0357JK=3?PG;L&#p-_j27Z z+)N(V6W_1|LJ#H)!)L|4Y z2+;0f&I0=cNaUND&b^<9t(4G4Dm~JnvJ)LDM_-Zq+5Q7$^+x*t0s8;oT-Y(S{v~&6jlO4n zyH*fh{VBfxv?UsmO&5qCd6Wz}!0|J-wnD!Dm81`uhS{90{CCQKHwCe$i|;?rCvqMp zenR-^#T?#4{_G=vkny=LD*awzE z?`ON&2ZeiP41NB?e2*b#f_BgezSpCCuMhIQhQcBCvD02Cr=3u8mhU-K;kWt-``%B@ zP$mAI(d>g?I72nd*cT6;HB;vv)vjgV{3^~pnq$tU<{Tux?+ws0gYyNkYo0knUTEJ( zdt@)?6Yi!x0^NkyLwKFrXQ+$tg7Z1|D2j8CAV4^MT-(oey@VBp#DDX9ql@@nlV=w4 zJY0a>0_Ng5P_z0M?5TT-GpoM+1x>tBW4-ZfJ7;g$t$*SF+@JW4mb|V1L6-lYw{;Qe zTX{m$PxDzRsIl5BKH;oG&apA(#2PF4dge;m(DOyi8MEm}bz1re;%h9;!_NFRQyzPw zirMEEV9#J8n}k044*jcNSc<~^E8xy}TZ@pHY{vL#3G@FuxW>;se<$zxcj&7VUJ+?( zUCX(24P)qS2+<^IhOV^_hua}CrrjGNkREowINakH6^o6VXw&eNO8d3v0y zmjMYevlY*|VR36_^WvMO)zUe5Dl zzP1AL*UwhohS{8vHJh~(j3;a%j++@z;QYM;XbQ|`Y@0rwZ#Lh$*=pVRsahIlvnFe{ zn!%MjTh2nx&@Y{>nz-4jv$M7Y>CT+3S_oku#804--(rCNi4^N2exIT1Ypiv^ZwFzu zduPk%n60)|pQ;Dh&3(GMW~&q8_RrEsxA5*?OCMk}>#HB94*M)cmZ zVKrxl)p|P2dWNuk3;Wep8CJ)}QPLS!mm{psaF}$4)xFWDz>ZP&0QwXf;=I>`K85M) z^wQ7hUr1kwc-SuVF&{F7vPsy2mO7Z4esi*pswk*#|@9N|H zw>&Geuuoab+m$_3W#ydiQ?9Rz{(qj8f415xIMSy={1;93vHma5D%qZAmF~;4$^w1l z0q4J6VEm(yGvmI*{QnuoKdxl_gY)5>d%hqShjZ3Of8n$7kGECFILrU5 zTPlPi^uzbyv5%4x9A7IrHaX;s@%o+&nm`$CQ1L{3C2H;GyZX0t`o;qJ9J#` zgU8wbd0cfDj?>mX&U(b-{07`{#z2p&89!e9wm|FNVWqp_W{x0?XU8SQ> zgHL(CwMBCt_KDvq3G9#v2@}6l{E^=&hWE(La2wnMTcCpL%XhuY``}&u81wT`yyaci zg1$@N?OhdYe3!o4yUN?~u6}`ADl|h&E`9k@`t))1^;4zBO8WYY1=M4ghr4I@Ovc11 z8*^vMwY|i0&f&~FsDV(JJ;V43@6WKRk^Q_o`{uGwm_}Id<1{XK|GtL(o0rng;1%{1 zMgAFh0eDT=>}*>66*Box%y}DL}=RABxxz}^%=R@=b9%X)(^=*|uxDx$KoS*T}pyIFWW<6<;@z0>_3%eQr z3@UkTP$@HmjDH4Mvk|0h1lj)+ROY@Q;~zm~Zx1TR;a6^Skn%sKd}P7GpbGbQs|Z&t8|2zn=QA>XY{D7E5w@ZVyna+ zQt`rKtLRjSb36M~5DqCnGo(CZZXl$bb0K9f4=Jm#+{)b1!``?h(2CO?ZnGh8B$weNUhknoCwK#p@(-vNKMc<+M|ZUA=N(~P#yR2Tqw5O zXNs*_q-$eH&J7_s_JvflzRa?f)+#PK#fopJRl@vwaomZ`9+O1Sa ztG|E;C}TdR_dwT%DP zs%&em%D2?2BBoZAYipVNsa17!t!lP;BVs+!MXS81a`H5D5 z_ywa{6*`q-^=`6T;oY^21JfB+qp#CB{wL zs6lB+hLvp`6=Tc9K5M31*)!bCeHnYV^5(nQ|K?`?uTJ;lr)c7^idVTA>u}TVZBQAq ze5ir-FNgI2?o|zTs~WqSjc(SHyX7PtSBzV=Nca3S%d^$3y2@0mevO;-r&^8NuPM;L z_@CYK62F#BEv$b_wc7S2TE6vee*f6bn1@@P`%|s1!>Lv`vWIy2x79KK=N7LdTWHd) zUI_1RP#^M9>}<WXnC*!&(`S>Y_cW_oZ14jx?(g*|g82W}dq@ z>`{xmQLR1?_1`1k*+%x(dDL;nqt5dlb)EI7oBQ;f^2qP-DB$%l{_jz!%ftHDT7?Te z%sIWKeq?c=PDNdHD$K0o*^!3%=~mukGi{nW<(x{ks+ZTPYE6?k{>N6~f2tYFm?0=;t-lsr z*Xb4X9>R6w_ZR3n-o@rTNL-L9(3)DOmhIz=kMgahJTx`bsnK0WdzSXJz_ol!d=+dyR57XIQ!G zGpsyher1gc4m)Yv*D(Io%(!ihN_N+%baS)HkmZ|d*#BFjO6;pP)-e86!~9Q;90zOU zoT!oONDb>hopPV6k!SvIRfnuUT%(4VW;G(4W;8Pv_*;1ko7J+zX0>A1HnWEDj~cbF zuVMXjjXIHCTeGe1`8BkEGA#ez8U=6<`oI~*e zhZ4FR0D z_c&BA*TMP64i(LGux87lk~o)2k!7ckFt_SZ#WaU1C%v3cz}f$`ltk}VWCrv_-ne-sb)u(<@IG*Eyz~l)yDJo9dxq(sfFK?bMpRk z@~v#4{p)0HsZ;(@rvm7M6HeCuI_dj36%ISqN4)yo#QDlr<+)wTU6yU-6uLM=B-<*Q ztXHAWrGirlR(YvQW!RT`T`I|qx2h*yv=Otd%C*^61v24utK!$X6nCPP@41UMlS@hV z1k1j+Rf$LFGn{a-c7!r?+NBiklf66J%Ib0{ld_kAeFygKGmgrKY~wlhtZ7v@_FZ#Y z`QE!|v$z!8lVAl>Tjh^F%9=$NC^KhW@?huY`(2A{ znsBM{h>N*1m+FUHlo=QAPM5r-uQ}|JpE?paMg2JIWlgs84fuS$1kSY=x>tnytMR>cmlDxqp_i>e`KAMck9=KbQO@8DIz znids8(fk&kRf|&g_gblI0#@4QE0qr0_(m7-UdvkURrWIKH}9>uJy$BSzHjf9N<80d z*=Gc-r1L>5d1k=ch5r)#zlf|r(j~Xu$M|D9lHp2QAM$m^Cf`I}hWr9;@?zvykqqzI zzJW~o5p(7?#*Xi2fg5%s@IClGJPtXGvrH1!l}i{Kjb{G#2Kqj)u(weBHt_X~voEI~ zwVF$>V;uP=`Y^GqlfeE*H}Y=Bt{J=kK`vdzI1~PY`2RcZFXHcV+`bK0v(D}&PY#IHZ51aAuyRLzP*CN;Duc^~gFq=T6ZWscfrwl}knTRu#@nunO>(ACq9^ooi*> z$7bbtTa|sGRapmHl{wnVxKJx+Zni4To^7S>rM@3IuH;eP2eduy7hFn2Cd}u3aqhU{ zkT&W>KMM@{8osQsN|V!>TAvWbOwjZw{fGpCebVyv}Q&NB&? z<3fT}gS5@`TXAdWDxNXZgbQIS@m#&^SN2&+(Q}o2mNp8LqN$L^IBNRgu$2+xw=&oE zSy{fh%7z@qUUPk6E05=xe_}4*KgNBT>@3>sw~D=Ct7KcBRhk>N%69mza%6>LI`h1j ztIF4;>eR~_(_`Fs-{o>5U4?yCEz-T?a(OD}GS)X&_2Id+f9G;OKjXmLn$%23-0SVL zTHJkBYw29IWd>d`mL0P%asagbEhkPqSwlp)1plG z<*a>U9R&6{8v|BuSFe?~vERxM^jZa*16E-}uT_-FIJkE%-@itc=FU~wj_E3AlC6U4 zDtGl;RhX;e=BkFga8#21!pj-|x?HuX)8*dU$oVH`th3*$5BFOQahI#{l;3K?Z!>kdqujQZ7punkqD|qB`h3Mz>?!H`M z=o_7`{%O6IZCa4u|LC>ik8$?H7GxDg$g#a?!$Wp&!2T58+xsh-OPWKUcs2qJkI}}$GVSs zwEyR+ig2ra4U9{0{!3-AtY#TT?%nGkPduEn>i-)dh@`{!V<)p=%~y4Ivw-CO6W2mFW4dQ#GVI%tJX^;*3w zs0%|M>r?t?@*Jjxt?ILVR>icCRY`d$+srymCg4k=n^nqWd=YCViVyc$1&%9J2zfCf zD}Qa+%9%;Jsjpf4uTVB*?BRFOXsf4n(N5kHwo-O8Z$vpw+7YsnAHPD0>zieV$T?S~ zl)KG!j5}UI-EUSu^foXrLtP2d9|>_EKY1D;AG^;4tsdgh*%h|BQm%w~amGk6W$R3#)^Hg&_ zWI3kwS@Dj=dYJd<4h>kd%3REt#}XCeHxv6j z?2_>FF7CNqi`9+3_`+hwz!$3z{Z&xDcZq(3EX`f4z=y1t+P_?X#ay}Ux5Q^L=WuaO z6yFC&)M8zVtczK!TE`N78GU2w64vQ54;EOWhEq#e1F%@F-X&_r-V5#6eHVXire;tgoy!+9&$n2?b4zIVFVXk$6PQb!)-Tu3(f4jyt|X*wPPF0*7tuagqy)zz z%73)%kc2)tu!#PDH0|GL72!91S~UHrXxjhL%CawF|7WyvwnZy9yhwTY$v?kH1@1*E zJh(_jLyH*yTBMT0izxq#IR7tN<-4O*ML16QJNf0}yWEZ5apfX8mo4JmyoleMWR9mH zS|0RuYZlS}jb{EYS|8)TDK45Z(r9_1WyT`4x}&N8(ekZbq;~vt>|LbJX!d_@TEsf2 zMe5nJNdC=>DF2HX|Ba@77p((?{}vnri?)C*aHHZNew6*MkT`Lp?2v>$8B)%({}s}( z`zzP{4}1!rld?EjBZ)sYxgkH#oZG8dl|03G|Wwif`X#W@S{x75bUqt)Ai1vRG?f)X$|3$R_ zi)jBB(f%)@-WAdQFVYa_t~P91s>X#&SyQr9&ESO=_X5Tjm+GtB!_l=s&eR23guZs> zQhft?Gt`wX(6^9*zyjTc3~gSjXOPdq_u&W7wtgvNbqkoEU#f1*o%RL%ZrTEMpIV@v zBMX$0yOegxQl&YTDjhN)6SATg(7#?Pn|G<=RxMEcO!m}HTcBSOhCOvDjjKeE>PKtrSz|ss$$;_s*F3TsvS2lzH$TOb4yv1vp{*=BY*1x)~7Du_h}ZWX!`=@ z;ubLBxt96wwTj=kP6?2>b)D>xggzNk_O4Sr_e_GHLl&fg6aGz^%!yjZ{+D&CzvoKz zVfPvwfOlaU?Odmr!+VNzzJ9_!&u1kMW8QTy(H3EExce8gH0{rnC76TXxo{=Sgea(^ z-Zw!s`sFYSe;bjTU@KgXc^h&&+(}!r6~BAY7eX$4757TyHOK~}8@v#P{m_Ab+pgOb zxA!*1J8q-?bN&a|y(3EEyoO{qZFJ6aNTqF*Rye}=2lL*{OJ^dp&W}+4nfHF2_Rn_K zrEOy#d=K;9%rO^omPq8hcIKK(@LxKEdGKk>gVWcqKvu?`qW{l4IBk@gnY4en#<~65 za?y6GT}Aun%n^AWA5qGc1Z z4=29%g)@|dKDqR5=7eV`bvx-N4(ar)0{~8NK`pq!19eaj4bTYx`Tg!0to@%u z+jrZq)yBPi&_0v>@6b7)w(`U&b+iAV2S5IEr&#mF{&xt$|9k&?wR@eapt5kCDxiGJ z8!C%ir_$7Q^!L}%hWV$8yz6MwtW&|{2dsZTt-LPE{HArv+3>y&Q;sI!1WZB?WoilK zErgs-nfwCGh7bAf-GKcEtLU?A`$Taw*HPft>D##V<99P|i}4fMzD_|1>{-Y7-a7Sc zdtcp-k9eoAQ|In=>e#VP?c3JLhir5IjXC^vYDxV_-pPNe+5Qo~5w(u-|9`4s*9ZJ| z%sR%9*2zPCc5hzC*wQ+=)~}QE;P2(wvW~si>vTW&e+UMM!%vW}z%L*X3gC6@hhYWr ztH9g_oiGURz?<+}_#J!%|Ag7}LB9k`;8EVMw;*rl`jyCQ;X7~-{1EQO{wd_6@Bkd* zUe6+5f?vQ3FpgaT@_zjO8fnGwenGy6{c$)7jkG6P;B&bB6}J%ie8)F=2VlMdz5(00 z{tx)Q9sSMl1lUqp2MY0}tO0;T^ma&M&MX;HP7EqFlXazb=FTcvXNt_koQ2GuHmn@x z(Q+XV@}U3cAQ3k^BtbIxPZK`~o+p0Li#`l} z^NAm{Z5@`c;SlXS(nmTwkzHpv-vZe)!TVv)F*Teef1qjYu$po6k_Rp1Q7f{BYaBzo zUk>tqnK7(du5p8Bl>7x7<@0&U#ZJn?z3?Ku46nkkARS)9Jsl8UFd_b8vWhy31Qubyoh`f z`82!;zk*A?$vu(%`0qdtBA3A-^rOfz7>Cb)lk~v}^zXp$;RCpgaj1E44P5n2?hj|N z|1a`=g|Wx{FU+5Sc>lSuTZ~;RvIAydcPYG%ekt~sp??qkAK*I7U&4G5{f)+rI{$gN z3cim0jmQ<)--^5g9)#7Huf=US+=|{d=RL)l`F_?#CLj}8Cu!e9{ohCZr(UMSyvO** zdrDhR{f7+5gsctJ|5GE%+4`Pxhp7JtssGbnWBi-?4@D<_t^e%*+;&1`XQ}^CahUoK zk#p*gQ2&=x|5s7}*D_C!ti|l!!n%3lW7f^r@8ZmoJ*=xIK26AG%wFtUaBoGn;orB4 z^`Bhdf$YTGb(s1AJ>aKa1R!|sghIR{dZ{1bBh;@G)GuTe;~~{h0hLe&lJC5o?>;iOkaoZc+5x9%7a%i_&`#J-JE7}cr5vW+07;XyBOsBsjUD2n zhG@sKP=R(&T+FX&hY%0wg)jsgcn^So_lN?>?o84_J?=zy*?G@-c@JQ2+w`t{Zt6K{ zY=vg(xOYGA0n*e&+UlcOFNpNewyKNz>~|HBPRuT3&3xKjbKi~B|5(bxPE$6JFC!D+ z+vwLrHTt`db;yVR%s4G&rXT$P+>L%1xsdPvefW6*9>dR%;6v;$VVw0o{9JW2bpWo0 zZ_szT2DuoP!i}(n>sBD|hWlVOd=H+4JKzy`44!}=;P)wb0=`Gszrrp7(%}{CvXMKG z590O+Jcs^8cnp0jZaol!VHkiz@H&jcQFsrY!0!jh58+AlpCI=U#w^C3FN3GCyBb-} z`yw9uWY~$_^Kduz8JMe)E(pOe6k+!#-cKya(KzNqa2(EH_W`mT_ZnD1p4|p_gB$aG z$d@4jlHpg755>@ky9@au_iaM9VtyI@AJF@FCZ8ZXvFn3ZvHuk;UPHPuC!ile+UE29 zhxldGe@MLWPuZzUNz|oe>Qc&;>?4Wd`;Sbg?q%$u{_msyBeOB*%p6cI_?7bKXMf7 zF@}o+)Lp;V-k2vo?^~m~+mtuJ!_JaFO|#3$QP& z{6s}8`Y*PBq7pB^Q_{unfP~rCgsi}=vVq@+q8uKi{5^(DrY!D6z6`%IW=O~GE!=z1 z4?s5Nd>BT56#HWIze7&KAE6xc=Q%TB7R-aI;UnycI%l}?`xy`fqQP%K^ZOal2U?&7 zZ^S_YB!V51AQ@5&Z`dNL@sn~e9yJZpA>R;j&5wkTA8{?fwjg3#5V0+Y*w$fN4;@-y z%m=y41Yt~M7$$O2GYt`47P_p6E*o8TM3;juC!#Zcb0^wR^R(a(@ufy7(?Tue|28es zVl7d$zM-Z3P}a3E%it!XM0Bx6x56kZjk3xp9I$51j;c55@?0{!rCp-(g;5m36UVz>3Q+N?xf<5pvco|-Sz3_9GOteMR_=uVi zQSA{mDWWDv)YOQY9#JzQYF0$ej;J{iH8-N>p}y}oVZQG-p}p@n*S;U9G-}XX`+l&% zs3Dh8d*dRix%QO3DWZCfn$~I5Oyhbg)A&D?lWEjke*5g&Q{{Yuu02&zZB(xZ^^e7F zqgJFuRCC8aR@57{rqQTvCbSRYO!yz928>#6?*2h-pHbZ=ZXdYKegBkf{QoJr!>H{J zqjs1aIGt=__~$m0t`Fl&jGA6+)JhYd59`c*KWsDc{7ZbZQ414|T2lL$n$I+6$|IU8 zlUILnnXCTNW@7l4fQj3g1QYU^43j@+3JHIizNzb21G-!{=mztDgKpGKy6Jk|bc1gC zeO%%S-Sk0xKrxEBUNJW)<|h7)ASaAkk%AiWfDhvy6LE-%IK)I8VigmMLu|w$HsTN) zaaf_46*#PjIIM_RtyIiPtX4*>Rz|E=DTWE{m{k$0RS~P36>~FIH%F{)j##Z$%xbJw zN32#ytZq@vEm++WvAQK(uv!zbS`)D%iHbon2}2OsNTQ8OWKg+3Dz`-CE~o^I zN?4I#A_+1<#VYoCkS1C4<_}H5j8%c8PYQ;tp8heXkBgukE^{w8{nyR6WP z>qUOAxQTz`v?7N8h$dDma9ybtt3*3-#cI(?TCqke2nNeYU`E9<;@t!piRntMyk0yn z23}UiXl1NcuFy&>S83(VTDe*)Z_&y%S{Vr-lA@L7(nzXSY1Q@OxiVb83iq2M>Me>S zbd~WHdD1uQ=Ih0yjwJhLV;y*NX=q$(UP=b>!Kv z(VFYU!;fUs8e<)K{@>EKzO`1jvQlEL)|&sdTBqA|+d|!To4%vf1gsgOSkLx z-MT|}+^IXBzd?6kxASS;sXOo0oj=f>Ki8c@Vd&Ea-KD$MKdQTR_wBm-o}cUP2e#?% zy}C#DJfM3X);&)P%Z=J-{^4G2+@g&SYvZH3SNG|@+jSr6Cf%?5@74Ve-KqPZ(fzOW zXp=VGu1$An)1BHx0GkMP)AzOMVQqRuo5mXSU48dnefJrC_j!F!59on=^uPm~;9))R zeLe8d264e=eP7>yQ4i|DJN4iuJ&5LKdPom#xL*$u=0lsbMGx!Yjr>PIiKP7zJ#vR0 z`GFq!u^!c<8~NX;t@?o;(+~By{!35jN7|-sH|)AW+irXkUevY&efqI}yzQrYQcvFa zJnYev+n&{v7@yLUyZDcV6?#ff-L9v$>8WS+)Gq#G0o$MJDKc}rwm+=xKYd#Nt^YRv zJK$Lvfx1gi>uG}giGD)bezHwFw8Q+vGurW-cKl2`UePn!sh#V!ll1Mx+s=oz^AYVd zQQ`URMBT-IEUeJZm$Z|dd{)msqi1*N+2{1^^H1y9SM)6Tzf1VS)2{W}^{{q5qFq1M zuBWwY$FthSKhDP8cz7P;i`qrbeWu3hIX$;t&)ucx9@cY@=(%U~+%7%$qMq0D59}V^jfoCYr6yP5@{N+X&_Dm@ft|bK%xfh8c5PW zvIbH#kg9<+4Ww%zLj#!_$kITz268o!r-6J86lkDO14SAr)j*jB$~91-fl3Y3XuzQX zrv_XasMUa519cjx*Fb{?8a2?Qfo2VOHPE7gRt>aiz^8$B4RmOrQv+QZ=+;0$13?Xh zG|;Poum<`yXwzVv2IDoDput29+BKM@!DJ1lXfRcSX&Ow|V1@=WHJGKrYz^jVFkgcO z8Z6Xckp_!3SfasF4VG!JT!R%FtkhtY2CFq#qd|uTof>p$uvUX^4SF zgH0N2)}U8|EgEdqV4DVg8tl|ymj=5v*rUOK27?+5X|Pv=VX=sRC_zJs8nSCBNkhpR zO3_fdhB7pisi7XvnXj zfY^yJ6w*+ycu7&Mev_r&wZ&`6?2>>5eZNU}y!G?J>3 zG>xQdBts*a8p+Z~wnlO^lBjKqG}3Dbh%>MoKhNs*y5{lxw6)Bh?ylYQ&?F zI*rt8q(LK%8fnre8rNqaLyTf3!iPjT&vzs8=il8g0|4PowP`?a*kaM!PiX*JwbaAsvX*fp{HA z(1BDPNYjCI9mvpuOdZJ4fm|KP(}6-AsMLXK9SG<^?{lJ)Ll1`z5=y?tk~Eg0u~dzv zX)HrySsKgMSdPYWHI}C_JQQfGP-8_JE7n+v#!59-rm=F3RcNeIV^tcf)>w_k92#?J ztX5-gjd?Uyr?GnR(i>~kSd+$@HRjb=i^f_t)}}F^#@aR3p|MVlb!n_yV?7%4Yb>C# zpvFQP>*aqv+yxKABk(jl3%lS&jfFMVr?Gw=vguHq4#n$Gf(|9>kX?t8bSPPeQgkR) zhthN?U57GsC{u^Bbf`jyD)pvKZ^r4(1ihK8H#78Rrrylbo27a)pf|%hoTkGWI-H}! zxjLM$!^JvWp~ICrTqSno9PZJ0oW|odo}=+xjpu1RU*km@FV=XO#>+Kcq47$MS82Rj z<4%pcG+wK5x5hmhuhV#=#+x+WtZ}c#TQuIP@ivY7G~TZ94vlweyi4PLjR$nZ{dT;L zbRKaVrO7B>qjc*?AIi}j{@?B~ir*-`C`T)FG?#y4$bGwCNAvhMhJ5~wEa2bB!nXrP zL04py5~Gy9-DwnbWkxCIKO#*y75p1RrH)n^Sz{E3QJgyJGO~6`!LP^Y>Wos)zmW}} zDM#ZX`lIoPh~DTL`8V$Eq~#rl-f`+3m)@z>J8r$>;s2dFqcj`EYm^qF_>9tyGGRAL zl2MY4l46upqoko6Hx9>*!*SzqJjW=7GC|f&B)n5^!l|1`z4Z%5&s?jV7Lr=6;P4#|eAnGK2{PVJ@7oN3KAa;7yboO_Um^Y_V(N-(1&Z zLTfUiHJQ+wjMpaPwaIvGGMD44*@TT^?2ItB6Nw9q63H_BN0NmS@i}#SOjAD1RYpf3 zku;)2jOLD{)`V(=@iC=GM=Dy(ty|1hk!*-$LL>`X^-dfAjGvg;``+o8NSR1P6R|j+ zVWK3Hqz?W~7$_zbgbBW5N*{?yhq({Rlzk-d4ijdFi5g1e>ZwFwnzAIZ9TEQ~!W58B z;|V2VG+}p|)JzHE!`!0N1lk#~oq9s(r%Z&_X+rBXmv`}RLO}UUL~Kn6T_%Js@;hLz zLt`$Ahy{^HK&f4TTjE3-nu!#mnE08;<9WtocHR7&$akBYp%@1vjFY)1 zin*@a+`2oWi}*0^RL>rBRgbYo`Aq+rDdNNU?J+t(|HdJ}zquVs#5fgM%u}XFe{*fn_(l0&fnQ@XK8)WWx(OR~7bPpAOynAo9Z{zAIYw`W;SdoePZM$c z6FiF7C@m3%drZWArkHT!xC08hc>ayg_=!Yv7lBWdDJv5IC=t(}DG|?8ku@*ANVzf(M#kN4?qn)~QH)O`2m>+Y9Jyg6-6&K3Oxec+;+@th zjR|Y&DpRA4e#(ofZc~XSoe<$CPkRi3YqiO=(-Fgb$leo%A?PdI-bgoD!`OQJ1chbs0KYUv#n_$NJ(a zQ86W&rbPRc@JB?m<76{Fo1GEiP0>j&VR#9nr9dZJ(*ScTW*=@HHb@4e$E^dmE@YRH z_~|Kw3aErCoeX52^tbC|u<2y5bV@jMGL(5Th-EL=gl#ALx)AXv`+5)=QzCat6itcp zDd9UA=Atly#3TBMN?%Uo^1i%?m%fTA;g}MhDbX|~+W&J+`_!c!|FP&W7LzuQCKD>e zs5itxJS0FO*dYm$Aq7$)4bmY4G9e4HAqR3H4+@|V%D{wO2~|)74se1C>YxFdpc%Z- z0izdo8$u^wD{iMZj?74@ZIQhAcpJ(Xj zUVfgTpZocFhJNnp=NbBWhJMoQ=l*`;>?hrRo}-_8`nji{y!Vp^KhM=)4#dGv9Q@>s zpEUV-CVukB&olA!O#D0(Kk@VPO#D0(e?9O_{N$mZXW}Ox{p6#cXX59X_{mQ{`9j^9 z^pl@{^3zX#`Y8wg9`Hi|f-uDX^an_Pfb<7Qe}MD{NPmFz2S|T_ z^an_Pfb<7Qe}MD{NPmFzb4=G{fb<7Qe}MD{NIyr)Oa@4Qkn{&he~|PCNq>;^2T6aB z^an|Qkn{&he~|PCNq>;^2T6aB^an|Qkn{&he~|PCNq>;^2T6aB^an|Qko5CHn+%f9 zAn6Q}&LHUwlFlIM43f?u=?s$2An6Q}&LHUwlFlIM43f?u=?s$2AnD|FI2k0JLDCr{ zok7wWB%MLh86=%S(itS3LDCr{ok7wWB%LAB86uq_(itM1A<`Klogva0BAp@986uq_ z(itM1A<`Klogva0BAp@986uq_(itM1A<`KlouU6r)qP;Mk!4Yv#T?HfZluW zZPHgI-2{3PRib_84x%@dS$}u0m2Zg#48#U#fQb7cIUhJ5I3GA4+|Lj0=Lh%mgZufx z{rup5e&B!Lf8c-Mf8c-Mf8c-Mf8c-Mf8c-Mf8c-Mf8c-Mf8c-Mf8c-Mf8c-Mf8c-M zf8c-Mf8>1ReB^xOeB^xOeB^xOeB^xOeB|_J_0j$M=ze{4zdpKOAKkA{yidGOyidGOyidGOoKKujoKKujoKKuj zd{2B&d{2B&d{2C`hW|XdAD`t*o>`t*o>`t*o>`vVXU{y(?z3m6XZP7N*R$`*XSQdy zXSQdyXSQdyXSQdyXSQdyXSQdyXSQdyXSQdyXSQdyXZPJR-?RJfnemy?1C;+fb3VJ@ zo>`w+Ul?B)Ul?EbUie=4Uf5o^URYjOURYl2|BL;9vHvgj|Hb~l*w+{P`eI*S?B|R9 ze6gP|_VdMlzSz$f`}txYU+m9|{duuJFZSof{=C?q7yI(^*!k}tlh%wiYhC&8pMU;; zZ~gtw?SKEc_1XSEzuy`->il=em-{pSUw`KRJ7hCM%hrl@-uds(F>BnK==^th!1`|e zv_`B^%hwJ2y5U`G-#YQ%KmYvy-unBU^ZyRp=y3m^-*1h4>-=}bCP!>?#3n~Jt!?Yf za>rd;_y0Ps1$iJ+AcmCV`!}9fAU*G*<4O_O+J+*v&*VlJ_ea|*~TbAwicC2H| zKK5L$=W?@H^DeW{>-=}$cJ_xl|2BnKuqLf3 zYucLaum9_p7X{_1EY>Z@u-_{`c1T|33RZGV{;J-scwn_txs^KW{l{{lB+X z{`qWw>+{w^=hruT^Ua=ovlrj&!8d#G&F8)?TFcg|wQg-%+t!|SU>#Y1T9?+fbz|LG z_tw9yzpRH}-+;e8T2Gx{1H-=t+y(=#!N5&_-sabUEf2V@25fm?``6&quffM(gKn_? zJ14&e=l=QZufgkIgUdep_h($@#9KSoZh!q6?Da1(V1N4WxUAcLz@855_t&q1BOmmZ z{T?{%uU~GHfs5Z)=AQAss~vEC_SESAAi4d;R7yjf8pFL{VVdm4bhBwr+;((zt{QC zTQq;)Snk+?%l>5zJ^UKDvicUc^56gbzL&6EU-$RN`&(V}){S-BU;Qiny#v4B`F*ea zzUn^Ecgle8l!5!-x5Php-M@UGe_g(H1|ItB_wCldEB}6L*js&N|9k1#dg-wC*Z=mi zf42_~ScBGg>xVUD{p_#b*Vy0fe}seX`oWRkw{D*>*!?v)?w_7DYPp4ezd!!#pPPJe z!bc{4-<17rPxjYuko)(En1Yz@xz*WVYO_2%p^-{!s(`}@=1 zfcN|N*WYisl?VNt{{9j8-}hO6hy6(G{Jz#cWbXH8`WKz=uV41mO*(M!_f{5s%#ZQl zVt=~>FTcO4zx73*{`>avV>r0<`_}8Hp(??{D?*xBC0C+F!q~qrZ=9{q_4+^WF0M1HbRqzQvEk;Cla4eYbDVzu)=2 zwe?^9TfKjad&{@*#ouq)72h&^{p;E2ufMeLw=(S`i(N^H;Ug+{arY)4y_}rf7Pzn zZvS(CdZ+*K{#UxaXKnEBx7=Xvs{V)jyL0;Y4*I8m`}j%wd&~WsqmOpW^tb3OKdys* zCvXB}>&$I&?#}$vx74K{lpFp#-*%p#{PLXR zm&d}tJf?W{_wMzNMeE<4*Kb3e*Ma%Yt6TNeafw$C1YU;^I!$NMw$yo@ zn6vyl+3UPcopfGjt+_R8%kuWZf@OP){-?sr&z;wmpPkozUvseEc|AI}?mDl>SDn|> z>(1-Nc<1%Hzs{_GbzW~>zvuR^p4*!tv_`D1<+?n7fAyUF)pP7u&#zxSpMLfH`PFmg zSI?JUJzswH9QoDr<5$m(Up;>{d)RsP9P!n2!&lD@O(9yd7L(_4ub!v9dhYe=dDg4v zPOqNdSZ~wb^mOFaQ;Js)rC&Ydef9A6)x+6W4B-G8@8?CLF=dG>xb7Y*Eei`haWm` zBSV(!8M(4-#{;Xk?uxZzU0XMux1O)<`FPL1^lZE5`g^wB`?qB~qimzLV@&#O)OC#V zkN(TD{Zan0Z`OCq<;HAhY|3&wjJe#H{T;JEV}Dw%Z_IYR((T)ruN!w8jNf(MChY&j zu+_B~CpIkmH{mv%a63#~SeKTspR^y7E<4FI>AslcowP5L_I1kkrhIP7ZSKeIZF<{s zT{Es@cCzy}=kjwi7RQ|J&)esD_IbDYyzR`pu6frn@A3<7*9F^Ku#H96xx}<&yGw4b z6}Q2P+i=BouDah=y}xRoR~grPowto~%jGuh^A%YJR!&bIq@+jZ?M zTE2G2b?=;8wz1>#JMODp*T3t!cik?#_IcOW`<3vvYoB&~Z|xqupJD8}dG}EE(D&@- zo|}HpMfPm|z~&EJ)uG)z^d(0=ezf0tJ9antP4aeXS5BWhZ)a|vGYl`P_jYbq&)t>h z-oJ3yT-e5iuf4E)7q)rf=Du+E{b|?!bd&yRn}7P)pFVcEX?g$Bx6q}#@6y*?x(Tk_ z6b?MU-MFc5ZS&Uk-6G%m{Ox1s?GE|wug=@O{k-3@Y~$Y7-`k&iUw`jzytkizE4=v) z@aD+VnMK&4HLV2TE{c_vq){ zi;`xja!@6q4ihieaQQs)xG=WtaIze^0|?oHDj%H-n(b`Fn4|^6{R1={;J{o%d1uH2TzeAG4h? zmmPEcV-KD8@j=V}j30I0C+z3M56iwxGEHuG-lzDcd~WLAvYqK=%k|B;?5zFu{~vhw zQ};eMZ=G8I>b(1jd!JvhmMq(v|I>0?E!g&g>+%!#zOZBMb>0`p->>Y(qTOBesl~JY z@9p;`3O|SM%jC;bmc3c_{>qSLYb*V?UDK*t$H?jXs>}NseP4A8t-94$>DJt`Yc{)P z&(_?cYp!P99<00E#D-ny|IbAP{gJx>3;-~0HzeQ|93-LdR<$8_HvkA45k z4ef8~yW^7YjzzvZ?)dJQ;=AK>?~cd4`_XxKtnJ+~rgz6u-d|3v=g!9;E7rR8@6N}! zu61JlY28~-oezIlKRg-x@VM$@(A&5`S7j&;amM< z_|Q7GY-{+|y0advm(GWy5FaBhH)2~OF6&5yGNAL}NW+I8g%3v-J{(c_=y~6dz{jZV z_`~!u_E+a){JZ7r$6fb$&)T!DIv*4E-yffkNgwyE|1sqEeXtHJANPasvHH!j@BTo2tnse7oIg?@ z>n^k5@*B4A8~$U2Iv+bOx8v;{`?h;*xow>A?0^4%_)GJ#$GT^~yxfY$ z!g3vZ_I=O3?7PnWG0QghCoKE2|1Zne9@wtGN*@QV>(Fg^FM9p=VRw{*!CUB`*Z;B zbJ*qlTzn31bUqz%`*g(Z(}TWGN7+7o?|*h}yE|pAS(lb=be}q(y*+Ec^Ev9{qpoN4 zuJbu&U&m%F?~l8_ahIF0e;%NHdVuyhv1%PyuEPVf&q<%3^7*NS&gYElp7F66+n=$I zv-6$Lx!um^!kFbY@nGt6(e@W@caeL^$CoB8+g!3u-`k%{C!J5f+&`DSy|Uf;TphRg z*L-}9W!*N`ZEItz^SS9Z@JssB1FX+2`?lq_-Ll_11J;CPpLW(Qm)&t&?D)D}*SF{E z_UzZ5+t`u9&waPgzI{EgZwJ2q(DsgP>)366Y(IT3exCT;iOZbW_Y>PX@wKP6b84T? z-2NUpeV)6G{=DyeUfQP9?Vp!(mdjqbeXs1p71I^RwXeIjk50vZIu-wU?e@O$uiw?5 zxAyDS_lDoipLcG@JGZ0X&7b#f?|UD29_rJ>nNR0>KAq3_bPnUw`G`;FAwE4^`E*SE z)3NkV$H70HoBwp|`|FP#>tCHOzkk0RE&Td+Wj%Jj1}3a2Yqs;{0Lz!70AHThehof# zzMKR9`o7Zn@_YEplbbK+pT9go`EqXf>*tU)YWez~F6Uv#*Rbt-dH1go+wCn_i=D60 z@13tP*E#X;&e!Bz=WFi1^ELm6)w9kl?=P-e=bf)5AM^e2rLFv0vaKcCS#~|k_I=rQ zmtA(*$CqtuWzgETd~W5?^7$2?U-kYf+uE>Yzx;v!@(2EF&1KiPH(ZC`dtV!ymh1Ps z?`z|w^X2>KYx9TY^PBd2(`7dwI$s{JfBF0Vwc~3%Q2*Moot+EIzU}(BN8ew*Z@%{3 zF8c@8Y3J);z;YWNJa@j1+-4pFe4W_liLZA){OfejvW-*wX>#N1Y}xYurQ79lq4VW$ z_t&-C|JL^I+^+Y3b-tXJ{&N2G%fs6*=P$qhvMuK+zy7k1&KrI?C-~)@;FrJ2U(UCE z`F|*Ud1(9P9NU+3XkY$De|dQO^=Mz68~gIT^5wkLm-A3x&N+QKfAi)1&6jgAU(UgN zIS12L4?1n_qSH?AJMC<`qmZ+{Sp(Lf_1*em4Ou^}VQa+dT0Lvj8nec&32V}tvijSa zv1Y9~Yu;M07Of>~*;=tytu<@i+OX`?pIg?pwPWpCd)B^XU;jL`j;v$r#5%RktaFRw z&p)k8>&m*eZme7D&SLZ|rv3SE>o4oUdbFObXX~ZY+&}G-|C0Zb|C0Zb-_Lrx{!9K#{!4y; z>)Iv1XISl$-?OadSysE`_ZYBU@?Y{_@?Y{_@?Y|M7S=BLFZn$iYnS}~X0}Uy|C^U~ z$$!c3_f)&&zvTD#wE17)v`hX={!9KV{ww|~{ww|~{ww|~{wscuLE9Do75^3g6~AYG z?TY`3|BC;L-{0wW#ecw!cl>w!cl>w!cl>w! zcl>w!cl>w!cl>w!cl>w!j$5}o{yY9V{yY9V{yY9V{yY9Vevi@H9seEw9seEw9seEw z9seEw9seEw9seEw9seEw9seEw9lxVZ&Eu-(Z)>~bzvI8-zvI8-cOW6jaAcF%v$?LiJ^www=Q-`3|DOMz|DNCT{^s#UyXU{>cjT=(^49M89k*|e+c(dR znq&9POkOjS*UaQKGkMKSUh|x(IiBAf&u?b(nq&ISOkOjS*UaQKGkMKSUNe)|{>5)5 zubIhfX7ZYuyyp0SbKIes$!q`OH}Ui*vR`JLu5aC5k~nbm7%^_p3|_7}hBNX@yh zW>&8`2h_~!HHUdd(r?W>&8`Z`RD}HOIJ`$JNcOUNfuL%<46> zdd)ecW>&A6)oUJeH?w-ptX?y#*BldTX7!pw&CPSO=J;51zNvX!-aIaEX7!p`z2@&A6)oW(;npwSORdd;j} zGppA;cW4j%5Bv}Oo<}s#BiaMM8NTMES$pI+!`B=(Z=PQ?GknbqUo*qk9{J7iHHXui z8NOzQuX+B_JpX8B_?i=L%?w}jT%*v#-X&q>-NzgfO!mam!RYtCObvwY1gUwh<# zr`kI-(W~Q&1>1&=lH8Xw9 zOkaEAf8u}QH`~`dr)p;Vn%TZ)wy!<$Kk+-Q&>TBzo^Lg$7n&KrX2!2Q@tg5$PyFWm z+7rK7zxKp$-mg9JoB3-_R5qt1n&$1d9ozxjdo!tWSP53lCvm}Z<5&mV);tr4qh^{i2A%o?{Q ztVwIinzm-FS!>Rkw-&5LYsp%+R;*QP&04oMtW9gn+O~GAU2D(Uw+^gB>&QB`POMYw z%sRI&tUs+w>&m*eZme7D&bqh$W&PXw%X+XLttac*;{V3~jsF|}H-4{TlW+Wv>E;{% zH~w$@j5F(DYo{$ffn zrUc{EWK0UixtM(8H!T>$4*ACK_+Xr$iDT4pj5-7SPGe?(e}I30e}I30e}I30-*N1i zD~w~=8Q>q_AK-VKF$4ULHD-W+fZs944Dg#t$pHTVzjHw`iOFc<@aF&vly{sI00{sDd?b{XJzd_CsoVmK~_<6{0T=HFtP zDrT!<TMU{Db_4+G40JgZxhG#`&ulV~hE<7+lLB{~*8P zsu|=rq!x2&G07FvXBp%-RTkr78RU0bIL?2?fJsbs#pqTHY{fCz7_W-asW@#MlTk6l z6O&Of1r<|JF_03oPch>ZvrjS56a!5$$dy5U(@Qb46vuXBW+`TtGRQy3Kge%(DQ1^q zb}43;VsW6eCO9yMOPK4v!koZcYa+}bXCz+MOPJF zRdiKxOgXx$eCO9zMO&5c{NMS%^LwqnXszP7bM#ixTSae`@BEspI5(K@{FI@BH8S4dz6L6&+S|SkYm{yi>mOf9Ka@Ziyka`u>9cv!T*E*2mcTLAN)oGq7#cwEIP3mG>So^=)_{!D8>e2*eH(E z$G}l^Vli|S=TM^)i%u*$vFOC26N^qPIXax16CR0H)NF|{vrM${vrM${vrM${vrOK{6G1B^8e)j$^Vnzxsd4O zqLYh3t^DNw$?yC~4Bp4N>o`vm=da^jNq+L{=<<{QC;v}=O@v|`G4{o_KLhnPGkdZ5ifwUNgh|!~Dbi z!~9}}4D$;XVvaFp0%G_r<^p0aAjAB_{KjH3%s|!`D&K1YG;*9W*@H>we|9kq3@Q?7D zpNLW;UH&e=Vk3%;bosmd%8eK>OqbvJ=5+bH{9XPof0y4m=ydtJ{9S(Y80qqN`MdmG z{w}{E#dP_duTGc0%irbi@^|?SFs94jsy@A3Ee4N<1YZ^SA+{vLmi zzsKL>@A3Eed;C5A9={Q+^!R)H&X=ag-{bG`_xOAKJ$^%(>G3OIqJW74CJGp@Qfcu! zf1e(IkH5#SjEOQP%9to)(&O*(_xOAKJ^mhlkKdqYdi*{99>1Z_82U_)-)u~J{6@sm z;~(WWC!A6KQGVxkV>mR1Lt}(2qx_@%qx_@%qx_@%qx_@%qx_@%qx_@%qx^Oe~f>O-*{aN$Hsg`%vZ#aY>eAwjDL)OjNklq49;eZ-|%d_E^)^A z$N0zi$N0zi&1Phbe~f>Oe~jPQUJTo2jDL)OjDL)OjDL*Z5N^i!$M_B7W{lt5Q^xqm z_{aGD@62NkD#rgZ#&1|RWBdkoGtNKGZ*Vt817lVyQ>Z_XwY{ASN$C^{4T6Z{kW2Bc#YGlrxy!EYWX6Z{kW6Z{6JGr>Q>Kf!NS zClmbUbuz(km^u^u6Z{kW6Z{kW<}@yD;B){3FO!80iPw`LjPw`LjPw`LjPw|@v%oM*ls7&!s@lWwj@lWwj@lWwj@f(}X z6#o?e6#o?e6#o?e6#o?e6uQ~Xo>Q~XAFGsQo}KgB=AKgB=AZ?rd4{L}o?{L}o?{L}pA z9y859%|Fd=1~Sw9)BJ{YGtEEEKg~bQKg~bQKg~bQKg~bQKh1A;GSmFVh%?PU%|Fd= zm_O6})BMx?2KzJ3Kg~bQKg~bQKg~bQKg~bQKg~bQKg~bQKg~bQKg~bQKg~bQKg~bQ zKh3Yv$uz%CC)4}_f=u&I^Uv_l@Xzqi@Edc^4F3%O4F3%O48Qr$%<#|f&+yOi&+yOi z&+yOi&+yOi&+yOi&+yOi&+yOi&+wZe%?$qx{|x^O{|x^O{|x^O{|vu5)6DSC@Xzqi z@Xzqi@Xzp@$;%A?48PgD%!4yrFthyTWHZY@%RkFM z%RkFM%Wr-*v;4FCv;4FC=4ms_Kg&PMZ?-nG{ImR;rOfir^3U?m@|(NOEWg>?n7xhh z^vv?l@|(lWEdMP3EWerD%<|9jo6XHE{~Z4u{~Z4u{~Z4u{~Z4u{~Z4u{~Z4u{~Z4u z{~Z4u{~Z4u{~Z4u{~W&<~w^Uw2}@5?;@JpVlZJpVlZJioc-%=6Fl&-2goO9C>_ zFAK;#|2+RZzc45B{PXoz`wvRTgU?c0{;U40{;U4 z0{;U40{;U40{;U40{;U40{;U40{;U40{;U40{;U40{;U40{;U40{;U40>7E?NF=hr zzrer1zrer1zrer1zrer1zrer1zrer1zrer1zrer1zrer1zrer1zrer1zrer1zrer1 zzrer9zsSGHzsSGHzsPT{EPgH{EPgH{EPgH{EPhN*t5uQ{x_PAEb=e%FY+() zFY+()FY+()n+uJ(_bl?8eUAnti~NiHi~NiHi~NiHi~NiHi~NiHi~NiHi~MFwv&g^5 zzsSGHzsSGHuVKt0|04e)|04e)|04e)|04e)zgguh@h|b4!Os%^68{qa68{qa62CYl zOZ;Y8v&3(nHB0e{|f&K{|f&K{|dkG zC@cIc{44w`{44w`{44w`{44w`{44w`{44w`{44w`{44w`{44w`{44w`{44w`{44w` z{44zCo@4GgEBq_`EBu;|n1jv={|f&K{|f&KzZN7b{Nkvr@UQT%@asde!f$puEBvCV ztnjb!ukf$%ukf$%ukZ`1q9@5J|0@3~|0@3~|0=)ODq^dw@~`r*@~`rn-_I)lD*r0~ zD*r0~D*r0Kh%y4Ltn#n&ukwqvvdX{8uV0CNC9C`buB`H#^Uf;&D*r0KS>Jc{N}i`#=pkD#=pkD#=pkD z#=pkD#;>o)8owaU|Nq3|U*linU+34VXPtkYf1O`67d=MS`Pcc^`Pcb1Mp@@y=U?Yv z=U?Yv=U?Yv=U?Yv=U?X+-esMCoqwHwoqwHwoqwHwoqwHwoqwHwoqwHwoqwHQ&y;ok zb^dk!b^dk!b^dk!b^dk!b^dk!b^dk!b^dk!b^dk!b$)G0*7?`@*ZJ4^*ZDX2H~2UB zH~2UBH~2UBH~2UBH~2UBbq3ks-{2PnXM=x(e}jL6e}jL6e}jL6e}jL6e}jL6U!#x> z{tf;O{tbT3LNqVYY-NLggMWj6gMWj6gMWj6gMWj6gMWj6gMWj6gMWj6gJ1U$k#aWp zH~2UBH~2UBH~2UBH~2UBH~2UBH~DoE+2r5k-{jX!WRriBf0KWcf0JKFkxhOPbT;`n z`8WAD`8WAD`8WAD`8WB+meE~glYf(clYf(clYf(6(P5w>(P5w>(P5w=O z-9|R~H~BaDH~BaDy}m*=`32b#WM`9qlYf(clYf(clYf(clVAIhP5w>(P5w=OjZwDv zxA?dCxA?dCxA?dCxA?dCxA?dC^+?&`*Vbi=e~W*MUq6~H{w@A3{w@A3{w@A3exZ1_ z__z4C_%$fm;@381i+_t>hs_;>hs_;>hs_;>hs_;>hs_;>g-;Me!%fd7F1fd7F1fd7F1fM4UB1O5a41O5a41O5a41AdKZ z4)_oF5BLxG5BLxG5BLxG5BLxG5BLxG5BLxG5BLxGHL5w_Kj1&$Kj1&$Kj1&$Kj1&$ zKj1&$Kj1&$*CpnF|A7C1|A7C1|A7C1|A7C1|A7C1|A1e|m;-)IY!3Oowm}a05BU%I z5BU%I5BU%I5BU%I5BU%I5BU%I5BU%I5Ba?|LJs*4`49OI`49OI`49OI`SsyBLxklKjc5;Kjc5;KjioNNjc;{ zBmN`)BmN`)BmN`)BmN`)BYwSoj`)xGkNA)HkN7qCIpRO!KjJ^)KjJ^;KjuH?KjuH? zKjuH?KjuH?KjuH?KjuH?KjuH?KjuH?KjuH?KjuH?KjuH?KjuH?KjuH?KjuH?KjuH? zKjuH?KjuH?KjuH?KjuH?KjuH?KjuH?KjuH?KjuH?KjuH?KjuH?KjuH?KjuH?KjuH? zKjuH?KjuH?KjuH?KjuH-KjA;&KjA;&KjA;&KjA;&KjA;&KjA;&KjA;&KjA;&KjA;& zKjA;&KjA;&KjA;&KjA;&KjA;&KjA;&KjA;&KjA;&KjA;&KjA;&KjA;&KjA;&KjA;& zKjA;&KjA;&KjA;&KjA;&KjA;&KjGK3$7?O*g#U#9g#UzJ=bjV(Q+~aBPWeyyy`D)< z`A_*z`Msi0PWiq5cTV}e26#^Sy$*Oz`A_*z`8Civ3+GaWBKjlB= zKjlB=Kjrrt5b@e6Ipsg)Kjqg_=am1H->Vtrl>e0fl>e0fl>d}pZ=F;AQ-0lbyf#@* z`A_*z`8Cct<=67(l;3NXN1Gk5TOO~FlvDmQ{xkkF{xkkF{xkkF{xkkF{xkkFelNiv zFTtNP{xkkF{xkkFel2y*_`L>9&iK#x&-lHDK+gE};5p+z<3Hm+<3Hm+<3Ho~x-mK9 zKjS~+_o_`f<3Hm+<3Hm+<3Hm+<3Hoqm*(Atj-)qq1jQ@=P zjQ@=PjQ@<^>l@^p|D6Av|D4||Mdh6Tod2BvoZo98MbtkN!XU|LFgt|BwDZ`v2(vqyLZo zKl=aZ|D*qp{y+Nv=>MbtkN)5PQ`h3x|408H{eSfT(f>#PAN_yy|Iz#PAN_yy|Iz#PAN_yy|Iz#PAN_yy|Iz#PAN_yy|Iz#PAN_yy|Iz#PAN_yy|Iz

#PAN_yy|Iz#PAN_yy|Iz#PAN_yy z|Iz#PAN_yy|Iz#PAN_yy|Iz#P zAN_yy|Iz#PAN_yy|Iz#PAN_yy|Iz#PAN_yy|Iz#PAN_yy|Iz#PAN_yy|Iz#PAN_yy|Iz#PAN_yy|Iz

#PAN_yy|Iz#PAN_yy|Iz#PAN_yy z|Iz#PAN_yy|Iz#PAN_yy|Iz#P zAN_yy|Iz#PAN_yy|Iz#PAN_yy|Iz#PAN_yy|Iz#PAN_yy|Iz#PAN_yy|Iz#PAN_yy|Iz#PAN_yy|Iz

#PAN_yy|Iz#PAN_yy|Iz#PAN_yy z|Iz#PAN_yy|Iz#PAN_yy|Iz#P zAN_yy|Iz#PAN_yy|Iz#PAN_yy|Iz#PAN_yy|Iz#PAN_yy|Iz#PAN_yy|Iz#PAN_yy|Iz#PAN_yy|Iz

#PAN_yy|Iz#PAN_yy|Iz#PAN_yy z|Iz#PAN_yy|Iz#PAN_yy|Iz#P zAN_yy|Iz#PAN_yy|Iz#PAN_yy|Iz#PAN_yy|Iz#PAN_yy|Iz#PAN_yy|Iz#PAN_yy|Iz#PAN_yy|Iz

#PAN_yy|Iz#PAN_yy|Iz#PAN_yy z|Iz#PAN_yy|IzHkarU;6*j|Cj#1^#7&*Fa3Y%|4aX0`v21Zm;S%>|E2#g{eS8IOaEW`|I+`L z{=fA9rT;Jef9d~A|6lt5(*KwKzx4m5|1bT2>HkarU;6*j|Cj#1^#7&*Fa3Y%|4aX0 z`v21Zm;S%>|E2#g{eS8IOaEW`|I+`L{=fA9rT;Jef9d~A|6lt5(*KwKzx4m5|1bT2 z>HkarU;6*j|Cj#1^#7&*Fa3Y%|4aX0`v21Zm;S%>|E2#g{eS8IOaEW`|I+`L{=fA9 zrT;Jef9d~A|6lt5(*KwKzx4m5|1bT2>HkarU;6*j|Cj#1^#7&*Fa3Y%|4aX0`v21Z zm;S%>|E2#g{eS8IOaEW`|I+`L{=fA9rT;Jef9d~A|6lt5(*KwKzx4m5|1bT2>Hkar zU;6*j|Cj#1^#7&*Fa3Y%|4aX0`v21Zm;S%>|E2#g{eS8IOaEW`|I+`L{=fA9rT;Je zf9d~A|6lt5(*KwKzx4m5|1bT2>HkarUjzL5|I+`L{=fA9rT?!1e*J&x|4aX0`v21Z zm;S%>|E2#g{eS8IOaEW`|I+`L{=fA9rT;Jef9d~A|6lt5(*KwKzx4m5|1bT24f5;% zOaEW`|I+`L{=WwK_5Y>+Fa3Y%|4aX0`v21Zm;S%>|E2#g{eS8IOaEW`|I+`L{=fA9 zrT;Jef9d~A|6lt5(*KwKzx4m5|1bT2>HkarU;6*j|Cj#1^#7&*Fa3Y%|4aX0`v21Z zm;S%>|E2#g{eS8IOaEW`|I+`L{=fA9rT;Jef9d~A|6lt5(*KwKzx4m5|1bT2>Hkar zU;6*j|Cj#1^#7&*Fa3Y%|4aX0`v21Zm;S%>|E2#g{eS8IOaEW`|I+`L{=fA9rT;Je zf9d~A|6lt5(*KwKzx4m5|1bT2>HkarU;6*j|Cj#1^#7&*Fa3Y%|4aX0`v21Zm;S%> z|E2#g{eS8IOaEW`|I+`L{=fA9rT;Jef9d~A|6lt5(*KwKzx4m5|1bT2>HkarU;6*j z|Cj#1^#7&*Fa3Y%|4aX0`v21Zm;S%>|E2#g{eS8IOaEW`|I+`L{=fA9rT;Jef9d~A z|6lt5(*KwKzx4m5|1bT2>HkarU;6*j|Cj#1^#7&*Fa3Y%|4aX0`v21Zm;S%>|E2#g z{eS8IOaEW`|I+`L{=fA9rT;Jef9d~A|6lt5(*KwKzx4m5|1bT2>HkarU;6*j|Cj#1 z^#7&*Fa3Y%|4aX0`v21Zm;S%>|E2#g{eS8IOaEW`|I+`L{=fA9rT;Jef9d~A|6lt5 z(*KwKzx4m5|1bT2>HkarU;6*j|Cj#1^#7&*Fa3Y%|4aX0`v21Zm;S%>|E2#g{eS8I zOaEW`|I+`L{=fA9rT;Jef9d~A|6lt5(*KwKzx4m5|1bT2>HkarU;6*j|Cj#1^#7&* zFa3Y%|4aX0`v21Zm;S%>|E2#g{eS8IOaEW`|I+`L{=fA9rT;Jef9d~A|6lt5(*KwK zzx4m5|1bT2>HkarU;6*j|Cj#1^#7&*Fa3Y%|4aX0`v21Zm;S%>|E2#g{eS8IOaEW` z|I+`L{=fA9rT;Jef9d~A|6lt5(*KwKzx4m5|1bT2>HkarU;6*j|Cj#1^#7&*Fa3Y% z|4aX0`v21Zm;S%>|E2#g{eS8IOaEW`|I+`L{=fA9rT;Jef9d~A|6lt5(*KwKzx4m5 z|1bT2>HkarU;6*j|Cj#1^#7&*Fa3Y%|4aX0`v21Zm;S%>|E2#g{eS8IOaEW`|I+`L z{=fA9rT;Jef9d~A|6lt5(*KwKzx4m5|1bT2>HkarU;6*j|Cj#1^#7&*Fa3Y%|4aX0 z`v21Zm;S%>|E2#g{eS8IOaEW`|I+`L{=fA9rT;Jef9d~A|6lt5(*KwKzx4m5|1bT2 z>HkarU;6*j|Cj#1^#7&*Fa3Y%|4aX0`v21Zm;S%>|E2#g{eS8IOaEW`|I+`L{=fA9 z)#dNgUH&eAm%q#3@A3Eed;C5A z9)FL&$KT`c@%Q+9{5}32e~-V%-{bG`_xOAKJ^mhlkH5#?+Fa3Y%|4aX0`v21Zm;S%>|E2#g{eS8I zOaEW`|I+`L{=fA9rT;Jef9d~A|6lt5(*KwKzx4m5|1bT2>HkarU;6*j|Cj#1^#7&* zFa3Y%|4aX0`v21Zm;S%>|E2#g{eS8IOaEW`|I+`L{=fA9rT;Jef9d~A|6lt5(*KwK zzx4m5|1bT2>HkarU;6*j|Cj#1^#7&*Fa3Y%|4aX0`v21Zm;S%>|E2#g{eS8IOaEW` z|I+`L{=dff$N0zi$N0zi$N0zi$N0zi$N2UCrT?!n{xSYB{xN?2f9d~A|6lt5(*KwK zzx4m5|1bT2>HkarU;6*j|Cj#1^#7&*Fa3Y%|4aX0`v21Zm;S%>|E2#g{eS8IOaEW` z|I+`L{=fA9rT;Jef9d~A|6lt5(*KwKzx4m5|1bT2>HkarU;6*j|Cj#1^#7&*Fa3Y% z|4aX0`v21Zm;S%>|E2#g{eS8IOaEW`|I+`L{=fA9rT;Jef9d~A|6lt5(*KwKzx4m5 z|1bT2>HkarU;6*j|Cj#1^#7&*Fa3Y%|4aX0`v21Zm;S%>|E2#g{eS8IOaEW`|I+`L z{=fA9rT;Jef9d~A|6lt5(*KwKzx4m5|1bT2>HkarU;6*j|Cj#1^#7&*Fa3Y%|4aX0 z`v21Zm;S%>|E2#g{eS8IOaEW`|I+`L{=fA9rT;Jef9d~A|6lt5(*KwKzx4m5|1bT2 z>HkarU;6*j|Cj#1^#7&*Fa3Y%|4aX0`v21Zm;S%>|E2#g{eS8IOaEW`|I+`L{=fA9 zrT;Jef9d~A|6lt5(*KwKzx4m5|1bT2>HkarU;6*j|Cj#1^#7&*Fa3Y%|4aX0`v21Z zm;S%>|E2#g{eS8IOaEW`|I+`L{=fA9rT;Jef9d~A|6lt5(*KwKzx4m5|1bT2>Hkar zU;6*j|Cj#1^#7&*Fa3Y%|4aX0`v21Zm;S%>|E2#g{eS8IOaEW`|I+`L{=fA9rT;Je zf9d~A|6lt5(*KwKzx4m5|1bT2>HkarU;6*j|Cj#1^#7&*Fa3Y%|4aX0`v21Zm;S%> z|E2#g{eS8IOaEW`|I+`L{=fA9rT;Jef9d~A|6lt5(*KwKzx4m5|1bT2>HkarU;6*j z|Cj#1^#7&*Fa3Y%|4aX0`v21Zm;S%>|E2#g{eS8IOaEW`|I+`L{=fA9rT;Jef9d~A z|6lt5(*KwKzx4m5|1bT2>HkarU;6*j|Cj#1^#7&*Fa3Y%|4aX0`v21Zm;S%>|E2#g z{eS8IOaEW`|I+`L{=fA9rT;Jef9d~A|6lt5(*KwKzx4m5|1bT2>HkarU;6*j|Cj#1 z^#7&*Fa3Y%|4aX0`v21Zm;S%>|E2#g{eS8IOaEW`|I+`L{=fA9rT;Jef9d~A|6lt5 z(*KwKzx4m5|1bT2>HkarU;6*j|Cj#1^#7&*Fa3Y%|4aX0`v21Zm;S%>|E2#g{eS8I zOaEW`|I+`L{=fA9rT;Jef9d~A|6lt5(*KwKzx4m5|1bT2>HkarU;6*j|Cj#1^#7&* zFa3Y%|4aX0`v21Zm;S%>|MmY=aW}hlEC>}w`!RT45ItzB27nL@KqS|Rb8}q9&t(Kc zJP`V}RFA+)!-Zzxice&nct?qSTvgxF|EK>?|DXOp{eSxZ^#AGq)BmUcPye6(KmC9D z|MdUq|I`1c|4;v){y+VH`v3I*>HpLJr~gm?pZ-7nfBOIQ|LOnJ|EK>?|DXOp{eSxZ z^#AGq)BmUcPye6(KmC9D|MdUq|I`1c|4;v){y+VH`v3I*>HpLJr~gm?pZ-7nfBOIQ z|LOnJ|EK>?|DXOp{eSxZ^#AGq)BmUcPye6(KmC9D|MdUq|I`1c|4;v){y+VH`v3I* z>HpLJr~gm?pZ-7nfBOIQ|LOnJ|EK>?|DXOp{eSxZ^#AGq)BmUcPye6(KmC9D|MdUq z|I`1c|4;v){y+VH`v3I*>HpLJr~gm?pZ-7nfBOIQ|LOnJ|EK>?|DXOp{eSxZ^#AGq z)BmUcPye6(KmC9D|MdUq|I`1c|4;v){y+VH`v3I*>HpLJr~gm?pZ-7nfBOIQ|LOnJ z|EK>?|DXOp{eSxZ^#AGq)BmUcPye6(KmC9D|MdUq|I`1c|4;v){y+VH`v3I*>HpLJ zr~gm?pZ-7nfBOIQ|LOnJ|EK>?|DXOp{eSxZ^#AGq)BmUcPye6(KmC9D|MdUq|I`1c z|4;v){y+VH`v3I*>HpLJr~gm?pZ-7nfBOIQ|LOnJ|EK>?|DXOp{eSxZ^#AGq)BmUc zPye6(KmC9D|MdUq|I`1c|4;v){y+VH`v3I*>HpLJr~gm?pZ-7nfBOIQ|LOnJ|EK>? z|DXOp{eSxZ^#AGq)BmUcPye6(KmC9D|MdUq|I`1c|4;v){y+VH`v3I*>HpLJr~gm? zpZ-7nfBOIQ|LOnJ|EK>?|DXOp{eSxZ^#AGq)BmUcPye6(KmC9D|MdUq|I`1c|4;v) z{y+VH`v3I*>HpLJr~gm?pZ-7nfBOIQ|LOnJ|EK>?|DXOp{eSxZ^#AGq)BmUcPye6( zKmC9D|MdUq|I`1c|4;v){y+VH`v3I*>HpLJr~gm?pZ-7nfBOIQ|LOnJ|EK>?|DXOp z{eSxZ^#AGq)BmUcPye6(KmC9D|MdUq|I`1c|4;v){y+VH`v3I*>HpLJr~gm?pZ-7n zfBOIQ|LOnJ|EK>?|DXOp{eSxZ^#AGq)BmUcPye6(KmC9D|MdUq|I`1c|4;v){y+VH z`v3I*>HpLJr~gm?pZ-7nfBOIQ|LOnJ|EK>?|DXOp{eSxZ^#AGq)BmUcPye6(KmC9D z|MdUq|I`1c|4;v){y+VH`v3I*>HpLJr~gm?pZ-7nfBOIQ|LOnJ|EK>?|DXOp{eSxZ z^#AGq)BmUcPye6(KmC9D|MdUq|I`1c|4;v){y+VH`v3I*>HpLJr~gm?pZ-7nfBOIQ z|LOnJ|EK>?|DXOp{eSxZ^#AGq)BmUcPye6(KmC9D|MdUq|I`1c|4;v){y+VH`v3I* z>HpLJr~gm?pZ-7nfBOIQ|LOnJ|EK>?|DXOp{eSxZ^#AGq)BmUcPye6(KmC9D|MdUq z|I`1c|4;v){y+VH`v3I*>HpLJr~gm?pZ-7nfBOIQ|LOnJ|EK>?|DXOp{eSxZ^#AGq z)BmUcPye6(KmC9D|MdUq|I`1c|4;v){y+VH`v3I*>HpLJr~gm?pZ-7nfBOIQ|LOnJ z|EK>?|DXOp{eSxZ^#AGq)BmUcPye6(KmC9D|MdUq|I`1c|4;v){y+VH`v3I*>HpLJ zr~gm?pZ-7nfBOIQ|LOnJ|EK>?|DXOp{eSxZ^#AGq)BmUcPye6(KmC9D|MdUq|I`1c z|4;v){y+VH`v3I*>HpLJr~gm?pZ-7nfBOIQ|LOnJ|EK>?|DXOp{eSxZ^#AGq)BmUc zPye6(KmC9D|MdUq|I`1c|4;v){y+VH`v3I*>HpLJr~gm?pZ-7nfBOIQ|LOnJ|EK>? z|DXOp{eSxZ^#AGq)BmUcPye6(KmC9D|MdUq|I`1c|4;v){y+VH`v3I*>HpLJr~gm? zpZ-7nfBOIQ|LOnJ|EK>?|DXOp{eSxZ^#AGq)BmUcPye6(KmC9D|MdUq|I`1c|4;v) z{y+VH`v3I*>HpLJr~gm?pZ-7nfBOIQ|LOnJ|EK>?|DXOp{eSxZ^#AGq)BmUcPye6( zKmC9D|MdUq|I`1c|4;v){y+VH`v3I*>HpLJr~gm?pZ-7nfBOIQ|LOnJ|EK>?|DXOp z{eSxZ^#AGq)BmUcPye6(KmC9D|MdUq|I`1c|4;v){y+VH`v3I*>HpLJr~gm?pZ-7n zfBOIQ|LOnJ|EK>?|DXOp{eSxZ^#AGq)BmUcPye6(KmC9D|MdUq|I`1c|4;v){y+VH z`v3I*>HpLJr~gm?pZ-7nfBOIQ|LOnJ|EK>?|DXOp{eSxZ^#AGq)BmUcPye6(KmC9D z|MdUq|I`1c|4;v){y+VH`v3I*>HpLJr~gm?pZ-7nfBOIQ|LOnJ|EK>?|DXOp{eSxZ z^#AGq)BmUcPye6(KmC9D|MdUq|I`1c|4;v){y+VH`v3I*>HpLJr~gm?pZ-7nfBOIQ z|LOnJ|EK>?|DXOp{eSxZ^#AGq)BmUcPye6(KmC9D|MdUq|I`1c|4;v){y+VH`v3I* z>HpLJr~gm?pZ-7nfBOIQ|LOnJ|M!#qKiRMUPye6(KmC9D|MdUq|I`1c|4;v){y+VH z`v3I*>HpLJr~gm?pZ-7nfBOIQ|LOnJ|EK>?|DXOp{eSxZ^#8rt|7O4bKmC9D|MdUq z|I`1c|4;v){y+VH`v3I*>HpLJr~gm?pZ-7nfBOIQ|LOnJ|EK>?|DXOp{eSxZ^#AGq z)BmUcPye6(KmC9D|MdUq|I`1c|4;v){y+VH`v3I*>HpLJr~gm?pZ-7nfBJuKzyG#` z0SE&S1|SST7=SPUVF1DagaHTx5C$L&Kp2290AT>a0E7Vu0}uuv3_uuwFaTiy!T^K; z2m=rXAPhhlfG_}I0Kx!-0SE&S1|SST7=SPUVF1DagaHTx5C$L&Kp2290AT>a0E7Vu z0}uuv3_uuwFaTiy!T^K;2m=rXAPhhlfG_}I0Kx!-0SE&S1|SST7=SPUVF1DagaHTx z5C$L&Kp2290AT>a0E7Vu0}uuv3_uuwFaTiy!T^K;2m=rXAPhhlfG_}I0Kx!-0SE&S z1|SST7=SPUVF1DagaHTx5C$L&Kp2290AT>a0E7Vu0}uuv3_uuwFaTiy!T^K;2m=rX zAPhhlfG_}I0Kx!-0SE&S1|SST7=SPUVF1DagaHTx5C$L&Kp2290AT>a0E7Vu0}uuv z3_uuwFaTiy!T^K;2m=rXAPhhlfG_}I0Kx!-0SE&S1|SST7=SPUVF1DagaHTx5C$L& zKp2290AT>a0E7Vu0}uuv3_uuwFaTiy!T^K;2m=rXAPhhlfG_}I0Kx!-0SE&S1|SST z7=SPUVF1DagaHTx5C$L&Kp2290AT>a0E7Vu0}uuv3_uuwFaTiy!T^K;2m=rXAPhhl zfG_}I0Kx!-0SE&S1|SST7=SPUVF1DagaHTx5C$L&Kp2290AT>a0E7Vu0}uuv3_uuw zFaTiy!T^K;2m=rXAPhhlfG_}I0Kx!-0SE&S1|SST7=SPUVF1DagaHTx5C$L&Kp229 z0AT>a0E7Vu0}uuv3_uuwFaTiy!T^K;2m=rXAPhhlfG_}I0Kx!-0SE&S1|SST7=SPU zVF1DagaHTx5C$L&Kp2290AT>a0E7Vu0}uuv3_uuwFaTiy!T^K;2m=rXAPhhlfG_}I z0Kx!-0SE&S1|SST7=SPUVF1DagaHTx5C$L&Kp2290AT>a0E7Vu0}uuv3_uuwFaTiy z!T^K;2m=rXAPhhlfG_}I0Kx!-0SE&S1|SST7=SPUVF1DagaHTx5C$L&Kp2290AT>a z0E7Vu0}uuv3_uuwFaTiy!T^K;2m=rXAPhhlfG_}I0Kx!-0SE&S1|SST7=SPUVF1Da zgaHTx5C))^{V)4p_P^}M0E7YPW&g|mm;EpMU-rN3f7$=C|7HKn{+InP`(O6I?0?z+ zvj1iO%YF<%7=SPUVF1DagaHTx5C$L&Kp2290AT>a0E7Vu0}uuv3_uuwFaTiy!T^K; z2m=rXAPhhlfG_}I0Kx!-0SE&S1|SST7=SPUVF1DagaHTx5C$L&Kp2290AT>a0E7Vu z0}uuv3_uuwFaTiy!T^K;2m=rXAPhhlfG_}I0Kx!-0SE&S1|SST7=SPUVF1DagaHTx z5C)(R`#Zq;3_uuwFaTiy`mq1Q{tx>ZsU z5BoptzwE#4zwE#4zwE#4zwE#4zwE#4zwE#4zwE#4zwE#4zwE#4zwE#4zwE#4zwE#4 zzwE#4zwE#4zwE#4zwE#4zwE#4zwE#4zwE#4zwE#4zwE#4#{h%@=(7K^|FZwG9|I5u zAPhhlfG_}I0Kx!-0SE&S1|SST7=SPUVF1DagaPQb|F-|O|F-|O|F-|O|F-|O|F$0k z5C)*z{@ecB{@Z>GKp2290AT>a0E7Vu0}uuv3_uuwFaTiy!T^K;2m=rXpxgf2{@ecB z{@ecB{@ecB{@ecB{@ecB{@ecB{@ecB{@ecB{@ecB{@ecB{@ecB{@ecB{@ecB{@ecB z{@ecBehfeufG_}I0Kx!-0SE&S1|SST7=SPUVF1DagaHTx5C$L&Kp2290AT>a0E7Vu z0}uuv3_uuwFaTiy!T^K;2m=rXAPhhlfG_}I0Kx!-0SE&S1|SST7=SPUVF1DagaHTx z5C$L&Kp2290AT>a0E7Vu0}uuv3_uuwFaTiy!T^K;2m=rXAPhhlfG_}I0Kx!-0SE&S z1|SST7=SPUVF1DagaHTx5C$L&Kp2290AT>a0E7Vu0}uuv3_uuwFaTiy!T^K;2m=rX zAPhhlfG_}I0Kx!-0SE&S1|SST7=SPUVF1DagaHTx5C$L&Kp2290AT>a0E7Vu0}uuv z3_uuwFaTiy!T^K;2m=rXAPhhlfG_}I0Kx!-0SE&S1|SST7=SPUVF1DagaHTx5C$L& zKp2290AT>a0E7Vu0}uuv3_uuwFaTiy!T^K;2m=rXAPhhlfG_}I0Kx!-0SE&S1|SST z7=SPUVF1DagaHTx5C$L&Kp2290AT>a0E7Vu0}uuv41l-)R9i;_hz1Z1AR0h4fM@{G z0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLaw zq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V z0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0Q$v#8bCCFXaLawq5(t$hz1Z1 zAR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ( z8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2 zKs1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4Immo zG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c2GEaxpEQ7I0MP)V0Yn3c1`rJ(8bCCF zXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks118 z0MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT z(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G z0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLaw zq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V z0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?W zL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz z1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$ zhz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c z1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh z5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC? z4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1 zAR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ( z8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2 zKs1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4Immo zG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4 zfM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCF zXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks118 z0MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT z(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G z0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLaw zq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V z0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?W zL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz z1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$ zhz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c z1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh z5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC? z4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1 zAR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2Ks1180MP)V0Yn3c1`rJ( z8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4ImmoG=OLT(Ey?WL<5Kh5Dg$2 zKs1180MP)V0Yn3c1`rJ(8bCCFXaLawq5(t$hz1Z1AR0h4fM@{G0HOgz1BeC?4Immo zG=OLT(Ey?WL<5Kh5Dj27fYAU(0~ifpG=R|nMgtfPU^IZy07e5C4PZ2Y(EvsR7!6=F zfYAU(0~ifpG=R|nMgtfPU^IZy07e5C4PZ2Y(EvsR7!6=FfYAU(0~ifpG=R|nMgtfP zU^IZy07e5C4PZ2Y(EvsR7!6=FfYAU(0~ifpG=R|nMgtfPU^IZy07e5C4PZ2Y(EvsR z7!6=FfYAU(0~ifpG=R|nMgtfPU^IZy07e5C4PZ2Y(EvsR7!6=FfYAU(0~ifpG=R|n zMgtfPU^IZy07e5C4PZ2Y(EvsR7!6=FfYAU(0~ifpG=R|nMgtfPU^IZy07e5C4PZ2Y z(EvsR7!6=FfYAU(0~ifpG=R|nMgtfPU^IZy07e5C4PZ2Y(EvsR7!6=FfYAU(0~ifp zG=R|nMgtfPU^IZy07e5C4PZ2Y(EvsR7!6=FfYAU(0~ifpG=R|nMgtfPU^IZy07e5C z4PZ2Y(EvsR7!6=FfYAU(0~ifpG=R|nMgtfPU^IaJvHdiF(EvsR7!6=FfYAU(0~ifp zG=R|nMgtfPU^IZy07e5C4PZ2Y(EvsR7!6=FfYAU(0~ifpG=R|nMgtfPU^IZy07e7Y zpV&_W7!6=FfYAU(0~ifpG=R|nMgtfPU^IZy07e5C4PZ2Y(EvsR7!6=FfYAU(0~ifp zG=R|nMgtfPU^IZy07e5C4PZ2Y(EvsR7!6=FfYAU(0~ifpG=R|nMgtfPU^IZy07e5C z4PZ2Y(EvsR7!6=FfYAU(0~ifpG=R|nMgtfPU^IZy07e5C4PZ2Y(EvsR7!6=FfYAU( z0~ifpG=R|nMgtfPU^IZy07e5C4PZ2Y(EvsR7!6=FfYAU(0~ifpG=R|nMgtfPU^IZy z07e5C4PZ2Y(EvsR7!6=FfYAU(0~ifpG=R|nMgtfPU^IZy07e5C4PZ2Y(EvsR7!6=F zfYAU(0~ifpG=R|nMgtfPU^IZy07e5C4PZ2Y(EvsR7!6=FfYAU(0~ifpG=R|nMgtfP zU^IZy07e5C4PZ2Y(EvsR7!6=FfYAU(0~ifpG=R|nMgtfPU^IZy07e5C4PZ2Y(EvsR z7!6=FfYAU(0~ifpG=R|nMgtfPU^IZy07e5C4PZ2Y(EvsR7!6=FfYAU(0~ifpG=R|n zMgtfPU^IZy07e5C4PZ2Y(EvsR7!6=FfYAU(0~ifpG=R|nMgtfPU^IZy07e5C4PZ2Y z(EvsR7!6=FfYAU(0~ifpG=R|nMgtfPU^IZy07e5C4PZ2Y(EvsR7!6=FfYAU(0~ifp zG=R|nMgtfPU^IZy07e5C4PZ2Y(EvsR7!6=FfYAU(0~ifpG=R|nMgtfPU^IZy07e5C z4PZ2Y(EvsR7!6=FfYAU(0~ifpG=R|nMgtfPU^IZy07e5C4PZ2Y(EvsR7!6=FfYAU( z0~ifpG=R|nMgtfPU^IZy07e5C4PZ2Y(EvsR7!6=FfYAU(0~ifpG=R|nMgtfPU^IZy z07e5C4PZ2Y(EvsR7!6=FfYAU(0~ifpG=R|nMgtfPU^IZy07e5C4PZ2Y(EvsR7!6=F zfYAU(0~ifpG=R|nMgtfPU^IZy07e5C4PZ2Y(EvsR7!6=FfYAU(0~ifpG=R|nMgtfP zU^IZy07e5C4PZ2Y(EvsR7!6=FfYAU(0~ifpG=R|nMgtfPU^IZy07e5C4PZ2Y(EvsR z7!6=FfYAU(0~ifpG=R|nMgtfPU^IZy07e5C4PZ2Y(EvsR7!6=FfYAU(0~ifpG=R|n zMgtfPU^IZy07e5C4PZ2Y(EvsR7!6=FfYAU(0~ifpG=R|nMgtfPU^IZy07e5C4PZ2Y z(EvsR7!6=FfYAU(0~ifpG=R|nMgtfPU^IZy07e5C4PZ2Y(EvsR7!6=FfYAU(0~ifp zG=R|nMgtfPU^IZy07e5C4PZ2Y(EvsR7!6=FfYAU(0~ifpG=R|nMgtfPU^IZy07e5C z4PZ2Y(EvsR7!6=FfYAU(0~ifpG=R|nMgtfPU^IZy07e5C4PZ2Y(EvsR7!6=FfYAU( z0~ifpG=R|nMgtfPU^IZy0QNikX#k@Ej0P|oz-R!Y0gMJP8o+1(qXCQtFdD#U0HXnn z1~3}HXaJ)Dj0P|oz-R!Y0gMJP8o+1(qXCQtFdD#U0HXnn2CzS~p9U}*z-R!Y0gMJP z8o+1(qXCQtFdD#U0HXnn1~3}HXaJ)Dj0P|oz-R!Y0gMJP8o+1(qXCQtFdD#U0HXnn z1~3}HXaJ)Dj0P|oz-R!Y0gMJP8o+1(qXCQtFdD#U0HXnn1~3}HXaJ)Dj0Uj7{=@#m z{=xcaMz=wQtiR<~~^{<2H`sCH*^U1fR=aT~e z`6Lg1K8f6(Pim0ov!4T=&%XFQpZ(8&KC2O*&yt$wvzu_A&+Za*c>m4wlWR5(fA8Qd z*z=PwKhIB!c8A|Nh<2WzR07YN>-W!_lcCR>vrf;OQ#B4gmp3Op9RA9|kCo4xufflo z4EuQ#dp&RJq37)vf7Nep*nQsI>GZt0b>(?;|Ht#i3G3&JGm+01r)HinUMoCb{O5bV zFae$~?!H7KV4wdKKWtHdMHG=2e$@=HrnbzmsslDgj&s)#C zA1I!8uOOdyu!Mt8?fvq6?>|1ZceiOj@9wF6-rZF6yt|9!`Q|e2^Ub-Q=bLZ7&o|Ba z^G%Wad~+ZA^UV!j&o_4+JwJQ7=io2wXJ3e(pG6kW&u+JUzPon#e0O2z`R@L6& literal 0 HcmV?d00001 diff --git a/records/track_10min_16mb/2020-04-29_AWQ_lite_mixedprecision_GPTQ/train_gpt.py b/records/track_10min_16mb/2020-04-29_AWQ_lite_mixedprecision_GPTQ/train_gpt.py new file mode 100644 index 0000000000..a7f006cf7c --- /dev/null +++ b/records/track_10min_16mb/2020-04-29_AWQ_lite_mixedprecision_GPTQ/train_gpt.py @@ -0,0 +1,3998 @@ +import base64, collections, copy, fcntl, glob, io, lzma, math, os +from pathlib import Path +import random, re, subprocess, sys, time, uuid, numpy as np, sentencepiece as spm, torch, torch.distributed as dist, torch.nn.functional as F +from torch import Tensor, nn +from flash_attn_interface import ( + flash_attn_func as flash_attn_3_func, + flash_attn_varlen_func, +) +from concurrent.futures import ThreadPoolExecutor +import triton +import triton.language as tl +from triton.tools.tensor_descriptor import TensorDescriptor + + +# ===== Fused softcapped cross-entropy (Triton) — training-only path ===== +# Replaces the eager +# logits_softcap = softcap * tanh(logits / softcap) +# F.cross_entropy(logits_softcap.float(), targets, reduction="mean") +# sequence with a single fused kernel that reads logits_proj once, applies +# softcap in-register, and computes (LSE, loss) in one streaming pass. The +# backward kernel mirrors the forward so there's no stored softcapped logits. +# Numerically identical to the eager path up to fp32 accumulation differences. +_FUSED_CE_LIBRARY = "pgsubmission1draft7fusedce" +_FUSED_CE_BLOCK_SIZE = 1024 +_FUSED_CE_NUM_WARPS = 4 + + +@triton.jit +def _softcapped_ce_fwd_kernel( + logits_ptr, losses_ptr, lse_ptr, targets_ptr, + stride_logits_n, stride_logits_v, + n_rows, n_cols, softcap, + block_size: tl.constexpr, +): + row_idx = tl.program_id(0).to(tl.int64) + logits_row_ptr = logits_ptr + row_idx * stride_logits_n + max_val = -float("inf") + sum_exp = 0.0 + A = 2.0 * softcap + inv_C = 2.0 / softcap + for off in range(0, n_cols, block_size): + cols = off + tl.arange(0, block_size) + mask = cols < n_cols + val = tl.load( + logits_row_ptr + cols * stride_logits_v, + mask=mask, other=-float("inf"), + ).to(tl.float32) + z = A * tl.sigmoid(val * inv_C) + z = tl.where(mask, z, -float("inf")) + curr_max = tl.max(z, axis=0) + new_max = tl.maximum(max_val, curr_max) + sum_exp = sum_exp * tl.exp(max_val - new_max) + tl.sum(tl.exp(z - new_max), axis=0) + max_val = new_max + lse = max_val + tl.log(sum_exp) + tl.store(lse_ptr + row_idx, lse) + target = tl.load(targets_ptr + row_idx).to(tl.int32) + target_val = tl.load(logits_row_ptr + target * stride_logits_v).to(tl.float32) + target_z = A * tl.sigmoid(target_val * inv_C) + tl.store(losses_ptr + row_idx, lse - target_z) + + +@triton.jit +def _softcapped_ce_bwd_kernel( + grad_logits_ptr, grad_losses_ptr, lse_ptr, logits_ptr, targets_ptr, + stride_logits_n, stride_logits_v, + stride_grad_n, stride_grad_v, + n_rows, n_cols, softcap, + block_size: tl.constexpr, +): + row_idx = tl.program_id(0).to(tl.int64) + logits_row_ptr = logits_ptr + row_idx * stride_logits_n + grad_row_ptr = grad_logits_ptr + row_idx * stride_grad_n + lse = tl.load(lse_ptr + row_idx) + grad_loss = tl.load(grad_losses_ptr + row_idx).to(tl.float32) + target = tl.load(targets_ptr + row_idx).to(tl.int32) + A = 2.0 * softcap + inv_C = 2.0 / softcap + dz_dx_scale = A * inv_C + for off in range(0, n_cols, block_size): + cols = off + tl.arange(0, block_size) + mask = cols < n_cols + val = tl.load( + logits_row_ptr + cols * stride_logits_v, + mask=mask, other=0.0, + ).to(tl.float32) + sigmoid_u = tl.sigmoid(val * inv_C) + z = A * sigmoid_u + probs = tl.exp(z - lse) + grad_z = grad_loss * (probs - tl.where(cols == target, 1.0, 0.0)) + grad_x = grad_z * (dz_dx_scale * sigmoid_u * (1.0 - sigmoid_u)) + tl.store(grad_row_ptr + cols * stride_grad_v, grad_x, mask=mask) + + +def _validate_softcapped_ce_inputs( + logits: Tensor, targets: Tensor, softcap: float, +) -> tuple[Tensor, Tensor]: + if logits.ndim != 2: + raise ValueError(f"Expected logits.ndim=2, got {logits.ndim}") + if targets.ndim != 1: + raise ValueError(f"Expected targets.ndim=1, got {targets.ndim}") + if logits.shape[0] != targets.shape[0]: + raise ValueError( + f"Expected matching rows, got logits={tuple(logits.shape)} targets={tuple(targets.shape)}" + ) + if not logits.is_cuda or not targets.is_cuda: + raise ValueError("softcapped_cross_entropy requires CUDA tensors") + if softcap <= 0.0: + raise ValueError(f"softcap must be positive, got {softcap}") + if logits.dtype not in (torch.float16, torch.bfloat16, torch.float32): + raise ValueError(f"Unsupported logits dtype: {logits.dtype}") + logits = logits.contiguous() + targets = targets.contiguous() + if targets.dtype != torch.int64: + targets = targets.to(dtype=torch.int64) + return logits, targets + + +@torch.library.custom_op(f"{_FUSED_CE_LIBRARY}::softcapped_ce", mutates_args=()) +def softcapped_ce_op(logits: Tensor, targets: Tensor, softcap: float) -> tuple[Tensor, Tensor]: + logits, targets = _validate_softcapped_ce_inputs(logits, targets, float(softcap)) + n_rows, n_cols = logits.shape + losses = torch.empty((n_rows,), device=logits.device, dtype=torch.float32) + lse = torch.empty((n_rows,), device=logits.device, dtype=torch.float32) + _softcapped_ce_fwd_kernel[(n_rows,)]( + logits, losses, lse, targets, + logits.stride(0), logits.stride(1), + n_rows, n_cols, float(softcap), + block_size=_FUSED_CE_BLOCK_SIZE, num_warps=_FUSED_CE_NUM_WARPS, + ) + return losses, lse + + +@softcapped_ce_op.register_fake +def _(logits: Tensor, targets: Tensor, softcap: float): + if logits.ndim != 2 or targets.ndim != 1: + raise ValueError("softcapped_ce fake impl expects 2D logits and 1D targets") + if logits.shape[0] != targets.shape[0]: + raise ValueError( + f"Expected matching rows, got logits={tuple(logits.shape)} targets={tuple(targets.shape)}" + ) + n_rows = logits.shape[0] + return ( + logits.new_empty((n_rows,), dtype=torch.float32), + logits.new_empty((n_rows,), dtype=torch.float32), + ) + + +@torch.library.custom_op(f"{_FUSED_CE_LIBRARY}::softcapped_ce_backward", mutates_args=()) +def softcapped_ce_backward_op( + logits: Tensor, targets: Tensor, lse: Tensor, grad_losses: Tensor, softcap: float, +) -> Tensor: + logits, targets = _validate_softcapped_ce_inputs(logits, targets, float(softcap)) + lse = lse.contiguous() + grad_losses = grad_losses.contiguous().to(dtype=torch.float32) + if lse.ndim != 1 or grad_losses.ndim != 1: + raise ValueError("Expected 1D lse and grad_losses") + if lse.shape[0] != logits.shape[0] or grad_losses.shape[0] != logits.shape[0]: + raise ValueError( + f"Expected row-aligned lse/grad_losses, got logits={tuple(logits.shape)} " + f"lse={tuple(lse.shape)} grad_losses={tuple(grad_losses.shape)}" + ) + grad_logits = torch.empty_like(logits) + n_rows, n_cols = logits.shape + _softcapped_ce_bwd_kernel[(n_rows,)]( + grad_logits, grad_losses, lse, logits, targets, + logits.stride(0), logits.stride(1), + grad_logits.stride(0), grad_logits.stride(1), + n_rows, n_cols, float(softcap), + block_size=_FUSED_CE_BLOCK_SIZE, num_warps=_FUSED_CE_NUM_WARPS, + ) + return grad_logits + + +@softcapped_ce_backward_op.register_fake +def _(logits: Tensor, targets: Tensor, lse: Tensor, grad_losses: Tensor, softcap: float): + if logits.ndim != 2 or targets.ndim != 1 or lse.ndim != 1 or grad_losses.ndim != 1: + raise ValueError("softcapped_ce_backward fake impl expects 2D logits and 1D row tensors") + if ( + logits.shape[0] != targets.shape[0] + or logits.shape[0] != lse.shape[0] + or logits.shape[0] != grad_losses.shape[0] + ): + raise ValueError("softcapped_ce_backward fake impl expects row-aligned tensors") + return logits.new_empty(logits.shape) + + +def _softcapped_ce_setup_context( + ctx: torch.autograd.function.FunctionCtx, inputs, output, +) -> None: + logits, targets, softcap = inputs + _losses, lse = output + ctx.save_for_backward(logits, targets, lse) + ctx.softcap = float(softcap) + + +def _softcapped_ce_backward( + ctx: torch.autograd.function.FunctionCtx, grad_losses: Tensor, grad_lse: "Tensor | None", +): + del grad_lse + logits, targets, lse = ctx.saved_tensors + grad_logits = torch.ops.pgsubmission1draft7fusedce.softcapped_ce_backward( + logits, targets, lse, grad_losses, ctx.softcap + ) + return grad_logits, None, None + + +softcapped_ce_op.register_autograd( + _softcapped_ce_backward, setup_context=_softcapped_ce_setup_context, +) + + +def softcapped_cross_entropy( + logits: Tensor, targets: Tensor, softcap: float, reduction: str = "mean", +) -> Tensor: + losses, _lse = torch.ops.pgsubmission1draft7fusedce.softcapped_ce( + logits, targets, float(softcap) + ) + if reduction == "none": + return losses + if reduction == "sum": + return losses.sum() + if reduction == "mean": + return losses.mean() + raise ValueError(f"Unsupported reduction={reduction!r}") + + +class Hyperparameters: + data_dir = os.environ.get("DATA_DIR", "./data/") + seed = int(os.environ.get("SEED", 1337)) + run_id = os.environ.get("RUN_ID", str(uuid.uuid4())) + iterations = int(os.environ.get("ITERATIONS", 20000)) + warmdown_frac = float(os.environ.get("WARMDOWN_FRAC", 0.75)) + warmup_steps = int(os.environ.get("WARMUP_STEPS", 20)) + train_batch_tokens = int(os.environ.get("TRAIN_BATCH_TOKENS", 786432)) + # Fused softcapped CE (Triton). Training-only — forward_logits eval path still uses + # eager softcap+F.cross_entropy. Default ON since validated as at-worst neutral. + fused_ce_enabled = bool(int(os.environ.get("FUSED_CE_ENABLED", "1"))) + train_seq_len = int(os.environ.get("TRAIN_SEQ_LEN", 2048)) + train_log_every = int(os.environ.get("TRAIN_LOG_EVERY", 500)) + max_wallclock_seconds = float(os.environ.get("MAX_WALLCLOCK_SECONDS", 6e2)) + val_batch_tokens = int(os.environ.get("VAL_BATCH_TOKENS", 524288)) + eval_seq_len = int(os.environ.get("EVAL_SEQ_LEN", 2048)) + val_loss_every = int(os.environ.get("VAL_LOSS_EVERY", 4000)) + vocab_size = int(os.environ.get("VOCAB_SIZE", 8192)) + num_layers = int(os.environ.get("NUM_LAYERS", 11)) + xsa_last_n = int(os.environ.get("XSA_LAST_N", 11)) + model_dim = int(os.environ.get("MODEL_DIM", 512)) + num_kv_heads = int(os.environ.get("NUM_KV_HEADS", 4)) + num_heads = int(os.environ.get("NUM_HEADS", 8)) + mlp_mult = float(os.environ.get("MLP_MULT", 4.0)) + skip_gates_enabled = bool(int(os.environ.get("SKIP_GATES_ENABLED", "1"))) + tie_embeddings = bool(int(os.environ.get("TIE_EMBEDDINGS", "1"))) + logit_softcap = float(os.environ.get("LOGIT_SOFTCAP", 3e1)) + rope_base = float(os.environ.get("ROPE_BASE", 1e4)) + rope_dims = int(os.environ.get("ROPE_DIMS", 16)) + rope_train_seq_len = int(os.environ.get("ROPE_TRAIN_SEQ_LEN", 2048)) + rope_yarn = bool(int(os.environ.get("ROPE_YARN", "0"))) + ln_scale = bool(int(os.environ.get("LN_SCALE", "1"))) + qk_gain_init = float(os.environ.get("QK_GAIN_INIT", 5.0)) + num_loops = int(os.environ.get("NUM_LOOPS", 2)) + loop_start = int(os.environ.get("LOOP_START", 3)) + loop_end = int(os.environ.get("LOOP_END", 5)) + enable_looping_at = float(os.environ.get("ENABLE_LOOPING_AT", 0.35)) + parallel_start_layer = int(os.environ.get("PARALLEL_START_LAYER", 8)) + parallel_final_lane = os.environ.get("PARALLEL_FINAL_LANE", "mean") + min_lr = float(os.environ.get("MIN_LR", 0.0)) + embed_lr = float(os.environ.get("EMBED_LR", 0.6)) + tied_embed_lr = float(os.environ.get("TIED_EMBED_LR", 0.03)) + tied_embed_init_std = float(os.environ.get("TIED_EMBED_INIT_STD", 0.005)) + matrix_lr = float(os.environ.get("MATRIX_LR", 0.026)) + scalar_lr = float(os.environ.get("SCALAR_LR", 0.02)) + muon_momentum = float(os.environ.get("MUON_MOMENTUM", 0.97)) + muon_backend_steps = int(os.environ.get("MUON_BACKEND_STEPS", 5)) + muon_momentum_warmup_start = float( + os.environ.get("MUON_MOMENTUM_WARMUP_START", 0.92) + ) + muon_momentum_warmup_steps = int(os.environ.get("MUON_MOMENTUM_WARMUP_STEPS", 1500)) + muon_row_normalize = bool(int(os.environ.get("MUON_ROW_NORMALIZE", "1"))) + beta1 = float(os.environ.get("BETA1", 0.9)) + beta2 = float(os.environ.get("BETA2", 0.95)) + adam_eps = float(os.environ.get("ADAM_EPS", 1e-08)) + grad_clip_norm = float(os.environ.get("GRAD_CLIP_NORM", 0.3)) + eval_stride = int(os.environ.get("EVAL_STRIDE", 64)) + adam_wd = float(os.environ.get("ADAM_WD", 0.02)) + muon_wd = float(os.environ.get("MUON_WD", 0.095)) + embed_wd = float(os.environ.get("EMBED_WD", 0.085)) + ema_decay = float(os.environ.get("EMA_DECAY", 0.9965)) + ttt_enabled = bool(int(os.environ.get("TTT_ENABLED", "1"))) + ttt_lora_rank = int(os.environ.get("TTT_LORA_RANK", 96)) + ttt_lora_lr = float(os.environ.get("TTT_LORA_LR", 0.0001)) + ttt_chunk_size = int(os.environ.get("TTT_CHUNK_SIZE", 48)) + ttt_eval_seq_len = int(os.environ.get("TTT_EVAL_SEQ_LEN", 2048)) + ttt_batch_size = int(os.environ.get("TTT_BATCH_SIZE", 64)) + ttt_grad_steps = int(os.environ.get("TTT_GRAD_STEPS", 1)) + ttt_weight_decay = float(os.environ.get("TTT_WEIGHT_DECAY", 1.0)) + ttt_beta1 = float(os.environ.get("TTT_BETA1", 0)) + ttt_beta2 = float(os.environ.get("TTT_BETA2", 0.999)) + ttt_k_lora = bool(int(os.environ.get("TTT_K_LORA", "1"))) + ttt_mlp_lora = bool(int(os.environ.get("TTT_MLP_LORA", "1"))) + ttt_o_lora = bool(int(os.environ.get("TTT_O_LORA", "1"))) + ttt_optimizer = os.environ.get("TTT_OPTIMIZER", "adam") + ttt_eval_batches = os.environ.get("TTT_EVAL_BATCHES", "") + val_doc_fraction = float(os.environ.get("VAL_DOC_FRACTION", 1.0)) + compressor = os.environ.get("COMPRESSOR", "brotli") + gptq_calibration_batches = int(os.environ.get("GPTQ_CALIBRATION_BATCHES", 16)) + gptq_reserve_seconds = float(os.environ.get("GPTQ_RESERVE_SECONDS", 4.0)) + phased_ttt_prefix_docs = int(os.environ.get("PHASED_TTT_PREFIX_DOCS", 2000)) + phased_ttt_num_phases = int(os.environ.get("PHASED_TTT_NUM_PHASES", 1)) + global_ttt_lr = float(os.environ.get("GLOBAL_TTT_LR", 0.001)) + global_ttt_momentum = float(os.environ.get("GLOBAL_TTT_MOMENTUM", 0.9)) + global_ttt_epochs = int(os.environ.get("GLOBAL_TTT_EPOCHS", 1)) + global_ttt_chunk_tokens = int(os.environ.get("GLOBAL_TTT_CHUNK_TOKENS", 32768)) + global_ttt_batch_seqs = int(os.environ.get("GLOBAL_TTT_BATCH_SEQS", 32)) + global_ttt_warmup_start_lr = float(os.environ.get("GLOBAL_TTT_WARMUP_START_LR", 0.0)) + global_ttt_warmup_chunks = int(os.environ.get("GLOBAL_TTT_WARMUP_CHUNKS", 0)) + global_ttt_grad_clip = float(os.environ.get("GLOBAL_TTT_GRAD_CLIP", 1.0)) + global_ttt_respect_doc_boundaries = bool(int(os.environ.get("GLOBAL_TTT_RESPECT_DOC_BOUNDARIES", "1"))) + matrix_bits = int(os.environ.get("MATRIX_BITS", 6)) + embed_bits = int(os.environ.get("EMBED_BITS", 8)) + matrix_clip_sigmas = float(os.environ.get("MATRIX_CLIP_SIGMAS", 12.85)) + embed_clip_sigmas = float(os.environ.get("EMBED_CLIP_SIGMAS", 2e1)) + mlp_clip_sigmas = float(os.environ.get("MLP_CLIP_SIGMAS", 10.0)) + attn_clip_sigmas = float(os.environ.get("ATTN_CLIP_SIGMAS", 13.0)) + # AttnOutGate (per-head multiplicative output gate, PR #1667 MarioPaerle). + # Zero-init weight: 2*sigmoid(0)=1 -> transparent at start. Source defaults to + # block input x ('proj'); 'q' uses raw Q projection output. + attn_out_gate_enabled = bool(int(os.environ.get("ATTN_OUT_GATE_ENABLED", "0"))) + attn_out_gate_src = os.environ.get("ATTN_OUT_GATE_SRC", "proj") + # SmearGate (input-dependent forward-1 token smear, modded-nanogpt @classiclarryd + # via PR #1667). x_t <- x_t + lam * sigmoid(W*x_t[:gate_window]) * x_{t-1}. + # lam=0 + W=0 -> transparent at init. + smear_gate_enabled = bool(int(os.environ.get("SMEAR_GATE_ENABLED", "0"))) + # Window: first GATE_WINDOW dims of the source feed the gate projection. + gate_window = int(os.environ.get("GATE_WINDOW", 12)) + # Gated Attention (Qwen, NeurIPS 2025 Best Paper, arXiv:2505.06708; + # qiuzh20/gated_attention). Per-head sigmoid gate on SDPA output, BEFORE + # out_proj. Gate input = full block input x (paper's headwise G1 variant + # driven from hidden_states). W_g shape (num_heads, dim), plain sigmoid. + # Near-zero init gives g~0.5 at step 0 (half attention output); per-block + # attn_scale (init 1.0) compensates during training. Name contains + # "attn_gate" so CONTROL_TENSOR_NAME_PATTERNS routes it to scalar AdamW. + gated_attn_enabled = bool(int(os.environ.get("GATED_ATTN_ENABLED", "0"))) + gated_attn_init_std = float(os.environ.get("GATED_ATTN_INIT_STD", 0.01)) + # Dedicated int8-per-row quantization for `attn_gate_w` tensors. These are + # small ((num_heads, dim) = (8, 512) = 4096 params) and bypass GPTQ via the + # numel<=65536 passthrough branch -> stored as fp16 (8 KB/layer, ~65 KB total + # compressed). int8-per-row cuts the raw tensor in half with negligible BPB + # impact: scales per head (8 values), symmetric quant over [-127, 127]. + # No Hessian needed (gate weights not in collect_hessians()). + gated_attn_quant_gate = bool(int(os.environ.get("GATED_ATTN_QUANT_GATE", "0"))) + # Sparse Attention Gate (modded-nanogpt-style). Keeps dense SDPA and only + # swaps the output-gate input to the first GATE_WINDOW residual dims. + # W_g: (num_heads, gate_window) = (8, 12) = 96 params/layer (~44K total), + # vs dense GatedAttn's (8, 512) = 4K/layer (~44K diff). Name "attn_gate_w" + # is shared so quant routing and int8 gate passthrough Just Work. Gate + # passthrough int8 still applies via GATED_ATTN_QUANT_GATE=1. + # Mutually exclusive with ATTN_OUT_GATE_ENABLED and GATED_ATTN_ENABLED. + sparse_attn_gate_enabled = bool(int(os.environ.get("SPARSE_ATTN_GATE_ENABLED", "0"))) + sparse_attn_gate_init_std = float(os.environ.get("SPARSE_ATTN_GATE_INIT_STD", 0.0)) + sparse_attn_gate_scale = float(os.environ.get("SPARSE_ATTN_GATE_SCALE", 1.0)) + # LQER asymmetric rank-k correction on top-K quant-error tensors (PR #1530 v2 port). + # Computes SVD of E = W_fp - W_quant, packs top-r A,B as INT2/INT4 (asym) or INTk (sym). + lqer_enabled = bool(int(os.environ.get("LQER_ENABLED", "1"))) + lqer_rank = int(os.environ.get("LQER_RANK", 4)) + lqer_top_k = int(os.environ.get("LQER_TOP_K", 3)) + lqer_factor_bits = int(os.environ.get("LQER_FACTOR_BITS", 4)) + lqer_asym_enabled = bool(int(os.environ.get("LQER_ASYM_ENABLED", "1"))) + lqer_asym_group = int(os.environ.get("LQER_ASYM_GROUP", "64")) + lqer_scope = os.environ.get("LQER_SCOPE", "all") + lqer_gain_select = bool(int(os.environ.get("LQER_GAIN_SELECT", "0"))) + awq_lite_enabled = bool(int(os.environ.get("AWQ_LITE_ENABLED", "0"))) + awq_lite_bits = int(os.environ.get("AWQ_LITE_BITS", "8")) + awq_lite_group_top_k = int(os.environ.get("AWQ_LITE_GROUP_TOP_K", "1")) + awq_lite_group_size = int(os.environ.get("AWQ_LITE_GROUP_SIZE", "64")) + distributed = "RANK" in os.environ and "WORLD_SIZE" in os.environ + rank = int(os.environ.get("RANK", "0")) + world_size = int(os.environ.get("WORLD_SIZE", "1")) + local_rank = int(os.environ.get("LOCAL_RANK", "0")) + is_main_process = rank == 0 + grad_accum_steps = 8 // world_size + # CaseOps integration: optional override of dataset root + tokenizer path. + # When CASEOPS_ENABLED=1, the wrapper loads a per-token byte sidecar + # (fineweb_val_bytes_*.bin, identical shard layout to val_*.bin) and uses + # it as the canonical raw-byte budget for BPB accounting. The sidecar + # REPLACES the build_sentencepiece_luts byte-counting path entirely. + caseops_enabled = bool(int(os.environ.get("CASEOPS_ENABLED", "0"))) + _default_caseops_data = os.path.join( + data_dir, + "datasets", + "fineweb10B_sp8192_caseops", + "datasets", + "datasets", + "fineweb10B_sp8192_lossless_caps_caseops_v1_reserved", + ) + _default_caseops_tok = os.path.join( + data_dir, + "datasets", + "fineweb10B_sp8192_caseops", + "datasets", + "tokenizers", + "fineweb_8192_bpe_lossless_caps_caseops_v1_reserved.model", + ) + if caseops_enabled: + datasets_dir = os.environ.get("DATA_PATH", _default_caseops_data) + tokenizer_path = os.environ.get("TOKENIZER_PATH", _default_caseops_tok) + else: + datasets_dir = os.environ.get( + "DATA_PATH", + os.path.join(data_dir, "datasets", f"fineweb10B_sp{vocab_size}"), + ) + tokenizer_path = os.environ.get( + "TOKENIZER_PATH", + os.path.join(data_dir, "tokenizers", f"fineweb_{vocab_size}_bpe.model"), + ) + train_files = os.path.join(datasets_dir, "fineweb_train_*.bin") + val_files = os.path.join(datasets_dir, "fineweb_val_*.bin") + val_bytes_files = os.path.join(datasets_dir, "fineweb_val_bytes_*.bin") + artifact_dir = os.environ.get("ARTIFACT_DIR", "") + logfile = ( + os.path.join(artifact_dir, f"{run_id}.txt") + if artifact_dir + else f"logs/{run_id}.txt" + ) + model_path = ( + os.path.join(artifact_dir, "final_model.pt") + if artifact_dir + else "final_model.pt" + ) + quantized_model_path = ( + os.path.join(artifact_dir, "final_model.int6.ptz") + if artifact_dir + else "final_model.int6.ptz" + ) + + +_logger_hparams = None + + +def set_logging_hparams(h): + global _logger_hparams + _logger_hparams = h + + +def log(msg, console=True): + if _logger_hparams is None: + print(msg) + return + if _logger_hparams.is_main_process: + if console: + print(msg) + if _logger_hparams.logfile is not None: + with open(_logger_hparams.logfile, "a", encoding="utf-8") as f: + print(msg, file=f) + + +class ValidationData: + def __init__(self, h, device): + self.sp = spm.SentencePieceProcessor(model_file=h.tokenizer_path) + if int(self.sp.vocab_size()) != h.vocab_size: + raise ValueError( + f"VOCAB_SIZE={h.vocab_size} does not match tokenizer vocab_size={int(self.sp.vocab_size())}" + ) + self.val_tokens = load_validation_tokens(h.val_files, h.eval_seq_len) + self.caseops_enabled = bool(getattr(h, "caseops_enabled", False)) + if self.caseops_enabled: + self.base_bytes_lut = None + self.has_leading_space_lut = None + self.is_boundary_token_lut = None + else: + ( + self.base_bytes_lut, + self.has_leading_space_lut, + self.is_boundary_token_lut, + ) = build_sentencepiece_luts(self.sp, h.vocab_size, device) + self.val_bytes = None + if self.caseops_enabled: + self.val_bytes = load_validation_byte_sidecar( + h.val_bytes_files, h.eval_seq_len, self.val_tokens.numel() + ) + + +def build_sentencepiece_luts(sp, vocab_size, device): + sp_vocab_size = int(sp.vocab_size()) + assert ( + sp.piece_to_id("▁") != sp.unk_id() + ), "Tokenizer must have '▁' (space) as its own token for correct BPB byte counting" + table_size = max(sp_vocab_size, vocab_size) + base_bytes_np = np.zeros((table_size,), dtype=np.int16) + has_leading_space_np = np.zeros((table_size,), dtype=np.bool_) + is_boundary_token_np = np.ones((table_size,), dtype=np.bool_) + for token_id in range(sp_vocab_size): + if sp.is_control(token_id) or sp.is_unknown(token_id) or sp.is_unused(token_id): + continue + is_boundary_token_np[token_id] = False + if sp.is_byte(token_id): + base_bytes_np[token_id] = 1 + continue + piece = sp.id_to_piece(token_id) + if piece.startswith("▁"): + has_leading_space_np[token_id] = True + piece = piece[1:] + base_bytes_np[token_id] = len(piece.encode("utf-8")) + return ( + torch.tensor(base_bytes_np, dtype=torch.int16, device=device), + torch.tensor(has_leading_space_np, dtype=torch.bool, device=device), + torch.tensor(is_boundary_token_np, dtype=torch.bool, device=device), + ) + + +def load_validation_tokens(pattern, seq_len): + # Filter out CaseOps byte sidecar shards which share the val_*.bin glob. + files = [ + Path(p) + for p in sorted(glob.glob(pattern)) + if "_bytes_" not in Path(p).name + ] + if not files: + raise FileNotFoundError(f"No files found for pattern: {pattern}") + tokens = torch.cat([load_data_shard(file) for file in files]).contiguous() + usable = (tokens.numel() - 1) // seq_len * seq_len + if usable <= 0: + raise ValueError(f"Validation split is too short for TRAIN_SEQ_LEN={seq_len}") + return tokens[: usable + 1] + + +def load_validation_byte_sidecar(pattern, seq_len, expected_len): + """Load CaseOps per-token byte sidecar(s). Same shard layout as token shards + (256 int32 header + uint16 array). Each entry = canonical raw-text byte + budget for that token in the corresponding val shard. Returns a CPU + int16 tensor sliced to match expected_len (i.e. val_tokens length).""" + files = [Path(p) for p in sorted(glob.glob(pattern))] + if not files: + raise FileNotFoundError(f"No byte sidecar files for pattern: {pattern}") + shards = [load_data_shard(file) for file in files] + # load_data_shard returns uint16 — that's exactly what the sidecar stores. + bytes_full = torch.cat(shards).contiguous() + if bytes_full.numel() < expected_len: + raise ValueError( + f"Byte sidecar too short: {bytes_full.numel()} < val_tokens {expected_len}" + ) + return bytes_full[:expected_len].to(torch.int32) + + +def load_data_shard(file): + header_bytes = 256 * np.dtype(" 0: + pos = start + while pos < end: + seg_starts.append(pos) + pos += max_doc_len + else: + seg_starts.append(start) + boundaries = seg_starts + [total_len] + padded_len = get_next_multiple_of_n(len(boundaries), bucket_size) + cu = torch.full((padded_len,), total_len, dtype=torch.int32, device=device) + cu[: len(boundaries)] = torch.tensor(boundaries, dtype=torch.int32, device=device) + seg_ends = seg_starts[1:] + [total_len] + max_seqlen = max(end - start for start, end in zip(seg_starts, seg_ends)) + return cu, max_seqlen + +class DocumentPackingLoader: + _shard_pool = ThreadPoolExecutor(1) + + def __init__(self, h, device, cu_bucket_size=64): + self.rank = h.rank + self.world_size = h.world_size + self.device = device + self.cu_bucket_size = cu_bucket_size + self.max_seq_len = h.train_seq_len + all_files = [Path(p) for p in sorted(glob.glob(h.train_files))] + if not all_files: + raise FileNotFoundError(f"No files found for pattern: {h.train_files}") + self.files = all_files + self.file_iter = iter(self.files) + self._init_shard(load_data_shard(next(self.file_iter))) + self._next_shard = self._submit_next_shard() + self._batch_pool = ThreadPoolExecutor(1) + self._prefetch_queue = [] + + def _init_shard(self, tokens): + global BOS_ID + self.tokens = tokens + self.shard_size = tokens.numel() + if BOS_ID is None: + BOS_ID = 1 + self.bos_idx = ( + (tokens == BOS_ID).nonzero(as_tuple=True)[0].to(torch.int64).cpu().numpy() + ) + self.cursor = int(self.bos_idx[0]) + + def _submit_next_shard(self): + try: + path = next(self.file_iter) + return self._shard_pool.submit(load_data_shard, path) + except StopIteration: + return None + + def _advance_shard(self): + if self._next_shard is None: + self.file_iter = iter(self.files) + self._next_shard = self._shard_pool.submit( + load_data_shard, next(self.file_iter) + ) + self._init_shard(self._next_shard.result()) + self._next_shard = self._submit_next_shard() + + def _local_doc_starts(self, local_start, total_len): + lo = np.searchsorted(self.bos_idx, local_start, side="left") + hi = np.searchsorted(self.bos_idx, local_start + total_len, side="left") + return (self.bos_idx[lo:hi] - local_start).tolist() + + def _prepare_batch(self, num_tokens_local, max_seq_len): + per_rank_span = num_tokens_local + 1 + global_span = per_rank_span * self.world_size + while self.cursor + global_span > self.shard_size: + self._advance_shard() + local_start = self.cursor + self.rank * per_rank_span + buf = self.tokens[local_start : local_start + per_rank_span] + inputs = torch.empty(per_rank_span - 1, dtype=torch.int64, pin_memory=True) + targets = torch.empty(per_rank_span - 1, dtype=torch.int64, pin_memory=True) + inputs.copy_(buf[:-1]) + targets.copy_(buf[1:]) + starts = self._local_doc_starts(local_start, inputs.numel()) + cu_seqlens, max_seqlen = _build_cu_seqlens( + starts, inputs.numel(), inputs.device, max_seq_len, self.cu_bucket_size + ) + cu_seqlens = cu_seqlens.pin_memory() + self.cursor += global_span + return inputs, targets, cu_seqlens, max_seqlen + + def next_batch(self, global_tokens, grad_accum_steps): + num_tokens_local = global_tokens // (self.world_size * grad_accum_steps) + while len(self._prefetch_queue) < 2: + self._prefetch_queue.append( + self._batch_pool.submit(self._prepare_batch, num_tokens_local, self.max_seq_len)) + inputs, targets, cu_seqlens, max_seqlen = self._prefetch_queue.pop(0).result() + self._prefetch_queue.append( + self._batch_pool.submit(self._prepare_batch, num_tokens_local, self.max_seq_len)) + return ( + inputs[None].to(self.device, non_blocking=True), + targets[None].to(self.device, non_blocking=True), + cu_seqlens.to(self.device, non_blocking=True), + max_seqlen, + ) + + +class ShuffledSequenceLoader: + def __init__(self, h, device): + self.world_size = h.world_size + self.seq_len = h.train_seq_len + self.device = device + all_files = [Path(p) for p in sorted(glob.glob(h.train_files))] + if not all_files: + raise FileNotFoundError(f"No files found for pattern: {h.train_files}") + self.files = all_files[h.rank :: h.world_size] + self.rng = np.random.Generator(np.random.PCG64(h.rank)) + self.num_tokens = [_read_num_tokens(f) for f in self.files] + self.start_inds = [[] for _ in self.files] + for si in range(len(self.files)): + self._reset_shard(si) + + def _reset_shard(self, si): + max_phase = min( + self.seq_len - 1, max(0, self.num_tokens[si] - self.seq_len - 1) + ) + phase = int(self.rng.integers(max_phase + 1)) if max_phase > 0 else 0 + num_sequences = (self.num_tokens[si] - 1 - phase) // self.seq_len + sequence_order = self.rng.permutation(num_sequences) + self.start_inds[si] = (phase + sequence_order * self.seq_len).tolist() + + def next_batch(self, global_tokens, grad_accum_steps): + device_tokens = global_tokens // (self.world_size * grad_accum_steps) + device_batch_size = device_tokens // self.seq_len + remaining = np.array([len(s) for s in self.start_inds], dtype=np.float64) + x = torch.empty((device_batch_size, self.seq_len), dtype=torch.int64) + y = torch.empty((device_batch_size, self.seq_len), dtype=torch.int64) + for bi in range(device_batch_size): + total = remaining.sum() + if total <= 0: + for si in range(len(self.files)): + self._reset_shard(si) + remaining = np.array( + [len(s) for s in self.start_inds], dtype=np.float64 + ) + total = remaining.sum() + probs = remaining / total + si = int(self.rng.choice(len(self.files), p=probs)) + start_ind = self.start_inds[si].pop() + remaining[si] -= 1 + mm = _get_shard_memmap(self.files[si]) + window = torch.as_tensor( + np.array(mm[start_ind : start_ind + self.seq_len + 1], dtype=np.int64) + ) + x[bi] = window[:-1] + y[bi] = window[1:] + return x.to(self.device, non_blocking=True), y.to( + self.device, non_blocking=True + ) + + +class RMSNorm(nn.Module): + def __init__(self, eps=None): + super().__init__() + self.eps = eps + + def forward(self, x): + return F.rms_norm(x, (x.size(-1),), eps=self.eps) + + +class CastedLinear(nn.Linear): + def forward(self, x): + w = self.weight.to(x.dtype) + bias = self.bias.to(x.dtype) if self.bias is not None else None + return F.linear(x, w, bias) + + +@triton.jit +def linear_leaky_relu_square_kernel( + a_desc, + b_desc, + c_desc, + aux_desc, + M, + N, + K, + BLOCK_SIZE_M: tl.constexpr, + BLOCK_SIZE_N: tl.constexpr, + BLOCK_SIZE_K: tl.constexpr, + NUM_SMS: tl.constexpr, + FORWARD: tl.constexpr, +): + dtype = tl.bfloat16 + start_pid = tl.program_id(axis=0) + num_pid_m = tl.cdiv(M, BLOCK_SIZE_M) + num_pid_n = tl.cdiv(N, BLOCK_SIZE_N) + k_tiles = tl.cdiv(K, BLOCK_SIZE_K) + num_tiles = num_pid_m * num_pid_n + tile_id_c = start_pid - NUM_SMS + for tile_id in tl.range(start_pid, num_tiles, NUM_SMS, flatten=True): + pid_m = tile_id // num_pid_n + pid_n = tile_id % num_pid_n + offs_am = pid_m * BLOCK_SIZE_M + offs_bn = pid_n * BLOCK_SIZE_N + accumulator = tl.zeros((BLOCK_SIZE_M, BLOCK_SIZE_N), dtype=tl.float32) + for ki in range(k_tiles): + offs_k = ki * BLOCK_SIZE_K + a = a_desc.load([offs_am, offs_k]) + b = b_desc.load([offs_bn, offs_k]) + accumulator = tl.dot(a, b.T, accumulator) + tile_id_c += NUM_SMS + offs_am_c = offs_am + offs_bn_c = offs_bn + acc = tl.reshape(accumulator, (BLOCK_SIZE_M, 2, BLOCK_SIZE_N // 2)) + acc = tl.permute(acc, (0, 2, 1)) + acc0, acc1 = tl.split(acc) + c0 = acc0.to(dtype) + c1 = acc1.to(dtype) + if not FORWARD: + pre0 = aux_desc.load([offs_am_c, offs_bn_c]) + pre1 = aux_desc.load([offs_am_c, offs_bn_c + BLOCK_SIZE_N // 2]) + c0 = c0 * tl.where(pre0 > 0, 2.0 * pre0, 0.5 * pre0) + c1 = c1 * tl.where(pre1 > 0, 2.0 * pre1, 0.5 * pre1) + c_desc.store([offs_am_c, offs_bn_c], c0) + c_desc.store([offs_am_c, offs_bn_c + BLOCK_SIZE_N // 2], c1) + if FORWARD: + aux0 = tl.where(c0 > 0, c0, 0.5 * c0) + aux1 = tl.where(c1 > 0, c1, 0.5 * c1) + aux_desc.store([offs_am_c, offs_bn_c], aux0 * aux0) + aux_desc.store([offs_am_c, offs_bn_c + BLOCK_SIZE_N // 2], aux1 * aux1) + + +def linear_leaky_relu_square(a, b, aux=None): + M, K = a.shape + N, K2 = b.shape + assert K == K2 + c = torch.empty((M, N), device=a.device, dtype=a.dtype) + forward = aux is None + if aux is None: + aux = torch.empty((M, N), device=a.device, dtype=a.dtype) + num_sms = torch.cuda.get_device_properties(a.device).multi_processor_count + BLOCK_SIZE_M, BLOCK_SIZE_N, BLOCK_SIZE_K = 256, 128, 64 + num_stages = 4 if forward else 3 + a_desc = TensorDescriptor.from_tensor(a, [BLOCK_SIZE_M, BLOCK_SIZE_K]) + b_desc = TensorDescriptor.from_tensor(b, [BLOCK_SIZE_N, BLOCK_SIZE_K]) + c_desc = TensorDescriptor.from_tensor(c, [BLOCK_SIZE_M, BLOCK_SIZE_N // 2]) + aux_desc = TensorDescriptor.from_tensor(aux, [BLOCK_SIZE_M, BLOCK_SIZE_N // 2]) + grid = lambda _meta: ( + min(num_sms, triton.cdiv(M, BLOCK_SIZE_M) * triton.cdiv(N, BLOCK_SIZE_N)), + ) + linear_leaky_relu_square_kernel[grid]( + a_desc, + b_desc, + c_desc, + aux_desc, + M, + N, + K, + BLOCK_SIZE_M=BLOCK_SIZE_M, + BLOCK_SIZE_N=BLOCK_SIZE_N, + BLOCK_SIZE_K=BLOCK_SIZE_K, + NUM_SMS=num_sms, + FORWARD=forward, + num_stages=num_stages, + num_warps=8, + ) + if forward: + return c, aux + return c + + +class FusedLinearLeakyReLUSquareFunction(torch.autograd.Function): + @staticmethod + def forward(ctx, x, w1, w2): + x_flat = x.reshape(-1, x.shape[-1]) + pre, post = linear_leaky_relu_square(x_flat, w1) + out = F.linear(post, w2) + ctx.save_for_backward(x, w1, w2, pre, post) + return out.view(*x.shape[:-1], out.shape[-1]) + + @staticmethod + def backward(ctx, grad_output): + x, w1, w2, pre, post = ctx.saved_tensors + x_flat = x.reshape(-1, x.shape[-1]) + grad_output_flat = grad_output.reshape(-1, grad_output.shape[-1]) + dw2 = grad_output_flat.T @ post + dpre = linear_leaky_relu_square(grad_output_flat, w2.T.contiguous(), aux=pre) + dw1 = dpre.T @ x_flat + dx = dpre @ w1 + return dx.view_as(x), dw1, dw2 + + +FusedLeakyReLUSquareMLP = FusedLinearLeakyReLUSquareFunction.apply + + +class Rotary(nn.Module): + def __init__(self, dim, base=1e4, train_seq_len=1024, rope_dims=0, yarn=True): + super().__init__() + self.dim = dim + self.base = base + self.train_seq_len = train_seq_len + self.yarn = yarn + self.rope_dims = rope_dims if rope_dims > 0 else dim + inv_freq = 1.0 / base ** ( + torch.arange(0, self.rope_dims, 2, dtype=torch.float32) / self.rope_dims + ) + self.register_buffer("inv_freq", inv_freq, persistent=False) + self._seq_len_cached = 0 + self._cos_cached = None + self._sin_cached = None + + def forward(self, seq_len, device, dtype): + if ( + self._cos_cached is None + or self._sin_cached is None + or self._seq_len_cached < seq_len + or self._cos_cached.device != device + ): + rd = self.rope_dims + if self.yarn and seq_len > self.train_seq_len: + scale = seq_len / self.train_seq_len + new_base = self.base * scale ** (rd / (rd - 2)) + inv_freq = 1.0 / new_base ** ( + torch.arange(0, rd, 2, dtype=torch.float32, device=device) / rd + ) + else: + inv_freq = self.inv_freq.float().to(device) + t = torch.arange(seq_len, device=device, dtype=torch.float32) + freqs = torch.outer(t, inv_freq) + self._cos_cached = freqs.cos()[None, :, None, :] + self._sin_cached = freqs.sin()[None, :, None, :] + self._seq_len_cached = seq_len + return self._cos_cached[:, :seq_len].to(dtype=dtype), self._sin_cached[:, :seq_len].to(dtype=dtype) + + +def apply_rotary_emb(x, cos, sin, rope_dims=0): + if rope_dims > 0 and rope_dims < x.size(-1): + x_rope, x_pass = x[..., :rope_dims], x[..., rope_dims:] + half = rope_dims // 2 + x1, x2 = x_rope[..., :half], x_rope[..., half:] + x_rope = torch.cat((x1 * cos + x2 * sin, x1 * -sin + x2 * cos), dim=-1) + return torch.cat((x_rope, x_pass), dim=-1) + half = x.size(-1) // 2 + x1, x2 = x[..., :half], x[..., half:] + return torch.cat((x1 * cos + x2 * sin, x1 * -sin + x2 * cos), dim=-1) + + +class CausalSelfAttention(nn.Module): + def __init__( + self, dim, num_heads, num_kv_heads, rope_base, qk_gain_init, train_seq_len, yarn=True, + attn_out_gate=False, attn_out_gate_src="proj", gate_window=12, + gated_attn=False, gated_attn_init_std=0.01, + sparse_attn_gate=False, sparse_attn_gate_init_std=0.0, sparse_attn_gate_scale=1.0, + ): + super().__init__() + if dim % num_heads != 0: + raise ValueError("model_dim must be divisible by num_heads") + if num_heads % num_kv_heads != 0: + raise ValueError("num_heads must be divisible by num_kv_heads") + if int(attn_out_gate) + int(gated_attn) + int(sparse_attn_gate) > 1: + raise ValueError( + "attn_out_gate, gated_attn, and sparse_attn_gate are mutually exclusive" + ) + self.num_heads = num_heads + self.num_kv_heads = num_kv_heads + self.head_dim = dim // num_heads + if self.head_dim % 2 != 0: + raise ValueError("head_dim must be even for RoPE") + self.q_gain = nn.Parameter( + torch.full((num_heads,), qk_gain_init, dtype=torch.float32) + ) + self.rope_dims = 0 + self.rotary = Rotary(self.head_dim, base=rope_base, train_seq_len=train_seq_len, yarn=yarn) + self.use_xsa = False + # AttnOutGate (PR #1667 MarioPaerle): per-head multiplicative gate on attention + # output. CastedLinear so restore_fp32_params casts back to fp32 for GPTQ. + # _zero_init -> 2*sigmoid(0)=1 -> transparent at init. + self.attn_out_gate = attn_out_gate + self.attn_out_gate_src = attn_out_gate_src + self.gate_window = gate_window + if attn_out_gate: + self.attn_gate_proj = CastedLinear(gate_window, num_heads, bias=False) + self.attn_gate_proj._zero_init = True + # Gated Attention (arXiv:2505.06708, Qwen, NeurIPS 2025). Per-head sigmoid + # gate on SDPA output, BEFORE out_proj. Gate projection W_g: (num_heads, dim). + # Name "attn_gate_w" contains "attn_gate" substring so it matches + # CONTROL_TENSOR_NAME_PATTERNS and routes to the scalar AdamW group. + # fp32 Parameter -> restore_fp32_params path covers it via the ndim<2 OR + # name-pattern check (name matches "attn_gate"). Cast to x.dtype on use. + self.gated_attn = gated_attn + if gated_attn: + W = torch.empty(num_heads, dim, dtype=torch.float32) + nn.init.normal_(W, mean=0.0, std=gated_attn_init_std) + self.attn_gate_w = nn.Parameter(W) + # Sparse attention head-output gate (modded-nanogpt style). Keeps dense SDPA + # and only narrows the gate input to the first gate_window residual dims. + # W_g: (num_heads, gate_window). y_{t,h} <- sigmoid(scale * W_g_h @ x_t[:gate_window]) * y_{t,h}. + # Shares attn_gate_w name with dense GatedAttn so the quant routing + # (CONTROL_TENSOR_NAME_PATTERNS / attn_gate_w int8 passthrough) is unchanged. + self.sparse_attn_gate = sparse_attn_gate + self.sparse_attn_gate_scale = sparse_attn_gate_scale + if sparse_attn_gate: + W = torch.empty(num_heads, gate_window, dtype=torch.float32) + if sparse_attn_gate_init_std > 0: + nn.init.normal_(W, mean=0.0, std=sparse_attn_gate_init_std) + else: + nn.init.zeros_(W) + self.attn_gate_w = nn.Parameter(W) + + def _xsa_efficient(self, y, v): + B, T, H, D = y.shape + Hkv = v.size(-2) + group = H // Hkv + y_g = y.reshape(B, T, Hkv, group, D) + vn = F.normalize(v, dim=-1).unsqueeze(-2) + proj = (y_g * vn).sum(dim=-1, keepdim=True) * vn + return (y_g - proj).reshape(B, T, H, D) + + def forward(self, x, q_w, k_w, v_w, out_w, cu_seqlens=None, max_seqlen=0): + bsz, seqlen, dim = x.shape + # q_raw kept around as a tap point for attn_out_gate_src='q' (post-projection, + # pre-reshape, pre-RoPE). + q_raw = F.linear(x, q_w.to(x.dtype)) + q = q_raw.reshape(bsz, seqlen, self.num_heads, self.head_dim) + k = F.linear(x, k_w.to(x.dtype)).reshape(bsz, seqlen, self.num_kv_heads, self.head_dim) + v = F.linear(x, v_w.to(x.dtype)).reshape(bsz, seqlen, self.num_kv_heads, self.head_dim) + q = F.rms_norm(q, (q.size(-1),)) + k = F.rms_norm(k, (k.size(-1),)) + cos, sin = self.rotary(seqlen, x.device, q.dtype) + q = apply_rotary_emb(q, cos, sin, self.rope_dims) + k = apply_rotary_emb(k, cos, sin, self.rope_dims) + q = q * self.q_gain.to(dtype=q.dtype)[None, None, :, None] + if cu_seqlens is not None: + y = flash_attn_varlen_func( + q[0], + k[0], + v[0], + cu_seqlens_q=cu_seqlens, + cu_seqlens_k=cu_seqlens, + max_seqlen_q=max_seqlen, + max_seqlen_k=max_seqlen, + causal=True, + window_size=(-1, -1), + )[None] + else: + y = flash_attn_3_func(q, k, v, causal=True) + if self.use_xsa: + y = self._xsa_efficient(y, v) + # AttnOutGate inlined (PR #1667). Inline + .contiguous() barrier so torch.compile + # fullgraph=True is happy (this avoids the @torch.compiler.disable trap that + # crashed gates v3). Per-head gate on (B,T,H,D) tensor: g shape [B,T,H], broadcast + # over D via [..., None]. zero-init weight -> 2*sigmoid(0)=1 -> transparent. + if self.attn_out_gate: + gate_src = q_raw if self.attn_out_gate_src == "q" else x + gate_in = gate_src[..., : self.gate_window].contiguous() + g = 2.0 * torch.sigmoid(self.attn_gate_proj(gate_in)) + y = y * g[..., None] + # Gated Attention (arXiv:2505.06708 G1). Inline + .contiguous() barrier so + # torch.compile fullgraph=True is happy. Per-head gate on (B,T,H,D): g shape + # [B,T,H], broadcast over D via [..., None]. Paper: g = sigmoid(x @ W_g.T) + # where W_g: (H, dim). .to(x.dtype) on fp32 param before broadcast with bf16. + if self.gated_attn: + x_c = x.contiguous() + g = torch.sigmoid(F.linear(x_c, self.attn_gate_w.to(x.dtype))) + y = y * g[..., None] + # Sparse head-output gate: narrower (gate_window) input, same shape g as GatedAttn. + if self.sparse_attn_gate: + gate_in = x[..., : self.gate_window].contiguous() + g = torch.sigmoid( + self.sparse_attn_gate_scale + * F.linear(gate_in, self.attn_gate_w.to(x.dtype)) + ) + y = y * g[..., None] + y = y.reshape(bsz, seqlen, dim) + self._last_proj_input = y.detach() if getattr(self, "_calib", False) else None + return F.linear(y, out_w.to(x.dtype)) + + +class MLP(nn.Module): + def __init__(self, dim, mlp_mult): + super().__init__() + self.use_fused = True + + def forward(self, x, up_w, down_w): + if self.training and self.use_fused: + return FusedLeakyReLUSquareMLP(x, up_w.to(x.dtype), down_w.to(x.dtype)) + hidden = F.leaky_relu(F.linear(x, up_w.to(x.dtype)), negative_slope=0.5).square() + self._last_down_input = hidden.detach() if getattr(self, "_calib", False) else None + return F.linear(hidden, down_w.to(x.dtype)) + + +class Block(nn.Module): + def __init__( + self, + dim, + num_heads, + num_kv_heads, + mlp_mult, + rope_base, + qk_gain_init, + train_seq_len, + layer_idx=0, + ln_scale=False, + yarn=True, + attn_out_gate=False, + attn_out_gate_src="proj", + gate_window=12, + gated_attn=False, + gated_attn_init_std=0.01, + sparse_attn_gate=False, + sparse_attn_gate_init_std=0.0, + sparse_attn_gate_scale=1.0, + ): + super().__init__() + self.attn_norm = RMSNorm() + self.mlp_norm = RMSNorm() + self.attn = CausalSelfAttention( + dim, num_heads, num_kv_heads, rope_base, qk_gain_init, train_seq_len, yarn=yarn, + attn_out_gate=attn_out_gate, attn_out_gate_src=attn_out_gate_src, gate_window=gate_window, + gated_attn=gated_attn, gated_attn_init_std=gated_attn_init_std, + sparse_attn_gate=sparse_attn_gate, + sparse_attn_gate_init_std=sparse_attn_gate_init_std, + sparse_attn_gate_scale=sparse_attn_gate_scale, + ) + self.mlp = MLP(dim, mlp_mult) + self.attn_scale = nn.Parameter(torch.ones(dim, dtype=torch.float32)) + self.mlp_scale = nn.Parameter(torch.ones(dim, dtype=torch.float32)) + self.resid_mix = nn.Parameter( + torch.stack((torch.ones(dim), torch.zeros(dim))).float() + ) + self.ln_scale_factor = 1.0 / math.sqrt(layer_idx + 1) if ln_scale else 1.0 + + def forward(self, x, x0, q_w, k_w, v_w, out_w, up_w, down_w, cu_seqlens=None, max_seqlen=0): + mix = self.resid_mix.to(dtype=x.dtype) + x_in = mix[0][None, None, :] * x + mix[1][None, None, :] * x0 + attn_out = self.attn( + self.attn_norm(x_in) * self.ln_scale_factor, + q_w, k_w, v_w, out_w, + cu_seqlens=cu_seqlens, + max_seqlen=max_seqlen, + ) + x_out = x_in + self.attn_scale.to(dtype=x_in.dtype)[None, None, :] * attn_out + x_out = x_out + self.mlp_scale.to(dtype=x_out.dtype)[ + None, None, : + ] * self.mlp(self.mlp_norm(x_out) * self.ln_scale_factor, up_w, down_w) + return x_out + +class GPT(nn.Module): + def __init__(self, h): + super().__init__() + if h.logit_softcap <= 0.0: + raise ValueError(f"logit_softcap must be positive, got {h.logit_softcap}") + self.tie_embeddings = h.tie_embeddings + self.tied_embed_init_std = h.tied_embed_init_std + self.logit_softcap = h.logit_softcap + self.fused_ce_enabled = bool(h.fused_ce_enabled) + self.tok_emb = nn.Embedding(h.vocab_size, h.model_dim) + self.num_layers = h.num_layers + head_dim = h.model_dim // h.num_heads + kv_dim = h.num_kv_heads * head_dim + hidden_dim = int(h.mlp_mult * h.model_dim) + self.qo_bank = nn.Parameter(torch.empty(2 * h.num_layers, h.model_dim, h.model_dim)) + self.kv_bank = nn.Parameter(torch.empty(2 * h.num_layers, kv_dim, h.model_dim)) + self.mlp_up_bank = nn.Parameter(torch.empty(h.num_layers, hidden_dim, h.model_dim)) + self.mlp_down_bank = nn.Parameter(torch.empty(h.num_layers, h.model_dim, hidden_dim)) + self.num_encoder_layers = h.num_layers // 2 + self.num_decoder_layers = h.num_layers - self.num_encoder_layers + self.blocks = nn.ModuleList( + [ + Block( + h.model_dim, + h.num_heads, + h.num_kv_heads, + h.mlp_mult, + h.rope_base, + h.qk_gain_init, + h.train_seq_len, + layer_idx=i, + ln_scale=h.ln_scale, + yarn=h.rope_yarn, + attn_out_gate=h.attn_out_gate_enabled, + attn_out_gate_src=h.attn_out_gate_src, + gate_window=h.gate_window, + gated_attn=h.gated_attn_enabled, + gated_attn_init_std=h.gated_attn_init_std, + sparse_attn_gate=h.sparse_attn_gate_enabled, + sparse_attn_gate_init_std=h.sparse_attn_gate_init_std, + sparse_attn_gate_scale=h.sparse_attn_gate_scale, + ) + for i in range(h.num_layers) + ] + ) + if h.rope_dims > 0: + head_dim = h.model_dim // h.num_heads + for block in self.blocks: + block.attn.rope_dims = h.rope_dims + block.attn.rotary = Rotary( + head_dim, + base=h.rope_base, + train_seq_len=h.train_seq_len, + rope_dims=h.rope_dims, + yarn=h.rope_yarn, + ) + self.final_norm = RMSNorm() + self.lm_head = ( + None + if h.tie_embeddings + else CastedLinear(h.model_dim, h.vocab_size, bias=False) + ) + if self.lm_head is not None: + self.lm_head._zero_init = True + if h.xsa_last_n > 0: + for i in range(max(0, h.num_layers - h.xsa_last_n), h.num_layers): + self.blocks[i].attn.use_xsa = True + self.looping_active = False + if h.num_loops > 0: + loop_seg = list(range(h.loop_start, h.loop_end + 1)) + all_indices = list(range(h.loop_start)) + for _ in range(h.num_loops + 1): + all_indices.extend(loop_seg) + all_indices.extend(range(h.loop_end + 1, h.num_layers)) + num_enc = len(all_indices) // 2 + self.encoder_indices = all_indices[:num_enc] + self.decoder_indices = all_indices[num_enc:] + else: + self.encoder_indices = list(range(self.num_encoder_layers)) + self.decoder_indices = list(range(self.num_encoder_layers, h.num_layers)) + self.num_skip_weights = min( + len(self.encoder_indices), len(self.decoder_indices) + ) + self.skip_weights = nn.Parameter( + torch.ones(self.num_skip_weights, h.model_dim, dtype=torch.float32) + ) + self.skip_gates = ( + nn.Parameter( + torch.zeros(self.num_skip_weights, h.model_dim, dtype=torch.float32) + ) + if h.skip_gates_enabled + else None + ) + self.parallel_start_layer = h.parallel_start_layer + self.parallel_final_lane = h.parallel_final_lane.lower() + self.parallel_post_lambdas = nn.Parameter( + torch.ones(h.num_layers, 2, 2, dtype=torch.float32) + ) + self.parallel_resid_lambdas = nn.Parameter( + torch.full((h.num_layers, 2), 1.1, dtype=torch.float32) + ) + # SmearGate (PR #1667 / modded-nanogpt @classiclarryd): + # x_t <- x_t + lam * sigmoid(W * x_t[:gate_window]) * x_{t-1}. + # Per-token forward-1 smear of the embedding lane. W zero-init + lam=0 -> + # transparent at init. Uses CastedLinear so restore_fp32_params handles dtype. + self.smear_gate_enabled = h.smear_gate_enabled + if self.smear_gate_enabled: + self.smear_window = h.gate_window + self.smear_gate = CastedLinear(self.smear_window, 1, bias=False) + self.smear_gate._zero_init = True + self.smear_lambda = nn.Parameter(torch.zeros(1, dtype=torch.float32)) + self._init_weights() + + def _init_weights(self): + if self.tie_embeddings: + nn.init.normal_(self.tok_emb.weight, mean=0.0, std=self.tied_embed_init_std) + n = self.num_layers + proj_scale = 1.0 / math.sqrt(2 * n) + for i in range(n): + nn.init.orthogonal_(self.qo_bank.data[i], gain=1.0) + nn.init.zeros_(self.qo_bank.data[n + i]) + self.qo_bank.data[n + i].mul_(proj_scale) + nn.init.orthogonal_(self.kv_bank.data[i], gain=1.0) + nn.init.orthogonal_(self.kv_bank.data[n + i], gain=1.0) + for i in range(n): + nn.init.orthogonal_(self.mlp_up_bank.data[i], gain=1.0) + nn.init.zeros_(self.mlp_down_bank.data[i]) + self.mlp_down_bank.data[i].mul_(proj_scale) + for name, module in self.named_modules(): + if isinstance(module, nn.Linear): + if getattr(module, "_zero_init", False): + nn.init.zeros_(module.weight) + elif ( + module.weight.ndim == 2 + and module.weight.shape[0] >= 64 + and module.weight.shape[1] >= 64 + ): + nn.init.orthogonal_(module.weight, gain=1.0) + + def _bank_weights(self, i): + n = self.num_layers + return ( + self.qo_bank[i], + self.kv_bank[i], + self.kv_bank[n + i], + self.qo_bank[n + i], + self.mlp_up_bank[i], + self.mlp_down_bank[i], + ) + + def _parallel_block( + self, block_idx, lane0, lane1, x0, + q_w, k_w, v_w, out_w, up_w, down_w, + cu_seqlens=None, max_seqlen=0, + ): + block = self.blocks[block_idx] + mix = block.resid_mix.to(dtype=lane0.dtype) + attn_read = mix[0][None, None, :] * lane0 + mix[1][None, None, :] * x0 + attn_out = block.attn( + block.attn_norm(attn_read) * block.ln_scale_factor, + q_w, k_w, v_w, out_w, + cu_seqlens=cu_seqlens, max_seqlen=max_seqlen, + ) + attn_out = block.attn_scale.to(dtype=attn_out.dtype)[None, None, :] * attn_out + mlp_read = lane1 + mlp_out = block.mlp_scale.to(dtype=lane1.dtype)[None, None, :] * block.mlp( + block.mlp_norm(mlp_read) * block.ln_scale_factor, up_w, down_w + ) + attn_resid = self.parallel_resid_lambdas[block_idx, 0].to(dtype=lane0.dtype) + attn_post = self.parallel_post_lambdas[block_idx, 0].to(dtype=lane0.dtype) + mlp_resid = self.parallel_resid_lambdas[block_idx, 1].to(dtype=lane0.dtype) + mlp_post = self.parallel_post_lambdas[block_idx, 1].to(dtype=lane0.dtype) + lane0 = attn_resid * lane0 + attn_post[0] * attn_out + mlp_post[0] * mlp_out + lane1 = mlp_resid * lane1 + attn_post[1] * attn_out + mlp_post[1] * mlp_out + return lane0, lane1 + + def _final_parallel_hidden(self, lane0, lane1): + if self.parallel_final_lane == "mlp": + return lane1 + if self.parallel_final_lane == "attn": + return lane0 + return 0.5 * (lane0 + lane1) + + def _forward_hidden(self, input_ids, cu_seqlens=None, max_seqlen=0): + """Run the encoder/decoder stack to the final RMSNorm; returns pre-projection hidden. + Shared by eval (softcap+projection via forward_logits) and train (fused CE path).""" + x = self.tok_emb(input_ids) + # SmearGate (PR #1667). lam=0 + W=0 -> identity at init. + # Cross-doc leak fix: zero the prev-token smear at any position whose current token + # is BOS, so the BOS embedding starting doc N+1 in a packed stream is not + # contaminated by doc N's last token (audited issue on PR#1797 base). + if self.smear_gate_enabled: + sl = self.smear_lambda.to(dtype=x.dtype) + gate_in = x[:, 1:, : self.smear_window].contiguous() + g = sl * torch.sigmoid(self.smear_gate(gate_in)) + not_bos = (input_ids[:, 1:] != BOS_ID).to(x.dtype).unsqueeze(-1) + x = torch.cat([x[:, :1], x[:, 1:] + g * x[:, :-1] * not_bos], dim=1) + x = F.rms_norm(x, (x.size(-1),)) + x0 = x + skips = [] + enc_iter = ( + self.encoder_indices + if self.looping_active + else range(self.num_encoder_layers) + ) + dec_iter = ( + self.decoder_indices + if self.looping_active + else range( + self.num_encoder_layers, + self.num_encoder_layers + self.num_decoder_layers, + ) + ) + for i in enc_iter: + q_w, k_w, v_w, out_w, up_w, down_w = self._bank_weights(i) + x = self.blocks[i](x, x0, q_w, k_w, v_w, out_w, up_w, down_w, cu_seqlens=cu_seqlens, max_seqlen=max_seqlen) + skips.append(x) + psl = self.parallel_start_layer + lane0 = None + lane1 = None + for skip_idx, i in enumerate(dec_iter): + q_w, k_w, v_w, out_w, up_w, down_w = self._bank_weights(i) + if i >= psl and psl > 0: + if lane0 is None: + lane0 = x + lane1 = x + if skip_idx < self.num_skip_weights and skips: + skip = skips.pop() + w = self.skip_weights[skip_idx].to(dtype=lane0.dtype)[None, None, :] + if self.skip_gates is not None: + g = torch.sigmoid(self.skip_gates[skip_idx].to(dtype=lane0.dtype))[None, None, :] + lane0 = torch.lerp(w * skip, lane0, g) + else: + lane0 = lane0 + w * skip + lane0, lane1 = self._parallel_block( + i, lane0, lane1, x0, q_w, k_w, v_w, out_w, up_w, down_w, + cu_seqlens=cu_seqlens, max_seqlen=max_seqlen, + ) + else: + if skip_idx < self.num_skip_weights and skips: + scaled_skip = ( + self.skip_weights[skip_idx].to(dtype=x.dtype)[None, None, :] + * skips.pop() + ) + if self.skip_gates is not None: + g = torch.sigmoid(self.skip_gates[skip_idx].to(dtype=x.dtype))[None, None, :] + x = torch.lerp(scaled_skip, x, g) + else: + x = x + scaled_skip + x = self.blocks[i](x, x0, q_w, k_w, v_w, out_w, up_w, down_w, cu_seqlens=cu_seqlens, max_seqlen=max_seqlen) + if lane0 is not None: + x = self._final_parallel_hidden(lane0, lane1) + x = self.final_norm(x) + return x + + def _project_logits(self, hidden): + if self.tie_embeddings: + return F.linear(hidden, self.tok_emb.weight) + return self.lm_head(hidden) + + def forward_logits(self, input_ids, cu_seqlens=None, max_seqlen=0): + hidden = self._forward_hidden(input_ids, cu_seqlens=cu_seqlens, max_seqlen=max_seqlen) + logits_proj = self._project_logits(hidden) + return self.logit_softcap * torch.tanh(logits_proj / self.logit_softcap) + + def forward(self, input_ids, target_ids, cu_seqlens=None, max_seqlen=0): + hidden = self._forward_hidden(input_ids, cu_seqlens=cu_seqlens, max_seqlen=max_seqlen) + logits_proj = self._project_logits(hidden) + flat_targets = target_ids.reshape(-1) + # Fused softcapped-CE kernel (training path only). Applies softcap inside the + # Triton kernel; takes pre-softcap logits_proj. Non-fused path matches stock + # PR-1736 numerics exactly (softcap in fp32, then F.cross_entropy on fp32). + if self.fused_ce_enabled: + return softcapped_cross_entropy( + logits_proj.reshape(-1, logits_proj.size(-1)), + flat_targets, + self.logit_softcap, + reduction="mean", + ) + logits = self.logit_softcap * torch.tanh(logits_proj / self.logit_softcap) + return F.cross_entropy( + logits.reshape(-1, logits.size(-1)).float(), + flat_targets, + reduction="mean", + ) + + def forward_ttt(self, input_ids, target_ids, lora): + x = self.tok_emb(input_ids) + # SmearGate on the TTT path — same inline compute as forward_logits. + # Cross-doc leak fix: see _forward_hidden comment. + if self.smear_gate_enabled: + sl = self.smear_lambda.to(dtype=x.dtype) + gate_in = x[:, 1:, : self.smear_window].contiguous() + g = sl * torch.sigmoid(self.smear_gate(gate_in)) + not_bos = (input_ids[:, 1:] != BOS_ID).to(x.dtype).unsqueeze(-1) + x = torch.cat([x[:, :1], x[:, 1:] + g * x[:, :-1] * not_bos], dim=1) + x = F.rms_norm(x, (x.size(-1),)) + x0 = x + skips = [] + enc_iter = ( + self.encoder_indices + if self.looping_active + else list(range(self.num_encoder_layers)) + ) + dec_iter = ( + self.decoder_indices + if self.looping_active + else list( + range( + self.num_encoder_layers, + self.num_encoder_layers + self.num_decoder_layers, + ) + ) + ) + slot = 0 + for i in enc_iter: + q_w, k_w, v_w, out_w, up_w, down_w = self._bank_weights(i) + x = self._block_with_lora(self.blocks[i], x, x0, lora, slot, q_w, k_w, v_w, out_w, up_w, down_w) + slot += 1 + skips.append(x) + psl = self.parallel_start_layer + lane0 = None + lane1 = None + for skip_idx, i in enumerate(dec_iter): + q_w, k_w, v_w, out_w, up_w, down_w = self._bank_weights(i) + if i >= psl and psl > 0: + if lane0 is None: + lane0 = x + lane1 = x + if skip_idx < self.num_skip_weights and skips: + skip = skips.pop() + w = self.skip_weights[skip_idx].to(dtype=lane0.dtype)[None, None, :] + if self.skip_gates is not None: + g = torch.sigmoid(self.skip_gates[skip_idx].to(dtype=lane0.dtype))[None, None, :] + lane0 = torch.lerp(w * skip, lane0, g) + else: + lane0 = lane0 + w * skip + lane0, lane1 = self._parallel_block_with_lora( + i, lane0, lane1, x0, lora, slot, + q_w, k_w, v_w, out_w, up_w, down_w, + ) + else: + if skip_idx < self.num_skip_weights and skips: + scaled_skip = ( + self.skip_weights[skip_idx].to(dtype=x.dtype)[None, None, :] + * skips.pop() + ) + if self.skip_gates is not None: + g = torch.sigmoid(self.skip_gates[skip_idx].to(dtype=x.dtype))[None, None, :] + x = torch.lerp(scaled_skip, x, g) + else: + x = x + scaled_skip + x = self._block_with_lora(self.blocks[i], x, x0, lora, slot, q_w, k_w, v_w, out_w, up_w, down_w) + slot += 1 + if lane0 is not None: + x = self._final_parallel_hidden(lane0, lane1) + x = self.final_norm(x) + if self.tie_embeddings: + logits = F.linear(x, self.tok_emb.weight) + else: + logits = self.lm_head(x) + logits = logits + lora.lm_head_lora(x) + logits = self.logit_softcap * torch.tanh(logits / self.logit_softcap) + bsz, sl, V = logits.shape + return F.cross_entropy( + logits.float().reshape(-1, V), target_ids.reshape(-1), reduction="none" + ).reshape(bsz, sl) + + def _block_with_lora(self, block, x, x0, lora, slot, q_w, k_w, v_w, out_w, up_w, down_w): + mix = block.resid_mix.to(dtype=x.dtype) + x_in = mix[0][None, None, :] * x + mix[1][None, None, :] * x0 + n = block.attn_norm(x_in) * block.ln_scale_factor + attn = block.attn + bsz, seqlen, dim = n.shape + # Keep raw Q for AttnOutGate src='q' (matches forward path semantics). + q_raw = F.linear(n, q_w.to(n.dtype)) + lora.q_loras[slot](n) + q = q_raw.reshape(bsz, seqlen, attn.num_heads, attn.head_dim) + k = F.linear(n, k_w.to(n.dtype)) + if lora.k_loras is not None: + k = k + lora.k_loras[slot](n) + k = k.reshape(bsz, seqlen, attn.num_kv_heads, attn.head_dim) + v = (F.linear(n, v_w.to(n.dtype)) + lora.v_loras[slot](n)).reshape( + bsz, seqlen, attn.num_kv_heads, attn.head_dim + ) + q = F.rms_norm(q, (q.size(-1),)) + k = F.rms_norm(k, (k.size(-1),)) + cos, sin = attn.rotary(seqlen, n.device, q.dtype) + q = apply_rotary_emb(q, cos, sin, attn.rope_dims) + k = apply_rotary_emb(k, cos, sin, attn.rope_dims) + q = q * attn.q_gain.to(dtype=q.dtype)[None, None, :, None] + y = flash_attn_3_func(q, k, v, causal=True) + if attn.use_xsa: + y = attn._xsa_efficient(y, v) + # AttnOutGate (TTT path) — inline + .contiguous() barrier, same as the eval path. + if attn.attn_out_gate: + gate_src = q_raw if attn.attn_out_gate_src == "q" else n + gate_in = gate_src[..., : attn.gate_window].contiguous() + g = 2.0 * torch.sigmoid(attn.attn_gate_proj(gate_in)) + y = y * g[..., None] + # Gated Attention (TTT path). Gate input is n (post-norm block input), same + # as eval path. .to(n.dtype) on fp32 param before bf16 broadcast. + if attn.gated_attn: + n_c = n.contiguous() + g = torch.sigmoid(F.linear(n_c, attn.attn_gate_w.to(n.dtype))) + y = y * g[..., None] + # Sparse attention head-output gate (TTT path) — must match the eval path in + # forward() exactly, else training (which applied the gate) and TTT eval (which + # skipped it) produce mismatched representations and catastrophic BPB regression. + if attn.sparse_attn_gate: + gate_in = n[..., : attn.gate_window].contiguous() + g = torch.sigmoid( + attn.sparse_attn_gate_scale + * F.linear(gate_in, attn.attn_gate_w.to(n.dtype)) + ) + y = y * g[..., None] + y = y.reshape(bsz, seqlen, dim) + attn_out = F.linear(y, out_w.to(n.dtype)) + if lora.o_loras is not None: + attn_out = attn_out + lora.o_loras[slot](n) + x_out = x_in + block.attn_scale.to(dtype=x_in.dtype)[None, None, :] * attn_out + mlp_n = block.mlp_norm(x_out) * block.ln_scale_factor + mlp_out = block.mlp(mlp_n, up_w, down_w) + if lora.mlp_loras is not None: + mlp_out = mlp_out + lora.mlp_loras[slot](mlp_n) + x_out = x_out + block.mlp_scale.to(dtype=x_out.dtype)[None, None, :] * mlp_out + return x_out + + def _parallel_block_with_lora( + self, block_idx, lane0, lane1, x0, lora, slot, + q_w, k_w, v_w, out_w, up_w, down_w, + ): + block = self.blocks[block_idx] + mix = block.resid_mix.to(dtype=lane0.dtype) + attn_read = mix[0][None, None, :] * lane0 + mix[1][None, None, :] * x0 + n = block.attn_norm(attn_read) * block.ln_scale_factor + attn = block.attn + bsz, seqlen, dim = n.shape + q_raw = F.linear(n, q_w.to(n.dtype)) + lora.q_loras[slot](n) + q = q_raw.reshape(bsz, seqlen, attn.num_heads, attn.head_dim) + k = F.linear(n, k_w.to(n.dtype)) + if lora.k_loras is not None: + k = k + lora.k_loras[slot](n) + k = k.reshape(bsz, seqlen, attn.num_kv_heads, attn.head_dim) + v = (F.linear(n, v_w.to(n.dtype)) + lora.v_loras[slot](n)).reshape( + bsz, seqlen, attn.num_kv_heads, attn.head_dim + ) + q = F.rms_norm(q, (q.size(-1),)) + k = F.rms_norm(k, (k.size(-1),)) + cos, sin = attn.rotary(seqlen, n.device, q.dtype) + q = apply_rotary_emb(q, cos, sin, attn.rope_dims) + k = apply_rotary_emb(k, cos, sin, attn.rope_dims) + q = q * attn.q_gain.to(dtype=q.dtype)[None, None, :, None] + y = flash_attn_3_func(q, k, v, causal=True) + if attn.use_xsa: + y = attn._xsa_efficient(y, v) + # AttnOutGate (TTT parallel path) — inline + .contiguous() barrier. + if attn.attn_out_gate: + gate_src = q_raw if attn.attn_out_gate_src == "q" else n + gate_in = gate_src[..., : attn.gate_window].contiguous() + g = 2.0 * torch.sigmoid(attn.attn_gate_proj(gate_in)) + y = y * g[..., None] + # Gated Attention (TTT parallel path). Gate input is n (post-norm block input). + if attn.gated_attn: + n_c = n.contiguous() + g = torch.sigmoid(F.linear(n_c, attn.attn_gate_w.to(n.dtype))) + y = y * g[..., None] + # Sparse attention head-output gate (TTT parallel path) — must match the + # eval path in forward() to keep train/eval semantics in sync. + if attn.sparse_attn_gate: + gate_in = n[..., : attn.gate_window].contiguous() + g = torch.sigmoid( + attn.sparse_attn_gate_scale + * F.linear(gate_in, attn.attn_gate_w.to(n.dtype)) + ) + y = y * g[..., None] + y = y.reshape(bsz, seqlen, dim) + attn_out = F.linear(y, out_w.to(n.dtype)) + if lora.o_loras is not None: + attn_out = attn_out + lora.o_loras[slot](n) + attn_out = block.attn_scale.to(dtype=attn_out.dtype)[None, None, :] * attn_out + mlp_read = lane1 + mlp_n = block.mlp_norm(mlp_read) * block.ln_scale_factor + mlp_out = block.mlp(mlp_n, up_w, down_w) + if lora.mlp_loras is not None: + mlp_out = mlp_out + lora.mlp_loras[slot](mlp_n) + mlp_out = block.mlp_scale.to(dtype=lane1.dtype)[None, None, :] * mlp_out + attn_resid = self.parallel_resid_lambdas[block_idx, 0].to(dtype=lane0.dtype) + attn_post = self.parallel_post_lambdas[block_idx, 0].to(dtype=lane0.dtype) + mlp_resid = self.parallel_resid_lambdas[block_idx, 1].to(dtype=lane0.dtype) + mlp_post = self.parallel_post_lambdas[block_idx, 1].to(dtype=lane0.dtype) + lane0 = attn_resid * lane0 + attn_post[0] * attn_out + mlp_post[0] * mlp_out + lane1 = mlp_resid * lane1 + attn_post[1] * attn_out + mlp_post[1] * mlp_out + return lane0, lane1 + + +class BatchedLinearLoRA(nn.Module): + # PR-1767: rank-scaled output (alpha/rank), like standard LoRA. Decouples + # effective magnitude from rank so changing rank does not change LR scale. + _ALPHA = float(os.environ.get("TTT_LORA_ALPHA", "144")) + # PR-1767: optionally keep A warm across per-doc resets (only B is zeroed). + # Accumulates useful feature directions across documents within a TTT phase. + _WARM_START_A = bool(int(os.environ.get("TTT_WARM_START_A", "1"))) + + def __init__(self, bsz, in_features, out_features, rank): + super().__init__() + self._bound = 1.0 / math.sqrt(in_features) + self._scale = self._ALPHA / rank + self.A = nn.Parameter( + torch.empty(bsz, rank, in_features).uniform_(-self._bound, self._bound) + ) + self.B = nn.Parameter(torch.zeros(bsz, out_features, rank)) + + def reset(self): + with torch.no_grad(): + if not self._WARM_START_A: + self.A.uniform_(-self._bound, self._bound) + self.B.zero_() + + def forward(self, x): + return ((x @ self.A.transpose(1, 2)) @ self.B.transpose(1, 2)) * self._scale + + +class BatchedTTTLoRA(nn.Module): + def __init__(self, bsz, model, rank, k_lora=True, mlp_lora=True, o_lora=True): + super().__init__() + self.bsz = bsz + dim = model.qo_bank.shape[-1] + vocab = model.tok_emb.num_embeddings + if getattr(model, "looping_active", False): + num_slots = len(model.encoder_indices) + len(model.decoder_indices) + else: + num_slots = len(model.blocks) + kv_dim = model.blocks[0].attn.num_kv_heads * ( + dim // model.blocks[0].attn.num_heads + ) + embed_dim = model.tok_emb.embedding_dim + self.lm_head_lora = BatchedLinearLoRA(bsz, embed_dim, vocab, rank) + self.q_loras = nn.ModuleList( + [BatchedLinearLoRA(bsz, dim, dim, rank) for _ in range(num_slots)] + ) + self.v_loras = nn.ModuleList( + [BatchedLinearLoRA(bsz, dim, kv_dim, rank) for _ in range(num_slots)] + ) + self.k_loras = ( + nn.ModuleList( + [BatchedLinearLoRA(bsz, dim, kv_dim, rank) for _ in range(num_slots)] + ) + if k_lora + else None + ) + self.mlp_loras = ( + nn.ModuleList( + [BatchedLinearLoRA(bsz, dim, dim, rank) for _ in range(num_slots)] + ) + if mlp_lora + else None + ) + self.o_loras = ( + nn.ModuleList( + [BatchedLinearLoRA(bsz, dim, dim, rank) for _ in range(num_slots)] + ) + if o_lora + else None + ) + + def reset(self): + with torch.no_grad(): + self.lm_head_lora.reset() + for loras in [self.q_loras, self.v_loras, self.k_loras, + self.mlp_loras, self.o_loras]: + if loras is not None: + for lora in loras: + lora.reset() + + +# Polar Express per-iteration minimax Newton-Schulz coefficients (PR #1344). +# Replaces the fixed (3.4445, -4.775, 2.0315) coefficients of stock Muon. +# Applied at backend_steps=5 — taking more than 5 iterations from this list +# falls back to the final (converged) tuple via the slice guard below. +_PE_COEFFS = ( + (8.156554524902461, -22.48329292557795, 15.878769915207462), + (4.042929935166739, -2.808917465908714, 0.5000178451051316), + (3.8916678022926607, -2.772484153217685, 0.5060648178503393), + (3.285753657755655, -2.3681294933425376, 0.46449024233003106), + (2.3465413258596377, -1.7097828382687081, 0.42323551169305323), +) + + +@torch.compile +def zeropower_via_newtonschulz5(G, steps=10, eps=1e-07): + was_2d = G.ndim == 2 + if was_2d: + G = G.unsqueeze(0) + X = G.bfloat16() + transposed = X.size(-2) > X.size(-1) + if transposed: + X = X.mT + X = X / (X.norm(dim=(-2, -1), keepdim=True) + eps) + coeffs = _PE_COEFFS[:steps] if steps <= len(_PE_COEFFS) else _PE_COEFFS + for a, b, c in coeffs: + A = X @ X.mT + B = b * A + c * (A @ A) + X = a * X + B @ X + if transposed: + X = X.mT + if was_2d: + X = X.squeeze(0) + return X + + +class Muon(torch.optim.Optimizer): + def __init__( + self, + params, + lr, + momentum, + backend_steps, + nesterov=True, + weight_decay=0.0, + row_normalize=False, + ): + super().__init__( + params, + dict( + lr=lr, + momentum=momentum, + backend_steps=backend_steps, + nesterov=nesterov, + weight_decay=weight_decay, + row_normalize=row_normalize, + ), + ) + self._built = False + + def _build(self): + self._distributed = dist.is_available() and dist.is_initialized() + self._world_size = dist.get_world_size() if self._distributed else 1 + self._rank = dist.get_rank() if self._distributed else 0 + ws = self._world_size + self._bank_meta = [] + for group in self.param_groups: + for p in group["params"]: + B = p.shape[0] + padded_B = ((B + ws - 1) // ws) * ws + shard_B = padded_B // ws + tail = p.shape[1:] + dev = p.device + self._bank_meta.append({ + "p": p, + "B": B, + "padded_grad": torch.zeros(padded_B, *tail, device=dev, dtype=torch.bfloat16), + "shard": torch.zeros(shard_B, *tail, device=dev, dtype=torch.bfloat16), + "shard_mom": torch.zeros(shard_B, *tail, device=dev, dtype=torch.bfloat16), + "full_update": torch.zeros(padded_B, *tail, device=dev, dtype=torch.bfloat16), + "scale": max(1, p.shape[-2] / p.shape[-1]) ** 0.5, + }) + self._bank_meta.sort(key=lambda m: -m["p"].numel()) + self._built = True + + def launch_reduce_scatters(self): + if not self._built: + self._build() + if not self._distributed: + return + self._rs_futures = [] + for m in self._bank_meta: + p = m["p"] + if p.grad is None: + self._rs_futures.append(None) + continue + pg = m["padded_grad"] + pg[: m["B"]].copy_(p.grad) + fut = dist.reduce_scatter_tensor( + m["shard"], pg, op=dist.ReduceOp.AVG, async_op=True + ) + self._rs_futures.append(fut) + + @torch.no_grad() + def step(self, closure=None): + loss = None + if closure is not None: + with torch.enable_grad(): + loss = closure() + if not self._built: + self._build() + for group in self.param_groups: + lr = group["lr"] + momentum = group["momentum"] + backend_steps = group["backend_steps"] + nesterov = group["nesterov"] + wd = group.get("weight_decay", 0.0) + row_normalize = group.get("row_normalize", False) + prev_ag_handle = None + prev_m = None + sharded = self._distributed and hasattr(self, "_rs_futures") + for idx, m in enumerate(self._bank_meta): + p = m["p"] + if p.grad is None: + continue + if prev_ag_handle is not None: + prev_ag_handle.wait() + pp = prev_m["p"] + upd = prev_m["full_update"][: prev_m["B"]] + if wd > 0.0: + pp.data.mul_(1.0 - lr * wd) + pp.add_(upd, alpha=-lr * prev_m["scale"]) + if sharded and self._rs_futures[idx] is not None: + self._rs_futures[idx].wait() + g = m["shard"] + buf = m["shard_mom"] + else: + g = p.grad.bfloat16() + state = self.state[p] + if "momentum_buffer" not in state: + state["momentum_buffer"] = torch.zeros_like(g) + buf = state["momentum_buffer"] + buf.mul_(momentum).add_(g) + if nesterov: + update = g.add(buf, alpha=momentum) + else: + update = buf + if row_normalize: + rn = update.float().norm(dim=-1, keepdim=True).clamp_min(1e-07) + update = update / rn.to(update.dtype) + update = zeropower_via_newtonschulz5(update, steps=backend_steps) + if sharded: + prev_ag_handle = dist.all_gather_into_tensor( + m["full_update"], update, async_op=True + ) + prev_m = m + else: + if wd > 0.0: + p.data.mul_(1.0 - lr * wd) + p.add_(update, alpha=-lr * m["scale"]) + if prev_ag_handle is not None: + prev_ag_handle.wait() + pp = prev_m["p"] + upd = prev_m["full_update"][: prev_m["B"]] + if wd > 0.0: + pp.data.mul_(1.0 - lr * wd) + pp.add_(upd, alpha=-lr * prev_m["scale"]) + if hasattr(self, "_rs_futures"): + del self._rs_futures + return loss + + +CONTROL_TENSOR_NAME_PATTERNS = tuple( + pattern + for pattern in os.environ.get( + "CONTROL_TENSOR_NAME_PATTERNS", + "attn_scale,attn_scales,mlp_scale,mlp_scales,resid_mix,resid_mixes,q_gain,skip_weight,skip_weights,skip_gates,parallel_post_lambdas,parallel_resid_lambdas,attn_gate_proj,attn_gate_w,smear_gate,smear_lambda", + ).split(",") + if pattern +) + + +PACKED_REPLICATED_GRAD_MAX_NUMEL = 1 << 15 + + +class Optimizers: + def __init__(self, h, base_model): + matrix_params = [ + base_model.qo_bank, + base_model.kv_bank, + base_model.mlp_up_bank, + base_model.mlp_down_bank, + ] + block_named_params = list(base_model.blocks.named_parameters()) + scalar_params = [ + p + for (name, p) in block_named_params + if p.ndim < 2 + or any(pattern in name for pattern in CONTROL_TENSOR_NAME_PATTERNS) + ] + if base_model.skip_weights.numel() > 0: + scalar_params.append(base_model.skip_weights) + if base_model.skip_gates is not None and base_model.skip_gates.numel() > 0: + scalar_params.append(base_model.skip_gates) + if base_model.parallel_post_lambdas is not None: + scalar_params.append(base_model.parallel_post_lambdas) + if base_model.parallel_resid_lambdas is not None: + scalar_params.append(base_model.parallel_resid_lambdas) + # SmearGate params live on GPT root (not in .blocks), so add them by hand. + # Both are tiny (gate_window scalars + 1 lambda). Optimized via scalar Adam. + if getattr(base_model, "smear_gate_enabled", False): + scalar_params.append(base_model.smear_gate.weight) + scalar_params.append(base_model.smear_lambda) + token_lr = h.tied_embed_lr if h.tie_embeddings else h.embed_lr + tok_params = [ + {"params": [base_model.tok_emb.weight], "lr": token_lr, "base_lr": token_lr} + ] + self.optimizer_tok = torch.optim.AdamW( + tok_params, + betas=(h.beta1, h.beta2), + eps=h.adam_eps, + weight_decay=h.embed_wd, + fused=True, + ) + self.optimizer_muon = Muon( + matrix_params, + lr=h.matrix_lr, + momentum=h.muon_momentum, + backend_steps=h.muon_backend_steps, + weight_decay=h.muon_wd, + row_normalize=h.muon_row_normalize, + ) + for group in self.optimizer_muon.param_groups: + group["base_lr"] = h.matrix_lr + self.optimizer_scalar = torch.optim.AdamW( + [{"params": scalar_params, "lr": h.scalar_lr, "base_lr": h.scalar_lr}], + betas=(h.beta1, h.beta2), + eps=h.adam_eps, + weight_decay=h.adam_wd, + fused=True, + ) + self.optimizers = [ + self.optimizer_tok, + self.optimizer_muon, + self.optimizer_scalar, + ] + self.replicated_params = list(tok_params[0]["params"]) + self.replicated_params.extend(scalar_params) + self.replicated_large_params = [] + self.replicated_packed_params = [] + for p in self.replicated_params: + if p.numel() <= PACKED_REPLICATED_GRAD_MAX_NUMEL: + self.replicated_packed_params.append(p) + else: + self.replicated_large_params.append(p) + self._aux_stream = torch.cuda.Stream() + + def __iter__(self): + return iter(self.optimizers) + + def zero_grad_all(self): + for opt in self.optimizers: + opt.zero_grad(set_to_none=True) + + def _all_reduce_packed_grads(self): + grads_by_key = collections.defaultdict(list) + for p in self.replicated_packed_params: + if p.grad is not None: + grads_by_key[(p.grad.device, p.grad.dtype)].append(p.grad) + for grads in grads_by_key.values(): + flat = torch.empty( + sum(g.numel() for g in grads), + device=grads[0].device, + dtype=grads[0].dtype, + ) + offset = 0 + for g in grads: + n = g.numel() + flat[offset : offset + n].copy_(g.contiguous().view(-1)) + offset += n + dist.all_reduce(flat, op=dist.ReduceOp.AVG) + offset = 0 + for g in grads: + n = g.numel() + g.copy_(flat[offset : offset + n].view_as(g)) + offset += n + + def step(self, distributed=False): + self.optimizer_muon.launch_reduce_scatters() + if distributed: + reduce_handles = [ + dist.all_reduce(p.grad, op=dist.ReduceOp.AVG, async_op=True) + for p in self.replicated_large_params + if p.grad is not None + ] + self._all_reduce_packed_grads() + for handle in reduce_handles: + handle.wait() + self._aux_stream.wait_stream(torch.cuda.current_stream()) + with torch.cuda.stream(self._aux_stream): + self.optimizer_tok.step() + self.optimizer_scalar.step() + self.optimizer_muon.step() + torch.cuda.current_stream().wait_stream(self._aux_stream) + self.zero_grad_all() + + +def restore_fp32_params(model): + for module in model.modules(): + if isinstance(module, CastedLinear): + module.float() + for name, param in model.named_parameters(): + if ( + param.ndim < 2 + or any(pattern in name for pattern in CONTROL_TENSOR_NAME_PATTERNS) + ) and param.dtype != torch.float32: + param.data = param.data.float() + if hasattr(model, "qo_bank") and model.qo_bank is not None: + model.qo_bank.data = model.qo_bank.data.float() + model.kv_bank.data = model.kv_bank.data.float() + model.mlp_up_bank.data = model.mlp_up_bank.data.float() + model.mlp_down_bank.data = model.mlp_down_bank.data.float() + + +def collect_hessians(model, train_loader, h, device, n_calibration_batches=64): + hessians = {} + act_sumsq = {} + act_counts = {} + hooks = [] + for i, block in enumerate(model.blocks): + block.attn._calib = True + block.mlp._calib = True + block.mlp.use_fused = False + + def make_attn_hook(layer_idx): + def hook_fn(module, inp, out): + x = inp[0].detach().float() + if x.ndim == 3: + x = x.reshape(-1, x.shape[-1]) + x_sq = x.square().sum(dim=0) + x_count = x.shape[0] + for suffix in ["c_q", "c_k", "c_v"]: + name = f"blocks.{layer_idx}.attn.{suffix}.weight" + if name not in hessians: + hessians[name] = torch.zeros( + x.shape[1], x.shape[1], dtype=torch.float32, device=device + ) + hessians[name].addmm_(x.T, x) + if name not in act_sumsq: + act_sumsq[name] = torch.zeros( + x.shape[1], dtype=torch.float32, device=device + ) + act_counts[name] = 0 + act_sumsq[name] += x_sq + act_counts[name] += x_count + y = module._last_proj_input + if y is not None: + y = y.float() + if y.ndim == 3: + y = y.reshape(-1, y.shape[-1]) + name = f"blocks.{layer_idx}.attn.proj.weight" + if name not in hessians: + hessians[name] = torch.zeros( + y.shape[1], y.shape[1], dtype=torch.float32, device=device + ) + hessians[name].addmm_(y.T, y) + if name not in act_sumsq: + act_sumsq[name] = torch.zeros( + y.shape[1], dtype=torch.float32, device=device + ) + act_counts[name] = 0 + act_sumsq[name] += y.square().sum(dim=0) + act_counts[name] += y.shape[0] + return hook_fn + + def make_mlp_hook(layer_idx): + def hook_fn(module, inp, out): + x = inp[0].detach().float() + if x.ndim == 3: + x = x.reshape(-1, x.shape[-1]) + name = f"blocks.{layer_idx}.mlp.fc.weight" + if name not in hessians: + hessians[name] = torch.zeros( + x.shape[1], x.shape[1], dtype=torch.float32, device=device + ) + hessians[name].addmm_(x.T, x) + if name not in act_sumsq: + act_sumsq[name] = torch.zeros( + x.shape[1], dtype=torch.float32, device=device + ) + act_counts[name] = 0 + act_sumsq[name] += x.square().sum(dim=0) + act_counts[name] += x.shape[0] + h_act = module._last_down_input + if h_act is not None: + h_act = h_act.float() + if h_act.ndim == 3: + h_act = h_act.reshape(-1, h_act.shape[-1]) + name = f"blocks.{layer_idx}.mlp.proj.weight" + if name not in hessians: + hessians[name] = torch.zeros( + h_act.shape[1], h_act.shape[1], dtype=torch.float32, device=device + ) + hessians[name].addmm_(h_act.T, h_act) + if name not in act_sumsq: + act_sumsq[name] = torch.zeros( + h_act.shape[1], dtype=torch.float32, device=device + ) + act_counts[name] = 0 + act_sumsq[name] += h_act.square().sum(dim=0) + act_counts[name] += h_act.shape[0] + return hook_fn + + for i, block in enumerate(model.blocks): + hooks.append(block.attn.register_forward_hook(make_attn_hook(i))) + hooks.append(block.mlp.register_forward_hook(make_mlp_hook(i))) + + # Hessian hooks for embedding factorization projection layers + def make_linear_input_hook(weight_name): + def hook_fn(module, inp, out): + x = inp[0].detach().float() + if x.ndim == 3: + x = x.reshape(-1, x.shape[-1]) + if weight_name not in hessians: + hessians[weight_name] = torch.zeros( + x.shape[1], x.shape[1], dtype=torch.float32, device=device + ) + hessians[weight_name].addmm_(x.T, x) + return hook_fn + + if model.tie_embeddings: + hook_module = model.final_norm + + def make_output_hook(name): + def hook_fn(module, inp, out): + x = out.detach().float() + if x.ndim == 3: + x = x.reshape(-1, x.shape[-1]) + if name not in hessians: + hessians[name] = torch.zeros( + x.shape[1], x.shape[1], dtype=torch.float32, device=device + ) + hessians[name].addmm_(x.T, x) + if name not in act_sumsq: + act_sumsq[name] = torch.zeros( + x.shape[1], dtype=torch.float32, device=device + ) + act_counts[name] = 0 + act_sumsq[name] += x.square().sum(dim=0) + act_counts[name] += x.shape[0] + return hook_fn + + hooks.append( + hook_module.register_forward_hook(make_output_hook("tok_emb.weight")) + ) + model.eval() + with torch.no_grad(): + for _ in range(n_calibration_batches): + x, _ = train_loader.next_batch(h.train_batch_tokens, h.grad_accum_steps) + model.forward_logits(x) + for hook in hooks: + hook.remove() + for i, block in enumerate(model.blocks): + block.attn._calib = False + block.mlp._calib = False + block.mlp.use_fused = True + for name in hessians: + hessians[name] = hessians[name].cpu() / n_calibration_batches + act_stats = {} + for name, sumsq in act_sumsq.items(): + count = max(act_counts.get(name, 0), 1) + act_stats[name] = (sumsq / count).sqrt().cpu() + return hessians, act_stats + + +def gptq_quantize_weight( + w, + H, + clip_sigmas=3.0, + clip_range=63, + block_size=128, + protect_groups=None, + group_size=None, + protect_clip_range=None, +): + W_orig = w.float().clone() + rows, cols = W_orig.shape + H = H.float().clone() + dead = torch.diag(H) == 0 + H[dead, dead] = 1 + damp = 0.01 * H.diag().mean() + H.diagonal().add_(damp) + perm = torch.argsort(H.diag(), descending=True) + invperm = torch.argsort(perm) + W_perm = W_orig[:, perm].clone() + W_perm[:, dead[perm]] = 0 + H = H[perm][:, perm] + Hinv = torch.cholesky_inverse(torch.linalg.cholesky(H)) + Hinv = torch.linalg.cholesky(Hinv, upper=True) + row_std = W_orig.std(dim=1) + s = (clip_sigmas * row_std / clip_range).clamp_min(1e-10).to(torch.float16) + sf = s.float() + protect_meta = None + protect_mask_perm = None + s_hi = None + sf_hi = None + if ( + protect_groups + and group_size is not None + and protect_clip_range is not None + and protect_clip_range > clip_range + ): + protect_mask = torch.zeros(cols, dtype=torch.bool) + starts = [] + for (start, end) in protect_groups: + if start < 0 or end > cols or end <= start: + continue + protect_mask[start:end] = True + starts.append(start) + if starts: + protect_mask_perm = protect_mask[perm] + s_hi = (clip_sigmas * row_std / protect_clip_range).clamp_min(1e-10).to( + torch.float16 + ) + sf_hi = s_hi.float() + protect_meta = { + "starts": torch.tensor(starts, dtype=torch.int16), + "size": int(group_size), + "s_hi": s_hi, + } + Q = torch.zeros(rows, cols, dtype=torch.int8) + W_work = W_perm.clone() + for i1 in range(0, cols, block_size): + i2 = min(i1 + block_size, cols) + W_block = W_work[:, i1:i2].clone() + Hinv_block = Hinv[i1:i2, i1:i2] + Err = torch.zeros(rows, i2 - i1) + for j in range(i2 - i1): + w_col = W_block[:, j] + d = Hinv_block[j, j] + if protect_mask_perm is not None and bool(protect_mask_perm[i1 + j]): + q_col = torch.clamp( + torch.round(w_col / sf_hi), + -protect_clip_range, + protect_clip_range, + ) + w_recon = q_col.float() * sf_hi + else: + q_col = torch.clamp(torch.round(w_col / sf), -clip_range, clip_range) + w_recon = q_col.float() * sf + Q[:, i1 + j] = q_col.to(torch.int8) + err = (w_col - w_recon) / d + Err[:, j] = err + W_block[:, j:] -= err.unsqueeze(1) * Hinv_block[j, j:].unsqueeze(0) + if i2 < cols: + W_work[:, i2:] -= Err @ Hinv[i1:i2, i2:] + return Q[:, invperm], s, protect_meta + + +def _quantize_gate_int8_row(w): + # Symmetric int8-per-row quantization for small gate tensors. w shape + # (R, C) -> (R,) scales in fp16, int8 values in [-127, 127]. Single scale + # per row keeps accuracy high while halving storage vs fp16. + W = w.float().contiguous() + row_max = W.abs().amax(dim=1).clamp_min(1e-10) + s = (row_max / 127.0).to(torch.float16) + sf = s.float().view(-1, 1) + q = torch.clamp(torch.round(W / sf), -127, 127).to(torch.int8) + return q, s + + +def _lqer_pack(A, B, bits): + rng = 2 ** (bits - 1) - 1 + sA = (A.abs().amax(dim=1).clamp_min(1e-10) / rng).to(torch.float16) + sB = (B.abs().amax(dim=1).clamp_min(1e-10) / rng).to(torch.float16) + qA = torch.clamp(torch.round(A / sA.float().view(-1, 1)), -rng, rng).to(torch.int8) + qB = torch.clamp(torch.round(B / sB.float().view(-1, 1)), -rng, rng).to(torch.int8) + return qA, sA, qB, sB + + +def _lqer_pack_asym(A, B, g=64): + # A: INT2 per-matrix scalar (signed [-2,1], scale = |A|max/1.5). + sA = (A.abs().amax().clamp_min(1e-10) / 1.5).to(torch.float16) + qA = torch.clamp(torch.round(A / sA.float()), -2, 1).to(torch.int8) + # B: INT4 groupwise g over flattened B (signed [-8,7], per-group scale). + Bf = B.reshape(-1, g) + Bmax = Bf.abs().amax(dim=-1, keepdim=True).clamp_min(1e-10) + sB = (Bmax / 7.5).to(torch.float16).reshape(-1) + qB = torch.clamp(torch.round(Bf / sB.float().reshape(-1, 1)), -8, 7).to( + torch.int8 + ).reshape(B.shape) + return qA, sA, qB, sB + + +def _lqer_fit_quantized(E, h): + U, S, Vh = torch.linalg.svd(E, full_matrices=False) + r = min(h.lqer_rank, S.numel()) + if r <= 0: + return None + A = (U[:, :r] * S[:r]).contiguous() + B = Vh[:r, :].contiguous() + asym_on = bool(getattr(h, "lqer_asym_enabled", False)) + asym_g = int(getattr(h, "lqer_asym_group", 64)) + if asym_on and B.numel() % asym_g == 0: + qA, sA, qB, sB = _lqer_pack_asym(A, B, asym_g) + A_hat = qA.float() * float(sA) + g_sz = qB.numel() // sB.numel() + B_hat = (qB.reshape(-1, g_sz).float() * sB.float().view(-1, 1)).reshape( + qB.shape + ) + return { + "kind": "asym", + "qA": qA, + "sA": sA, + "qB": qB, + "sB": sB, + "delta": A_hat @ B_hat, + } + qA, sA, qB, sB = _lqer_pack(A, B, h.lqer_factor_bits) + A_hat = qA.float() * sA.float().view(-1, 1) + B_hat = qB.float() * sB.float().view(-1, 1) + return { + "kind": "sym", + "qA": qA, + "sA": sA, + "qB": qB, + "sB": sB, + "delta": A_hat @ B_hat, + } + + +def _awq_lite_group_candidates(w, act_rms, group_size): + cols = w.shape[1] + n_groups = cols // group_size + if n_groups <= 0: + return [] + weight_score = w.float().abs().mean(dim=0) + saliency = act_rms.float() * weight_score + cands = [] + for gi in range(n_groups): + start = gi * group_size + end = start + group_size + score = float(saliency[start:end].sum()) + cands.append((score, start, end)) + return cands + + +def gptq_mixed_quantize(state_dict, hessians, act_stats, h): + result = {} + meta = {} + quant_gate = bool(getattr(h, "gated_attn_quant_gate", False)) + lqer_on = bool(getattr(h, "lqer_enabled", False)) + awq_on = bool(getattr(h, "awq_lite_enabled", False)) + lqer_cands = {} + awq_selected = collections.defaultdict(list) + if awq_on: + awq_cands = [] + for (name, tensor) in state_dict.items(): + t = tensor.detach().cpu().contiguous() + if t.is_floating_point() and t.numel() > 65536 and name in act_stats: + bits = h.embed_bits if "tok_emb" in name else h.matrix_bits + if bits < h.awq_lite_bits: + for score, start, end in _awq_lite_group_candidates( + t, act_stats[name], h.awq_lite_group_size + ): + awq_cands.append((score, name, start, end)) + awq_cands.sort(key=lambda x: -x[0]) + for (_score, name, start, end) in awq_cands[: h.awq_lite_group_top_k]: + awq_selected[name].append((start, end)) + for (name, tensor) in state_dict.items(): + t = tensor.detach().cpu().contiguous() + # Dedicated int8-per-row path for attn_gate_w (bypasses both GPTQ and + # fp16 passthrough). Applied BEFORE the numel<=65536 passthrough check + # so the gate tensor is routed here instead of to fp16. + if ( + quant_gate + and t.is_floating_point() + and t.ndim == 2 + and name.endswith(".attn_gate_w") + # Dense GatedAttn: (num_heads, dim) = (8, 512) = 4096. + # Sparse gate: (num_heads, gate_window) = (8, 12) = 96. + # Both need int8-per-row routing; the 1024 lower bound in stock + # PR-1736 presumed dense-only. Widen to catch both. + and 32 <= t.numel() <= 8192 + ): + gq, gs = _quantize_gate_int8_row(t) + result[name + ".gq"] = gq + result[name + ".gs"] = gs + meta[name] = "gate_int8_row" + continue + if not t.is_floating_point() or t.numel() <= 65536: + result[name] = t.to(torch.float16) if t.is_floating_point() else t + meta[name] = "passthrough (float16)" + continue + if "tok_emb" in name: + cs = h.embed_clip_sigmas + elif ".mlp." in name: + cs = h.mlp_clip_sigmas + elif ".attn." in name: + cs = h.attn_clip_sigmas + else: + cs = h.matrix_clip_sigmas + bits = h.embed_bits if "tok_emb" in name else h.matrix_bits + clip_range = 2 ** (bits - 1) - 1 + q, s, protect_meta = gptq_quantize_weight( + t, + hessians[name], + clip_sigmas=cs, + clip_range=clip_range, + protect_groups=awq_selected.get(name), + group_size=h.awq_lite_group_size if name in awq_selected else None, + protect_clip_range=(2 ** (h.awq_lite_bits - 1) - 1) + if name in awq_selected + else None, + ) + result[name + ".q"] = q + result[name + ".scale"] = s + meta[name] = f"gptq (int{bits})" + W_q = q.float() * s.float().view(-1, 1) + if protect_meta is not None: + result[name + ".awqg_start"] = protect_meta["starts"] + result[name + ".awqg_s_hi"] = protect_meta["s_hi"] + result[name + ".awqg_size"] = torch.tensor( + protect_meta["size"], dtype=torch.int16 + ) + meta[name] = meta[name] + f"+awqgrpint{h.awq_lite_bits}" + gsz = protect_meta["size"] + for start in protect_meta["starts"].tolist(): + W_q[:, start : start + gsz] = ( + q[:, start : start + gsz].float() + * protect_meta["s_hi"].float().view(-1, 1) + ) + if lqer_on: + # LQER is fit on top of the fully realized GPTQ base, which already + # includes any higher-precision AWQ-protected groups. + scope = str(getattr(h, "lqer_scope", "all")).lower() + scope_ok = ( + scope == "all" + or (scope == "mlp" and ".mlp." in name) + or (scope == "attn" and ".attn." in name) + or (scope == "embed" and "tok_emb" in name) + ) + if scope_ok: + E = t.float() - W_q + err_norm = float(E.norm()) + if err_norm > 0: + lqer_cands[name] = (E, err_norm) + if lqer_on and lqer_cands: + if bool(getattr(h, "lqer_gain_select", False)): + scored = [] + for (name, (E, base_err)) in lqer_cands.items(): + fit = _lqer_fit_quantized(E, h) + if fit is None: + continue + new_err = float((E - fit["delta"]).norm()) + gain = base_err - new_err + if gain > 0: + scored.append((gain, name, fit)) + scored.sort(key=lambda x: -x[0]) + for (_gain, name, fit) in scored[: h.lqer_top_k]: + if fit["kind"] == "asym": + result[name + ".lqA_a"] = fit["qA"] + result[name + ".lqAs_a"] = fit["sA"] + result[name + ".lqB_a"] = fit["qB"] + result[name + ".lqBs_a"] = fit["sB"] + meta[name] = meta[name] + "+lqer_asym" + else: + result[name + ".lqA"] = fit["qA"] + result[name + ".lqAs"] = fit["sA"] + result[name + ".lqB"] = fit["qB"] + result[name + ".lqBs"] = fit["sB"] + meta[name] = meta[name] + "+lqer" + else: + top = sorted(lqer_cands.items(), key=lambda kv: -kv[1][1])[: h.lqer_top_k] + asym_on = bool(getattr(h, "lqer_asym_enabled", False)) + asym_g = int(getattr(h, "lqer_asym_group", 64)) + for (name, (E, _)) in top: + U, S, Vh = torch.linalg.svd(E, full_matrices=False) + r = min(h.lqer_rank, S.numel()) + A = (U[:, :r] * S[:r]).contiguous() + B = Vh[:r, :].contiguous() + if asym_on and B.numel() % asym_g == 0: + qA, sA, qB, sB = _lqer_pack_asym(A, B, asym_g) + result[name + ".lqA_a"] = qA + result[name + ".lqAs_a"] = sA + result[name + ".lqB_a"] = qB + result[name + ".lqBs_a"] = sB + meta[name] = meta[name] + "+lqer_asym" + else: + qA, sA, qB, sB = _lqer_pack(A, B, h.lqer_factor_bits) + result[name + ".lqA"] = qA + result[name + ".lqAs"] = sA + result[name + ".lqB"] = qB + result[name + ".lqBs"] = sB + meta[name] = meta[name] + "+lqer" + categories = collections.defaultdict(set) + for (name, cat) in meta.items(): + short = re.sub("\\.\\d+$", "", re.sub("blocks\\.\\d+", "blocks", name)) + categories[cat].add(short) + log("Quantized weights:") + for cat in sorted(categories): + log(f" {cat}: {', '.join(sorted(categories[cat]))}") + return result, meta + +def dequantize_mixed(result, meta, template_sd): + out = {} + for (name, orig) in template_sd.items(): + info = meta.get(name) + if info is None: + continue + orig_dtype = orig.dtype + if "passthrough" in info: + t = result[name] + if t.dtype == torch.float16 and orig_dtype in ( + torch.float32, + torch.bfloat16, + ): + t = t.to(orig_dtype) + out[name] = t + continue + if info == "gate_int8_row": + gq = result[name + ".gq"] + gs = result[name + ".gs"] + out[name] = (gq.float() * gs.float().view(-1, 1)).to(orig_dtype) + continue + q, s = result[name + ".q"], result[name + ".scale"] + if s.ndim > 0: + W = q.float() * s.float().view(q.shape[0], *[1] * (q.ndim - 1)) + else: + W = q.float() * float(s.item()) + if "awqgrpint" in info: + starts = result[name + ".awqg_start"].tolist() + s_hi = result[name + ".awqg_s_hi"].float() + gsz = int(result[name + ".awqg_size"].item()) + for start in starts: + W[:, start : start + gsz] = ( + q[:, start : start + gsz].float() * s_hi.view(-1, 1) + ) + if "lqer_asym" in info: + qA_t = result[name + ".lqA_a"] + sA_t = result[name + ".lqAs_a"] + qB_t = result[name + ".lqB_a"] + sB_t = result[name + ".lqBs_a"] + qA = qA_t.float() * float(sA_t) + g_sz = qB_t.numel() // sB_t.numel() + qB = (qB_t.reshape(-1, g_sz).float() * sB_t.float().view(-1, 1)).reshape( + qB_t.shape + ) + W = W + qA @ qB + elif "lqer" in info: + qA = result[name + ".lqA"].float() * result[name + ".lqAs"].float().view(-1, 1) + qB = result[name + ".lqB"].float() * result[name + ".lqBs"].float().view(-1, 1) + W = W + qA @ qB + out[name] = W.to(orig_dtype) + return out + + +_BSHF_MAGIC = b"BSHF" + + +# ── Per-group lrzip compression (ported from PR#1586 via PR#1667/1729) ──────── + +_GROUP_ORDER = [ + "_tok_emb.weight.q", + "attn.c_k.weight.q", "attn.c_q.weight.q", + "attn.c_v.weight.q", "attn.proj.weight.q", + "mlp.fc.weight.q", "mlp.proj.weight.q", +] +_SIMSORT_KEYS = {"_tok_emb.weight.q", "attn.c_q.weight.q", "mlp.fc.weight.q"} +_PACK_MAGIC = b"PGRP" + + +def _similarity_sort_l1(matrix): + import numpy as _np + n = matrix.shape[0] + used = _np.zeros(n, dtype=bool) + order = [0] + used[0] = True + cur = matrix[0].astype(_np.float32) + for _ in range(n - 1): + dists = _np.sum(_np.abs(matrix[~used].astype(_np.float32) - cur), axis=1) + unused = _np.where(~used)[0] + best = unused[_np.argmin(dists)] + order.append(best) + used[best] = True + cur = matrix[best].astype(_np.float32) + return _np.array(order, dtype=_np.uint16) + + +def _lrzip_compress(data, tmpdir, label): + inp = os.path.join(tmpdir, f"{label}.bin") + out = f"{inp}.lrz" + with open(inp, "wb") as f: + f.write(data) + subprocess.run(["lrzip", "-z", "-L", "9", "-o", out, inp], capture_output=True, check=True) + with open(out, "rb") as f: + result = f.read() + os.remove(inp); os.remove(out) + return result + + +def _lrzip_decompress(data, tmpdir, label): + inp = os.path.join(tmpdir, f"{label}.lrz") + out = os.path.join(tmpdir, f"{label}.bin") + with open(inp, "wb") as f: + f.write(data) + subprocess.run(["lrzip", "-d", "-f", "-o", out, inp], capture_output=True, check=True) + with open(out, "rb") as f: + result = f.read() + os.remove(inp); os.remove(out) + return result + + +def _pack_streams(streams): + import struct + n = len(streams) + hdr = _PACK_MAGIC + struct.pack("= 2 + docs.append((start, end - start)) + return docs + + +def _build_ttt_global_batches(doc_entries, h, ascending=False): + batch_size = h.ttt_batch_size + global_doc_entries = sorted(doc_entries, key=lambda x: x[1][1]) + global_batches = [ + global_doc_entries[i : i + batch_size] + for i in range(0, len(global_doc_entries), batch_size) + ] + indexed = list(enumerate(global_batches)) + if not ascending: + indexed.sort(key=lambda ib: -max(dl for _, (_, dl) in ib[1])) + return indexed + + +def _init_batch_counter(path): + with open(path, "wb") as f: + f.write((0).to_bytes(4, "little")) + + +def _claim_next_batch(counter_path, queue_len): + try: + with open(counter_path, "r+b") as f: + fcntl.flock(f, fcntl.LOCK_EX) + idx = int.from_bytes(f.read(4), "little") + f.seek(0) + f.write((idx + 1).to_bytes(4, "little")) + f.flush() + except FileNotFoundError: + return queue_len + return idx + + +def _compute_chunk_window(ci, pred_len, num_chunks, chunk_size, eval_seq_len): + chunk_end = pred_len if ci == num_chunks - 1 else (ci + 1) * chunk_size + win_start = max(0, chunk_end - eval_seq_len) + win_len = chunk_end - win_start + chunk_start = ci * chunk_size + chunk_offset = chunk_start - win_start + chunk_len = chunk_end - chunk_start + return win_start, win_len, chunk_offset, chunk_len + + +def _accumulate_bpb( + ptl, + x, + y, + chunk_offsets, + chunk_lens, + pos_idx, + base_bytes_lut, + has_leading_space_lut, + is_boundary_token_lut, + loss_sum, + byte_sum, + token_count, + y_bytes=None, +): + pos = pos_idx[: x.size(1)].unsqueeze(0) + mask = ( + (chunk_lens.unsqueeze(1) > 0) + & (pos >= chunk_offsets.unsqueeze(1)) + & (pos < (chunk_offsets + chunk_lens).unsqueeze(1)) + ) + mask_f64 = mask.to(torch.float64) + if y_bytes is not None: + tok_bytes = y_bytes.to(torch.float64) + else: + tok_bytes = base_bytes_lut[y].to(torch.float64) + tok_bytes += (has_leading_space_lut[y] & ~is_boundary_token_lut[x]).to( + torch.float64 + ) + loss_sum += (ptl.to(torch.float64) * mask_f64).sum() + byte_sum += (tok_bytes * mask_f64).sum() + token_count += chunk_lens.to(torch.float64).sum() + + +def _loss_bpb_from_sums(loss_sum, token_count, byte_sum): + val_loss = (loss_sum / token_count).item() + val_bpb = val_loss / math.log(2.0) * (token_count.item() / byte_sum.item()) + return val_loss, val_bpb + + +def _add_to_counter(path, delta): + try: + with open(path, "r+b") as f: + fcntl.flock(f, fcntl.LOCK_EX) + cur = int.from_bytes(f.read(8), "little", signed=True) + cur += int(delta) + f.seek(0) + f.write(int(cur).to_bytes(8, "little", signed=True)) + f.flush() + return cur + except FileNotFoundError: + return int(delta) + + +def _init_int64_counter(path): + with open(path, "wb") as f: + f.write((0).to_bytes(8, "little", signed=True)) + + +def _select_ttt_doc_entries(docs, h): + doc_entries = list(enumerate(docs)) + if h.val_doc_fraction < 1.0: + sample_n = max(1, int(round(len(docs) * h.val_doc_fraction))) + sampled_indices = sorted( + random.Random(h.seed).sample(range(len(docs)), sample_n) + ) + return [(i, docs[i]) for i in sampled_indices] + return doc_entries + + +def train_val_ttt_global_sgd_distributed(h, device, val_data, base_model, val_tokens, batch_seqs=None): + global BOS_ID + if BOS_ID is None: + BOS_ID = 1 + base_model.eval() + seq_len = h.eval_seq_len + total_tokens = val_tokens.numel() - 1 + ttt_chunk = h.global_ttt_chunk_tokens + batch_seqs = h.global_ttt_batch_seqs if batch_seqs is None else batch_seqs + num_chunks = (total_tokens + ttt_chunk - 1) // ttt_chunk + ttt_params = [p for p in base_model.parameters()] + for p in ttt_params: + p.requires_grad_(True) + optimizer = torch.optim.SGD( + ttt_params, lr=h.global_ttt_lr, momentum=h.global_ttt_momentum + ) + t_start = time.perf_counter() + for ci in range(num_chunks): + chunk_start = ci * ttt_chunk + chunk_end = min((ci + 1) * ttt_chunk, total_tokens) + is_last_chunk = ci == num_chunks - 1 + if is_last_chunk or h.global_ttt_epochs <= 0: + continue + base_model.train() + chunk_seqs = (chunk_end - chunk_start) // seq_len + if chunk_seqs <= 0: + continue + warmup_chunks = max(0, min(h.global_ttt_warmup_chunks, num_chunks - 1)) + if warmup_chunks > 0 and ci < warmup_chunks: + warmup_denom = max(warmup_chunks - 1, 1) + warmup_t = ci / warmup_denom + lr_now = ( + h.global_ttt_warmup_start_lr + + (h.global_ttt_lr - h.global_ttt_warmup_start_lr) * warmup_t + ) + else: + decay_steps = max(num_chunks - 1 - warmup_chunks, 1) + decay_ci = max(ci - warmup_chunks, 0) + lr_now = h.global_ttt_lr * 0.5 * ( + 1.0 + math.cos(math.pi * decay_ci / decay_steps) + ) + for pg in optimizer.param_groups: + pg["lr"] = lr_now + my_seq_s = chunk_seqs * h.rank // h.world_size + my_seq_e = chunk_seqs * (h.rank + 1) // h.world_size + my_chunk_seqs = my_seq_e - my_seq_s + for _ in range(h.global_ttt_epochs): + for bs in range(0, my_chunk_seqs, batch_seqs): + be = min(bs + batch_seqs, my_chunk_seqs) + actual_bs = my_seq_s + bs + start_tok = chunk_start + actual_bs * seq_len + end_tok = chunk_start + (my_seq_s + be) * seq_len + 1 + if end_tok > val_tokens.numel(): + continue + local = val_tokens[start_tok:end_tok].to(device=device, dtype=torch.int64) + x_flat = local[:-1] + y_flat = local[1:] + optimizer.zero_grad(set_to_none=True) + with torch.enable_grad(): + with torch.autocast(device_type="cuda", dtype=torch.bfloat16): + if h.global_ttt_respect_doc_boundaries: + bos_pos = (x_flat == BOS_ID).nonzero(as_tuple=True)[0].tolist() + cu_seqlens, max_seqlen = _build_cu_seqlens( + bos_pos, x_flat.numel(), x_flat.device, h.eval_seq_len, 64 + ) + loss = base_model( + x_flat[None], + y_flat[None], + cu_seqlens=cu_seqlens, + max_seqlen=max_seqlen, + ) + else: + x = x_flat.reshape(-1, seq_len) + y = y_flat.reshape(-1, seq_len) + loss = base_model(x, y) + loss.backward() + if dist.is_available() and dist.is_initialized(): + for p in ttt_params: + if p.grad is not None: + dist.all_reduce(p.grad, op=dist.ReduceOp.SUM) + p.grad.mul_(1.0 / h.world_size) + if h.global_ttt_grad_clip > 0: + torch.nn.utils.clip_grad_norm_(ttt_params, h.global_ttt_grad_clip) + optimizer.step() + base_model.eval() + if h.rank == 0: + elapsed = time.perf_counter() - t_start + log( + f"tttg: c{ci+1}/{num_chunks} lr:{lr_now:.6f} t:{elapsed:.1f}s" + ) + for p in base_model.parameters(): + p.requires_grad_(True) + base_model.eval() + + +def eval_val_ttt_phased(h, base_model, device, val_data, forward_ttt_train): + global BOS_ID + if BOS_ID is None: + BOS_ID = 1 + base_model.eval() + for p in base_model.parameters(): + p.requires_grad_(False) + all_tokens = val_data.val_tokens + all_tokens_idx = all_tokens.to(torch.int32) + docs = _find_docs(all_tokens) + doc_entries = _select_ttt_doc_entries(docs, h) + prefix_doc_limit = max(0, min(len(doc_entries), int(h.phased_ttt_prefix_docs))) + num_phases = max(1, int(h.phased_ttt_num_phases)) + phase_boundaries = [] + for pi in range(num_phases): + boundary = prefix_doc_limit * (pi + 1) // num_phases + phase_boundaries.append(boundary) + current_phase = 0 + current_phase_boundary = phase_boundaries[0] + log( + "ttt_phased:" + f" total_docs:{len(doc_entries)} prefix_docs:{prefix_doc_limit} " + f"suffix_docs:{len(doc_entries) - prefix_doc_limit}" + f" num_phases:{num_phases} boundaries:{phase_boundaries}" + ) + chunk_size, eval_seq_len = h.ttt_chunk_size, h.ttt_eval_seq_len + eval_batch_set = None + if h.ttt_eval_batches: + eval_batch_set = set(int(x) for x in h.ttt_eval_batches.split(",") if x.strip()) + use_ascending = eval_batch_set is not None + global_batches_sorted = _build_ttt_global_batches( + doc_entries, h, ascending=use_ascending + ) + queue_len = len(global_batches_sorted) + counter_path = f"/tmp/ttt_counter_{h.run_id}" + prefix_counter_path = f"/tmp/ttt_prefix_counter_{h.run_id}" + pause_flag_path = f"/tmp/ttt_pause_flag_{h.run_id}" + if h.rank == 0: + _init_batch_counter(counter_path) + _init_int64_counter(prefix_counter_path) + try: + os.remove(pause_flag_path) + except FileNotFoundError: + pass + if dist.is_available() and dist.is_initialized(): + path_list = [counter_path, prefix_counter_path, pause_flag_path] + dist.broadcast_object_list(path_list, src=0) + counter_path, prefix_counter_path, pause_flag_path = path_list + dist.barrier() + loss_sum = torch.zeros((), device=device, dtype=torch.float64) + byte_sum = torch.zeros((), device=device, dtype=torch.float64) + token_count = torch.zeros((), device=device, dtype=torch.float64) + t_start = time.perf_counter() + reusable_lora = BatchedTTTLoRA( + h.ttt_batch_size, base_model, h.ttt_lora_rank, + k_lora=h.ttt_k_lora, mlp_lora=h.ttt_mlp_lora, o_lora=h.ttt_o_lora, + ).to(device) + + def _build_opt(lora): + if h.ttt_optimizer == "sgd": + return torch.optim.SGD( + lora.parameters(), lr=h.ttt_lora_lr, + momentum=h.ttt_beta1, weight_decay=h.ttt_weight_decay, + ) + return torch.optim.AdamW( + lora.parameters(), lr=h.ttt_lora_lr, + betas=(h.ttt_beta1, h.ttt_beta2), + eps=1e-10, weight_decay=h.ttt_weight_decay, fused=True, + ) + + reusable_opt = _build_opt(reusable_lora) + local_scored_docs = [] + global_ttt_done = prefix_doc_limit == 0 + try: + while True: + queue_idx = _claim_next_batch(counter_path, queue_len) + if queue_idx >= queue_len: + break + orig_batch_idx, batch_entries = global_batches_sorted[queue_idx] + batch = [doc for _, doc in batch_entries] + bsz = len(batch) + prev_loss = loss_sum.item() + prev_bytes = byte_sum.item() + prev_tokens = token_count.item() + if bsz == reusable_lora.bsz: + reusable_lora.reset() + for s in reusable_opt.state.values(): + for k, v in s.items(): + if isinstance(v, torch.Tensor): + v.zero_() + elif k == "step": + s[k] = 0 + cur_lora = reusable_lora + cur_opt = reusable_opt + else: + cur_lora = BatchedTTTLoRA( + bsz, base_model, h.ttt_lora_rank, + k_lora=h.ttt_k_lora, mlp_lora=h.ttt_mlp_lora, o_lora=h.ttt_o_lora, + ).to(device) + cur_opt = _build_opt(cur_lora) + pred_lens = [doc_len - 1 for _, doc_len in batch] + num_chunks = [(pl + chunk_size - 1) // chunk_size for pl in pred_lens] + max_nc = max(num_chunks) + num_chunks_t = torch.tensor(num_chunks, dtype=torch.int64, device=device) + for ci in range(max_nc): + active = [ci < nc for nc in num_chunks] + needs_train = any(ci < nc - 1 for nc in num_chunks) + tok_starts = torch.zeros(bsz, dtype=torch.int64) + tok_wls = torch.zeros(bsz, dtype=torch.int64) + chunk_offsets_cpu = torch.zeros(bsz, dtype=torch.int64) + chunk_lens_cpu = torch.zeros(bsz, dtype=torch.int64) + for b in range(bsz): + if not active[b]: + continue + doc_start, doc_len = batch[b] + win_start, win_len, chunk_offset, chunk_len = _compute_chunk_window( + ci, pred_lens[b], num_chunks[b], chunk_size, eval_seq_len + ) + tok_starts[b] = doc_start + win_start + tok_wls[b] = win_len + chunk_offsets_cpu[b] = chunk_offset + chunk_lens_cpu[b] = chunk_len + _, context_size, chunk_offset, _ = _compute_chunk_window( + ci, (ci + 1) * chunk_size, ci + 1, chunk_size, eval_seq_len + ) + col_idx = torch.arange(context_size + 1) + idx = tok_starts.unsqueeze(1) + col_idx.unsqueeze(0) + idx.clamp_(max=all_tokens.numel() - 1) + gathered_gpu = all_tokens_idx[idx].to( + device=device, dtype=torch.int64, non_blocking=True + ) + valid = (col_idx[:context_size].unsqueeze(0) < tok_wls.unsqueeze(1)).to( + device, non_blocking=True + ) + chunk_offsets = chunk_offsets_cpu.to(device, non_blocking=True) + chunk_lens = chunk_lens_cpu.to(device, non_blocking=True) + x = torch.where(valid, gathered_gpu[:, :context_size], 0) + y = torch.where(valid, gathered_gpu[:, 1 : context_size + 1], 0) + ctx_pos = torch.arange(context_size, device=device, dtype=torch.int64) + with torch.autocast(device_type="cuda", dtype=torch.bfloat16): + per_tok_loss = forward_ttt_train(x, y, lora=cur_lora) + # CaseOps sidecar-driven byte budget. Mirror the index pattern + # used to build y from all_tokens: y[b, j] corresponds to the + # token at global position tok_starts[b] + 1 + j (when valid). + y_bytes_arg = None + if val_data.caseops_enabled and val_data.val_bytes is not None: + y_idx = ( + tok_starts.unsqueeze(1) + + 1 + + col_idx[:context_size].unsqueeze(0) + ) + y_idx = y_idx.clamp_(max=val_data.val_bytes.numel() - 1) + y_bytes_arg = val_data.val_bytes[y_idx].to( + device=device, dtype=torch.int32, non_blocking=True + ) + # Mirror the `valid` masking used for y so out-of-range tokens + # contribute zero bytes (matches y=0 substitution above). + y_bytes_arg = torch.where( + valid, y_bytes_arg, torch.zeros_like(y_bytes_arg) + ) + with torch.no_grad(): + _accumulate_bpb( + per_tok_loss, + x, + y, + chunk_offsets, + chunk_lens, + ctx_pos, + val_data.base_bytes_lut, + val_data.has_leading_space_lut, + val_data.is_boundary_token_lut, + loss_sum, + byte_sum, + token_count, + y_bytes=y_bytes_arg, + ) + if needs_train: + activate_chunk_mask = (num_chunks_t - 1 > ci).float() + for gi in range(h.ttt_grad_steps): + if gi > 0: + with torch.autocast(device_type="cuda", dtype=torch.bfloat16): + per_tok_loss = forward_ttt_train(x, y, lora=cur_lora) + per_doc = per_tok_loss[ + :, chunk_offset : chunk_offset + chunk_size + ].mean(dim=-1) + cur_opt.zero_grad(set_to_none=True) + (per_doc * activate_chunk_mask).sum().backward() + cur_opt.step() + else: + del per_tok_loss + batch_num = orig_batch_idx + 1 + doc_lens = [dl for _, dl in batch] + should_report = batch_num in eval_batch_set if eval_batch_set is not None else True + if should_report: + cur_tokens = token_count.item() + cur_loss_val = loss_sum.item() + cur_bytes_val = byte_sum.item() + dt = cur_tokens - prev_tokens + db = cur_bytes_val - prev_bytes + if dt > 0 and db > 0: + b_loss = (cur_loss_val - prev_loss) / dt + b_bpb = b_loss / math.log(2.0) * (dt / db) + else: + b_loss = b_bpb = 0.0 + r_loss = cur_loss_val / max(cur_tokens, 1) + r_bpb = r_loss / math.log(2.0) * (cur_tokens / max(cur_bytes_val, 1)) + elapsed = time.perf_counter() - t_start + log( + f"ttp: b{batch_num}/{queue_len} bl:{b_loss:.4f} bb:{b_bpb:.4f} " + f"rl:{r_loss:.4f} rb:{r_bpb:.4f} dl:{min(doc_lens)}-{max(doc_lens)} " + f"gd:{int(global_ttt_done)}" + ) + if not global_ttt_done: + local_scored_docs.extend( + (orig_batch_idx, pos, doc_start, doc_len) + for pos, (doc_start, doc_len) in enumerate(batch) + ) + prefix_done = _add_to_counter(prefix_counter_path, len(batch_entries)) + if prefix_done >= current_phase_boundary: + try: + with open(pause_flag_path, "x"): + pass + except FileExistsError: + pass + should_pause = os.path.exists(pause_flag_path) + if should_pause: + if dist.is_available() and dist.is_initialized(): + dist.barrier() + gathered_scored_docs = [None] * h.world_size + if dist.is_available() and dist.is_initialized(): + dist.all_gather_object(gathered_scored_docs, local_scored_docs) + else: + gathered_scored_docs = [local_scored_docs] + scored_docs_for_global = [] + for rank_docs in gathered_scored_docs: + if rank_docs: + scored_docs_for_global.extend(rank_docs) + scored_docs_for_global.sort(key=lambda x: (x[0], x[1])) + scored_docs_for_global = scored_docs_for_global[:current_phase_boundary] + scored_token_chunks = [ + val_data.val_tokens[doc_start : doc_start + doc_len] + for _, _, doc_start, doc_len in scored_docs_for_global + ] + if scored_token_chunks: + global_ttt_tokens = torch.cat(scored_token_chunks) + else: + global_ttt_tokens = val_data.val_tokens[:0] + if h.rank == 0: + prefix_done = 0 + try: + with open(prefix_counter_path, "rb") as f: + prefix_done = int.from_bytes( + f.read(8), "little", signed=True + ) + except FileNotFoundError: + pass + log( + f"ttpp: phase:{current_phase + 1}/{num_phases} pd:{prefix_done} " + f"gd:{len(scored_docs_for_global)} " + f"t:{time.perf_counter() - t_start:.1f}s" + ) + train_val_ttt_global_sgd_distributed( + h, device, val_data, base_model, global_ttt_tokens + ) + for p in base_model.parameters(): + p.requires_grad_(False) + reusable_lora = BatchedTTTLoRA( + h.ttt_batch_size, base_model, h.ttt_lora_rank, + k_lora=h.ttt_k_lora, mlp_lora=h.ttt_mlp_lora, o_lora=h.ttt_o_lora, + ).to(device) + reusable_opt = _build_opt(reusable_lora) + current_phase += 1 + if current_phase >= num_phases: + global_ttt_done = True + else: + current_phase_boundary = phase_boundaries[current_phase] + if h.rank == 0: + try: + os.remove(pause_flag_path) + except FileNotFoundError: + pass + if dist.is_available() and dist.is_initialized(): + dist.barrier() + if h.rank == 0: + log(f"ttpr: phase:{current_phase}/{num_phases} t:{time.perf_counter() - t_start:.1f}s") + del cur_lora, cur_opt + finally: + pass + if dist.is_available() and dist.is_initialized(): + dist.all_reduce(loss_sum, op=dist.ReduceOp.SUM) + dist.all_reduce(byte_sum, op=dist.ReduceOp.SUM) + dist.all_reduce(token_count, op=dist.ReduceOp.SUM) + for p in base_model.parameters(): + p.requires_grad_(True) + base_model.train() + return _loss_bpb_from_sums(loss_sum, token_count, byte_sum) + + +def timed_eval(label, fn, *args, **kwargs): + torch.cuda.synchronize() + t0 = time.perf_counter() + val_loss, val_bpb = fn(*args, **kwargs) + torch.cuda.synchronize() + elapsed_ms = 1e3 * (time.perf_counter() - t0) + log( + f"{label} val_loss:{val_loss:.8f} val_bpb:{val_bpb:.8f} eval_time:{elapsed_ms:.0f}ms" + ) + return val_loss, val_bpb + + +def train_model(h, device, val_data): + base_model = GPT(h).to(device).bfloat16() + restore_fp32_params(base_model) + compiled_model = torch.compile(base_model, dynamic=False, fullgraph=True) + compiled_forward_logits = torch.compile( + base_model.forward_logits, dynamic=False, fullgraph=True + ) + model = compiled_model + log(f"model_params:{sum(p.numel()for p in base_model.parameters())}") + optimizers = Optimizers(h, base_model) + train_loader = DocumentPackingLoader(h, device) + max_wallclock_ms = ( + 1e3 * h.max_wallclock_seconds if h.max_wallclock_seconds > 0 else None + ) + if max_wallclock_ms is not None: + max_wallclock_ms -= h.gptq_reserve_seconds * 1e3 + log( + f"gptq:reserving {h.gptq_reserve_seconds:.0f}s, effective={max_wallclock_ms:.0f}ms" + ) + + def training_frac(step, elapsed_ms): + if max_wallclock_ms is None: + return step / max(h.iterations, 1) + return elapsed_ms / max(max_wallclock_ms, 1e-09) + + def lr_mul(frac): + if h.warmdown_frac <= 0: + return 1.0 + if frac >= 1.0 - h.warmdown_frac: + return max((1.0 - frac) / h.warmdown_frac, h.min_lr) + return 1.0 + + _clip_params = [p for p in base_model.parameters() if p.requires_grad] + def step_fn(step, lr_scale): + train_loss = torch.zeros((), device=device) + for micro_step in range(h.grad_accum_steps): + x, y, cu_seqlens, _max_seqlen = train_loader.next_batch( + h.train_batch_tokens, h.grad_accum_steps + ) + with torch.autocast(device_type="cuda", dtype=torch.bfloat16, enabled=True): + loss = model(x, y, cu_seqlens=cu_seqlens, max_seqlen=h.train_seq_len) + train_loss += loss.detach() + (loss / h.grad_accum_steps).backward() + train_loss /= h.grad_accum_steps + if step <= h.muon_momentum_warmup_steps: + + frac = ( + + min(step / h.muon_momentum_warmup_steps, 1.0) + + if h.muon_momentum_warmup_steps > 0 + + else 1.0 + + ) + + muon_momentum = ( + + 1 - frac + + ) * h.muon_momentum_warmup_start + frac * h.muon_momentum + + for group in optimizers.optimizer_muon.param_groups: + + group["momentum"] = muon_momentum + for opt in optimizers: + for group in opt.param_groups: + group["lr"] = group["base_lr"] * lr_scale + if h.grad_clip_norm > 0: + torch.nn.utils.clip_grad_norm_(_clip_params, h.grad_clip_norm) + optimizers.step(distributed=h.distributed) + return train_loss + + if h.warmup_steps > 0: + initial_model_state = { + name: tensor.detach().cpu().clone() + for (name, tensor) in base_model.state_dict().items() + } + initial_optimizer_states = [ + copy.deepcopy(opt.state_dict()) for opt in optimizers + ] + model.train() + num_tokens_local = h.train_batch_tokens // h.world_size + for blk in base_model.blocks: + blk.attn.rotary(num_tokens_local, device, torch.bfloat16) + cu_bucket_size = train_loader.cu_bucket_size + warmup_cu_buckets = tuple(cu_bucket_size * i for i in range(1, 5)) + warmup_cu_iters = 3 + x, y, cu_seqlens, _ = train_loader.next_batch( + h.train_batch_tokens, h.grad_accum_steps + ) + log(f"warmup_cu_buckets:{','.join(str(b) for b in warmup_cu_buckets)} iters_each:{warmup_cu_iters}") + def _run_cu_bucket_warmup(): + for bucket_len in warmup_cu_buckets: + boundaries = list(range(0, x.size(1), max(h.train_seq_len, 1))) + if boundaries[-1] != x.size(1): + boundaries.append(x.size(1)) + cu = torch.full((bucket_len,), x.size(1), dtype=torch.int32, device=device) + cu[: len(boundaries)] = torch.tensor(boundaries, dtype=torch.int32, device=device) + for _ in range(warmup_cu_iters): + optimizers.zero_grad_all() + with torch.autocast(device_type="cuda", dtype=torch.bfloat16, enabled=True): + wloss = model(x, y, cu_seqlens=cu, max_seqlen=h.train_seq_len) + (wloss / h.grad_accum_steps).backward() + optimizers.zero_grad_all() + _run_cu_bucket_warmup() + if h.num_loops > 0: + base_model.looping_active = True + _run_cu_bucket_warmup() + base_model.looping_active = False + for warmup_step in range(h.warmup_steps): + step_fn(warmup_step, 1.0) + if ( + warmup_step <= 5 + or (warmup_step + 1) % 10 == 0 + or warmup_step + 1 == h.warmup_steps + ): + log(f"warmup_step: {warmup_step+1}/{h.warmup_steps}") + if h.num_loops > 0: + base_model.looping_active = True + log( + f"loop_warmup:enabled encoder:{base_model.encoder_indices} decoder:{base_model.decoder_indices}" + ) + for warmup_step in range(h.warmup_steps): + step_fn(warmup_step, 1.0) + if ( + warmup_step <= 5 + or (warmup_step + 1) % 10 == 0 + or warmup_step + 1 == h.warmup_steps + ): + log(f"loop_warmup_step: {warmup_step+1}/{h.warmup_steps}") + base_model.looping_active = False + base_model.load_state_dict(initial_model_state, strict=True) + for (opt, state) in zip(optimizers, initial_optimizer_states, strict=True): + opt.load_state_dict(state) + optimizers.zero_grad_all() + train_loader = DocumentPackingLoader(h, device) + _live_state = base_model.state_dict(keep_vars=True) + ema_state = { + name: t.detach().float().clone() + for (name, t) in _live_state.items() + } + _ema_pairs = [(ema_state[name], t) for (name, t) in _live_state.items()] + ema_decay = h.ema_decay + training_time_ms = 0.0 + forced_stop_step = int(os.environ.get("FORCE_STOP_STEP", "0")) + stop_after_step = forced_stop_step if forced_stop_step > 0 else None + torch.cuda.synchronize() + t0 = time.perf_counter() + step = 0 + while True: + last_step = ( + step == h.iterations + or stop_after_step is not None + and step >= stop_after_step + ) + should_validate = ( + last_step or h.val_loss_every > 0 and step % h.val_loss_every == 0 + ) + if should_validate: + torch.cuda.synchronize() + training_time_ms += 1e3 * (time.perf_counter() - t0) + val_loss, val_bpb = eval_val( + h, device, val_data, model, compiled_forward_logits + ) + log( + f"{step}/{h.iterations} val_loss: {val_loss:.4f} val_bpb: {val_bpb:.4f}" + ) + torch.cuda.synchronize() + t0 = time.perf_counter() + if last_step: + if stop_after_step is not None and step < h.iterations: + log( + f"stopping_early: wallclock_cap train_time: {training_time_ms:.0f}ms step: {step}/{h.iterations}" + ) + break + elapsed_ms = training_time_ms + 1e3 * (time.perf_counter() - t0) + frac = training_frac(step, elapsed_ms) + scale = lr_mul(frac) + if ( + h.num_loops > 0 + and not base_model.looping_active + and frac >= h.enable_looping_at + ): + base_model.looping_active = True + log( + f"layer_loop:enabled step:{step} frac:{frac:.3f} encoder:{base_model.encoder_indices} decoder:{base_model.decoder_indices}" + ) + train_loss = step_fn(step, scale) + with torch.no_grad(): + for ema_t, t in _ema_pairs: + ema_t.mul_(ema_decay).add_(t.detach(), alpha=1.0 - ema_decay) + step += 1 + approx_training_time_ms = training_time_ms + 1e3 * (time.perf_counter() - t0) + should_log_train = h.train_log_every > 0 and ( + step <= 5 or step % h.train_log_every == 0 or stop_after_step is not None + ) + if should_log_train: + tok_per_sec = step * h.train_batch_tokens / (approx_training_time_ms / 1e3) + log( + f"{step}/{h.iterations} train_loss: {train_loss.item():.4f} train_time: {approx_training_time_ms/60000:.1f}m tok/s: {tok_per_sec:.0f}" + ) + reached_cap = ( + forced_stop_step <= 0 + and max_wallclock_ms is not None + and approx_training_time_ms >= max_wallclock_ms + ) + if h.distributed and forced_stop_step <= 0 and max_wallclock_ms is not None: + reached_cap_tensor = torch.tensor(int(reached_cap), device=device) + dist.all_reduce(reached_cap_tensor, op=dist.ReduceOp.MAX) + reached_cap = bool(reached_cap_tensor.item()) + if stop_after_step is None and reached_cap: + stop_after_step = step + log( + f"peak memory allocated: {torch.cuda.max_memory_allocated()//1024//1024} MiB reserved: {torch.cuda.max_memory_reserved()//1024//1024} MiB" + ) + log("ema:applying EMA weights") + current_state = base_model.state_dict() + avg_state = { + name: t.to(dtype=current_state[name].dtype) for (name, t) in ema_state.items() + } + base_model.load_state_dict(avg_state, strict=True) + return base_model, compiled_model, compiled_forward_logits + + +def train_and_eval(h, device): + global BOS_ID + random.seed(h.seed) + np.random.seed(h.seed) + torch.manual_seed(h.seed) + torch.cuda.manual_seed_all(h.seed) + if h.artifact_dir and h.is_main_process: + os.makedirs(h.artifact_dir, exist_ok=True) + val_data = ValidationData(h, device) + log( + f"train_shards: {len(list(Path(h.datasets_dir).resolve().glob('fineweb_train_*.bin')))}" + ) + log(f"val_tokens: {val_data.val_tokens.numel()-1}") + # TTT_EVAL_ONLY: skip training + GPTQ, jump straight to TTT eval on a + # pre-existing quantized artifact. Used to test TTT-only improvements + # (e.g., PR-1767's alpha/warm-start/WD) without retraining. + ttt_eval_only = os.environ.get("TTT_EVAL_ONLY", "0") == "1" + quantize_only = os.environ.get("QUANTIZE_ONLY", "0") == "1" + if ttt_eval_only: + log("TTT_EVAL_ONLY=1 — skipping training + GPTQ, loading saved artifact for TTT eval") + log(f"ttt_lora_alpha: {BatchedLinearLoRA._ALPHA}") + log(f"ttt_warm_start_a: {BatchedLinearLoRA._WARM_START_A}") + log(f"ttt_weight_decay: {h.ttt_weight_decay}") + elif quantize_only: + log("QUANTIZE_ONLY=1 — skipping training, loading saved full-precision checkpoint") + log(f"quantize_only checkpoint: {h.model_path}") + if BOS_ID is None: + BOS_ID = 1 + base_model = GPT(h).to(device).bfloat16() + state = torch.load(h.model_path, map_location="cpu") + base_model.load_state_dict(state, strict=True) + del state + serialize(h, base_model, Path(__file__).read_text(encoding="utf-8")) + if h.distributed: + dist.barrier() + else: + base_model, compiled_model, compiled_forward_logits = train_model( + h, device, val_data + ) + torch._dynamo.reset() + timed_eval( + "diagnostic pre-quantization post-ema", + eval_val, + h, + device, + val_data, + compiled_model, + compiled_forward_logits, + ) + if os.environ.get("PREQUANT_ONLY", "0") == "1": + log("PREQUANT_ONLY=1 — skipping serialize/GPTQ/post-quant eval/TTT") + return + serialize(h, base_model, Path(__file__).read_text(encoding="utf-8")) + if h.distributed: + dist.barrier() + eval_model = deserialize(h, device) + if h.num_loops > 0: + eval_model.looping_active = True + if not ttt_eval_only: + compiled_model = torch.compile(eval_model, dynamic=False, fullgraph=True) + compiled_forward_logits = torch.compile( + eval_model.forward_logits, dynamic=False, fullgraph=True + ) + timed_eval( + "diagnostic quantized", + eval_val, + h, + device, + val_data, + compiled_model, + compiled_forward_logits, + ) + del eval_model + if h.ttt_enabled: + if not ttt_eval_only: + del compiled_model + if ttt_eval_only: + del eval_model + torch._dynamo.reset() + torch.cuda.empty_cache() + ttt_model = deserialize(h, device) + if h.num_loops > 0: + ttt_model.looping_active = True + for p in ttt_model.parameters(): + p.requires_grad_(False) + + if h.rope_yarn: + _yarn_seqlen = h.train_batch_tokens // h.grad_accum_steps + for block in ttt_model.blocks: + block.attn.rotary(_yarn_seqlen, device, torch.bfloat16) + else: + for block in ttt_model.blocks: + block.attn.rotary._cos_cached = None + block.attn.rotary._sin_cached = None + block.attn.rotary._seq_len_cached = 0 + block.attn.rotary(h.ttt_eval_seq_len, device, torch.bfloat16) + + def _fwd_ttt_inner(input_ids, target_ids, lora): + return ttt_model.forward_ttt(input_ids, target_ids, lora=lora) + + _fwd_ttt_compiled_inner = None + + def _fwd_ttt(input_ids, target_ids, lora): + nonlocal _fwd_ttt_compiled_inner + if _fwd_ttt_compiled_inner is None: + _fwd_ttt_compiled_inner = torch.compile(_fwd_ttt_inner, dynamic=True) + return _fwd_ttt_compiled_inner(input_ids, target_ids, lora=lora) + + fwd_ttt_compiled = _fwd_ttt + log(f"ttt_lora:warming up compile (random tokens, no val data)") + if BOS_ID is None: + BOS_ID = 1 + t_warmup = time.perf_counter() + warmup_bszes = [h.ttt_batch_size] + for bsz in warmup_bszes: + wl = BatchedTTTLoRA( + bsz, ttt_model, h.ttt_lora_rank, + k_lora=h.ttt_k_lora, mlp_lora=h.ttt_mlp_lora, o_lora=h.ttt_o_lora, + ).to(device) + wo = torch.optim.AdamW( + wl.parameters(), + lr=h.ttt_lora_lr, + betas=(h.ttt_beta1, h.ttt_beta2), + eps=1e-10, + weight_decay=h.ttt_weight_decay, + fused=True, + ) + for ctx_len in (h.ttt_chunk_size, h.ttt_eval_seq_len): + xw = torch.randint(0, h.vocab_size, (bsz, ctx_len), device=device, dtype=torch.int64) + yw = torch.randint(0, h.vocab_size, (bsz, ctx_len), device=device, dtype=torch.int64) + with torch.autocast(device_type="cuda", dtype=torch.bfloat16): + ptl = fwd_ttt_compiled(xw, yw, lora=wl) + ptl[:, : min(h.ttt_chunk_size, ctx_len)].mean(dim=-1).sum().backward() + wo.step() + wo.zero_grad(set_to_none=True) + del wl, wo + torch.cuda.empty_cache() + compile_elapsed = time.perf_counter() - t_warmup + log(f"ttt_lora:compile warmup done ({compile_elapsed:.1f}s)") + log("\nbeginning TTT eval timer") + torch.cuda.synchronize() + t_ttt = time.perf_counter() + ttt_val_loss, ttt_val_bpb = eval_val_ttt_phased( + h, ttt_model, device, val_data, forward_ttt_train=fwd_ttt_compiled + ) + torch.cuda.synchronize() + ttt_eval_elapsed = time.perf_counter() - t_ttt + log( + "quantized_ttt_phased " + f"val_loss:{ttt_val_loss:.8f} val_bpb:{ttt_val_bpb:.8f} " + f"eval_time:{1e3*ttt_eval_elapsed:.0f}ms" + ) + log(f"total_eval_time:{ttt_eval_elapsed:.1f}s") + del ttt_model + + +def main(): + world_size = int(os.environ.get("WORLD_SIZE", "1")) + local_rank = int(os.environ.get("LOCAL_RANK", "0")) + distributed = "RANK" in os.environ and "WORLD_SIZE" in os.environ + if not torch.cuda.is_available(): + raise RuntimeError("CUDA is required") + if world_size <= 0: + raise ValueError(f"WORLD_SIZE must be positive, got {world_size}") + if 8 % world_size != 0: + raise ValueError( + f"WORLD_SIZE={world_size} must divide 8 so grad_accum_steps stays integral" + ) + device = torch.device("cuda", local_rank) + torch.cuda.set_device(device) + if distributed: + dist.init_process_group(backend="nccl", device_id=device) + dist.barrier() + torch.backends.cuda.matmul.allow_tf32 = True + torch.backends.cudnn.allow_tf32 = True + torch.set_float32_matmul_precision("high") + from torch.backends.cuda import ( + enable_cudnn_sdp, + enable_flash_sdp, + enable_math_sdp, + enable_mem_efficient_sdp, + ) + + enable_cudnn_sdp(False) + enable_flash_sdp(True) + enable_mem_efficient_sdp(False) + enable_math_sdp(False) + torch._dynamo.config.optimize_ddp = False + torch._dynamo.config.cache_size_limit = 64 + h = Hyperparameters() + set_logging_hparams(h) + if h.is_main_process: + os.makedirs(h.artifact_dir if h.artifact_dir else "logs", exist_ok=True) + log(100 * "=", console=False) + log("Hyperparameters:", console=True) + for (k, v) in sorted(vars(type(h)).items()): + if not k.startswith("_"): + log(f" {k}: {v}", console=True) + log("=" * 100, console=False) + log("Source code:", console=False) + log("=" * 100, console=False) + with open(__file__, "r", encoding="utf-8") as _src: + log(_src.read(), console=False) + log("=" * 100, console=False) + log(f"Running Python {sys.version}", console=False) + log(f"Running PyTorch {torch.__version__}", console=False) + log("=" * 100, console=False) + train_and_eval(h, device) + if distributed: + dist.destroy_process_group() + + +if __name__ == "__main__": + main() diff --git a/records/track_10min_16mb/2020-04-29_AWQ_lite_mixedprecision_GPTQ/train_seed1234.log b/records/track_10min_16mb/2020-04-29_AWQ_lite_mixedprecision_GPTQ/train_seed1234.log new file mode 100644 index 0000000000..73879a8861 --- /dev/null +++ b/records/track_10min_16mb/2020-04-29_AWQ_lite_mixedprecision_GPTQ/train_seed1234.log @@ -0,0 +1,947 @@ +=== Starting training: SEED=1234 RUN_ID=awq_seed1234_20260429_055105 === +=== Artifact dir: /workspace/pargolf-work/runs/awq_seed1234_20260429_055105 === +=== NPROC=8 === +W0429 05:51:07.057000 272044 torch/distributed/run.py:803] +W0429 05:51:07.057000 272044 torch/distributed/run.py:803] ***************************************** +W0429 05:51:07.057000 272044 torch/distributed/run.py:803] Setting OMP_NUM_THREADS environment variable for each process to be 1 in default, to avoid your system being overloaded, please further tune the variable for optimal performance in your application as needed. +W0429 05:51:07.057000 272044 torch/distributed/run.py:803] ***************************************** +Hyperparameters: + adam_eps: 1e-08 + adam_wd: 0.02 + artifact_dir: /workspace/pargolf-work/runs/awq_seed1234_20260429_055105 + attn_clip_sigmas: 13.0 + attn_out_gate_enabled: False + attn_out_gate_src: proj + awq_lite_bits: 8 + awq_lite_enabled: True + awq_lite_group_size: 64 + awq_lite_group_top_k: 1 + beta1: 0.9 + beta2: 0.99 + caseops_enabled: True + compressor: pergroup + data_dir: ./data/ + datasets_dir: /workspace/pargolf-work/data/datasets/fineweb10B_sp8192_lossless_caps_caseops_v1_reserved/datasets/fineweb10B_sp8192_lossless_caps_caseops_v1_reserved + distributed: True + ema_decay: 0.9965 + embed_bits: 7 + embed_clip_sigmas: 14.0 + embed_lr: 0.6 + embed_wd: 0.085 + enable_looping_at: 0.35 + eval_seq_len: 2048 + eval_stride: 64 + fused_ce_enabled: True + gate_window: 12 + gated_attn_enabled: False + gated_attn_init_std: 0.01 + gated_attn_quant_gate: True + global_ttt_batch_seqs: 32 + global_ttt_chunk_tokens: 32768 + global_ttt_epochs: 1 + global_ttt_grad_clip: 1.0 + global_ttt_lr: 0.001 + global_ttt_momentum: 0.9 + global_ttt_respect_doc_boundaries: True + global_ttt_warmup_chunks: 0 + global_ttt_warmup_start_lr: 0.0 + gptq_calibration_batches: 16 + gptq_reserve_seconds: 0.5 + grad_accum_steps: 1 + grad_clip_norm: 0.3 + is_main_process: True + iterations: 20000 + ln_scale: True + local_rank: 0 + logfile: /workspace/pargolf-work/runs/awq_seed1234_20260429_055105/awq_seed1234_20260429_055105.txt + logit_softcap: 30.0 + loop_end: 5 + loop_start: 3 + lqer_asym_enabled: True + lqer_asym_group: 64 + lqer_enabled: True + lqer_factor_bits: 4 + lqer_gain_select: False + lqer_rank: 4 + lqer_scope: all + lqer_top_k: 3 + matrix_bits: 6 + matrix_clip_sigmas: 12.85 + matrix_lr: 0.026 + max_wallclock_seconds: 600.0 + min_lr: 0.1 + mlp_clip_sigmas: 11.5 + mlp_mult: 4.0 + model_dim: 512 + model_path: /workspace/pargolf-work/runs/awq_seed1234_20260429_055105/final_model.pt + muon_backend_steps: 5 + muon_momentum: 0.97 + muon_momentum_warmup_start: 0.92 + muon_momentum_warmup_steps: 1500 + muon_row_normalize: True + muon_wd: 0.095 + num_heads: 8 + num_kv_heads: 4 + num_layers: 11 + num_loops: 2 + parallel_final_lane: mean + parallel_start_layer: 8 + phased_ttt_num_phases: 3 + phased_ttt_prefix_docs: 2500 + qk_gain_init: 5.0 + quantized_model_path: /workspace/pargolf-work/runs/awq_seed1234_20260429_055105/final_model.int6.ptz + rank: 0 + rope_base: 10000.0 + rope_dims: 16 + rope_train_seq_len: 2048 + rope_yarn: False + run_id: awq_seed1234_20260429_055105 + scalar_lr: 0.02 + seed: 1234 + skip_gates_enabled: True + smear_gate_enabled: True + sparse_attn_gate_enabled: True + sparse_attn_gate_init_std: 0.0 + sparse_attn_gate_scale: 0.5 + tie_embeddings: True + tied_embed_init_std: 0.005 + tied_embed_lr: 0.03 + tokenizer_path: /workspace/pargolf/records/track_10min_16mb/2026-04-27_SP8192_LQER_SparseGate_BOSSmearFix_9HpStack_1.0611/tokenizers/fineweb_8192_bpe_lossless_caps_caseops_v1_reserved.model + train_batch_tokens: 786432 + train_files: /workspace/pargolf-work/data/datasets/fineweb10B_sp8192_lossless_caps_caseops_v1_reserved/datasets/fineweb10B_sp8192_lossless_caps_caseops_v1_reserved/fineweb_train_*.bin + train_log_every: 500 + train_seq_len: 2048 + ttt_batch_size: 64 + ttt_beta1: 0.0 + ttt_beta2: 0.99 + ttt_chunk_size: 48 + ttt_enabled: True + ttt_eval_batches: + ttt_eval_seq_len: 2048 + ttt_grad_steps: 1 + ttt_k_lora: True + ttt_lora_lr: 0.0001 + ttt_lora_rank: 80 + ttt_mlp_lora: True + ttt_o_lora: True + ttt_optimizer: adam + ttt_weight_decay: 0.5 + val_batch_tokens: 524288 + val_bytes_files: /workspace/pargolf-work/data/datasets/fineweb10B_sp8192_lossless_caps_caseops_v1_reserved/datasets/fineweb10B_sp8192_lossless_caps_caseops_v1_reserved/fineweb_val_bytes_*.bin + val_doc_fraction: 1.0 + val_files: /workspace/pargolf-work/data/datasets/fineweb10B_sp8192_lossless_caps_caseops_v1_reserved/datasets/fineweb10B_sp8192_lossless_caps_caseops_v1_reserved/fineweb_val_*.bin + val_loss_every: 4000 + vocab_size: 8192 + warmdown_frac: 0.85 + warmup_steps: 20 + world_size: 8 + xsa_last_n: 11 +train_shards: 80 +val_tokens: 47851520 +model_params:35945671 +gptq:reserving 0s, effective=599500ms +warmup_cu_buckets:64,128,192,256 iters_each:3 +warmup_step: 1/20 +warmup_step: 2/20 +warmup_step: 3/20 +warmup_step: 4/20 +warmup_step: 5/20 +warmup_step: 6/20 +warmup_step: 10/20 +warmup_step: 20/20 +loop_warmup:enabled encoder:[0, 1, 2, 3, 4, 5, 3, 4] decoder:[5, 3, 4, 5, 6, 7, 8, 9, 10] +loop_warmup_step: 1/20 +loop_warmup_step: 2/20 +loop_warmup_step: 3/20 +loop_warmup_step: 4/20 +loop_warmup_step: 5/20 +loop_warmup_step: 6/20 +loop_warmup_step: 10/20 +loop_warmup_step: 20/20 +0/20000 val_loss: 9.0008 val_bpb: 4.1127 +1/20000 train_loss: 9.0048 train_time: 0.0m tok/s: 17443421 +2/20000 train_loss: 12.7447 train_time: 0.0m tok/s: 11135875 +3/20000 train_loss: 10.3870 train_time: 0.0m tok/s: 9840663 +4/20000 train_loss: 9.1924 train_time: 0.0m tok/s: 9247123 +5/20000 train_loss: 7.9185 train_time: 0.0m tok/s: 8954019 +500/20000 train_loss: 2.6288 train_time: 0.8m tok/s: 8301535 +1000/20000 train_loss: 2.7393 train_time: 1.6m tok/s: 8296244 +1500/20000 train_loss: 2.7178 train_time: 2.4m tok/s: 8296853 +2000/20000 train_loss: 2.6255 train_time: 3.2m tok/s: 8291262 +layer_loop:enabled step:2212 frac:0.350 encoder:[0, 1, 2, 3, 4, 5, 3, 4] decoder:[5, 3, 4, 5, 6, 7, 8, 9, 10] +2500/20000 train_loss: 2.4698 train_time: 4.2m tok/s: 7864474 +3000/20000 train_loss: 2.4962 train_time: 5.4m tok/s: 7320722 +3500/20000 train_loss: 2.6220 train_time: 6.5m tok/s: 7021693 +4000/20000 train_loss: 2.3670 train_time: 7.7m tok/s: 6795873 +4000/20000 val_loss: 2.4308 val_bpb: 1.1107 +4500/20000 train_loss: 2.5373 train_time: 8.9m tok/s: 6645807 +4982/20000 val_loss: 2.3576 val_bpb: 1.0772 +stopping_early: wallclock_cap train_time: 599654ms step: 4982/20000 +peak memory allocated: 41707 MiB reserved: 47048 MiB +ema:applying EMA weights +diagnostic pre-quantization post-ema val_loss:2.33178511 val_bpb:1.06546561 eval_time:8209ms +Serialized model: 135417533 bytes +Code size (uncompressed): 168543 bytes +Code size (compressed): 33787 bytes +GPTQ:collecting Hessians from calibration data... +GPTQ:collected 67 Hessians in 4.0s +Quantized weights: + gate_int8_row: blocks.attn.attn_gate_w + gptq (int6): blocks.attn.c_k.weight, blocks.attn.c_q.weight, blocks.attn.c_v.weight, blocks.attn.proj.weight, blocks.mlp.fc.weight, blocks.mlp.proj.weight + gptq (int6)+lqer_asym: blocks.mlp.fc.weight + gptq (int7)+awqgrpint8+lqer_asym: tok_emb.weight + passthrough (float16): blocks.attn.q_gain, blocks.attn_scale, blocks.mlp_scale, blocks.resid_mix, parallel_post_lambdas, parallel_resid_lambdas, skip_gates, skip_weights, smear_gate.weight, smear_lambda +Serialize: per-group lrzip compression... +Serialize: per-group compression done in 108.1s +Serialized model quantized+pergroup: 15942886 bytes +Total submission size quantized+pergroup: 15976673 bytes +Deserialize: per-group lrzip decompression... +Deserialize: decompression done in 19.4s +diagnostic quantized val_loss:2.35052869 val_bpb:1.07403014 eval_time:13429ms +Deserialize: per-group lrzip decompression... +Deserialize: decompression done in 19.7s +ttt_lora:warming up compile (random tokens, no val data) +ttt_lora:compile warmup done (97.9s) + +beginning TTT eval timer +ttt_phased: total_docs:50000 prefix_docs:2500 suffix_docs:47500 num_phases:3 boundaries:[833, 1666, 2500] +ttp: b780/782 bl:2.2373 bb:1.0778 rl:2.2373 rb:1.0778 dl:13091-17244 gd:0 +ttp: b765/782 bl:2.3181 bb:1.0841 rl:2.2561 rb:1.0793 dl:4393-4510 gd:0 +ttpp: phase:1/3 pd:1296 gd:833 t:169.6s +tttg: c1/131 lr:0.001000 t:2.4s +tttg: c2/131 lr:0.001000 t:2.5s +tttg: c3/131 lr:0.000999 t:2.5s +tttg: c4/131 lr:0.000999 t:2.6s +tttg: c5/131 lr:0.000998 t:2.7s +tttg: c6/131 lr:0.000996 t:2.7s +tttg: c7/131 lr:0.000995 t:2.8s +tttg: c8/131 lr:0.000993 t:2.9s +tttg: c9/131 lr:0.000991 t:2.9s +tttg: c10/131 lr:0.000988 t:3.0s +tttg: c11/131 lr:0.000985 t:3.1s +tttg: c12/131 lr:0.000982 t:3.1s +tttg: c13/131 lr:0.000979 t:3.2s +tttg: c14/131 lr:0.000976 t:3.3s +tttg: c15/131 lr:0.000972 t:3.3s +tttg: c16/131 lr:0.000968 t:3.4s +tttg: c17/131 lr:0.000963 t:3.5s +tttg: c18/131 lr:0.000958 t:3.5s +tttg: c19/131 lr:0.000953 t:3.6s +tttg: c20/131 lr:0.000948 t:3.7s +tttg: c21/131 lr:0.000943 t:3.8s +tttg: c22/131 lr:0.000937 t:3.8s +tttg: c23/131 lr:0.000931 t:3.9s +tttg: c24/131 lr:0.000925 t:4.0s +tttg: c25/131 lr:0.000918 t:4.0s +tttg: c26/131 lr:0.000911 t:4.1s +tttg: c27/131 lr:0.000905 t:4.2s +tttg: c28/131 lr:0.000897 t:4.2s +tttg: c29/131 lr:0.000890 t:5.8s +tttg: c30/131 lr:0.000882 t:5.9s +tttg: c31/131 lr:0.000874 t:5.9s +tttg: c32/131 lr:0.000866 t:6.0s +tttg: c33/131 lr:0.000858 t:6.1s +tttg: c34/131 lr:0.000849 t:6.1s +tttg: c35/131 lr:0.000841 t:6.2s +tttg: c36/131 lr:0.000832 t:6.3s +tttg: c37/131 lr:0.000822 t:6.3s +tttg: c38/131 lr:0.000813 t:6.4s +tttg: c39/131 lr:0.000804 t:6.5s +tttg: c40/131 lr:0.000794 t:6.5s +tttg: c41/131 lr:0.000784 t:6.6s +tttg: c42/131 lr:0.000774 t:6.7s +tttg: c43/131 lr:0.000764 t:6.7s +tttg: c44/131 lr:0.000753 t:6.8s +tttg: c45/131 lr:0.000743 t:6.9s +tttg: c46/131 lr:0.000732 t:7.0s +tttg: c47/131 lr:0.000722 t:7.0s +tttg: c48/131 lr:0.000711 t:7.1s +tttg: c49/131 lr:0.000700 t:7.2s +tttg: c50/131 lr:0.000689 t:7.2s +tttg: c51/131 lr:0.000677 t:7.3s +tttg: c52/131 lr:0.000666 t:7.4s +tttg: c53/131 lr:0.000655 t:7.4s +tttg: c54/131 lr:0.000643 t:7.5s +tttg: c55/131 lr:0.000631 t:7.6s +tttg: c56/131 lr:0.000620 t:7.6s +tttg: c57/131 lr:0.000608 t:7.7s +tttg: c58/131 lr:0.000596 t:7.8s +tttg: c59/131 lr:0.000584 t:7.8s +tttg: c60/131 lr:0.000572 t:7.9s +tttg: c61/131 lr:0.000560 t:8.0s +tttg: c62/131 lr:0.000548 t:8.1s +tttg: c63/131 lr:0.000536 t:8.1s +tttg: c64/131 lr:0.000524 t:8.2s +tttg: c65/131 lr:0.000512 t:8.3s +tttg: c66/131 lr:0.000500 t:8.3s +tttg: c67/131 lr:0.000488 t:8.4s +tttg: c68/131 lr:0.000476 t:8.5s +tttg: c69/131 lr:0.000464 t:8.6s +tttg: c70/131 lr:0.000452 t:8.6s +tttg: c71/131 lr:0.000440 t:8.7s +tttg: c72/131 lr:0.000428 t:8.8s +tttg: c73/131 lr:0.000416 t:8.8s +tttg: c74/131 lr:0.000404 t:8.9s +tttg: c75/131 lr:0.000392 t:9.0s +tttg: c76/131 lr:0.000380 t:9.0s +tttg: c77/131 lr:0.000369 t:9.1s +tttg: c78/131 lr:0.000357 t:9.2s +tttg: c79/131 lr:0.000345 t:9.2s +tttg: c80/131 lr:0.000334 t:9.3s +tttg: c81/131 lr:0.000323 t:9.4s +tttg: c82/131 lr:0.000311 t:9.4s +tttg: c83/131 lr:0.000300 t:9.5s +tttg: c84/131 lr:0.000289 t:9.6s +tttg: c85/131 lr:0.000278 t:9.6s +tttg: c86/131 lr:0.000268 t:9.7s +tttg: c87/131 lr:0.000257 t:9.8s +tttg: c88/131 lr:0.000247 t:9.8s +tttg: c89/131 lr:0.000236 t:9.9s +tttg: c90/131 lr:0.000226 t:10.0s +tttg: c91/131 lr:0.000216 t:10.0s +tttg: c92/131 lr:0.000206 t:10.1s +tttg: c93/131 lr:0.000196 t:10.2s +tttg: c94/131 lr:0.000187 t:10.3s +tttg: c95/131 lr:0.000178 t:10.3s +tttg: c96/131 lr:0.000168 t:10.4s +tttg: c97/131 lr:0.000159 t:10.5s +tttg: c98/131 lr:0.000151 t:10.5s +tttg: c99/131 lr:0.000142 t:10.6s +tttg: c100/131 lr:0.000134 t:10.7s +tttg: c101/131 lr:0.000126 t:10.7s +tttg: c102/131 lr:0.000118 t:10.8s +tttg: c103/131 lr:0.000110 t:12.3s +tttg: c104/131 lr:0.000103 t:12.4s +tttg: c105/131 lr:0.000095 t:12.5s +tttg: c106/131 lr:0.000089 t:12.6s +tttg: c107/131 lr:0.000082 t:12.6s +tttg: c108/131 lr:0.000075 t:12.7s +tttg: c109/131 lr:0.000069 t:12.8s +tttg: c110/131 lr:0.000063 t:12.8s +tttg: c111/131 lr:0.000057 t:12.9s +tttg: c112/131 lr:0.000052 t:13.0s +tttg: c113/131 lr:0.000047 t:13.0s +tttg: c114/131 lr:0.000042 t:13.1s +tttg: c115/131 lr:0.000037 t:13.2s +tttg: c116/131 lr:0.000032 t:13.2s +tttg: c117/131 lr:0.000028 t:13.3s +tttg: c118/131 lr:0.000024 t:13.4s +tttg: c119/131 lr:0.000021 t:13.4s +tttg: c120/131 lr:0.000018 t:13.5s +tttg: c121/131 lr:0.000015 t:13.6s +tttg: c122/131 lr:0.000012 t:13.6s +tttg: c123/131 lr:0.000009 t:13.7s +tttg: c124/131 lr:0.000007 t:13.8s +tttg: c125/131 lr:0.000005 t:13.8s +tttg: c126/131 lr:0.000004 t:13.9s +tttg: c127/131 lr:0.000002 t:14.0s +tttg: c128/131 lr:0.000001 t:14.0s +tttg: c129/131 lr:0.000001 t:14.1s +tttg: c130/131 lr:0.000000 t:14.2s +ttpr: phase:1/3 t:185.3s +ttp: b754/782 bl:2.2870 bb:1.0579 rl:2.2607 rb:1.0760 dl:3345-3397 gd:0 +ttp: b751/782 bl:2.3137 bb:1.0358 rl:2.2673 rb:1.0707 dl:3150-3221 gd:0 +ttpp: phase:2/3 pd:2128 gd:1666 t:251.4s +tttg: c1/219 lr:0.001000 t:0.1s +tttg: c2/219 lr:0.001000 t:0.1s +tttg: c3/219 lr:0.001000 t:0.2s +tttg: c4/219 lr:0.001000 t:0.3s +tttg: c5/219 lr:0.000999 t:0.3s +tttg: c6/219 lr:0.000999 t:0.4s +tttg: c7/219 lr:0.000998 t:0.5s +tttg: c8/219 lr:0.000997 t:0.5s +tttg: c9/219 lr:0.000997 t:0.6s +tttg: c10/219 lr:0.000996 t:0.7s +tttg: c11/219 lr:0.000995 t:0.7s +tttg: c12/219 lr:0.000994 t:0.8s +tttg: c13/219 lr:0.000993 t:0.9s +tttg: c14/219 lr:0.000991 t:1.0s +tttg: c15/219 lr:0.000990 t:1.0s +tttg: c16/219 lr:0.000988 t:1.1s +tttg: c17/219 lr:0.000987 t:1.2s +tttg: c18/219 lr:0.000985 t:1.2s +tttg: c19/219 lr:0.000983 t:1.3s +tttg: c20/219 lr:0.000981 t:1.4s +tttg: c21/219 lr:0.000979 t:1.4s +tttg: c22/219 lr:0.000977 t:1.5s +tttg: c23/219 lr:0.000975 t:1.6s +tttg: c24/219 lr:0.000973 t:1.6s +tttg: c25/219 lr:0.000970 t:1.7s +tttg: c26/219 lr:0.000968 t:1.8s +tttg: c27/219 lr:0.000965 t:1.8s +tttg: c28/219 lr:0.000963 t:1.9s +tttg: c29/219 lr:0.000960 t:2.0s +tttg: c30/219 lr:0.000957 t:2.0s +tttg: c31/219 lr:0.000954 t:2.1s +tttg: c32/219 lr:0.000951 t:2.2s +tttg: c33/219 lr:0.000948 t:2.3s +tttg: c34/219 lr:0.000945 t:2.3s +tttg: c35/219 lr:0.000941 t:2.4s +tttg: c36/219 lr:0.000938 t:2.5s +tttg: c37/219 lr:0.000934 t:2.5s +tttg: c38/219 lr:0.000931 t:2.6s +tttg: c39/219 lr:0.000927 t:2.7s +tttg: c40/219 lr:0.000923 t:2.7s +tttg: c41/219 lr:0.000919 t:2.8s +tttg: c42/219 lr:0.000915 t:2.9s +tttg: c43/219 lr:0.000911 t:2.9s +tttg: c44/219 lr:0.000907 t:3.0s +tttg: c45/219 lr:0.000903 t:3.1s +tttg: c46/219 lr:0.000898 t:3.1s +tttg: c47/219 lr:0.000894 t:3.2s +tttg: c48/219 lr:0.000890 t:3.3s +tttg: c49/219 lr:0.000885 t:3.3s +tttg: c50/219 lr:0.000880 t:3.4s +tttg: c51/219 lr:0.000876 t:3.5s +tttg: c52/219 lr:0.000871 t:3.5s +tttg: c53/219 lr:0.000866 t:3.6s +tttg: c54/219 lr:0.000861 t:3.7s +tttg: c55/219 lr:0.000856 t:3.8s +tttg: c56/219 lr:0.000851 t:3.8s +tttg: c57/219 lr:0.000846 t:3.9s +tttg: c58/219 lr:0.000841 t:4.0s +tttg: c59/219 lr:0.000835 t:4.0s +tttg: c60/219 lr:0.000830 t:4.1s +tttg: c61/219 lr:0.000824 t:4.2s +tttg: c62/219 lr:0.000819 t:4.2s +tttg: c63/219 lr:0.000813 t:4.3s +tttg: c64/219 lr:0.000808 t:4.4s +tttg: c65/219 lr:0.000802 t:4.4s +tttg: c66/219 lr:0.000796 t:4.5s +tttg: c67/219 lr:0.000790 t:4.6s +tttg: c68/219 lr:0.000784 t:4.6s +tttg: c69/219 lr:0.000779 t:4.7s +tttg: c70/219 lr:0.000773 t:4.8s +tttg: c71/219 lr:0.000766 t:4.8s +tttg: c72/219 lr:0.000760 t:4.9s +tttg: c73/219 lr:0.000754 t:5.0s +tttg: c74/219 lr:0.000748 t:5.1s +tttg: c75/219 lr:0.000742 t:5.1s +tttg: c76/219 lr:0.000735 t:5.2s +tttg: c77/219 lr:0.000729 t:5.3s +tttg: c78/219 lr:0.000722 t:5.3s +tttg: c79/219 lr:0.000716 t:5.4s +tttg: c80/219 lr:0.000709 t:5.5s +tttg: c81/219 lr:0.000703 t:5.5s +tttg: c82/219 lr:0.000696 t:5.6s +tttg: c83/219 lr:0.000690 t:5.7s +tttg: c84/219 lr:0.000683 t:5.7s +tttg: c85/219 lr:0.000676 t:5.8s +tttg: c86/219 lr:0.000670 t:5.9s +tttg: c87/219 lr:0.000663 t:5.9s +tttg: c88/219 lr:0.000656 t:6.0s +tttg: c89/219 lr:0.000649 t:6.1s +tttg: c90/219 lr:0.000642 t:6.1s +tttg: c91/219 lr:0.000635 t:6.2s +tttg: c92/219 lr:0.000628 t:6.3s +tttg: c93/219 lr:0.000621 t:6.3s +tttg: c94/219 lr:0.000614 t:6.4s +tttg: c95/219 lr:0.000607 t:6.5s +tttg: c96/219 lr:0.000600 t:6.6s +tttg: c97/219 lr:0.000593 t:6.6s +tttg: c98/219 lr:0.000586 t:6.7s +tttg: c99/219 lr:0.000579 t:6.8s +tttg: c100/219 lr:0.000572 t:6.8s +tttg: c101/219 lr:0.000565 t:6.9s +tttg: c102/219 lr:0.000558 t:7.0s +tttg: c103/219 lr:0.000550 t:7.0s +tttg: c104/219 lr:0.000543 t:7.1s +tttg: c105/219 lr:0.000536 t:7.2s +tttg: c106/219 lr:0.000529 t:7.2s +tttg: c107/219 lr:0.000522 t:7.3s +tttg: c108/219 lr:0.000514 t:7.4s +tttg: c109/219 lr:0.000507 t:7.4s +tttg: c110/219 lr:0.000500 t:7.5s +tttg: c111/219 lr:0.000493 t:7.6s +tttg: c112/219 lr:0.000486 t:7.6s +tttg: c113/219 lr:0.000478 t:7.7s +tttg: c114/219 lr:0.000471 t:7.8s +tttg: c115/219 lr:0.000464 t:7.8s +tttg: c116/219 lr:0.000457 t:7.9s +tttg: c117/219 lr:0.000450 t:8.0s +tttg: c118/219 lr:0.000442 t:8.1s +tttg: c119/219 lr:0.000435 t:8.1s +tttg: c120/219 lr:0.000428 t:8.2s +tttg: c121/219 lr:0.000421 t:8.3s +tttg: c122/219 lr:0.000414 t:8.3s +tttg: c123/219 lr:0.000407 t:8.4s +tttg: c124/219 lr:0.000400 t:8.5s +tttg: c125/219 lr:0.000393 t:8.5s +tttg: c126/219 lr:0.000386 t:8.6s +tttg: c127/219 lr:0.000379 t:8.7s +tttg: c128/219 lr:0.000372 t:8.7s +tttg: c129/219 lr:0.000365 t:8.8s +tttg: c130/219 lr:0.000358 t:8.9s +tttg: c131/219 lr:0.000351 t:9.0s +tttg: c132/219 lr:0.000344 t:9.0s +tttg: c133/219 lr:0.000337 t:9.1s +tttg: c134/219 lr:0.000330 t:9.2s +tttg: c135/219 lr:0.000324 t:9.2s +tttg: c136/219 lr:0.000317 t:9.3s +tttg: c137/219 lr:0.000310 t:9.4s +tttg: c138/219 lr:0.000304 t:9.4s +tttg: c139/219 lr:0.000297 t:9.5s +tttg: c140/219 lr:0.000291 t:9.6s +tttg: c141/219 lr:0.000284 t:9.6s +tttg: c142/219 lr:0.000278 t:9.7s +tttg: c143/219 lr:0.000271 t:9.8s +tttg: c144/219 lr:0.000265 t:9.8s +tttg: c145/219 lr:0.000258 t:9.9s +tttg: c146/219 lr:0.000252 t:10.0s +tttg: c147/219 lr:0.000246 t:10.1s +tttg: c148/219 lr:0.000240 t:10.1s +tttg: c149/219 lr:0.000234 t:10.2s +tttg: c150/219 lr:0.000227 t:10.3s +tttg: c151/219 lr:0.000221 t:10.3s +tttg: c152/219 lr:0.000216 t:10.4s +tttg: c153/219 lr:0.000210 t:10.5s +tttg: c154/219 lr:0.000204 t:10.5s +tttg: c155/219 lr:0.000198 t:10.6s +tttg: c156/219 lr:0.000192 t:10.7s +tttg: c157/219 lr:0.000187 t:10.7s +tttg: c158/219 lr:0.000181 t:10.8s +tttg: c159/219 lr:0.000176 t:10.9s +tttg: c160/219 lr:0.000170 t:11.0s +tttg: c161/219 lr:0.000165 t:11.0s +tttg: c162/219 lr:0.000159 t:11.1s +tttg: c163/219 lr:0.000154 t:11.2s +tttg: c164/219 lr:0.000149 t:11.2s +tttg: c165/219 lr:0.000144 t:11.3s +tttg: c166/219 lr:0.000139 t:11.4s +tttg: c167/219 lr:0.000134 t:11.4s +tttg: c168/219 lr:0.000129 t:11.5s +tttg: c169/219 lr:0.000124 t:11.6s +tttg: c170/219 lr:0.000120 t:11.7s +tttg: c171/219 lr:0.000115 t:11.7s +tttg: c172/219 lr:0.000110 t:11.8s +tttg: c173/219 lr:0.000106 t:11.9s +tttg: c174/219 lr:0.000102 t:11.9s +tttg: c175/219 lr:0.000097 t:12.0s +tttg: c176/219 lr:0.000093 t:12.1s +tttg: c177/219 lr:0.000089 t:12.1s +tttg: c178/219 lr:0.000085 t:12.2s +tttg: c179/219 lr:0.000081 t:12.3s +tttg: c180/219 lr:0.000077 t:12.3s +tttg: c181/219 lr:0.000073 t:12.4s +tttg: c182/219 lr:0.000069 t:12.5s +tttg: c183/219 lr:0.000066 t:12.6s +tttg: c184/219 lr:0.000062 t:12.6s +tttg: c185/219 lr:0.000059 t:12.7s +tttg: c186/219 lr:0.000055 t:12.8s +tttg: c187/219 lr:0.000052 t:12.8s +tttg: c188/219 lr:0.000049 t:12.9s +tttg: c189/219 lr:0.000046 t:13.0s +tttg: c190/219 lr:0.000043 t:13.0s +tttg: c191/219 lr:0.000040 t:13.1s +tttg: c192/219 lr:0.000037 t:13.2s +tttg: c193/219 lr:0.000035 t:13.2s +tttg: c194/219 lr:0.000032 t:13.3s +tttg: c195/219 lr:0.000030 t:13.4s +tttg: c196/219 lr:0.000027 t:13.4s +tttg: c197/219 lr:0.000025 t:13.5s +tttg: c198/219 lr:0.000023 t:13.6s +tttg: c199/219 lr:0.000021 t:13.6s +tttg: c200/219 lr:0.000019 t:13.7s +tttg: c201/219 lr:0.000017 t:13.8s +tttg: c202/219 lr:0.000015 t:13.8s +tttg: c203/219 lr:0.000013 t:13.9s +tttg: c204/219 lr:0.000012 t:14.0s +tttg: c205/219 lr:0.000010 t:14.1s +tttg: c206/219 lr:0.000009 t:14.1s +tttg: c207/219 lr:0.000007 t:14.2s +tttg: c208/219 lr:0.000006 t:14.3s +tttg: c209/219 lr:0.000005 t:14.3s +tttg: c210/219 lr:0.000004 t:14.4s +tttg: c211/219 lr:0.000003 t:14.5s +tttg: c212/219 lr:0.000003 t:14.5s +tttg: c213/219 lr:0.000002 t:14.6s +tttg: c214/219 lr:0.000001 t:14.7s +tttg: c215/219 lr:0.000001 t:14.7s +tttg: c216/219 lr:0.000000 t:14.8s +tttg: c217/219 lr:0.000000 t:14.9s +tttg: c218/219 lr:0.000000 t:15.0s +ttpr: phase:2/3 t:267.9s +ttp: b741/782 bl:2.3227 bb:1.0416 rl:2.2726 rb:1.0678 dl:2686-2730 gd:0 +ttp: b740/782 bl:2.2599 bb:1.0374 rl:2.2715 rb:1.0652 dl:2653-2686 gd:0 +ttpp: phase:3/3 pd:2960 gd:2500 t:283.1s +tttg: c1/289 lr:0.001000 t:0.1s +tttg: c2/289 lr:0.001000 t:0.1s +tttg: c3/289 lr:0.001000 t:0.2s +tttg: c4/289 lr:0.001000 t:0.3s +tttg: c5/289 lr:0.001000 t:0.3s +tttg: c6/289 lr:0.000999 t:0.4s +tttg: c7/289 lr:0.000999 t:0.5s +tttg: c8/289 lr:0.000999 t:0.5s +tttg: c9/289 lr:0.000998 t:0.6s +tttg: c10/289 lr:0.000998 t:0.7s +tttg: c11/289 lr:0.000997 t:0.8s +tttg: c12/289 lr:0.000996 t:0.8s +tttg: c13/289 lr:0.000996 t:0.9s +tttg: c14/289 lr:0.000995 t:1.0s +tttg: c15/289 lr:0.000994 t:1.0s +tttg: c16/289 lr:0.000993 t:1.1s +tttg: c17/289 lr:0.000992 t:1.2s +tttg: c18/289 lr:0.000991 t:1.2s +tttg: c19/289 lr:0.000990 t:1.3s +tttg: c20/289 lr:0.000989 t:1.4s +tttg: c21/289 lr:0.000988 t:1.4s +tttg: c22/289 lr:0.000987 t:1.5s +tttg: c23/289 lr:0.000986 t:1.6s +tttg: c24/289 lr:0.000984 t:1.6s +tttg: c25/289 lr:0.000983 t:1.7s +tttg: c26/289 lr:0.000982 t:1.8s +tttg: c27/289 lr:0.000980 t:1.8s +tttg: c28/289 lr:0.000978 t:1.9s +tttg: c29/289 lr:0.000977 t:2.0s +tttg: c30/289 lr:0.000975 t:2.0s +tttg: c31/289 lr:0.000973 t:2.1s +tttg: c32/289 lr:0.000972 t:2.2s +tttg: c33/289 lr:0.000970 t:2.2s +tttg: c34/289 lr:0.000968 t:2.3s +tttg: c35/289 lr:0.000966 t:2.4s +tttg: c36/289 lr:0.000964 t:2.4s +tttg: c37/289 lr:0.000962 t:2.5s +tttg: c38/289 lr:0.000960 t:2.6s +tttg: c39/289 lr:0.000958 t:2.7s +tttg: c40/289 lr:0.000955 t:2.7s +tttg: c41/289 lr:0.000953 t:2.8s +tttg: c42/289 lr:0.000951 t:2.9s +tttg: c43/289 lr:0.000948 t:2.9s +tttg: c44/289 lr:0.000946 t:3.0s +tttg: c45/289 lr:0.000944 t:3.1s +tttg: c46/289 lr:0.000941 t:3.1s +tttg: c47/289 lr:0.000938 t:3.2s +tttg: c48/289 lr:0.000936 t:3.3s +tttg: c49/289 lr:0.000933 t:3.3s +tttg: c50/289 lr:0.000930 t:3.4s +tttg: c51/289 lr:0.000927 t:3.5s +tttg: c52/289 lr:0.000925 t:3.5s +tttg: c53/289 lr:0.000922 t:3.6s +tttg: c54/289 lr:0.000919 t:3.7s +tttg: c55/289 lr:0.000916 t:3.7s +tttg: c56/289 lr:0.000913 t:3.8s +tttg: c57/289 lr:0.000910 t:3.9s +tttg: c58/289 lr:0.000906 t:3.9s +tttg: c59/289 lr:0.000903 t:4.0s +tttg: c60/289 lr:0.000900 t:4.1s +tttg: c61/289 lr:0.000897 t:4.1s +tttg: c62/289 lr:0.000893 t:4.2s +tttg: c63/289 lr:0.000890 t:4.3s +tttg: c64/289 lr:0.000887 t:4.4s +tttg: c65/289 lr:0.000883 t:4.4s +tttg: c66/289 lr:0.000879 t:4.5s +tttg: c67/289 lr:0.000876 t:4.6s +tttg: c68/289 lr:0.000872 t:4.6s +tttg: c69/289 lr:0.000869 t:4.7s +tttg: c70/289 lr:0.000865 t:4.8s +tttg: c71/289 lr:0.000861 t:4.8s +tttg: c72/289 lr:0.000857 t:4.9s +tttg: c73/289 lr:0.000854 t:5.0s +tttg: c74/289 lr:0.000850 t:5.0s +tttg: c75/289 lr:0.000846 t:5.1s +tttg: c76/289 lr:0.000842 t:5.2s +tttg: c77/289 lr:0.000838 t:5.2s +tttg: c78/289 lr:0.000834 t:5.3s +tttg: c79/289 lr:0.000830 t:5.4s +tttg: c80/289 lr:0.000826 t:5.4s +tttg: c81/289 lr:0.000821 t:5.5s +tttg: c82/289 lr:0.000817 t:5.6s +tttg: c83/289 lr:0.000813 t:5.6s +tttg: c84/289 lr:0.000809 t:5.7s +tttg: c85/289 lr:0.000804 t:5.8s +tttg: c86/289 lr:0.000800 t:5.8s +tttg: c87/289 lr:0.000796 t:5.9s +tttg: c88/289 lr:0.000791 t:6.0s +tttg: c89/289 lr:0.000787 t:6.1s +tttg: c90/289 lr:0.000782 t:6.1s +tttg: c91/289 lr:0.000778 t:6.2s +tttg: c92/289 lr:0.000773 t:6.3s +tttg: c93/289 lr:0.000769 t:6.3s +tttg: c94/289 lr:0.000764 t:6.4s +tttg: c95/289 lr:0.000759 t:6.5s +tttg: c96/289 lr:0.000755 t:6.5s +tttg: c97/289 lr:0.000750 t:6.6s +tttg: c98/289 lr:0.000745 t:6.7s +tttg: c99/289 lr:0.000740 t:6.7s +tttg: c100/289 lr:0.000736 t:6.8s +tttg: c101/289 lr:0.000731 t:6.9s +tttg: c102/289 lr:0.000726 t:6.9s +tttg: c103/289 lr:0.000721 t:7.0s +tttg: c104/289 lr:0.000716 t:7.1s +tttg: c105/289 lr:0.000711 t:7.1s +tttg: c106/289 lr:0.000706 t:7.2s +tttg: c107/289 lr:0.000701 t:7.3s +tttg: c108/289 lr:0.000696 t:7.4s +tttg: c109/289 lr:0.000691 t:7.4s +tttg: c110/289 lr:0.000686 t:7.5s +tttg: c111/289 lr:0.000681 t:7.6s +tttg: c112/289 lr:0.000676 t:7.6s +tttg: c113/289 lr:0.000671 t:7.7s +tttg: c114/289 lr:0.000666 t:7.8s +tttg: c115/289 lr:0.000661 t:7.8s +tttg: c116/289 lr:0.000656 t:7.9s +tttg: c117/289 lr:0.000650 t:8.0s +tttg: c118/289 lr:0.000645 t:8.0s +tttg: c119/289 lr:0.000640 t:8.1s +tttg: c120/289 lr:0.000635 t:8.2s +tttg: c121/289 lr:0.000629 t:8.2s +tttg: c122/289 lr:0.000624 t:8.3s +tttg: c123/289 lr:0.000619 t:8.4s +tttg: c124/289 lr:0.000614 t:8.4s +tttg: c125/289 lr:0.000608 t:8.5s +tttg: c126/289 lr:0.000603 t:8.6s +tttg: c127/289 lr:0.000598 t:8.6s +tttg: c128/289 lr:0.000592 t:8.7s +tttg: c129/289 lr:0.000587 t:8.8s +tttg: c130/289 lr:0.000581 t:8.8s +tttg: c131/289 lr:0.000576 t:8.9s +tttg: c132/289 lr:0.000571 t:9.0s +tttg: c133/289 lr:0.000565 t:9.0s +tttg: c134/289 lr:0.000560 t:9.1s +tttg: c135/289 lr:0.000554 t:9.2s +tttg: c136/289 lr:0.000549 t:9.3s +tttg: c137/289 lr:0.000544 t:9.3s +tttg: c138/289 lr:0.000538 t:9.4s +tttg: c139/289 lr:0.000533 t:9.5s +tttg: c140/289 lr:0.000527 t:9.5s +tttg: c141/289 lr:0.000522 t:9.6s +tttg: c142/289 lr:0.000516 t:9.7s +tttg: c143/289 lr:0.000511 t:9.7s +tttg: c144/289 lr:0.000505 t:9.8s +tttg: c145/289 lr:0.000500 t:9.9s +tttg: c146/289 lr:0.000495 t:9.9s +tttg: c147/289 lr:0.000489 t:10.0s +tttg: c148/289 lr:0.000484 t:10.1s +tttg: c149/289 lr:0.000478 t:10.1s +tttg: c150/289 lr:0.000473 t:10.2s +tttg: c151/289 lr:0.000467 t:10.3s +tttg: c152/289 lr:0.000462 t:10.3s +tttg: c153/289 lr:0.000456 t:10.4s +tttg: c154/289 lr:0.000451 t:10.5s +tttg: c155/289 lr:0.000446 t:10.5s +tttg: c156/289 lr:0.000440 t:10.6s +tttg: c157/289 lr:0.000435 t:10.7s +tttg: c158/289 lr:0.000429 t:10.7s +tttg: c159/289 lr:0.000424 t:10.8s +tttg: c160/289 lr:0.000419 t:10.9s +tttg: c161/289 lr:0.000413 t:11.0s +tttg: c162/289 lr:0.000408 t:11.0s +tttg: c163/289 lr:0.000402 t:11.1s +tttg: c164/289 lr:0.000397 t:11.2s +tttg: c165/289 lr:0.000392 t:11.2s +tttg: c166/289 lr:0.000386 t:11.3s +tttg: c167/289 lr:0.000381 t:11.4s +tttg: c168/289 lr:0.000376 t:11.4s +tttg: c169/289 lr:0.000371 t:11.5s +tttg: c170/289 lr:0.000365 t:11.6s +tttg: c171/289 lr:0.000360 t:11.6s +tttg: c172/289 lr:0.000355 t:11.7s +tttg: c173/289 lr:0.000350 t:11.8s +tttg: c174/289 lr:0.000344 t:11.8s +tttg: c175/289 lr:0.000339 t:11.9s +tttg: c176/289 lr:0.000334 t:12.0s +tttg: c177/289 lr:0.000329 t:12.0s +tttg: c178/289 lr:0.000324 t:12.1s +tttg: c179/289 lr:0.000319 t:12.2s +tttg: c180/289 lr:0.000314 t:12.2s +tttg: c181/289 lr:0.000309 t:12.3s +tttg: c182/289 lr:0.000304 t:12.4s +tttg: c183/289 lr:0.000299 t:12.4s +tttg: c184/289 lr:0.000294 t:12.5s +tttg: c185/289 lr:0.000289 t:12.6s +tttg: c186/289 lr:0.000284 t:12.7s +tttg: c187/289 lr:0.000279 t:12.7s +tttg: c188/289 lr:0.000274 t:12.8s +tttg: c189/289 lr:0.000269 t:12.9s +tttg: c190/289 lr:0.000264 t:12.9s +tttg: c191/289 lr:0.000260 t:13.0s +tttg: c192/289 lr:0.000255 t:13.1s +tttg: c193/289 lr:0.000250 t:13.1s +tttg: c194/289 lr:0.000245 t:13.2s +tttg: c195/289 lr:0.000241 t:13.3s +tttg: c196/289 lr:0.000236 t:13.3s +tttg: c197/289 lr:0.000231 t:13.4s +tttg: c198/289 lr:0.000227 t:13.5s +tttg: c199/289 lr:0.000222 t:13.5s +tttg: c200/289 lr:0.000218 t:13.6s +tttg: c201/289 lr:0.000213 t:13.7s +tttg: c202/289 lr:0.000209 t:13.8s +tttg: c203/289 lr:0.000204 t:13.8s +tttg: c204/289 lr:0.000200 t:13.9s +tttg: c205/289 lr:0.000196 t:14.0s +tttg: c206/289 lr:0.000191 t:14.0s +tttg: c207/289 lr:0.000187 t:14.1s +tttg: c208/289 lr:0.000183 t:14.2s +tttg: c209/289 lr:0.000179 t:14.2s +tttg: c210/289 lr:0.000174 t:14.3s +tttg: c211/289 lr:0.000170 t:14.4s +tttg: c212/289 lr:0.000166 t:14.4s +tttg: c213/289 lr:0.000162 t:14.5s +tttg: c214/289 lr:0.000158 t:14.6s +tttg: c215/289 lr:0.000154 t:14.6s +tttg: c216/289 lr:0.000150 t:14.7s +tttg: c217/289 lr:0.000146 t:14.8s +tttg: c218/289 lr:0.000143 t:14.8s +tttg: c219/289 lr:0.000139 t:14.9s +tttg: c220/289 lr:0.000135 t:15.0s +tttg: c221/289 lr:0.000131 t:15.1s +tttg: c222/289 lr:0.000128 t:15.1s +tttg: c223/289 lr:0.000124 t:15.2s +tttg: c224/289 lr:0.000121 t:15.3s +tttg: c225/289 lr:0.000117 t:15.3s +tttg: c226/289 lr:0.000113 t:15.4s +tttg: c227/289 lr:0.000110 t:15.5s +tttg: c228/289 lr:0.000107 t:15.5s +tttg: c229/289 lr:0.000103 t:15.6s +tttg: c230/289 lr:0.000100 t:15.7s +tttg: c231/289 lr:0.000097 t:15.7s +tttg: c232/289 lr:0.000094 t:15.8s +tttg: c233/289 lr:0.000090 t:15.9s +tttg: c234/289 lr:0.000087 t:15.9s +tttg: c235/289 lr:0.000084 t:16.0s +tttg: c236/289 lr:0.000081 t:16.1s +tttg: c237/289 lr:0.000078 t:16.1s +tttg: c238/289 lr:0.000075 t:16.2s +tttg: c239/289 lr:0.000073 t:16.3s +tttg: c240/289 lr:0.000070 t:16.3s +tttg: c241/289 lr:0.000067 t:16.4s +tttg: c242/289 lr:0.000064 t:16.5s +tttg: c243/289 lr:0.000062 t:16.5s +tttg: c244/289 lr:0.000059 t:16.6s +tttg: c245/289 lr:0.000056 t:16.7s +tttg: c246/289 lr:0.000054 t:16.8s +tttg: c247/289 lr:0.000052 t:16.8s +tttg: c248/289 lr:0.000049 t:16.9s +tttg: c249/289 lr:0.000047 t:17.0s +tttg: c250/289 lr:0.000045 t:17.0s +tttg: c251/289 lr:0.000042 t:17.1s +tttg: c252/289 lr:0.000040 t:17.2s +tttg: c253/289 lr:0.000038 t:17.2s +tttg: c254/289 lr:0.000036 t:17.3s +tttg: c255/289 lr:0.000034 t:17.4s +tttg: c256/289 lr:0.000032 t:17.4s +tttg: c257/289 lr:0.000030 t:17.5s +tttg: c258/289 lr:0.000028 t:17.6s +tttg: c259/289 lr:0.000027 t:17.6s +tttg: c260/289 lr:0.000025 t:17.7s +tttg: c261/289 lr:0.000023 t:17.8s +tttg: c262/289 lr:0.000022 t:17.8s +tttg: c263/289 lr:0.000020 t:17.9s +tttg: c264/289 lr:0.000018 t:18.0s +tttg: c265/289 lr:0.000017 t:18.0s +tttg: c266/289 lr:0.000016 t:18.1s +tttg: c267/289 lr:0.000014 t:18.2s +tttg: c268/289 lr:0.000013 t:18.3s +tttg: c269/289 lr:0.000012 t:18.3s +tttg: c270/289 lr:0.000011 t:18.4s +tttg: c271/289 lr:0.000010 t:18.5s +tttg: c272/289 lr:0.000009 t:18.5s +tttg: c273/289 lr:0.000008 t:18.6s +tttg: c274/289 lr:0.000007 t:18.7s +tttg: c275/289 lr:0.000006 t:18.7s +tttg: c276/289 lr:0.000005 t:18.8s +tttg: c277/289 lr:0.000004 t:18.9s +tttg: c278/289 lr:0.000004 t:18.9s +tttg: c279/289 lr:0.000003 t:19.0s +tttg: c280/289 lr:0.000002 t:19.1s +tttg: c281/289 lr:0.000002 t:19.1s +tttg: c282/289 lr:0.000001 t:19.2s +tttg: c283/289 lr:0.000001 t:19.3s +tttg: c284/289 lr:0.000001 t:19.3s +tttg: c285/289 lr:0.000000 t:19.4s +tttg: c286/289 lr:0.000000 t:19.5s +tttg: c287/289 lr:0.000000 t:19.6s +tttg: c288/289 lr:0.000000 t:19.6s +ttpr: phase:3/3 t:304.3s +ttp: b728/782 bl:2.3652 bb:1.0828 rl:2.2780 rb:1.0664 dl:2306-2324 gd:1 +ttp: b726/782 bl:2.3362 bb:1.0391 rl:2.2817 rb:1.0646 dl:2254-2276 gd:1 +ttp: b716/782 bl:2.2543 bb:1.0417 rl:2.2802 rb:1.0633 dl:2054-2069 gd:1 +ttp: b707/782 bl:2.3574 bb:1.0476 rl:2.2839 rb:1.0625 dl:1910-1923 gd:1 +ttp: b697/782 bl:2.3273 bb:1.0326 rl:2.2858 rb:1.0612 dl:1790-1803 gd:1 +ttp: b692/782 bl:2.2947 bb:1.0301 rl:2.2862 rb:1.0599 dl:1737-1746 gd:1 +ttp: b682/782 bl:2.3448 bb:1.0581 rl:2.2883 rb:1.0598 dl:1638-1646 gd:1 +ttp: b674/782 bl:2.4074 bb:1.0903 rl:2.2923 rb:1.0609 dl:1571-1578 gd:1 +ttp: b670/782 bl:2.3505 bb:1.0696 rl:2.2942 rb:1.0612 dl:1537-1544 gd:1 +ttp: b656/782 bl:2.3300 bb:1.1115 rl:2.2953 rb:1.0626 dl:1439-1445 gd:1 +ttp: b648/782 bl:2.2839 bb:1.0078 rl:2.2949 rb:1.0610 dl:1387-1392 gd:1 +ttp: b647/782 bl:2.2781 bb:1.0339 rl:2.2945 rb:1.0603 dl:1382-1387 gd:1 +ttp: b638/782 bl:2.3468 bb:1.0692 rl:2.2958 rb:1.0605 dl:1325-1331 gd:1 +ttp: b627/782 bl:2.3785 bb:1.0709 rl:2.2977 rb:1.0607 dl:1266-1271 gd:1 +ttp: b619/782 bl:2.3254 bb:1.0605 rl:2.2983 rb:1.0607 dl:1221-1226 gd:1 +ttp: b609/782 bl:2.2724 bb:1.0181 rl:2.2978 rb:1.0598 dl:1172-1177 gd:1 +ttp: b602/782 bl:2.3796 bb:1.0496 rl:2.2994 rb:1.0596 dl:1141-1146 gd:1 +ttp: b598/782 bl:2.3564 bb:1.0658 rl:2.3005 rb:1.0597 dl:1124-1129 gd:1 +ttp: b587/782 bl:2.4053 bb:1.0674 rl:2.3023 rb:1.0599 dl:1077-1081 gd:1 +ttp: b579/782 bl:2.3443 bb:1.0361 rl:2.3031 rb:1.0595 dl:1044-1048 gd:1 +ttp: b573/782 bl:2.3675 bb:1.0672 rl:2.3041 rb:1.0596 dl:1021-1025 gd:1 +ttp: b564/782 bl:2.2867 bb:1.0175 rl:2.3038 rb:1.0589 dl:990-993 gd:1 +ttp: b556/782 bl:2.3773 bb:1.0688 rl:2.3049 rb:1.0591 dl:961-965 gd:1 +ttp: b546/782 bl:2.3295 bb:1.0357 rl:2.3053 rb:1.0587 dl:930-934 gd:1 +ttp: b539/782 bl:2.3388 bb:1.0368 rl:2.3057 rb:1.0584 dl:909-912 gd:1 +ttp: b536/782 bl:2.3243 bb:1.0467 rl:2.3060 rb:1.0583 dl:899-902 gd:1 +ttp: b527/782 bl:2.3454 bb:1.0294 rl:2.3065 rb:1.0579 dl:872-875 gd:1 +ttp: b519/782 bl:2.2964 bb:1.0418 rl:2.3064 rb:1.0577 dl:850-852 gd:1 +ttp: b512/782 bl:2.3030 bb:1.0636 rl:2.3063 rb:1.0577 dl:829-832 gd:1 +ttp: b504/782 bl:2.3207 bb:1.0353 rl:2.3065 rb:1.0575 dl:807-809 gd:1 +ttp: b496/782 bl:2.4205 bb:1.0479 rl:2.3078 rb:1.0574 dl:785-788 gd:1 +ttp: b489/782 bl:2.3888 bb:1.0747 rl:2.3086 rb:1.0576 dl:769-771 gd:1 +ttp: b482/782 bl:2.3266 bb:1.0460 rl:2.3088 rb:1.0574 dl:752-754 gd:1 +ttp: b474/782 bl:2.3406 bb:1.0717 rl:2.3091 rb:1.0576 dl:733-735 gd:1 +ttp: b466/782 bl:2.3875 bb:1.0293 rl:2.3099 rb:1.0573 dl:714-717 gd:1 +ttp: b458/782 bl:2.2041 bb:1.0222 rl:2.3089 rb:1.0570 dl:697-700 gd:1 +ttp: b450/782 bl:2.3571 bb:1.0332 rl:2.3093 rb:1.0567 dl:680-682 gd:1 +ttp: b428/782 bl:2.3049 bb:1.0503 rl:2.3093 rb:1.0567 dl:636-638 gd:1 +ttp: b420/782 bl:2.3597 bb:1.0534 rl:2.3097 rb:1.0567 dl:620-622 gd:1 +ttp: b412/782 bl:2.3329 bb:1.0460 rl:2.3099 rb:1.0566 dl:605-607 gd:1 +ttp: b404/782 bl:2.3656 bb:1.0594 rl:2.3103 rb:1.0566 dl:590-592 gd:1 +ttp: b397/782 bl:2.3518 bb:1.0430 rl:2.3106 rb:1.0565 dl:577-579 gd:1 +ttp: b388/782 bl:2.3094 bb:1.0415 rl:2.3106 rb:1.0564 dl:561-562 gd:1 +ttp: b380/782 bl:2.3565 bb:1.0867 rl:2.3109 rb:1.0566 dl:547-549 gd:1 +ttp: b372/782 bl:2.3305 bb:1.0468 rl:2.3110 rb:1.0565 dl:533-535 gd:1 +ttp: b364/782 bl:2.3490 bb:1.0621 rl:2.3113 rb:1.0566 dl:521-522 gd:1 +ttp: b356/782 bl:2.3413 bb:1.0543 rl:2.3115 rb:1.0565 dl:506-508 gd:1 +ttp: b348/782 bl:2.3588 bb:1.0580 rl:2.3117 rb:1.0566 dl:494-495 gd:1 +ttp: b340/782 bl:2.4575 bb:1.0803 rl:2.3126 rb:1.0567 dl:482-483 gd:1 +ttp: b332/782 bl:2.3072 bb:1.0444 rl:2.3126 rb:1.0566 dl:469-471 gd:1 +ttp: b324/782 bl:2.3182 bb:1.0838 rl:2.3126 rb:1.0568 dl:458-459 gd:1 +ttp: b316/782 bl:2.3683 bb:1.0804 rl:2.3129 rb:1.0569 dl:445-446 gd:1 +ttp: b308/782 bl:2.4097 bb:1.0929 rl:2.3134 rb:1.0571 dl:433-435 gd:1 +ttp: b299/782 bl:2.3326 bb:1.1077 rl:2.3135 rb:1.0573 dl:420-421 gd:1 +ttp: b291/782 bl:2.2654 bb:1.0129 rl:2.3133 rb:1.0571 dl:407-409 gd:1 +ttp: b283/782 bl:2.3702 bb:1.1270 rl:2.3135 rb:1.0574 dl:396-398 gd:1 +ttp: b274/782 bl:2.3021 bb:1.0701 rl:2.3135 rb:1.0575 dl:384-385 gd:1 +ttp: b266/782 bl:2.3742 bb:1.1046 rl:2.3137 rb:1.0577 dl:374-375 gd:1 +ttp: b258/782 bl:2.4397 bb:1.0947 rl:2.3143 rb:1.0578 dl:364-365 gd:1 +ttp: b250/782 bl:2.3184 bb:1.0749 rl:2.3143 rb:1.0579 dl:354-355 gd:1 +ttp: b242/782 bl:2.3767 bb:1.1002 rl:2.3145 rb:1.0581 dl:344-345 gd:1 +ttp: b232/782 bl:2.3078 bb:1.0877 rl:2.3145 rb:1.0582 dl:331-333 gd:1 +ttp: b223/782 bl:2.3291 bb:1.1245 rl:2.3146 rb:1.0584 dl:321-322 gd:1 +ttp: b215/782 bl:2.3919 bb:1.0964 rl:2.3148 rb:1.0585 dl:312-313 gd:1 +ttp: b207/782 bl:2.3487 bb:1.1287 rl:2.3149 rb:1.0588 dl:303-304 gd:1 +ttp: b199/782 bl:2.4358 bb:1.1460 rl:2.3153 rb:1.0591 dl:295-296 gd:1 +ttp: b191/782 bl:2.4133 bb:1.0979 rl:2.3157 rb:1.0592 dl:285-286 gd:1 +ttp: b183/782 bl:2.3270 bb:1.0717 rl:2.3157 rb:1.0592 dl:277-278 gd:1 +ttp: b175/782 bl:2.3892 bb:1.1545 rl:2.3159 rb:1.0595 dl:269-270 gd:1 +ttp: b168/782 bl:2.4579 bb:1.1891 rl:2.3163 rb:1.0598 dl:263-263 gd:1 +ttp: b160/782 bl:2.3851 bb:1.1138 rl:2.3165 rb:1.0600 dl:255-255 gd:1 +ttp: b151/782 bl:2.4622 bb:1.1382 rl:2.3169 rb:1.0602 dl:246-247 gd:1 +ttp: b142/782 bl:2.3797 bb:1.1077 rl:2.3171 rb:1.0603 dl:237-238 gd:1 +ttp: b134/782 bl:2.4262 bb:1.1377 rl:2.3174 rb:1.0605 dl:230-231 gd:1 +ttp: b126/782 bl:2.3935 bb:1.1402 rl:2.3175 rb:1.0607 dl:222-223 gd:1 +ttp: b118/782 bl:2.4628 bb:1.1241 rl:2.3179 rb:1.0608 dl:215-216 gd:1 +ttp: b109/782 bl:2.4960 bb:1.1895 rl:2.3183 rb:1.0611 dl:207-208 gd:1 +ttp: b103/782 bl:2.4622 bb:1.1852 rl:2.3186 rb:1.0614 dl:202-202 gd:1 +ttp: b94/782 bl:2.5631 bb:1.2111 rl:2.3191 rb:1.0617 dl:193-194 gd:1 +ttp: b81/782 bl:2.4744 bb:1.1230 rl:2.3194 rb:1.0618 dl:182-183 gd:1 +ttp: b74/782 bl:2.4824 bb:1.1520 rl:2.3197 rb:1.0620 dl:175-176 gd:1 +ttp: b66/782 bl:2.6397 bb:1.2353 rl:2.3203 rb:1.0623 dl:169-169 gd:1 +ttp: b57/782 bl:2.4635 bb:1.1601 rl:2.3206 rb:1.0625 dl:160-161 gd:1 +ttp: b48/782 bl:2.5261 bb:1.2179 rl:2.3209 rb:1.0627 dl:151-152 gd:1 +ttp: b39/782 bl:2.4423 bb:1.1822 rl:2.3211 rb:1.0629 dl:142-143 gd:1 +ttp: b32/782 bl:2.6105 bb:1.2172 rl:2.3215 rb:1.0631 dl:135-136 gd:1 +ttp: b23/782 bl:2.5967 bb:1.2196 rl:2.3219 rb:1.0633 dl:126-127 gd:1 +ttp: b15/782 bl:2.6562 bb:1.2336 rl:2.3223 rb:1.0635 dl:115-117 gd:1 +ttp: b5/782 bl:2.7223 bb:1.2386 rl:2.3227 rb:1.0637 dl:96-99 gd:1 +quantized_ttt_phased val_loss:2.32262534 val_bpb:1.06134915 eval_time:395545ms +total_eval_time:395.5s +=== Done: SEED=1234 === diff --git a/records/track_10min_16mb/2020-04-29_AWQ_lite_mixedprecision_GPTQ/train_seed314.log b/records/track_10min_16mb/2020-04-29_AWQ_lite_mixedprecision_GPTQ/train_seed314.log new file mode 100644 index 0000000000..9b5c27486f --- /dev/null +++ b/records/track_10min_16mb/2020-04-29_AWQ_lite_mixedprecision_GPTQ/train_seed314.log @@ -0,0 +1,951 @@ +=== Starting training: SEED=314 RUN_ID=awq_seed314_20260429_052651 === +=== Artifact dir: /workspace/pargolf-work/runs/awq_seed314_20260429_052651 === +=== NPROC=8 === +W0429 05:26:52.995000 181313 torch/distributed/run.py:803] +W0429 05:26:52.995000 181313 torch/distributed/run.py:803] ***************************************** +W0429 05:26:52.995000 181313 torch/distributed/run.py:803] Setting OMP_NUM_THREADS environment variable for each process to be 1 in default, to avoid your system being overloaded, please further tune the variable for optimal performance in your application as needed. +W0429 05:26:52.995000 181313 torch/distributed/run.py:803] ***************************************** +Hyperparameters: + adam_eps: 1e-08 + adam_wd: 0.02 + artifact_dir: /workspace/pargolf-work/runs/awq_seed314_20260429_052651 + attn_clip_sigmas: 13.0 + attn_out_gate_enabled: False + attn_out_gate_src: proj + awq_lite_bits: 8 + awq_lite_enabled: True + awq_lite_group_size: 64 + awq_lite_group_top_k: 1 + beta1: 0.9 + beta2: 0.99 + caseops_enabled: True + compressor: pergroup + data_dir: ./data/ + datasets_dir: /workspace/pargolf-work/data/datasets/fineweb10B_sp8192_lossless_caps_caseops_v1_reserved/datasets/fineweb10B_sp8192_lossless_caps_caseops_v1_reserved + distributed: True + ema_decay: 0.9965 + embed_bits: 7 + embed_clip_sigmas: 14.0 + embed_lr: 0.6 + embed_wd: 0.085 + enable_looping_at: 0.35 + eval_seq_len: 2048 + eval_stride: 64 + fused_ce_enabled: True + gate_window: 12 + gated_attn_enabled: False + gated_attn_init_std: 0.01 + gated_attn_quant_gate: True + global_ttt_batch_seqs: 32 + global_ttt_chunk_tokens: 32768 + global_ttt_epochs: 1 + global_ttt_grad_clip: 1.0 + global_ttt_lr: 0.001 + global_ttt_momentum: 0.9 + global_ttt_respect_doc_boundaries: True + global_ttt_warmup_chunks: 0 + global_ttt_warmup_start_lr: 0.0 + gptq_calibration_batches: 16 + gptq_reserve_seconds: 0.5 + grad_accum_steps: 1 + grad_clip_norm: 0.3 + is_main_process: True + iterations: 20000 + ln_scale: True + local_rank: 0 + logfile: /workspace/pargolf-work/runs/awq_seed314_20260429_052651/awq_seed314_20260429_052651.txt + logit_softcap: 30.0 + loop_end: 5 + loop_start: 3 + lqer_asym_enabled: True + lqer_asym_group: 64 + lqer_enabled: True + lqer_factor_bits: 4 + lqer_gain_select: False + lqer_rank: 4 + lqer_scope: all + lqer_top_k: 3 + matrix_bits: 6 + matrix_clip_sigmas: 12.85 + matrix_lr: 0.026 + max_wallclock_seconds: 600.0 + min_lr: 0.1 + mlp_clip_sigmas: 11.5 + mlp_mult: 4.0 + model_dim: 512 + model_path: /workspace/pargolf-work/runs/awq_seed314_20260429_052651/final_model.pt + muon_backend_steps: 5 + muon_momentum: 0.97 + muon_momentum_warmup_start: 0.92 + muon_momentum_warmup_steps: 1500 + muon_row_normalize: True + muon_wd: 0.095 + num_heads: 8 + num_kv_heads: 4 + num_layers: 11 + num_loops: 2 + parallel_final_lane: mean + parallel_start_layer: 8 + phased_ttt_num_phases: 3 + phased_ttt_prefix_docs: 2500 + qk_gain_init: 5.0 + quantized_model_path: /workspace/pargolf-work/runs/awq_seed314_20260429_052651/final_model.int6.ptz + rank: 0 + rope_base: 10000.0 + rope_dims: 16 + rope_train_seq_len: 2048 + rope_yarn: False + run_id: awq_seed314_20260429_052651 + scalar_lr: 0.02 + seed: 314 + skip_gates_enabled: True + smear_gate_enabled: True + sparse_attn_gate_enabled: True + sparse_attn_gate_init_std: 0.0 + sparse_attn_gate_scale: 0.5 + tie_embeddings: True + tied_embed_init_std: 0.005 + tied_embed_lr: 0.03 + tokenizer_path: /workspace/pargolf/records/track_10min_16mb/2026-04-27_SP8192_LQER_SparseGate_BOSSmearFix_9HpStack_1.0611/tokenizers/fineweb_8192_bpe_lossless_caps_caseops_v1_reserved.model + train_batch_tokens: 786432 + train_files: /workspace/pargolf-work/data/datasets/fineweb10B_sp8192_lossless_caps_caseops_v1_reserved/datasets/fineweb10B_sp8192_lossless_caps_caseops_v1_reserved/fineweb_train_*.bin + train_log_every: 500 + train_seq_len: 2048 + ttt_batch_size: 64 + ttt_beta1: 0.0 + ttt_beta2: 0.99 + ttt_chunk_size: 48 + ttt_enabled: True + ttt_eval_batches: + ttt_eval_seq_len: 2048 + ttt_grad_steps: 1 + ttt_k_lora: True + ttt_lora_lr: 0.0001 + ttt_lora_rank: 80 + ttt_mlp_lora: True + ttt_o_lora: True + ttt_optimizer: adam + ttt_weight_decay: 0.5 + val_batch_tokens: 524288 + val_bytes_files: /workspace/pargolf-work/data/datasets/fineweb10B_sp8192_lossless_caps_caseops_v1_reserved/datasets/fineweb10B_sp8192_lossless_caps_caseops_v1_reserved/fineweb_val_bytes_*.bin + val_doc_fraction: 1.0 + val_files: /workspace/pargolf-work/data/datasets/fineweb10B_sp8192_lossless_caps_caseops_v1_reserved/datasets/fineweb10B_sp8192_lossless_caps_caseops_v1_reserved/fineweb_val_*.bin + val_loss_every: 4000 + vocab_size: 8192 + warmdown_frac: 0.85 + warmup_steps: 20 + world_size: 8 + xsa_last_n: 11 +train_shards: 80 +val_tokens: 47851520 +model_params:35945671 +gptq:reserving 0s, effective=599500ms +warmup_cu_buckets:64,128,192,256 iters_each:3 +warmup_step: 1/20 +warmup_step: 2/20 +warmup_step: 3/20 +warmup_step: 4/20 +warmup_step: 5/20 +warmup_step: 6/20 +warmup_step: 10/20 +warmup_step: 20/20 +loop_warmup:enabled encoder:[0, 1, 2, 3, 4, 5, 3, 4] decoder:[5, 3, 4, 5, 6, 7, 8, 9, 10] +loop_warmup_step: 1/20 +loop_warmup_step: 2/20 +loop_warmup_step: 3/20 +loop_warmup_step: 4/20 +loop_warmup_step: 5/20 +loop_warmup_step: 6/20 +loop_warmup_step: 10/20 +loop_warmup_step: 20/20 +0/20000 val_loss: 8.9980 val_bpb: 4.1115 +1/20000 train_loss: 9.0022 train_time: 0.0m tok/s: 17511015 +2/20000 train_loss: 12.6318 train_time: 0.0m tok/s: 11153491 +3/20000 train_loss: 10.3697 train_time: 0.0m tok/s: 9879286 +4/20000 train_loss: 9.1871 train_time: 0.0m tok/s: 9333995 +5/20000 train_loss: 7.9130 train_time: 0.0m tok/s: 9041301 +500/20000 train_loss: 2.6141 train_time: 0.8m tok/s: 8313359 +1000/20000 train_loss: 2.7425 train_time: 1.6m tok/s: 8306581 +1500/20000 train_loss: 2.7117 train_time: 2.4m tok/s: 8292448 +2000/20000 train_loss: 2.6242 train_time: 3.2m tok/s: 8294631 +layer_loop:enabled step:2212 frac:0.350 encoder:[0, 1, 2, 3, 4, 5, 3, 4] decoder:[5, 3, 4, 5, 6, 7, 8, 9, 10] +2500/20000 train_loss: 2.4674 train_time: 4.2m tok/s: 7868142 +3000/20000 train_loss: 2.4906 train_time: 5.4m tok/s: 7328089 +3500/20000 train_loss: 2.6190 train_time: 6.5m tok/s: 7029380 +4000/20000 train_loss: 2.3655 train_time: 7.7m tok/s: 6804880 +4000/20000 val_loss: 2.4283 val_bpb: 1.1095 +4500/20000 train_loss: 2.5321 train_time: 8.9m tok/s: 6653369 +4986/20000 val_loss: 2.3549 val_bpb: 1.0760 +stopping_early: wallclock_cap train_time: 599550ms step: 4986/20000 +peak memory allocated: 41707 MiB reserved: 47048 MiB +ema:applying EMA weights +diagnostic pre-quantization post-ema val_loss:2.32934097 val_bpb:1.06434881 eval_time:7976ms +Serialized model: 135417533 bytes +Code size (uncompressed): 168543 bytes +Code size (compressed): 33787 bytes +GPTQ:collecting Hessians from calibration data... +GPTQ:collected 67 Hessians in 4.0s +Quantized weights: + gate_int8_row: blocks.attn.attn_gate_w + gptq (int6): blocks.attn.c_k.weight, blocks.attn.c_q.weight, blocks.attn.c_v.weight, blocks.attn.proj.weight, blocks.mlp.fc.weight, blocks.mlp.proj.weight + gptq (int6)+lqer_asym: blocks.mlp.fc.weight + gptq (int7)+awqgrpint8+lqer_asym: tok_emb.weight + passthrough (float16): blocks.attn.q_gain, blocks.attn_scale, blocks.mlp_scale, blocks.resid_mix, parallel_post_lambdas, parallel_resid_lambdas, skip_gates, skip_weights, smear_gate.weight, smear_lambda +Serialize: per-group lrzip compression... +Serialize: per-group compression done in 117.9s +Serialized model quantized+pergroup: 15942682 bytes +Total submission size quantized+pergroup: 15976469 bytes +Deserialize: per-group lrzip decompression... +Deserialize: decompression done in 19.3s +diagnostic quantized val_loss:2.34773220 val_bpb:1.07275234 eval_time:12677ms +Deserialize: per-group lrzip decompression... +Deserialize: decompression done in 19.0s +ttt_lora:warming up compile (random tokens, no val data) +ttt_lora:compile warmup done (90.3s) + +beginning TTT eval timer +ttt_phased: total_docs:50000 prefix_docs:2500 suffix_docs:47500 num_phases:3 boundaries:[833, 1666, 2500] +ttp: b777/782 bl:2.3105 bb:1.0828 rl:2.3105 rb:1.0828 dl:8452-9229 gd:0 +ttp: b772/782 bl:2.3255 bb:1.0960 rl:2.3165 rb:1.0881 dl:5762-6095 gd:0 +ttp: b767/782 bl:2.2678 bb:1.0731 rl:2.3046 rb:1.0844 dl:4681-4858 gd:0 +ttp: b762/782 bl:2.3513 bb:1.0888 rl:2.3127 rb:1.0852 dl:4032-4142 gd:0 +ttpp: phase:1/3 pd:1296 gd:833 t:169.0s +tttg: c1/131 lr:0.001000 t:0.4s +tttg: c2/131 lr:0.001000 t:0.5s +tttg: c3/131 lr:0.000999 t:0.5s +tttg: c4/131 lr:0.000999 t:0.6s +tttg: c5/131 lr:0.000998 t:0.7s +tttg: c6/131 lr:0.000996 t:0.7s +tttg: c7/131 lr:0.000995 t:0.8s +tttg: c8/131 lr:0.000993 t:0.9s +tttg: c9/131 lr:0.000991 t:0.9s +tttg: c10/131 lr:0.000988 t:1.0s +tttg: c11/131 lr:0.000985 t:1.1s +tttg: c12/131 lr:0.000982 t:1.1s +tttg: c13/131 lr:0.000979 t:1.2s +tttg: c14/131 lr:0.000976 t:1.2s +tttg: c15/131 lr:0.000972 t:1.3s +tttg: c16/131 lr:0.000968 t:1.4s +tttg: c17/131 lr:0.000963 t:1.4s +tttg: c18/131 lr:0.000958 t:1.5s +tttg: c19/131 lr:0.000953 t:1.6s +tttg: c20/131 lr:0.000948 t:1.6s +tttg: c21/131 lr:0.000943 t:1.7s +tttg: c22/131 lr:0.000937 t:1.8s +tttg: c23/131 lr:0.000931 t:1.8s +tttg: c24/131 lr:0.000925 t:1.9s +tttg: c25/131 lr:0.000918 t:2.0s +tttg: c26/131 lr:0.000911 t:2.0s +tttg: c27/131 lr:0.000905 t:2.1s +tttg: c28/131 lr:0.000897 t:2.2s +tttg: c29/131 lr:0.000890 t:2.2s +tttg: c30/131 lr:0.000882 t:2.3s +tttg: c31/131 lr:0.000874 t:2.3s +tttg: c32/131 lr:0.000866 t:2.4s +tttg: c33/131 lr:0.000858 t:2.5s +tttg: c34/131 lr:0.000849 t:2.5s +tttg: c35/131 lr:0.000841 t:2.6s +tttg: c36/131 lr:0.000832 t:2.7s +tttg: c37/131 lr:0.000822 t:2.7s +tttg: c38/131 lr:0.000813 t:2.8s +tttg: c39/131 lr:0.000804 t:2.9s +tttg: c40/131 lr:0.000794 t:2.9s +tttg: c41/131 lr:0.000784 t:3.0s +tttg: c42/131 lr:0.000774 t:3.1s +tttg: c43/131 lr:0.000764 t:3.1s +tttg: c44/131 lr:0.000753 t:3.2s +tttg: c45/131 lr:0.000743 t:3.3s +tttg: c46/131 lr:0.000732 t:3.3s +tttg: c47/131 lr:0.000722 t:3.4s +tttg: c48/131 lr:0.000711 t:3.5s +tttg: c49/131 lr:0.000700 t:3.5s +tttg: c50/131 lr:0.000689 t:3.6s +tttg: c51/131 lr:0.000677 t:3.7s +tttg: c52/131 lr:0.000666 t:3.7s +tttg: c53/131 lr:0.000655 t:3.8s +tttg: c54/131 lr:0.000643 t:3.8s +tttg: c55/131 lr:0.000631 t:3.9s +tttg: c56/131 lr:0.000620 t:4.0s +tttg: c57/131 lr:0.000608 t:4.0s +tttg: c58/131 lr:0.000596 t:4.1s +tttg: c59/131 lr:0.000584 t:4.2s +tttg: c60/131 lr:0.000572 t:4.2s +tttg: c61/131 lr:0.000560 t:4.3s +tttg: c62/131 lr:0.000548 t:4.4s +tttg: c63/131 lr:0.000536 t:4.4s +tttg: c64/131 lr:0.000524 t:4.5s +tttg: c65/131 lr:0.000512 t:4.6s +tttg: c66/131 lr:0.000500 t:4.6s +tttg: c67/131 lr:0.000488 t:4.7s +tttg: c68/131 lr:0.000476 t:4.8s +tttg: c69/131 lr:0.000464 t:4.8s +tttg: c70/131 lr:0.000452 t:4.9s +tttg: c71/131 lr:0.000440 t:4.9s +tttg: c72/131 lr:0.000428 t:5.0s +tttg: c73/131 lr:0.000416 t:5.1s +tttg: c74/131 lr:0.000404 t:5.1s +tttg: c75/131 lr:0.000392 t:5.2s +tttg: c76/131 lr:0.000380 t:5.3s +tttg: c77/131 lr:0.000369 t:5.3s +tttg: c78/131 lr:0.000357 t:5.4s +tttg: c79/131 lr:0.000345 t:5.5s +tttg: c80/131 lr:0.000334 t:5.5s +tttg: c81/131 lr:0.000323 t:5.6s +tttg: c82/131 lr:0.000311 t:5.7s +tttg: c83/131 lr:0.000300 t:5.7s +tttg: c84/131 lr:0.000289 t:5.8s +tttg: c85/131 lr:0.000278 t:5.8s +tttg: c86/131 lr:0.000268 t:5.9s +tttg: c87/131 lr:0.000257 t:6.0s +tttg: c88/131 lr:0.000247 t:6.0s +tttg: c89/131 lr:0.000236 t:6.1s +tttg: c90/131 lr:0.000226 t:6.2s +tttg: c91/131 lr:0.000216 t:6.2s +tttg: c92/131 lr:0.000206 t:6.3s +tttg: c93/131 lr:0.000196 t:6.4s +tttg: c94/131 lr:0.000187 t:6.4s +tttg: c95/131 lr:0.000178 t:6.5s +tttg: c96/131 lr:0.000168 t:6.6s +tttg: c97/131 lr:0.000159 t:6.6s +tttg: c98/131 lr:0.000151 t:6.7s +tttg: c99/131 lr:0.000142 t:6.8s +tttg: c100/131 lr:0.000134 t:6.8s +tttg: c101/131 lr:0.000126 t:6.9s +tttg: c102/131 lr:0.000118 t:6.9s +tttg: c103/131 lr:0.000110 t:7.0s +tttg: c104/131 lr:0.000103 t:7.1s +tttg: c105/131 lr:0.000095 t:7.1s +tttg: c106/131 lr:0.000089 t:7.2s +tttg: c107/131 lr:0.000082 t:7.3s +tttg: c108/131 lr:0.000075 t:7.3s +tttg: c109/131 lr:0.000069 t:7.4s +tttg: c110/131 lr:0.000063 t:7.5s +tttg: c111/131 lr:0.000057 t:7.5s +tttg: c112/131 lr:0.000052 t:7.6s +tttg: c113/131 lr:0.000047 t:7.7s +tttg: c114/131 lr:0.000042 t:7.7s +tttg: c115/131 lr:0.000037 t:7.8s +tttg: c116/131 lr:0.000032 t:7.9s +tttg: c117/131 lr:0.000028 t:7.9s +tttg: c118/131 lr:0.000024 t:8.0s +tttg: c119/131 lr:0.000021 t:8.0s +tttg: c120/131 lr:0.000018 t:8.1s +tttg: c121/131 lr:0.000015 t:8.2s +tttg: c122/131 lr:0.000012 t:8.2s +tttg: c123/131 lr:0.000009 t:8.3s +tttg: c124/131 lr:0.000007 t:8.4s +tttg: c125/131 lr:0.000005 t:8.4s +tttg: c126/131 lr:0.000004 t:8.5s +tttg: c127/131 lr:0.000002 t:8.6s +tttg: c128/131 lr:0.000001 t:8.6s +tttg: c129/131 lr:0.000001 t:8.7s +tttg: c130/131 lr:0.000000 t:8.8s +ttpr: phase:1/3 t:179.3s +ttp: b754/782 bl:2.2853 bb:1.0571 rl:2.3093 rb:1.0817 dl:3345-3397 gd:0 +ttp: b752/782 bl:2.3282 bb:1.0702 rl:2.3113 rb:1.0804 dl:3222-3283 gd:0 +ttpp: phase:2/3 pd:2128 gd:1666 t:245.5s +tttg: c1/219 lr:0.001000 t:0.1s +tttg: c2/219 lr:0.001000 t:0.1s +tttg: c3/219 lr:0.001000 t:0.2s +tttg: c4/219 lr:0.001000 t:0.3s +tttg: c5/219 lr:0.000999 t:0.3s +tttg: c6/219 lr:0.000999 t:0.4s +tttg: c7/219 lr:0.000998 t:0.5s +tttg: c8/219 lr:0.000997 t:0.5s +tttg: c9/219 lr:0.000997 t:0.6s +tttg: c10/219 lr:0.000996 t:0.7s +tttg: c11/219 lr:0.000995 t:0.7s +tttg: c12/219 lr:0.000994 t:0.8s +tttg: c13/219 lr:0.000993 t:0.9s +tttg: c14/219 lr:0.000991 t:0.9s +tttg: c15/219 lr:0.000990 t:1.0s +tttg: c16/219 lr:0.000988 t:1.0s +tttg: c17/219 lr:0.000987 t:1.1s +tttg: c18/219 lr:0.000985 t:1.2s +tttg: c19/219 lr:0.000983 t:1.3s +tttg: c20/219 lr:0.000981 t:1.3s +tttg: c21/219 lr:0.000979 t:1.4s +tttg: c22/219 lr:0.000977 t:1.4s +tttg: c23/219 lr:0.000975 t:1.5s +tttg: c24/219 lr:0.000973 t:1.6s +tttg: c25/219 lr:0.000970 t:1.6s +tttg: c26/219 lr:0.000968 t:1.7s +tttg: c27/219 lr:0.000965 t:1.8s +tttg: c28/219 lr:0.000963 t:1.8s +tttg: c29/219 lr:0.000960 t:1.9s +tttg: c30/219 lr:0.000957 t:2.0s +tttg: c31/219 lr:0.000954 t:2.0s +tttg: c32/219 lr:0.000951 t:2.1s +tttg: c33/219 lr:0.000948 t:2.2s +tttg: c34/219 lr:0.000945 t:2.2s +tttg: c35/219 lr:0.000941 t:2.3s +tttg: c36/219 lr:0.000938 t:2.4s +tttg: c37/219 lr:0.000934 t:2.4s +tttg: c38/219 lr:0.000931 t:2.5s +tttg: c39/219 lr:0.000927 t:2.6s +tttg: c40/219 lr:0.000923 t:2.6s +tttg: c41/219 lr:0.000919 t:2.7s +tttg: c42/219 lr:0.000915 t:2.7s +tttg: c43/219 lr:0.000911 t:2.8s +tttg: c44/219 lr:0.000907 t:2.9s +tttg: c45/219 lr:0.000903 t:2.9s +tttg: c46/219 lr:0.000898 t:3.0s +tttg: c47/219 lr:0.000894 t:3.1s +tttg: c48/219 lr:0.000890 t:3.1s +tttg: c49/219 lr:0.000885 t:3.2s +tttg: c50/219 lr:0.000880 t:3.3s +tttg: c51/219 lr:0.000876 t:3.3s +tttg: c52/219 lr:0.000871 t:3.4s +tttg: c53/219 lr:0.000866 t:3.5s +tttg: c54/219 lr:0.000861 t:3.5s +tttg: c55/219 lr:0.000856 t:3.6s +tttg: c56/219 lr:0.000851 t:3.7s +tttg: c57/219 lr:0.000846 t:3.7s +tttg: c58/219 lr:0.000841 t:3.8s +tttg: c59/219 lr:0.000835 t:3.9s +tttg: c60/219 lr:0.000830 t:3.9s +tttg: c61/219 lr:0.000824 t:4.0s +tttg: c62/219 lr:0.000819 t:4.0s +tttg: c63/219 lr:0.000813 t:4.1s +tttg: c64/219 lr:0.000808 t:4.2s +tttg: c65/219 lr:0.000802 t:4.2s +tttg: c66/219 lr:0.000796 t:4.3s +tttg: c67/219 lr:0.000790 t:4.4s +tttg: c68/219 lr:0.000784 t:4.4s +tttg: c69/219 lr:0.000779 t:4.5s +tttg: c70/219 lr:0.000773 t:4.6s +tttg: c71/219 lr:0.000766 t:4.6s +tttg: c72/219 lr:0.000760 t:4.7s +tttg: c73/219 lr:0.000754 t:4.8s +tttg: c74/219 lr:0.000748 t:4.8s +tttg: c75/219 lr:0.000742 t:4.9s +tttg: c76/219 lr:0.000735 t:5.0s +tttg: c77/219 lr:0.000729 t:5.0s +tttg: c78/219 lr:0.000722 t:5.1s +tttg: c79/219 lr:0.000716 t:5.2s +tttg: c80/219 lr:0.000709 t:5.2s +tttg: c81/219 lr:0.000703 t:5.3s +tttg: c82/219 lr:0.000696 t:5.4s +tttg: c83/219 lr:0.000690 t:5.4s +tttg: c84/219 lr:0.000683 t:5.5s +tttg: c85/219 lr:0.000676 t:5.6s +tttg: c86/219 lr:0.000670 t:5.6s +tttg: c87/219 lr:0.000663 t:5.7s +tttg: c88/219 lr:0.000656 t:5.7s +tttg: c89/219 lr:0.000649 t:5.8s +tttg: c90/219 lr:0.000642 t:5.9s +tttg: c91/219 lr:0.000635 t:5.9s +tttg: c92/219 lr:0.000628 t:6.0s +tttg: c93/219 lr:0.000621 t:6.1s +tttg: c94/219 lr:0.000614 t:6.1s +tttg: c95/219 lr:0.000607 t:6.2s +tttg: c96/219 lr:0.000600 t:6.3s +tttg: c97/219 lr:0.000593 t:6.3s +tttg: c98/219 lr:0.000586 t:6.4s +tttg: c99/219 lr:0.000579 t:6.5s +tttg: c100/219 lr:0.000572 t:6.5s +tttg: c101/219 lr:0.000565 t:6.6s +tttg: c102/219 lr:0.000558 t:6.7s +tttg: c103/219 lr:0.000550 t:6.7s +tttg: c104/219 lr:0.000543 t:6.8s +tttg: c105/219 lr:0.000536 t:6.9s +tttg: c106/219 lr:0.000529 t:6.9s +tttg: c107/219 lr:0.000522 t:7.0s +tttg: c108/219 lr:0.000514 t:7.1s +tttg: c109/219 lr:0.000507 t:7.1s +tttg: c110/219 lr:0.000500 t:7.2s +tttg: c111/219 lr:0.000493 t:7.3s +tttg: c112/219 lr:0.000486 t:7.3s +tttg: c113/219 lr:0.000478 t:7.4s +tttg: c114/219 lr:0.000471 t:7.4s +tttg: c115/219 lr:0.000464 t:7.5s +tttg: c116/219 lr:0.000457 t:7.6s +tttg: c117/219 lr:0.000450 t:7.6s +tttg: c118/219 lr:0.000442 t:7.7s +tttg: c119/219 lr:0.000435 t:7.8s +tttg: c120/219 lr:0.000428 t:7.8s +tttg: c121/219 lr:0.000421 t:7.9s +tttg: c122/219 lr:0.000414 t:8.0s +tttg: c123/219 lr:0.000407 t:8.0s +tttg: c124/219 lr:0.000400 t:8.1s +tttg: c125/219 lr:0.000393 t:8.2s +tttg: c126/219 lr:0.000386 t:8.2s +tttg: c127/219 lr:0.000379 t:8.3s +tttg: c128/219 lr:0.000372 t:8.4s +tttg: c129/219 lr:0.000365 t:8.4s +tttg: c130/219 lr:0.000358 t:8.5s +tttg: c131/219 lr:0.000351 t:8.6s +tttg: c132/219 lr:0.000344 t:8.6s +tttg: c133/219 lr:0.000337 t:8.7s +tttg: c134/219 lr:0.000330 t:8.8s +tttg: c135/219 lr:0.000324 t:8.8s +tttg: c136/219 lr:0.000317 t:8.9s +tttg: c137/219 lr:0.000310 t:8.9s +tttg: c138/219 lr:0.000304 t:9.0s +tttg: c139/219 lr:0.000297 t:9.1s +tttg: c140/219 lr:0.000291 t:9.1s +tttg: c141/219 lr:0.000284 t:9.2s +tttg: c142/219 lr:0.000278 t:9.3s +tttg: c143/219 lr:0.000271 t:9.3s +tttg: c144/219 lr:0.000265 t:9.4s +tttg: c145/219 lr:0.000258 t:9.5s +tttg: c146/219 lr:0.000252 t:9.5s +tttg: c147/219 lr:0.000246 t:9.6s +tttg: c148/219 lr:0.000240 t:9.7s +tttg: c149/219 lr:0.000234 t:9.7s +tttg: c150/219 lr:0.000227 t:9.8s +tttg: c151/219 lr:0.000221 t:9.9s +tttg: c152/219 lr:0.000216 t:9.9s +tttg: c153/219 lr:0.000210 t:10.0s +tttg: c154/219 lr:0.000204 t:10.1s +tttg: c155/219 lr:0.000198 t:10.1s +tttg: c156/219 lr:0.000192 t:10.2s +tttg: c157/219 lr:0.000187 t:10.3s +tttg: c158/219 lr:0.000181 t:10.3s +tttg: c159/219 lr:0.000176 t:10.4s +tttg: c160/219 lr:0.000170 t:10.5s +tttg: c161/219 lr:0.000165 t:10.5s +tttg: c162/219 lr:0.000159 t:10.6s +tttg: c163/219 lr:0.000154 t:10.6s +tttg: c164/219 lr:0.000149 t:10.7s +tttg: c165/219 lr:0.000144 t:10.8s +tttg: c166/219 lr:0.000139 t:10.8s +tttg: c167/219 lr:0.000134 t:10.9s +tttg: c168/219 lr:0.000129 t:11.0s +tttg: c169/219 lr:0.000124 t:11.0s +tttg: c170/219 lr:0.000120 t:11.1s +tttg: c171/219 lr:0.000115 t:11.2s +tttg: c172/219 lr:0.000110 t:11.2s +tttg: c173/219 lr:0.000106 t:11.3s +tttg: c174/219 lr:0.000102 t:11.4s +tttg: c175/219 lr:0.000097 t:11.4s +tttg: c176/219 lr:0.000093 t:11.5s +tttg: c177/219 lr:0.000089 t:11.6s +tttg: c178/219 lr:0.000085 t:11.6s +tttg: c179/219 lr:0.000081 t:11.7s +tttg: c180/219 lr:0.000077 t:11.8s +tttg: c181/219 lr:0.000073 t:11.8s +tttg: c182/219 lr:0.000069 t:11.9s +tttg: c183/219 lr:0.000066 t:11.9s +tttg: c184/219 lr:0.000062 t:12.0s +tttg: c185/219 lr:0.000059 t:12.1s +tttg: c186/219 lr:0.000055 t:12.1s +tttg: c187/219 lr:0.000052 t:12.2s +tttg: c188/219 lr:0.000049 t:12.3s +tttg: c189/219 lr:0.000046 t:12.3s +tttg: c190/219 lr:0.000043 t:12.4s +tttg: c191/219 lr:0.000040 t:12.5s +tttg: c192/219 lr:0.000037 t:12.5s +tttg: c193/219 lr:0.000035 t:12.6s +tttg: c194/219 lr:0.000032 t:12.7s +tttg: c195/219 lr:0.000030 t:12.7s +tttg: c196/219 lr:0.000027 t:12.8s +tttg: c197/219 lr:0.000025 t:12.9s +tttg: c198/219 lr:0.000023 t:12.9s +tttg: c199/219 lr:0.000021 t:13.0s +tttg: c200/219 lr:0.000019 t:13.1s +tttg: c201/219 lr:0.000017 t:13.1s +tttg: c202/219 lr:0.000015 t:13.2s +tttg: c203/219 lr:0.000013 t:13.3s +tttg: c204/219 lr:0.000012 t:13.3s +tttg: c205/219 lr:0.000010 t:13.4s +tttg: c206/219 lr:0.000009 t:13.5s +tttg: c207/219 lr:0.000007 t:13.5s +tttg: c208/219 lr:0.000006 t:13.6s +tttg: c209/219 lr:0.000005 t:13.6s +tttg: c210/219 lr:0.000004 t:13.7s +tttg: c211/219 lr:0.000003 t:13.8s +tttg: c212/219 lr:0.000003 t:13.8s +tttg: c213/219 lr:0.000002 t:13.9s +tttg: c214/219 lr:0.000001 t:14.0s +tttg: c215/219 lr:0.000001 t:14.0s +tttg: c216/219 lr:0.000000 t:14.1s +tttg: c217/219 lr:0.000000 t:14.2s +tttg: c218/219 lr:0.000000 t:14.2s +ttpr: phase:2/3 t:261.3s +ttp: b741/782 bl:2.3211 bb:1.0409 rl:2.3121 rb:1.0770 dl:2686-2730 gd:0 +ttp: b740/782 bl:2.2613 bb:1.0381 rl:2.3083 rb:1.0741 dl:2653-2686 gd:0 +ttpp: phase:3/3 pd:2960 gd:2500 t:276.4s +tttg: c1/289 lr:0.001000 t:0.1s +tttg: c2/289 lr:0.001000 t:0.1s +tttg: c3/289 lr:0.001000 t:0.2s +tttg: c4/289 lr:0.001000 t:0.3s +tttg: c5/289 lr:0.001000 t:0.3s +tttg: c6/289 lr:0.000999 t:0.4s +tttg: c7/289 lr:0.000999 t:0.5s +tttg: c8/289 lr:0.000999 t:0.5s +tttg: c9/289 lr:0.000998 t:0.6s +tttg: c10/289 lr:0.000998 t:0.7s +tttg: c11/289 lr:0.000997 t:0.7s +tttg: c12/289 lr:0.000996 t:0.8s +tttg: c13/289 lr:0.000996 t:0.8s +tttg: c14/289 lr:0.000995 t:0.9s +tttg: c15/289 lr:0.000994 t:1.0s +tttg: c16/289 lr:0.000993 t:1.0s +tttg: c17/289 lr:0.000992 t:1.1s +tttg: c18/289 lr:0.000991 t:1.2s +tttg: c19/289 lr:0.000990 t:1.2s +tttg: c20/289 lr:0.000989 t:1.3s +tttg: c21/289 lr:0.000988 t:1.4s +tttg: c22/289 lr:0.000987 t:1.4s +tttg: c23/289 lr:0.000986 t:1.5s +tttg: c24/289 lr:0.000984 t:1.6s +tttg: c25/289 lr:0.000983 t:1.6s +tttg: c26/289 lr:0.000982 t:1.7s +tttg: c27/289 lr:0.000980 t:1.8s +tttg: c28/289 lr:0.000978 t:1.8s +tttg: c29/289 lr:0.000977 t:1.9s +tttg: c30/289 lr:0.000975 t:2.0s +tttg: c31/289 lr:0.000973 t:2.0s +tttg: c32/289 lr:0.000972 t:2.1s +tttg: c33/289 lr:0.000970 t:2.1s +tttg: c34/289 lr:0.000968 t:2.2s +tttg: c35/289 lr:0.000966 t:2.3s +tttg: c36/289 lr:0.000964 t:2.3s +tttg: c37/289 lr:0.000962 t:2.4s +tttg: c38/289 lr:0.000960 t:2.5s +tttg: c39/289 lr:0.000958 t:2.5s +tttg: c40/289 lr:0.000955 t:2.6s +tttg: c41/289 lr:0.000953 t:2.7s +tttg: c42/289 lr:0.000951 t:2.7s +tttg: c43/289 lr:0.000948 t:2.8s +tttg: c44/289 lr:0.000946 t:2.9s +tttg: c45/289 lr:0.000944 t:2.9s +tttg: c46/289 lr:0.000941 t:3.0s +tttg: c47/289 lr:0.000938 t:3.1s +tttg: c48/289 lr:0.000936 t:3.1s +tttg: c49/289 lr:0.000933 t:3.2s +tttg: c50/289 lr:0.000930 t:3.3s +tttg: c51/289 lr:0.000927 t:3.3s +tttg: c52/289 lr:0.000925 t:3.4s +tttg: c53/289 lr:0.000922 t:3.5s +tttg: c54/289 lr:0.000919 t:3.5s +tttg: c55/289 lr:0.000916 t:3.6s +tttg: c56/289 lr:0.000913 t:3.7s +tttg: c57/289 lr:0.000910 t:3.7s +tttg: c58/289 lr:0.000906 t:3.8s +tttg: c59/289 lr:0.000903 t:3.8s +tttg: c60/289 lr:0.000900 t:3.9s +tttg: c61/289 lr:0.000897 t:4.0s +tttg: c62/289 lr:0.000893 t:4.0s +tttg: c63/289 lr:0.000890 t:4.1s +tttg: c64/289 lr:0.000887 t:4.2s +tttg: c65/289 lr:0.000883 t:4.2s +tttg: c66/289 lr:0.000879 t:4.3s +tttg: c67/289 lr:0.000876 t:4.4s +tttg: c68/289 lr:0.000872 t:4.4s +tttg: c69/289 lr:0.000869 t:4.5s +tttg: c70/289 lr:0.000865 t:4.6s +tttg: c71/289 lr:0.000861 t:4.6s +tttg: c72/289 lr:0.000857 t:4.7s +tttg: c73/289 lr:0.000854 t:4.8s +tttg: c74/289 lr:0.000850 t:4.8s +tttg: c75/289 lr:0.000846 t:4.9s +tttg: c76/289 lr:0.000842 t:5.0s +tttg: c77/289 lr:0.000838 t:5.0s +tttg: c78/289 lr:0.000834 t:5.1s +tttg: c79/289 lr:0.000830 t:5.2s +tttg: c80/289 lr:0.000826 t:5.2s +tttg: c81/289 lr:0.000821 t:5.3s +tttg: c82/289 lr:0.000817 t:5.4s +tttg: c83/289 lr:0.000813 t:5.4s +tttg: c84/289 lr:0.000809 t:5.5s +tttg: c85/289 lr:0.000804 t:5.6s +tttg: c86/289 lr:0.000800 t:5.6s +tttg: c87/289 lr:0.000796 t:5.7s +tttg: c88/289 lr:0.000791 t:5.7s +tttg: c89/289 lr:0.000787 t:5.8s +tttg: c90/289 lr:0.000782 t:5.9s +tttg: c91/289 lr:0.000778 t:5.9s +tttg: c92/289 lr:0.000773 t:6.0s +tttg: c93/289 lr:0.000769 t:6.1s +tttg: c94/289 lr:0.000764 t:6.1s +tttg: c95/289 lr:0.000759 t:6.2s +tttg: c96/289 lr:0.000755 t:6.3s +tttg: c97/289 lr:0.000750 t:6.3s +tttg: c98/289 lr:0.000745 t:6.4s +tttg: c99/289 lr:0.000740 t:6.5s +tttg: c100/289 lr:0.000736 t:6.5s +tttg: c101/289 lr:0.000731 t:6.6s +tttg: c102/289 lr:0.000726 t:6.7s +tttg: c103/289 lr:0.000721 t:6.7s +tttg: c104/289 lr:0.000716 t:6.8s +tttg: c105/289 lr:0.000711 t:6.9s +tttg: c106/289 lr:0.000706 t:6.9s +tttg: c107/289 lr:0.000701 t:7.0s +tttg: c108/289 lr:0.000696 t:7.1s +tttg: c109/289 lr:0.000691 t:7.1s +tttg: c110/289 lr:0.000686 t:7.2s +tttg: c111/289 lr:0.000681 t:7.2s +tttg: c112/289 lr:0.000676 t:7.3s +tttg: c113/289 lr:0.000671 t:7.4s +tttg: c114/289 lr:0.000666 t:7.4s +tttg: c115/289 lr:0.000661 t:7.5s +tttg: c116/289 lr:0.000656 t:7.6s +tttg: c117/289 lr:0.000650 t:7.6s +tttg: c118/289 lr:0.000645 t:7.7s +tttg: c119/289 lr:0.000640 t:7.8s +tttg: c120/289 lr:0.000635 t:7.8s +tttg: c121/289 lr:0.000629 t:7.9s +tttg: c122/289 lr:0.000624 t:8.0s +tttg: c123/289 lr:0.000619 t:8.0s +tttg: c124/289 lr:0.000614 t:8.1s +tttg: c125/289 lr:0.000608 t:8.2s +tttg: c126/289 lr:0.000603 t:8.2s +tttg: c127/289 lr:0.000598 t:8.3s +tttg: c128/289 lr:0.000592 t:8.3s +tttg: c129/289 lr:0.000587 t:8.4s +tttg: c130/289 lr:0.000581 t:8.5s +tttg: c131/289 lr:0.000576 t:8.5s +tttg: c132/289 lr:0.000571 t:8.6s +tttg: c133/289 lr:0.000565 t:8.7s +tttg: c134/289 lr:0.000560 t:8.7s +tttg: c135/289 lr:0.000554 t:8.8s +tttg: c136/289 lr:0.000549 t:8.9s +tttg: c137/289 lr:0.000544 t:8.9s +tttg: c138/289 lr:0.000538 t:9.0s +tttg: c139/289 lr:0.000533 t:9.1s +tttg: c140/289 lr:0.000527 t:9.1s +tttg: c141/289 lr:0.000522 t:9.2s +tttg: c142/289 lr:0.000516 t:9.3s +tttg: c143/289 lr:0.000511 t:9.3s +tttg: c144/289 lr:0.000505 t:9.4s +tttg: c145/289 lr:0.000500 t:9.4s +tttg: c146/289 lr:0.000495 t:9.5s +tttg: c147/289 lr:0.000489 t:9.6s +tttg: c148/289 lr:0.000484 t:9.6s +tttg: c149/289 lr:0.000478 t:9.7s +tttg: c150/289 lr:0.000473 t:9.8s +tttg: c151/289 lr:0.000467 t:9.8s +tttg: c152/289 lr:0.000462 t:9.9s +tttg: c153/289 lr:0.000456 t:10.0s +tttg: c154/289 lr:0.000451 t:10.0s +tttg: c155/289 lr:0.000446 t:10.1s +tttg: c156/289 lr:0.000440 t:10.2s +tttg: c157/289 lr:0.000435 t:10.2s +tttg: c158/289 lr:0.000429 t:10.3s +tttg: c159/289 lr:0.000424 t:10.4s +tttg: c160/289 lr:0.000419 t:10.4s +tttg: c161/289 lr:0.000413 t:10.5s +tttg: c162/289 lr:0.000408 t:10.6s +tttg: c163/289 lr:0.000402 t:10.6s +tttg: c164/289 lr:0.000397 t:10.7s +tttg: c165/289 lr:0.000392 t:10.8s +tttg: c166/289 lr:0.000386 t:10.8s +tttg: c167/289 lr:0.000381 t:10.9s +tttg: c168/289 lr:0.000376 t:10.9s +tttg: c169/289 lr:0.000371 t:11.0s +tttg: c170/289 lr:0.000365 t:11.1s +tttg: c171/289 lr:0.000360 t:11.1s +tttg: c172/289 lr:0.000355 t:11.2s +tttg: c173/289 lr:0.000350 t:11.3s +tttg: c174/289 lr:0.000344 t:11.3s +tttg: c175/289 lr:0.000339 t:11.4s +tttg: c176/289 lr:0.000334 t:11.5s +tttg: c177/289 lr:0.000329 t:11.5s +tttg: c178/289 lr:0.000324 t:11.6s +tttg: c179/289 lr:0.000319 t:11.7s +tttg: c180/289 lr:0.000314 t:11.7s +tttg: c181/289 lr:0.000309 t:11.8s +tttg: c182/289 lr:0.000304 t:11.9s +tttg: c183/289 lr:0.000299 t:11.9s +tttg: c184/289 lr:0.000294 t:12.0s +tttg: c185/289 lr:0.000289 t:12.0s +tttg: c186/289 lr:0.000284 t:12.1s +tttg: c187/289 lr:0.000279 t:12.2s +tttg: c188/289 lr:0.000274 t:12.2s +tttg: c189/289 lr:0.000269 t:12.3s +tttg: c190/289 lr:0.000264 t:12.4s +tttg: c191/289 lr:0.000260 t:12.4s +tttg: c192/289 lr:0.000255 t:12.5s +tttg: c193/289 lr:0.000250 t:12.6s +tttg: c194/289 lr:0.000245 t:12.6s +tttg: c195/289 lr:0.000241 t:12.7s +tttg: c196/289 lr:0.000236 t:12.8s +tttg: c197/289 lr:0.000231 t:12.8s +tttg: c198/289 lr:0.000227 t:12.9s +tttg: c199/289 lr:0.000222 t:13.0s +tttg: c200/289 lr:0.000218 t:13.0s +tttg: c201/289 lr:0.000213 t:13.1s +tttg: c202/289 lr:0.000209 t:13.1s +tttg: c203/289 lr:0.000204 t:13.2s +tttg: c204/289 lr:0.000200 t:13.3s +tttg: c205/289 lr:0.000196 t:13.3s +tttg: c206/289 lr:0.000191 t:13.4s +tttg: c207/289 lr:0.000187 t:13.5s +tttg: c208/289 lr:0.000183 t:13.5s +tttg: c209/289 lr:0.000179 t:13.6s +tttg: c210/289 lr:0.000174 t:13.7s +tttg: c211/289 lr:0.000170 t:13.7s +tttg: c212/289 lr:0.000166 t:13.8s +tttg: c213/289 lr:0.000162 t:13.9s +tttg: c214/289 lr:0.000158 t:13.9s +tttg: c215/289 lr:0.000154 t:14.0s +tttg: c216/289 lr:0.000150 t:14.1s +tttg: c217/289 lr:0.000146 t:14.1s +tttg: c218/289 lr:0.000143 t:14.2s +tttg: c219/289 lr:0.000139 t:14.3s +tttg: c220/289 lr:0.000135 t:14.3s +tttg: c221/289 lr:0.000131 t:14.4s +tttg: c222/289 lr:0.000128 t:14.4s +tttg: c223/289 lr:0.000124 t:14.5s +tttg: c224/289 lr:0.000121 t:14.6s +tttg: c225/289 lr:0.000117 t:14.6s +tttg: c226/289 lr:0.000113 t:14.7s +tttg: c227/289 lr:0.000110 t:14.8s +tttg: c228/289 lr:0.000107 t:14.8s +tttg: c229/289 lr:0.000103 t:14.9s +tttg: c230/289 lr:0.000100 t:15.0s +tttg: c231/289 lr:0.000097 t:15.0s +tttg: c232/289 lr:0.000094 t:15.1s +tttg: c233/289 lr:0.000090 t:15.2s +tttg: c234/289 lr:0.000087 t:15.2s +tttg: c235/289 lr:0.000084 t:15.3s +tttg: c236/289 lr:0.000081 t:15.4s +tttg: c237/289 lr:0.000078 t:15.4s +tttg: c238/289 lr:0.000075 t:15.5s +tttg: c239/289 lr:0.000073 t:15.6s +tttg: c240/289 lr:0.000070 t:15.6s +tttg: c241/289 lr:0.000067 t:15.7s +tttg: c242/289 lr:0.000064 t:15.7s +tttg: c243/289 lr:0.000062 t:15.8s +tttg: c244/289 lr:0.000059 t:15.9s +tttg: c245/289 lr:0.000056 t:15.9s +tttg: c246/289 lr:0.000054 t:16.0s +tttg: c247/289 lr:0.000052 t:16.1s +tttg: c248/289 lr:0.000049 t:16.1s +tttg: c249/289 lr:0.000047 t:16.2s +tttg: c250/289 lr:0.000045 t:16.3s +tttg: c251/289 lr:0.000042 t:16.3s +tttg: c252/289 lr:0.000040 t:16.4s +tttg: c253/289 lr:0.000038 t:16.5s +tttg: c254/289 lr:0.000036 t:16.5s +tttg: c255/289 lr:0.000034 t:16.6s +tttg: c256/289 lr:0.000032 t:16.7s +tttg: c257/289 lr:0.000030 t:16.7s +tttg: c258/289 lr:0.000028 t:16.8s +tttg: c259/289 lr:0.000027 t:16.8s +tttg: c260/289 lr:0.000025 t:16.9s +tttg: c261/289 lr:0.000023 t:17.0s +tttg: c262/289 lr:0.000022 t:17.0s +tttg: c263/289 lr:0.000020 t:17.1s +tttg: c264/289 lr:0.000018 t:17.2s +tttg: c265/289 lr:0.000017 t:17.2s +tttg: c266/289 lr:0.000016 t:17.3s +tttg: c267/289 lr:0.000014 t:17.4s +tttg: c268/289 lr:0.000013 t:17.4s +tttg: c269/289 lr:0.000012 t:17.5s +tttg: c270/289 lr:0.000011 t:17.6s +tttg: c271/289 lr:0.000010 t:17.6s +tttg: c272/289 lr:0.000009 t:17.7s +tttg: c273/289 lr:0.000008 t:17.8s +tttg: c274/289 lr:0.000007 t:17.8s +tttg: c275/289 lr:0.000006 t:17.9s +tttg: c276/289 lr:0.000005 t:18.0s +tttg: c277/289 lr:0.000004 t:18.0s +tttg: c278/289 lr:0.000004 t:18.1s +tttg: c279/289 lr:0.000003 t:18.1s +tttg: c280/289 lr:0.000002 t:18.2s +tttg: c281/289 lr:0.000002 t:18.3s +tttg: c282/289 lr:0.000001 t:18.3s +tttg: c283/289 lr:0.000001 t:18.4s +tttg: c284/289 lr:0.000001 t:18.5s +tttg: c285/289 lr:0.000000 t:18.5s +tttg: c286/289 lr:0.000000 t:18.6s +tttg: c287/289 lr:0.000000 t:18.7s +tttg: c288/289 lr:0.000000 t:18.7s +ttpr: phase:3/3 t:296.6s +ttp: b728/782 bl:2.3606 bb:1.0807 rl:2.3115 rb:1.0745 dl:2306-2324 gd:1 +ttp: b726/782 bl:2.3326 bb:1.0375 rl:2.3127 rb:1.0723 dl:2254-2276 gd:1 +ttp: b717/782 bl:2.2580 bb:1.0339 rl:2.3100 rb:1.0704 dl:2070-2088 gd:1 +ttp: b706/782 bl:2.4012 bb:1.0739 rl:2.3139 rb:1.0705 dl:1898-1910 gd:1 +ttp: b703/782 bl:2.3401 bb:1.0294 rl:2.3150 rb:1.0688 dl:1859-1872 gd:1 +ttp: b692/782 bl:2.2899 bb:1.0280 rl:2.3141 rb:1.0673 dl:1737-1746 gd:1 +ttp: b682/782 bl:2.3397 bb:1.0558 rl:2.3149 rb:1.0669 dl:1638-1646 gd:1 +ttp: b674/782 bl:2.4033 bb:1.0885 rl:2.3177 rb:1.0676 dl:1571-1578 gd:1 +ttp: b671/782 bl:2.3048 bb:1.0455 rl:2.3173 rb:1.0669 dl:1544-1552 gd:1 +ttp: b656/782 bl:2.3281 bb:1.1106 rl:2.3176 rb:1.0680 dl:1439-1445 gd:1 +ttp: b648/782 bl:2.2805 bb:1.0063 rl:2.3166 rb:1.0664 dl:1387-1392 gd:1 +ttp: b647/782 bl:2.2755 bb:1.0327 rl:2.3156 rb:1.0656 dl:1382-1387 gd:1 +ttp: b638/782 bl:2.3405 bb:1.0664 rl:2.3162 rb:1.0656 dl:1325-1331 gd:1 +ttp: b629/782 bl:2.3489 bb:1.0108 rl:2.3169 rb:1.0643 dl:1276-1280 gd:1 +ttp: b621/782 bl:2.2958 bb:1.0484 rl:2.3165 rb:1.0640 dl:1231-1237 gd:1 +ttp: b612/782 bl:2.2368 bb:1.0134 rl:2.3149 rb:1.0630 dl:1186-1190 gd:1 +ttp: b605/782 bl:2.2439 bb:1.0233 rl:2.3136 rb:1.0623 dl:1154-1159 gd:1 +ttp: b593/782 bl:2.2896 bb:1.0107 rl:2.3132 rb:1.0614 dl:1103-1107 gd:1 +ttp: b584/782 bl:2.2947 bb:1.0375 rl:2.3129 rb:1.0610 dl:1064-1069 gd:1 +ttp: b577/782 bl:2.2899 bb:1.0307 rl:2.3126 rb:1.0605 dl:1037-1041 gd:1 +ttp: b571/782 bl:2.2957 bb:1.0042 rl:2.3123 rb:1.0596 dl:1014-1017 gd:1 +ttp: b563/782 bl:2.2658 bb:1.0182 rl:2.3116 rb:1.0590 dl:987-990 gd:1 +ttp: b555/782 bl:2.3192 bb:1.0234 rl:2.3117 rb:1.0585 dl:959-961 gd:1 +ttp: b552/782 bl:2.2736 bb:1.0185 rl:2.3112 rb:1.0579 dl:949-952 gd:1 +ttp: b543/782 bl:2.3310 bb:1.0553 rl:2.3115 rb:1.0579 dl:921-924 gd:1 +ttp: b532/782 bl:2.3892 bb:1.0670 rl:2.3124 rb:1.0580 dl:887-889 gd:1 +ttp: b522/782 bl:2.3063 bb:1.0343 rl:2.3124 rb:1.0577 dl:858-860 gd:1 +ttp: b517/782 bl:2.3466 bb:1.0240 rl:2.3128 rb:1.0573 dl:843-846 gd:1 +ttp: b509/782 bl:2.3592 bb:1.0358 rl:2.3133 rb:1.0571 dl:820-823 gd:1 +ttp: b501/782 bl:2.3798 bb:1.0513 rl:2.3140 rb:1.0570 dl:799-802 gd:1 +ttp: b482/782 bl:2.3266 bb:1.0460 rl:2.3141 rb:1.0569 dl:752-754 gd:1 +ttp: b474/782 bl:2.3411 bb:1.0719 rl:2.3144 rb:1.0570 dl:733-735 gd:1 +ttp: b466/782 bl:2.3837 bb:1.0277 rl:2.3150 rb:1.0567 dl:714-717 gd:1 +ttp: b460/782 bl:2.2468 bb:1.0511 rl:2.3144 rb:1.0567 dl:701-703 gd:1 +ttp: b451/782 bl:2.4033 bb:1.0875 rl:2.3152 rb:1.0570 dl:682-685 gd:1 +ttp: b444/782 bl:2.3092 bb:1.0639 rl:2.3151 rb:1.0570 dl:668-670 gd:1 +ttp: b437/782 bl:2.2901 bb:1.0537 rl:2.3149 rb:1.0570 dl:653-655 gd:1 +ttp: b429/782 bl:2.2417 bb:1.0224 rl:2.3143 rb:1.0567 dl:638-640 gd:1 +ttp: b420/782 bl:2.3591 bb:1.0531 rl:2.3147 rb:1.0567 dl:620-622 gd:1 +ttp: b414/782 bl:2.2000 bb:1.0072 rl:2.3138 rb:1.0563 dl:609-611 gd:1 +ttp: b406/782 bl:2.3147 bb:1.0659 rl:2.3138 rb:1.0564 dl:593-595 gd:1 +ttp: b398/782 bl:2.2466 bb:1.0032 rl:2.3133 rb:1.0560 dl:579-581 gd:1 +ttp: b389/782 bl:2.2845 bb:1.0820 rl:2.3132 rb:1.0562 dl:563-564 gd:1 +ttp: b382/782 bl:2.2943 bb:1.0840 rl:2.3130 rb:1.0564 dl:550-552 gd:1 +ttp: b375/782 bl:2.4036 bb:1.0720 rl:2.3136 rb:1.0565 dl:538-540 gd:1 +ttp: b367/782 bl:2.2905 bb:1.0809 rl:2.3135 rb:1.0566 dl:525-527 gd:1 +ttp: b360/782 bl:2.3028 bb:1.0773 rl:2.3134 rb:1.0567 dl:513-515 gd:1 +ttp: b352/782 bl:2.4202 bb:1.0952 rl:2.3140 rb:1.0570 dl:499-501 gd:1 +ttp: b344/782 bl:2.3865 bb:1.0635 rl:2.3144 rb:1.0570 dl:488-489 gd:1 +ttp: b336/782 bl:2.4077 bb:1.0850 rl:2.3149 rb:1.0571 dl:476-477 gd:1 +ttp: b329/782 bl:2.2821 bb:1.0813 rl:2.3148 rb:1.0573 dl:465-466 gd:1 +ttp: b321/782 bl:2.3504 bb:1.0730 rl:2.3149 rb:1.0573 dl:453-455 gd:1 +ttp: b312/782 bl:2.3117 bb:1.0530 rl:2.3149 rb:1.0573 dl:439-440 gd:1 +ttp: b304/782 bl:2.3322 bb:1.0697 rl:2.3150 rb:1.0574 dl:427-429 gd:1 +ttp: b297/782 bl:2.4001 bb:1.0845 rl:2.3154 rb:1.0575 dl:417-418 gd:1 +ttp: b289/782 bl:2.3218 bb:1.0798 rl:2.3154 rb:1.0576 dl:405-406 gd:1 +ttp: b281/782 bl:2.2966 bb:1.0887 rl:2.3154 rb:1.0577 dl:394-395 gd:1 +ttp: b273/782 bl:2.3353 bb:1.0765 rl:2.3154 rb:1.0578 dl:383-384 gd:1 +ttp: b267/782 bl:2.4144 bb:1.1411 rl:2.3158 rb:1.0582 dl:375-376 gd:1 +ttp: b257/782 bl:2.4429 bb:1.1112 rl:2.3163 rb:1.0584 dl:362-364 gd:1 +ttp: b248/782 bl:2.4542 bb:1.1845 rl:2.3169 rb:1.0588 dl:351-352 gd:1 +ttp: b240/782 bl:2.3083 bb:1.0595 rl:2.3168 rb:1.0588 dl:341-342 gd:1 +ttp: b231/782 bl:2.3041 bb:1.0824 rl:2.3168 rb:1.0589 dl:330-331 gd:1 +ttp: b223/782 bl:2.3302 bb:1.1250 rl:2.3168 rb:1.0591 dl:321-322 gd:1 +ttp: b215/782 bl:2.3893 bb:1.0952 rl:2.3171 rb:1.0592 dl:312-313 gd:1 +ttp: b209/782 bl:2.4126 bb:1.1285 rl:2.3174 rb:1.0595 dl:305-306 gd:1 +ttp: b201/782 bl:2.2992 bb:1.0968 rl:2.3173 rb:1.0596 dl:297-298 gd:1 +ttp: b193/782 bl:2.3502 bb:1.1270 rl:2.3174 rb:1.0598 dl:288-289 gd:1 +ttp: b187/782 bl:2.4523 bb:1.1332 rl:2.3178 rb:1.0600 dl:281-282 gd:1 +ttp: b179/782 bl:2.3667 bb:1.1283 rl:2.3180 rb:1.0602 dl:273-274 gd:1 +ttp: b169/782 bl:2.3768 bb:1.1171 rl:2.3181 rb:1.0603 dl:263-264 gd:1 +ttp: b161/782 bl:2.3561 bb:1.1341 rl:2.3183 rb:1.0605 dl:256-256 gd:1 +ttp: b153/782 bl:2.2630 bb:1.0468 rl:2.3181 rb:1.0605 dl:248-249 gd:1 +ttp: b145/782 bl:2.5282 bb:1.1686 rl:2.3186 rb:1.0608 dl:240-241 gd:1 +ttp: b139/782 bl:2.4284 bb:1.1313 rl:2.3189 rb:1.0609 dl:234-235 gd:1 +ttp: b131/782 bl:2.3913 bb:1.1546 rl:2.3191 rb:1.0611 dl:227-228 gd:1 +ttp: b121/782 bl:2.4273 bb:1.1078 rl:2.3193 rb:1.0612 dl:218-219 gd:1 +ttp: b113/782 bl:2.5532 bb:1.1352 rl:2.3198 rb:1.0614 dl:210-211 gd:1 +ttp: b104/782 bl:2.4982 bb:1.1794 rl:2.3202 rb:1.0616 dl:202-203 gd:1 +ttp: b94/782 bl:2.5565 bb:1.2080 rl:2.3207 rb:1.0619 dl:193-194 gd:1 +ttp: b86/782 bl:2.4547 bb:1.1325 rl:2.3209 rb:1.0621 dl:186-187 gd:1 +ttp: b78/782 bl:2.5403 bb:1.1893 rl:2.3213 rb:1.0623 dl:179-180 gd:1 +ttp: b71/782 bl:2.4561 bb:1.1738 rl:2.3216 rb:1.0625 dl:173-173 gd:1 +ttp: b63/782 bl:2.5243 bb:1.2041 rl:2.3219 rb:1.0627 dl:166-166 gd:1 +ttp: b54/782 bl:2.4737 bb:1.2134 rl:2.3222 rb:1.0629 dl:157-158 gd:1 +ttp: b46/782 bl:2.5455 bb:1.2154 rl:2.3225 rb:1.0632 dl:149-150 gd:1 +ttp: b38/782 bl:2.6083 bb:1.1963 rl:2.3229 rb:1.0633 dl:141-142 gd:1 +ttp: b30/782 bl:2.5904 bb:1.2630 rl:2.3233 rb:1.0636 dl:133-134 gd:1 +ttp: b22/782 bl:2.5695 bb:1.2028 rl:2.3236 rb:1.0638 dl:124-126 gd:1 +ttp: b14/782 bl:2.5889 bb:1.1818 rl:2.3239 rb:1.0639 dl:114-115 gd:1 +ttp: b6/782 bl:2.7171 bb:1.2114 rl:2.3243 rb:1.0641 dl:99-101 gd:1 +quantized_ttt_phased val_loss:2.31977393 val_bpb:1.06004616 eval_time:395770ms +total_eval_time:395.8s +=== Done: SEED=314 === diff --git a/records/track_10min_16mb/2020-04-29_AWQ_lite_mixedprecision_GPTQ/train_seed42.log b/records/track_10min_16mb/2020-04-29_AWQ_lite_mixedprecision_GPTQ/train_seed42.log new file mode 100644 index 0000000000..8415915fc6 --- /dev/null +++ b/records/track_10min_16mb/2020-04-29_AWQ_lite_mixedprecision_GPTQ/train_seed42.log @@ -0,0 +1,950 @@ +=== Starting training: SEED=42 RUN_ID=awq_seed42_20260429_050241 === +=== Artifact dir: /workspace/pargolf-work/runs/awq_seed42_20260429_050241 === +=== NPROC=8 === +W0429 05:02:42.809000 90934 torch/distributed/run.py:803] +W0429 05:02:42.809000 90934 torch/distributed/run.py:803] ***************************************** +W0429 05:02:42.809000 90934 torch/distributed/run.py:803] Setting OMP_NUM_THREADS environment variable for each process to be 1 in default, to avoid your system being overloaded, please further tune the variable for optimal performance in your application as needed. +W0429 05:02:42.809000 90934 torch/distributed/run.py:803] ***************************************** +Hyperparameters: + adam_eps: 1e-08 + adam_wd: 0.02 + artifact_dir: /workspace/pargolf-work/runs/awq_seed42_20260429_050241 + attn_clip_sigmas: 13.0 + attn_out_gate_enabled: False + attn_out_gate_src: proj + awq_lite_bits: 8 + awq_lite_enabled: True + awq_lite_group_size: 64 + awq_lite_group_top_k: 1 + beta1: 0.9 + beta2: 0.99 + caseops_enabled: True + compressor: pergroup + data_dir: ./data/ + datasets_dir: /workspace/pargolf-work/data/datasets/fineweb10B_sp8192_lossless_caps_caseops_v1_reserved/datasets/fineweb10B_sp8192_lossless_caps_caseops_v1_reserved + distributed: True + ema_decay: 0.9965 + embed_bits: 7 + embed_clip_sigmas: 14.0 + embed_lr: 0.6 + embed_wd: 0.085 + enable_looping_at: 0.35 + eval_seq_len: 2048 + eval_stride: 64 + fused_ce_enabled: True + gate_window: 12 + gated_attn_enabled: False + gated_attn_init_std: 0.01 + gated_attn_quant_gate: True + global_ttt_batch_seqs: 32 + global_ttt_chunk_tokens: 32768 + global_ttt_epochs: 1 + global_ttt_grad_clip: 1.0 + global_ttt_lr: 0.001 + global_ttt_momentum: 0.9 + global_ttt_respect_doc_boundaries: True + global_ttt_warmup_chunks: 0 + global_ttt_warmup_start_lr: 0.0 + gptq_calibration_batches: 16 + gptq_reserve_seconds: 0.5 + grad_accum_steps: 1 + grad_clip_norm: 0.3 + is_main_process: True + iterations: 20000 + ln_scale: True + local_rank: 0 + logfile: /workspace/pargolf-work/runs/awq_seed42_20260429_050241/awq_seed42_20260429_050241.txt + logit_softcap: 30.0 + loop_end: 5 + loop_start: 3 + lqer_asym_enabled: True + lqer_asym_group: 64 + lqer_enabled: True + lqer_factor_bits: 4 + lqer_gain_select: False + lqer_rank: 4 + lqer_scope: all + lqer_top_k: 3 + matrix_bits: 6 + matrix_clip_sigmas: 12.85 + matrix_lr: 0.026 + max_wallclock_seconds: 600.0 + min_lr: 0.1 + mlp_clip_sigmas: 11.5 + mlp_mult: 4.0 + model_dim: 512 + model_path: /workspace/pargolf-work/runs/awq_seed42_20260429_050241/final_model.pt + muon_backend_steps: 5 + muon_momentum: 0.97 + muon_momentum_warmup_start: 0.92 + muon_momentum_warmup_steps: 1500 + muon_row_normalize: True + muon_wd: 0.095 + num_heads: 8 + num_kv_heads: 4 + num_layers: 11 + num_loops: 2 + parallel_final_lane: mean + parallel_start_layer: 8 + phased_ttt_num_phases: 3 + phased_ttt_prefix_docs: 2500 + qk_gain_init: 5.0 + quantized_model_path: /workspace/pargolf-work/runs/awq_seed42_20260429_050241/final_model.int6.ptz + rank: 0 + rope_base: 10000.0 + rope_dims: 16 + rope_train_seq_len: 2048 + rope_yarn: False + run_id: awq_seed42_20260429_050241 + scalar_lr: 0.02 + seed: 42 + skip_gates_enabled: True + smear_gate_enabled: True + sparse_attn_gate_enabled: True + sparse_attn_gate_init_std: 0.0 + sparse_attn_gate_scale: 0.5 + tie_embeddings: True + tied_embed_init_std: 0.005 + tied_embed_lr: 0.03 + tokenizer_path: /workspace/pargolf/records/track_10min_16mb/2026-04-27_SP8192_LQER_SparseGate_BOSSmearFix_9HpStack_1.0611/tokenizers/fineweb_8192_bpe_lossless_caps_caseops_v1_reserved.model + train_batch_tokens: 786432 + train_files: /workspace/pargolf-work/data/datasets/fineweb10B_sp8192_lossless_caps_caseops_v1_reserved/datasets/fineweb10B_sp8192_lossless_caps_caseops_v1_reserved/fineweb_train_*.bin + train_log_every: 500 + train_seq_len: 2048 + ttt_batch_size: 64 + ttt_beta1: 0.0 + ttt_beta2: 0.99 + ttt_chunk_size: 48 + ttt_enabled: True + ttt_eval_batches: + ttt_eval_seq_len: 2048 + ttt_grad_steps: 1 + ttt_k_lora: True + ttt_lora_lr: 0.0001 + ttt_lora_rank: 80 + ttt_mlp_lora: True + ttt_o_lora: True + ttt_optimizer: adam + ttt_weight_decay: 0.5 + val_batch_tokens: 524288 + val_bytes_files: /workspace/pargolf-work/data/datasets/fineweb10B_sp8192_lossless_caps_caseops_v1_reserved/datasets/fineweb10B_sp8192_lossless_caps_caseops_v1_reserved/fineweb_val_bytes_*.bin + val_doc_fraction: 1.0 + val_files: /workspace/pargolf-work/data/datasets/fineweb10B_sp8192_lossless_caps_caseops_v1_reserved/datasets/fineweb10B_sp8192_lossless_caps_caseops_v1_reserved/fineweb_val_*.bin + val_loss_every: 4000 + vocab_size: 8192 + warmdown_frac: 0.85 + warmup_steps: 20 + world_size: 8 + xsa_last_n: 11 +train_shards: 80 +val_tokens: 47851520 +model_params:35945671 +gptq:reserving 0s, effective=599500ms +warmup_cu_buckets:64,128,192,256 iters_each:3 +warmup_step: 1/20 +warmup_step: 2/20 +warmup_step: 3/20 +warmup_step: 4/20 +warmup_step: 5/20 +warmup_step: 6/20 +warmup_step: 10/20 +warmup_step: 20/20 +loop_warmup:enabled encoder:[0, 1, 2, 3, 4, 5, 3, 4] decoder:[5, 3, 4, 5, 6, 7, 8, 9, 10] +loop_warmup_step: 1/20 +loop_warmup_step: 2/20 +loop_warmup_step: 3/20 +loop_warmup_step: 4/20 +loop_warmup_step: 5/20 +loop_warmup_step: 6/20 +loop_warmup_step: 10/20 +loop_warmup_step: 20/20 +0/20000 val_loss: 9.0076 val_bpb: 4.1159 +1/20000 train_loss: 9.0109 train_time: 0.0m tok/s: 18061930 +2/20000 train_loss: 12.6608 train_time: 0.0m tok/s: 11392388 +3/20000 train_loss: 10.3312 train_time: 0.0m tok/s: 9969099 +4/20000 train_loss: 9.1770 train_time: 0.0m tok/s: 9361768 +5/20000 train_loss: 7.9874 train_time: 0.0m tok/s: 9058402 +500/20000 train_loss: 2.6266 train_time: 0.8m tok/s: 8346539 +1000/20000 train_loss: 2.7466 train_time: 1.6m tok/s: 8318344 +1500/20000 train_loss: 2.7187 train_time: 2.4m tok/s: 8309959 +2000/20000 train_loss: 2.6271 train_time: 3.2m tok/s: 8305645 +layer_loop:enabled step:2215 frac:0.350 encoder:[0, 1, 2, 3, 4, 5, 3, 4] decoder:[5, 3, 4, 5, 6, 7, 8, 9, 10] +2500/20000 train_loss: 2.4717 train_time: 4.2m tok/s: 7880578 +3000/20000 train_loss: 2.4940 train_time: 5.4m tok/s: 7336864 +3500/20000 train_loss: 2.6240 train_time: 6.5m tok/s: 7036254 +4000/20000 train_loss: 2.3701 train_time: 7.7m tok/s: 6811025 +4000/20000 val_loss: 2.4309 val_bpb: 1.1108 +4500/20000 train_loss: 2.5361 train_time: 8.9m tok/s: 6659009 +4989/20000 val_loss: 2.3567 val_bpb: 1.0768 +stopping_early: wallclock_cap train_time: 599599ms step: 4989/20000 +peak memory allocated: 41707 MiB reserved: 47048 MiB +ema:applying EMA weights +diagnostic pre-quantization post-ema val_loss:2.33105143 val_bpb:1.06513037 eval_time:8185ms +Serialized model: 135417533 bytes +Code size (uncompressed): 168543 bytes +Code size (compressed): 33787 bytes +GPTQ:collecting Hessians from calibration data... +GPTQ:collected 67 Hessians in 4.0s +Quantized weights: + gate_int8_row: blocks.attn.attn_gate_w + gptq (int6): blocks.attn.c_k.weight, blocks.attn.c_q.weight, blocks.attn.c_v.weight, blocks.attn.proj.weight, blocks.mlp.fc.weight, blocks.mlp.proj.weight + gptq (int6)+lqer_asym: blocks.mlp.fc.weight + gptq (int7)+awqgrpint8+lqer_asym: tok_emb.weight + passthrough (float16): blocks.attn.q_gain, blocks.attn_scale, blocks.mlp_scale, blocks.resid_mix, parallel_post_lambdas, parallel_resid_lambdas, skip_gates, skip_weights, smear_gate.weight, smear_lambda +Serialize: per-group lrzip compression... +Serialize: per-group compression done in 110.0s +Serialized model quantized+pergroup: 15944716 bytes +Total submission size quantized+pergroup: 15978503 bytes +Deserialize: per-group lrzip decompression... +Deserialize: decompression done in 19.8s +diagnostic quantized val_loss:2.34961250 val_bpb:1.07361151 eval_time:12561ms +Deserialize: per-group lrzip decompression... +Deserialize: decompression done in 19.4s +ttt_lora:warming up compile (random tokens, no val data) +ttt_lora:compile warmup done (91.7s) + +beginning TTT eval timer +ttt_phased: total_docs:50000 prefix_docs:2500 suffix_docs:47500 num_phases:3 boundaries:[833, 1666, 2500] +ttp: b776/782 bl:2.2579 bb:1.0705 rl:2.2579 rb:1.0705 dl:7534-8350 gd:0 +ttp: b773/782 bl:2.1991 bb:1.0356 rl:2.2318 rb:1.0550 dl:6104-6447 gd:0 +ttp: b769/782 bl:2.3288 bb:1.0841 rl:2.2578 rb:1.0629 dl:5097-5309 gd:0 +ttp: b763/782 bl:2.4204 bb:1.0998 rl:2.2868 rb:1.0697 dl:4142-4283 gd:0 +ttpp: phase:1/3 pd:1296 gd:833 t:168.3s +tttg: c1/131 lr:0.001000 t:0.4s +tttg: c2/131 lr:0.001000 t:0.5s +tttg: c3/131 lr:0.000999 t:0.6s +tttg: c4/131 lr:0.000999 t:0.6s +tttg: c5/131 lr:0.000998 t:0.7s +tttg: c6/131 lr:0.000996 t:0.8s +tttg: c7/131 lr:0.000995 t:0.9s +tttg: c8/131 lr:0.000993 t:0.9s +tttg: c9/131 lr:0.000991 t:1.0s +tttg: c10/131 lr:0.000988 t:1.1s +tttg: c11/131 lr:0.000985 t:1.2s +tttg: c12/131 lr:0.000982 t:1.2s +tttg: c13/131 lr:0.000979 t:1.3s +tttg: c14/131 lr:0.000976 t:1.4s +tttg: c15/131 lr:0.000972 t:1.4s +tttg: c16/131 lr:0.000968 t:1.5s +tttg: c17/131 lr:0.000963 t:1.6s +tttg: c18/131 lr:0.000958 t:1.6s +tttg: c19/131 lr:0.000953 t:1.7s +tttg: c20/131 lr:0.000948 t:1.8s +tttg: c21/131 lr:0.000943 t:1.8s +tttg: c22/131 lr:0.000937 t:1.9s +tttg: c23/131 lr:0.000931 t:2.0s +tttg: c24/131 lr:0.000925 t:2.0s +tttg: c25/131 lr:0.000918 t:2.1s +tttg: c26/131 lr:0.000911 t:2.1s +tttg: c27/131 lr:0.000905 t:2.2s +tttg: c28/131 lr:0.000897 t:2.3s +tttg: c29/131 lr:0.000890 t:2.3s +tttg: c30/131 lr:0.000882 t:2.4s +tttg: c31/131 lr:0.000874 t:2.5s +tttg: c32/131 lr:0.000866 t:2.5s +tttg: c33/131 lr:0.000858 t:2.6s +tttg: c34/131 lr:0.000849 t:2.7s +tttg: c35/131 lr:0.000841 t:2.7s +tttg: c36/131 lr:0.000832 t:2.8s +tttg: c37/131 lr:0.000822 t:2.9s +tttg: c38/131 lr:0.000813 t:2.9s +tttg: c39/131 lr:0.000804 t:3.0s +tttg: c40/131 lr:0.000794 t:3.0s +tttg: c41/131 lr:0.000784 t:3.1s +tttg: c42/131 lr:0.000774 t:3.2s +tttg: c43/131 lr:0.000764 t:3.2s +tttg: c44/131 lr:0.000753 t:3.3s +tttg: c45/131 lr:0.000743 t:3.4s +tttg: c46/131 lr:0.000732 t:3.4s +tttg: c47/131 lr:0.000722 t:3.5s +tttg: c48/131 lr:0.000711 t:3.6s +tttg: c49/131 lr:0.000700 t:3.6s +tttg: c50/131 lr:0.000689 t:3.7s +tttg: c51/131 lr:0.000677 t:3.8s +tttg: c52/131 lr:0.000666 t:3.8s +tttg: c53/131 lr:0.000655 t:3.9s +tttg: c54/131 lr:0.000643 t:3.9s +tttg: c55/131 lr:0.000631 t:4.0s +tttg: c56/131 lr:0.000620 t:4.1s +tttg: c57/131 lr:0.000608 t:4.1s +tttg: c58/131 lr:0.000596 t:4.2s +tttg: c59/131 lr:0.000584 t:4.3s +tttg: c60/131 lr:0.000572 t:4.3s +tttg: c61/131 lr:0.000560 t:4.4s +tttg: c62/131 lr:0.000548 t:4.5s +tttg: c63/131 lr:0.000536 t:4.5s +tttg: c64/131 lr:0.000524 t:4.6s +tttg: c65/131 lr:0.000512 t:4.7s +tttg: c66/131 lr:0.000500 t:4.7s +tttg: c67/131 lr:0.000488 t:4.8s +tttg: c68/131 lr:0.000476 t:4.9s +tttg: c69/131 lr:0.000464 t:4.9s +tttg: c70/131 lr:0.000452 t:5.0s +tttg: c71/131 lr:0.000440 t:5.0s +tttg: c72/131 lr:0.000428 t:5.1s +tttg: c73/131 lr:0.000416 t:5.2s +tttg: c74/131 lr:0.000404 t:5.2s +tttg: c75/131 lr:0.000392 t:5.3s +tttg: c76/131 lr:0.000380 t:5.4s +tttg: c77/131 lr:0.000369 t:5.4s +tttg: c78/131 lr:0.000357 t:5.5s +tttg: c79/131 lr:0.000345 t:5.6s +tttg: c80/131 lr:0.000334 t:5.6s +tttg: c81/131 lr:0.000323 t:5.7s +tttg: c82/131 lr:0.000311 t:5.8s +tttg: c83/131 lr:0.000300 t:5.8s +tttg: c84/131 lr:0.000289 t:5.9s +tttg: c85/131 lr:0.000278 t:6.0s +tttg: c86/131 lr:0.000268 t:6.0s +tttg: c87/131 lr:0.000257 t:6.1s +tttg: c88/131 lr:0.000247 t:6.2s +tttg: c89/131 lr:0.000236 t:6.2s +tttg: c90/131 lr:0.000226 t:6.3s +tttg: c91/131 lr:0.000216 t:6.3s +tttg: c92/131 lr:0.000206 t:6.4s +tttg: c93/131 lr:0.000196 t:6.5s +tttg: c94/131 lr:0.000187 t:6.5s +tttg: c95/131 lr:0.000178 t:6.6s +tttg: c96/131 lr:0.000168 t:6.7s +tttg: c97/131 lr:0.000159 t:6.7s +tttg: c98/131 lr:0.000151 t:6.8s +tttg: c99/131 lr:0.000142 t:6.9s +tttg: c100/131 lr:0.000134 t:6.9s +tttg: c101/131 lr:0.000126 t:7.0s +tttg: c102/131 lr:0.000118 t:7.1s +tttg: c103/131 lr:0.000110 t:7.1s +tttg: c104/131 lr:0.000103 t:7.2s +tttg: c105/131 lr:0.000095 t:7.3s +tttg: c106/131 lr:0.000089 t:7.3s +tttg: c107/131 lr:0.000082 t:7.4s +tttg: c108/131 lr:0.000075 t:7.4s +tttg: c109/131 lr:0.000069 t:7.5s +tttg: c110/131 lr:0.000063 t:7.6s +tttg: c111/131 lr:0.000057 t:7.6s +tttg: c112/131 lr:0.000052 t:7.7s +tttg: c113/131 lr:0.000047 t:7.8s +tttg: c114/131 lr:0.000042 t:7.8s +tttg: c115/131 lr:0.000037 t:7.9s +tttg: c116/131 lr:0.000032 t:8.0s +tttg: c117/131 lr:0.000028 t:8.0s +tttg: c118/131 lr:0.000024 t:8.1s +tttg: c119/131 lr:0.000021 t:8.2s +tttg: c120/131 lr:0.000018 t:8.2s +tttg: c121/131 lr:0.000015 t:8.3s +tttg: c122/131 lr:0.000012 t:8.3s +tttg: c123/131 lr:0.000009 t:8.4s +tttg: c124/131 lr:0.000007 t:8.5s +tttg: c125/131 lr:0.000005 t:8.5s +tttg: c126/131 lr:0.000004 t:8.6s +tttg: c127/131 lr:0.000002 t:8.7s +tttg: c128/131 lr:0.000001 t:8.7s +tttg: c129/131 lr:0.000001 t:8.8s +tttg: c130/131 lr:0.000000 t:8.9s +ttpr: phase:1/3 t:178.7s +ttp: b754/782 bl:2.2878 bb:1.0582 rl:2.2869 rb:1.0682 dl:3345-3397 gd:0 +ttp: b750/782 bl:2.3855 bb:1.0719 rl:2.2972 rb:1.0686 dl:3090-3149 gd:0 +ttpp: phase:2/3 pd:2128 gd:1666 t:244.4s +tttg: c1/219 lr:0.001000 t:0.1s +tttg: c2/219 lr:0.001000 t:0.1s +tttg: c3/219 lr:0.001000 t:0.2s +tttg: c4/219 lr:0.001000 t:0.3s +tttg: c5/219 lr:0.000999 t:0.3s +tttg: c6/219 lr:0.000999 t:0.4s +tttg: c7/219 lr:0.000998 t:0.5s +tttg: c8/219 lr:0.000997 t:0.5s +tttg: c9/219 lr:0.000997 t:0.6s +tttg: c10/219 lr:0.000996 t:0.7s +tttg: c11/219 lr:0.000995 t:0.7s +tttg: c12/219 lr:0.000994 t:0.8s +tttg: c13/219 lr:0.000993 t:0.9s +tttg: c14/219 lr:0.000991 t:0.9s +tttg: c15/219 lr:0.000990 t:1.0s +tttg: c16/219 lr:0.000988 t:1.0s +tttg: c17/219 lr:0.000987 t:1.1s +tttg: c18/219 lr:0.000985 t:1.2s +tttg: c19/219 lr:0.000983 t:1.3s +tttg: c20/219 lr:0.000981 t:1.3s +tttg: c21/219 lr:0.000979 t:1.4s +tttg: c22/219 lr:0.000977 t:1.4s +tttg: c23/219 lr:0.000975 t:1.5s +tttg: c24/219 lr:0.000973 t:1.6s +tttg: c25/219 lr:0.000970 t:1.6s +tttg: c26/219 lr:0.000968 t:1.7s +tttg: c27/219 lr:0.000965 t:1.8s +tttg: c28/219 lr:0.000963 t:1.8s +tttg: c29/219 lr:0.000960 t:1.9s +tttg: c30/219 lr:0.000957 t:2.0s +tttg: c31/219 lr:0.000954 t:2.0s +tttg: c32/219 lr:0.000951 t:2.1s +tttg: c33/219 lr:0.000948 t:2.2s +tttg: c34/219 lr:0.000945 t:2.2s +tttg: c35/219 lr:0.000941 t:2.3s +tttg: c36/219 lr:0.000938 t:2.4s +tttg: c37/219 lr:0.000934 t:2.4s +tttg: c38/219 lr:0.000931 t:2.5s +tttg: c39/219 lr:0.000927 t:2.6s +tttg: c40/219 lr:0.000923 t:2.6s +tttg: c41/219 lr:0.000919 t:2.7s +tttg: c42/219 lr:0.000915 t:2.8s +tttg: c43/219 lr:0.000911 t:2.8s +tttg: c44/219 lr:0.000907 t:2.9s +tttg: c45/219 lr:0.000903 t:3.0s +tttg: c46/219 lr:0.000898 t:3.0s +tttg: c47/219 lr:0.000894 t:3.1s +tttg: c48/219 lr:0.000890 t:3.1s +tttg: c49/219 lr:0.000885 t:3.2s +tttg: c50/219 lr:0.000880 t:3.3s +tttg: c51/219 lr:0.000876 t:3.4s +tttg: c52/219 lr:0.000871 t:3.4s +tttg: c53/219 lr:0.000866 t:3.5s +tttg: c54/219 lr:0.000861 t:3.6s +tttg: c55/219 lr:0.000856 t:3.6s +tttg: c56/219 lr:0.000851 t:3.7s +tttg: c57/219 lr:0.000846 t:3.8s +tttg: c58/219 lr:0.000841 t:3.8s +tttg: c59/219 lr:0.000835 t:3.9s +tttg: c60/219 lr:0.000830 t:4.0s +tttg: c61/219 lr:0.000824 t:4.0s +tttg: c62/219 lr:0.000819 t:4.1s +tttg: c63/219 lr:0.000813 t:4.1s +tttg: c64/219 lr:0.000808 t:4.2s +tttg: c65/219 lr:0.000802 t:4.3s +tttg: c66/219 lr:0.000796 t:4.3s +tttg: c67/219 lr:0.000790 t:4.4s +tttg: c68/219 lr:0.000784 t:4.5s +tttg: c69/219 lr:0.000779 t:4.5s +tttg: c70/219 lr:0.000773 t:4.6s +tttg: c71/219 lr:0.000766 t:4.7s +tttg: c72/219 lr:0.000760 t:4.7s +tttg: c73/219 lr:0.000754 t:4.8s +tttg: c74/219 lr:0.000748 t:4.9s +tttg: c75/219 lr:0.000742 t:4.9s +tttg: c76/219 lr:0.000735 t:5.0s +tttg: c77/219 lr:0.000729 t:5.1s +tttg: c78/219 lr:0.000722 t:5.1s +tttg: c79/219 lr:0.000716 t:5.2s +tttg: c80/219 lr:0.000709 t:5.2s +tttg: c81/219 lr:0.000703 t:5.3s +tttg: c82/219 lr:0.000696 t:5.4s +tttg: c83/219 lr:0.000690 t:5.4s +tttg: c84/219 lr:0.000683 t:5.5s +tttg: c85/219 lr:0.000676 t:5.6s +tttg: c86/219 lr:0.000670 t:5.6s +tttg: c87/219 lr:0.000663 t:5.7s +tttg: c88/219 lr:0.000656 t:5.8s +tttg: c89/219 lr:0.000649 t:5.8s +tttg: c90/219 lr:0.000642 t:5.9s +tttg: c91/219 lr:0.000635 t:6.0s +tttg: c92/219 lr:0.000628 t:6.0s +tttg: c93/219 lr:0.000621 t:6.1s +tttg: c94/219 lr:0.000614 t:6.2s +tttg: c95/219 lr:0.000607 t:6.2s +tttg: c96/219 lr:0.000600 t:6.3s +tttg: c97/219 lr:0.000593 t:6.3s +tttg: c98/219 lr:0.000586 t:6.4s +tttg: c99/219 lr:0.000579 t:6.5s +tttg: c100/219 lr:0.000572 t:6.5s +tttg: c101/219 lr:0.000565 t:6.6s +tttg: c102/219 lr:0.000558 t:6.7s +tttg: c103/219 lr:0.000550 t:6.7s +tttg: c104/219 lr:0.000543 t:6.8s +tttg: c105/219 lr:0.000536 t:6.9s +tttg: c106/219 lr:0.000529 t:6.9s +tttg: c107/219 lr:0.000522 t:7.0s +tttg: c108/219 lr:0.000514 t:7.1s +tttg: c109/219 lr:0.000507 t:7.1s +tttg: c110/219 lr:0.000500 t:7.2s +tttg: c111/219 lr:0.000493 t:7.3s +tttg: c112/219 lr:0.000486 t:7.3s +tttg: c113/219 lr:0.000478 t:7.4s +tttg: c114/219 lr:0.000471 t:7.5s +tttg: c115/219 lr:0.000464 t:7.5s +tttg: c116/219 lr:0.000457 t:7.6s +tttg: c117/219 lr:0.000450 t:7.7s +tttg: c118/219 lr:0.000442 t:7.7s +tttg: c119/219 lr:0.000435 t:7.8s +tttg: c120/219 lr:0.000428 t:7.9s +tttg: c121/219 lr:0.000421 t:7.9s +tttg: c122/219 lr:0.000414 t:8.0s +tttg: c123/219 lr:0.000407 t:8.1s +tttg: c124/219 lr:0.000400 t:8.1s +tttg: c125/219 lr:0.000393 t:8.2s +tttg: c126/219 lr:0.000386 t:8.3s +tttg: c127/219 lr:0.000379 t:8.3s +tttg: c128/219 lr:0.000372 t:8.4s +tttg: c129/219 lr:0.000365 t:8.5s +tttg: c130/219 lr:0.000358 t:8.5s +tttg: c131/219 lr:0.000351 t:8.6s +tttg: c132/219 lr:0.000344 t:8.7s +tttg: c133/219 lr:0.000337 t:8.7s +tttg: c134/219 lr:0.000330 t:8.8s +tttg: c135/219 lr:0.000324 t:8.9s +tttg: c136/219 lr:0.000317 t:8.9s +tttg: c137/219 lr:0.000310 t:9.0s +tttg: c138/219 lr:0.000304 t:9.0s +tttg: c139/219 lr:0.000297 t:9.1s +tttg: c140/219 lr:0.000291 t:9.2s +tttg: c141/219 lr:0.000284 t:9.2s +tttg: c142/219 lr:0.000278 t:9.3s +tttg: c143/219 lr:0.000271 t:9.4s +tttg: c144/219 lr:0.000265 t:9.4s +tttg: c145/219 lr:0.000258 t:9.5s +tttg: c146/219 lr:0.000252 t:9.6s +tttg: c147/219 lr:0.000246 t:9.6s +tttg: c148/219 lr:0.000240 t:9.7s +tttg: c149/219 lr:0.000234 t:9.8s +tttg: c150/219 lr:0.000227 t:9.8s +tttg: c151/219 lr:0.000221 t:9.9s +tttg: c152/219 lr:0.000216 t:10.0s +tttg: c153/219 lr:0.000210 t:10.0s +tttg: c154/219 lr:0.000204 t:10.1s +tttg: c155/219 lr:0.000198 t:10.2s +tttg: c156/219 lr:0.000192 t:10.2s +tttg: c157/219 lr:0.000187 t:10.3s +tttg: c158/219 lr:0.000181 t:10.3s +tttg: c159/219 lr:0.000176 t:10.4s +tttg: c160/219 lr:0.000170 t:10.5s +tttg: c161/219 lr:0.000165 t:10.5s +tttg: c162/219 lr:0.000159 t:10.6s +tttg: c163/219 lr:0.000154 t:10.7s +tttg: c164/219 lr:0.000149 t:10.7s +tttg: c165/219 lr:0.000144 t:10.8s +tttg: c166/219 lr:0.000139 t:10.9s +tttg: c167/219 lr:0.000134 t:10.9s +tttg: c168/219 lr:0.000129 t:11.0s +tttg: c169/219 lr:0.000124 t:11.1s +tttg: c170/219 lr:0.000120 t:11.1s +tttg: c171/219 lr:0.000115 t:11.2s +tttg: c172/219 lr:0.000110 t:11.3s +tttg: c173/219 lr:0.000106 t:11.3s +tttg: c174/219 lr:0.000102 t:11.4s +tttg: c175/219 lr:0.000097 t:11.5s +tttg: c176/219 lr:0.000093 t:11.5s +tttg: c177/219 lr:0.000089 t:11.6s +tttg: c178/219 lr:0.000085 t:11.6s +tttg: c179/219 lr:0.000081 t:11.7s +tttg: c180/219 lr:0.000077 t:11.8s +tttg: c181/219 lr:0.000073 t:11.8s +tttg: c182/219 lr:0.000069 t:11.9s +tttg: c183/219 lr:0.000066 t:12.0s +tttg: c184/219 lr:0.000062 t:12.0s +tttg: c185/219 lr:0.000059 t:12.1s +tttg: c186/219 lr:0.000055 t:12.2s +tttg: c187/219 lr:0.000052 t:12.2s +tttg: c188/219 lr:0.000049 t:12.3s +tttg: c189/219 lr:0.000046 t:12.4s +tttg: c190/219 lr:0.000043 t:12.4s +tttg: c191/219 lr:0.000040 t:12.5s +tttg: c192/219 lr:0.000037 t:12.6s +tttg: c193/219 lr:0.000035 t:12.6s +tttg: c194/219 lr:0.000032 t:12.7s +tttg: c195/219 lr:0.000030 t:12.8s +tttg: c196/219 lr:0.000027 t:12.8s +tttg: c197/219 lr:0.000025 t:12.9s +tttg: c198/219 lr:0.000023 t:13.0s +tttg: c199/219 lr:0.000021 t:13.0s +tttg: c200/219 lr:0.000019 t:13.1s +tttg: c201/219 lr:0.000017 t:13.1s +tttg: c202/219 lr:0.000015 t:13.2s +tttg: c203/219 lr:0.000013 t:13.3s +tttg: c204/219 lr:0.000012 t:13.3s +tttg: c205/219 lr:0.000010 t:13.4s +tttg: c206/219 lr:0.000009 t:13.5s +tttg: c207/219 lr:0.000007 t:13.5s +tttg: c208/219 lr:0.000006 t:13.6s +tttg: c209/219 lr:0.000005 t:13.7s +tttg: c210/219 lr:0.000004 t:13.7s +tttg: c211/219 lr:0.000003 t:13.8s +tttg: c212/219 lr:0.000003 t:13.9s +tttg: c213/219 lr:0.000002 t:13.9s +tttg: c214/219 lr:0.000001 t:14.0s +tttg: c215/219 lr:0.000001 t:14.1s +tttg: c216/219 lr:0.000000 t:14.1s +tttg: c217/219 lr:0.000000 t:14.2s +tttg: c218/219 lr:0.000000 t:14.3s +ttpr: phase:2/3 t:260.2s +ttp: b741/782 bl:2.3222 bb:1.0414 rl:2.2992 rb:1.0663 dl:2686-2730 gd:0 +ttp: b740/782 bl:2.2612 bb:1.0380 rl:2.2964 rb:1.0641 dl:2653-2686 gd:0 +ttpp: phase:3/3 pd:2960 gd:2500 t:275.3s +tttg: c1/289 lr:0.001000 t:0.1s +tttg: c2/289 lr:0.001000 t:0.1s +tttg: c3/289 lr:0.001000 t:0.2s +tttg: c4/289 lr:0.001000 t:0.3s +tttg: c5/289 lr:0.001000 t:0.3s +tttg: c6/289 lr:0.000999 t:0.4s +tttg: c7/289 lr:0.000999 t:0.5s +tttg: c8/289 lr:0.000999 t:0.5s +tttg: c9/289 lr:0.000998 t:0.6s +tttg: c10/289 lr:0.000998 t:0.7s +tttg: c11/289 lr:0.000997 t:0.7s +tttg: c12/289 lr:0.000996 t:0.8s +tttg: c13/289 lr:0.000996 t:0.9s +tttg: c14/289 lr:0.000995 t:0.9s +tttg: c15/289 lr:0.000994 t:1.0s +tttg: c16/289 lr:0.000993 t:1.0s +tttg: c17/289 lr:0.000992 t:1.1s +tttg: c18/289 lr:0.000991 t:1.2s +tttg: c19/289 lr:0.000990 t:1.2s +tttg: c20/289 lr:0.000989 t:1.3s +tttg: c21/289 lr:0.000988 t:1.4s +tttg: c22/289 lr:0.000987 t:1.4s +tttg: c23/289 lr:0.000986 t:1.5s +tttg: c24/289 lr:0.000984 t:1.6s +tttg: c25/289 lr:0.000983 t:1.6s +tttg: c26/289 lr:0.000982 t:1.7s +tttg: c27/289 lr:0.000980 t:1.8s +tttg: c28/289 lr:0.000978 t:1.8s +tttg: c29/289 lr:0.000977 t:1.9s +tttg: c30/289 lr:0.000975 t:2.0s +tttg: c31/289 lr:0.000973 t:2.0s +tttg: c32/289 lr:0.000972 t:2.1s +tttg: c33/289 lr:0.000970 t:2.2s +tttg: c34/289 lr:0.000968 t:2.2s +tttg: c35/289 lr:0.000966 t:2.3s +tttg: c36/289 lr:0.000964 t:2.4s +tttg: c37/289 lr:0.000962 t:2.4s +tttg: c38/289 lr:0.000960 t:2.5s +tttg: c39/289 lr:0.000958 t:2.5s +tttg: c40/289 lr:0.000955 t:2.6s +tttg: c41/289 lr:0.000953 t:2.7s +tttg: c42/289 lr:0.000951 t:2.7s +tttg: c43/289 lr:0.000948 t:2.8s +tttg: c44/289 lr:0.000946 t:2.9s +tttg: c45/289 lr:0.000944 t:2.9s +tttg: c46/289 lr:0.000941 t:3.0s +tttg: c47/289 lr:0.000938 t:3.1s +tttg: c48/289 lr:0.000936 t:3.1s +tttg: c49/289 lr:0.000933 t:3.2s +tttg: c50/289 lr:0.000930 t:3.3s +tttg: c51/289 lr:0.000927 t:3.3s +tttg: c52/289 lr:0.000925 t:3.4s +tttg: c53/289 lr:0.000922 t:3.5s +tttg: c54/289 lr:0.000919 t:3.5s +tttg: c55/289 lr:0.000916 t:3.6s +tttg: c56/289 lr:0.000913 t:3.7s +tttg: c57/289 lr:0.000910 t:3.7s +tttg: c58/289 lr:0.000906 t:3.8s +tttg: c59/289 lr:0.000903 t:3.9s +tttg: c60/289 lr:0.000900 t:3.9s +tttg: c61/289 lr:0.000897 t:4.0s +tttg: c62/289 lr:0.000893 t:4.1s +tttg: c63/289 lr:0.000890 t:4.1s +tttg: c64/289 lr:0.000887 t:4.2s +tttg: c65/289 lr:0.000883 t:4.3s +tttg: c66/289 lr:0.000879 t:4.3s +tttg: c67/289 lr:0.000876 t:4.4s +tttg: c68/289 lr:0.000872 t:4.4s +tttg: c69/289 lr:0.000869 t:4.5s +tttg: c70/289 lr:0.000865 t:4.6s +tttg: c71/289 lr:0.000861 t:4.6s +tttg: c72/289 lr:0.000857 t:4.7s +tttg: c73/289 lr:0.000854 t:4.8s +tttg: c74/289 lr:0.000850 t:4.8s +tttg: c75/289 lr:0.000846 t:4.9s +tttg: c76/289 lr:0.000842 t:5.0s +tttg: c77/289 lr:0.000838 t:5.0s +tttg: c78/289 lr:0.000834 t:5.1s +tttg: c79/289 lr:0.000830 t:5.2s +tttg: c80/289 lr:0.000826 t:5.2s +tttg: c81/289 lr:0.000821 t:5.3s +tttg: c82/289 lr:0.000817 t:5.4s +tttg: c83/289 lr:0.000813 t:5.4s +tttg: c84/289 lr:0.000809 t:5.5s +tttg: c85/289 lr:0.000804 t:5.6s +tttg: c86/289 lr:0.000800 t:5.6s +tttg: c87/289 lr:0.000796 t:5.7s +tttg: c88/289 lr:0.000791 t:5.7s +tttg: c89/289 lr:0.000787 t:5.8s +tttg: c90/289 lr:0.000782 t:5.9s +tttg: c91/289 lr:0.000778 t:5.9s +tttg: c92/289 lr:0.000773 t:6.0s +tttg: c93/289 lr:0.000769 t:6.1s +tttg: c94/289 lr:0.000764 t:6.1s +tttg: c95/289 lr:0.000759 t:6.2s +tttg: c96/289 lr:0.000755 t:6.3s +tttg: c97/289 lr:0.000750 t:6.3s +tttg: c98/289 lr:0.000745 t:6.4s +tttg: c99/289 lr:0.000740 t:6.5s +tttg: c100/289 lr:0.000736 t:6.5s +tttg: c101/289 lr:0.000731 t:6.6s +tttg: c102/289 lr:0.000726 t:6.7s +tttg: c103/289 lr:0.000721 t:6.7s +tttg: c104/289 lr:0.000716 t:6.8s +tttg: c105/289 lr:0.000711 t:6.9s +tttg: c106/289 lr:0.000706 t:6.9s +tttg: c107/289 lr:0.000701 t:7.0s +tttg: c108/289 lr:0.000696 t:7.0s +tttg: c109/289 lr:0.000691 t:7.1s +tttg: c110/289 lr:0.000686 t:7.2s +tttg: c111/289 lr:0.000681 t:7.3s +tttg: c112/289 lr:0.000676 t:7.3s +tttg: c113/289 lr:0.000671 t:7.4s +tttg: c114/289 lr:0.000666 t:7.4s +tttg: c115/289 lr:0.000661 t:7.5s +tttg: c116/289 lr:0.000656 t:7.6s +tttg: c117/289 lr:0.000650 t:7.6s +tttg: c118/289 lr:0.000645 t:7.7s +tttg: c119/289 lr:0.000640 t:7.8s +tttg: c120/289 lr:0.000635 t:7.8s +tttg: c121/289 lr:0.000629 t:7.9s +tttg: c122/289 lr:0.000624 t:8.0s +tttg: c123/289 lr:0.000619 t:8.0s +tttg: c124/289 lr:0.000614 t:8.1s +tttg: c125/289 lr:0.000608 t:8.2s +tttg: c126/289 lr:0.000603 t:8.2s +tttg: c127/289 lr:0.000598 t:8.3s +tttg: c128/289 lr:0.000592 t:8.4s +tttg: c129/289 lr:0.000587 t:8.4s +tttg: c130/289 lr:0.000581 t:8.5s +tttg: c131/289 lr:0.000576 t:8.6s +tttg: c132/289 lr:0.000571 t:8.6s +tttg: c133/289 lr:0.000565 t:8.7s +tttg: c134/289 lr:0.000560 t:8.7s +tttg: c135/289 lr:0.000554 t:8.8s +tttg: c136/289 lr:0.000549 t:8.9s +tttg: c137/289 lr:0.000544 t:8.9s +tttg: c138/289 lr:0.000538 t:9.0s +tttg: c139/289 lr:0.000533 t:9.1s +tttg: c140/289 lr:0.000527 t:9.1s +tttg: c141/289 lr:0.000522 t:9.2s +tttg: c142/289 lr:0.000516 t:9.3s +tttg: c143/289 lr:0.000511 t:9.3s +tttg: c144/289 lr:0.000505 t:9.4s +tttg: c145/289 lr:0.000500 t:9.5s +tttg: c146/289 lr:0.000495 t:9.5s +tttg: c147/289 lr:0.000489 t:9.6s +tttg: c148/289 lr:0.000484 t:9.7s +tttg: c149/289 lr:0.000478 t:9.7s +tttg: c150/289 lr:0.000473 t:9.8s +tttg: c151/289 lr:0.000467 t:9.8s +tttg: c152/289 lr:0.000462 t:9.9s +tttg: c153/289 lr:0.000456 t:10.0s +tttg: c154/289 lr:0.000451 t:10.0s +tttg: c155/289 lr:0.000446 t:10.1s +tttg: c156/289 lr:0.000440 t:10.2s +tttg: c157/289 lr:0.000435 t:10.2s +tttg: c158/289 lr:0.000429 t:10.3s +tttg: c159/289 lr:0.000424 t:10.4s +tttg: c160/289 lr:0.000419 t:10.4s +tttg: c161/289 lr:0.000413 t:10.5s +tttg: c162/289 lr:0.000408 t:10.6s +tttg: c163/289 lr:0.000402 t:10.6s +tttg: c164/289 lr:0.000397 t:10.7s +tttg: c165/289 lr:0.000392 t:10.8s +tttg: c166/289 lr:0.000386 t:10.8s +tttg: c167/289 lr:0.000381 t:10.9s +tttg: c168/289 lr:0.000376 t:11.0s +tttg: c169/289 lr:0.000371 t:11.0s +tttg: c170/289 lr:0.000365 t:11.1s +tttg: c171/289 lr:0.000360 t:11.2s +tttg: c172/289 lr:0.000355 t:11.2s +tttg: c173/289 lr:0.000350 t:11.3s +tttg: c174/289 lr:0.000344 t:11.3s +tttg: c175/289 lr:0.000339 t:11.4s +tttg: c176/289 lr:0.000334 t:11.5s +tttg: c177/289 lr:0.000329 t:11.5s +tttg: c178/289 lr:0.000324 t:11.6s +tttg: c179/289 lr:0.000319 t:11.7s +tttg: c180/289 lr:0.000314 t:11.7s +tttg: c181/289 lr:0.000309 t:11.8s +tttg: c182/289 lr:0.000304 t:11.9s +tttg: c183/289 lr:0.000299 t:11.9s +tttg: c184/289 lr:0.000294 t:12.0s +tttg: c185/289 lr:0.000289 t:12.1s +tttg: c186/289 lr:0.000284 t:12.1s +tttg: c187/289 lr:0.000279 t:12.2s +tttg: c188/289 lr:0.000274 t:12.3s +tttg: c189/289 lr:0.000269 t:12.3s +tttg: c190/289 lr:0.000264 t:12.4s +tttg: c191/289 lr:0.000260 t:12.5s +tttg: c192/289 lr:0.000255 t:12.5s +tttg: c193/289 lr:0.000250 t:12.6s +tttg: c194/289 lr:0.000245 t:12.7s +tttg: c195/289 lr:0.000241 t:12.7s +tttg: c196/289 lr:0.000236 t:12.8s +tttg: c197/289 lr:0.000231 t:12.9s +tttg: c198/289 lr:0.000227 t:12.9s +tttg: c199/289 lr:0.000222 t:13.0s +tttg: c200/289 lr:0.000218 t:13.0s +tttg: c201/289 lr:0.000213 t:13.1s +tttg: c202/289 lr:0.000209 t:13.2s +tttg: c203/289 lr:0.000204 t:13.2s +tttg: c204/289 lr:0.000200 t:13.3s +tttg: c205/289 lr:0.000196 t:13.4s +tttg: c206/289 lr:0.000191 t:13.4s +tttg: c207/289 lr:0.000187 t:13.5s +tttg: c208/289 lr:0.000183 t:13.6s +tttg: c209/289 lr:0.000179 t:13.6s +tttg: c210/289 lr:0.000174 t:13.7s +tttg: c211/289 lr:0.000170 t:13.8s +tttg: c212/289 lr:0.000166 t:13.8s +tttg: c213/289 lr:0.000162 t:13.9s +tttg: c214/289 lr:0.000158 t:14.0s +tttg: c215/289 lr:0.000154 t:14.0s +tttg: c216/289 lr:0.000150 t:14.1s +tttg: c217/289 lr:0.000146 t:14.1s +tttg: c218/289 lr:0.000143 t:14.2s +tttg: c219/289 lr:0.000139 t:14.3s +tttg: c220/289 lr:0.000135 t:14.3s +tttg: c221/289 lr:0.000131 t:14.4s +tttg: c222/289 lr:0.000128 t:14.5s +tttg: c223/289 lr:0.000124 t:14.5s +tttg: c224/289 lr:0.000121 t:14.6s +tttg: c225/289 lr:0.000117 t:14.7s +tttg: c226/289 lr:0.000113 t:14.7s +tttg: c227/289 lr:0.000110 t:14.8s +tttg: c228/289 lr:0.000107 t:14.9s +tttg: c229/289 lr:0.000103 t:14.9s +tttg: c230/289 lr:0.000100 t:15.0s +tttg: c231/289 lr:0.000097 t:15.1s +tttg: c232/289 lr:0.000094 t:15.1s +tttg: c233/289 lr:0.000090 t:15.2s +tttg: c234/289 lr:0.000087 t:15.3s +tttg: c235/289 lr:0.000084 t:15.3s +tttg: c236/289 lr:0.000081 t:15.4s +tttg: c237/289 lr:0.000078 t:15.4s +tttg: c238/289 lr:0.000075 t:15.5s +tttg: c239/289 lr:0.000073 t:15.6s +tttg: c240/289 lr:0.000070 t:15.6s +tttg: c241/289 lr:0.000067 t:15.7s +tttg: c242/289 lr:0.000064 t:15.8s +tttg: c243/289 lr:0.000062 t:15.8s +tttg: c244/289 lr:0.000059 t:15.9s +tttg: c245/289 lr:0.000056 t:16.0s +tttg: c246/289 lr:0.000054 t:16.0s +tttg: c247/289 lr:0.000052 t:16.1s +tttg: c248/289 lr:0.000049 t:16.2s +tttg: c249/289 lr:0.000047 t:16.2s +tttg: c250/289 lr:0.000045 t:16.3s +tttg: c251/289 lr:0.000042 t:16.4s +tttg: c252/289 lr:0.000040 t:16.4s +tttg: c253/289 lr:0.000038 t:16.5s +tttg: c254/289 lr:0.000036 t:16.6s +tttg: c255/289 lr:0.000034 t:16.6s +tttg: c256/289 lr:0.000032 t:16.7s +tttg: c257/289 lr:0.000030 t:16.7s +tttg: c258/289 lr:0.000028 t:16.8s +tttg: c259/289 lr:0.000027 t:16.9s +tttg: c260/289 lr:0.000025 t:16.9s +tttg: c261/289 lr:0.000023 t:17.0s +tttg: c262/289 lr:0.000022 t:17.1s +tttg: c263/289 lr:0.000020 t:17.1s +tttg: c264/289 lr:0.000018 t:17.2s +tttg: c265/289 lr:0.000017 t:17.3s +tttg: c266/289 lr:0.000016 t:17.3s +tttg: c267/289 lr:0.000014 t:17.4s +tttg: c268/289 lr:0.000013 t:17.5s +tttg: c269/289 lr:0.000012 t:17.5s +tttg: c270/289 lr:0.000011 t:17.6s +tttg: c271/289 lr:0.000010 t:17.7s +tttg: c272/289 lr:0.000009 t:17.7s +tttg: c273/289 lr:0.000008 t:17.8s +tttg: c274/289 lr:0.000007 t:17.9s +tttg: c275/289 lr:0.000006 t:17.9s +tttg: c276/289 lr:0.000005 t:18.0s +tttg: c277/289 lr:0.000004 t:18.1s +tttg: c278/289 lr:0.000004 t:18.1s +tttg: c279/289 lr:0.000003 t:18.2s +tttg: c280/289 lr:0.000002 t:18.3s +tttg: c281/289 lr:0.000002 t:18.3s +tttg: c282/289 lr:0.000001 t:18.4s +tttg: c283/289 lr:0.000001 t:18.5s +tttg: c284/289 lr:0.000001 t:18.5s +tttg: c285/289 lr:0.000000 t:18.6s +tttg: c286/289 lr:0.000000 t:18.6s +tttg: c287/289 lr:0.000000 t:18.7s +tttg: c288/289 lr:0.000000 t:18.8s +ttpr: phase:3/3 t:295.6s +ttp: b728/782 bl:2.3646 bb:1.0825 rl:2.3005 rb:1.0653 dl:2306-2324 gd:1 +ttp: b726/782 bl:2.3332 bb:1.0378 rl:2.3024 rb:1.0637 dl:2254-2276 gd:1 +ttp: b718/782 bl:2.2929 bb:1.0291 rl:2.3019 rb:1.0619 dl:2089-2106 gd:1 +ttp: b708/782 bl:2.3094 bb:1.0330 rl:2.3022 rb:1.0606 dl:1924-1937 gd:1 +ttp: b698/782 bl:2.2507 bb:1.0302 rl:2.3002 rb:1.0594 dl:1803-1814 gd:1 +ttp: b694/782 bl:2.3095 bb:1.0561 rl:2.3006 rb:1.0593 dl:1758-1769 gd:1 +ttp: b683/782 bl:2.2743 bb:1.0587 rl:2.2997 rb:1.0592 dl:1646-1657 gd:1 +ttp: b676/782 bl:2.3351 bb:1.0503 rl:2.3008 rb:1.0590 dl:1586-1595 gd:1 +ttp: b664/782 bl:2.3405 bb:1.0272 rl:2.3019 rb:1.0580 dl:1493-1499 gd:1 +ttp: b656/782 bl:2.3266 bb:1.1099 rl:2.3026 rb:1.0593 dl:1439-1445 gd:1 +ttp: b649/782 bl:2.2869 bb:1.0168 rl:2.3022 rb:1.0582 dl:1392-1398 gd:1 +ttp: b640/782 bl:2.3097 bb:1.0522 rl:2.3024 rb:1.0581 dl:1337-1343 gd:1 +ttp: b639/782 bl:2.3156 bb:1.0341 rl:2.3027 rb:1.0575 dl:1331-1337 gd:1 +ttp: b630/782 bl:2.3257 bb:1.0404 rl:2.3032 rb:1.0571 dl:1280-1285 gd:1 +ttp: b622/782 bl:2.2625 bb:1.0335 rl:2.3023 rb:1.0567 dl:1237-1243 gd:1 +ttp: b612/782 bl:2.2380 bb:1.0139 rl:2.3011 rb:1.0558 dl:1186-1190 gd:1 +ttp: b605/782 bl:2.2518 bb:1.0269 rl:2.3002 rb:1.0553 dl:1154-1159 gd:1 +ttp: b593/782 bl:2.2939 bb:1.0126 rl:2.3001 rb:1.0545 dl:1103-1107 gd:1 +ttp: b584/782 bl:2.3000 bb:1.0399 rl:2.3001 rb:1.0543 dl:1064-1069 gd:1 +ttp: b576/782 bl:2.3787 bb:1.0941 rl:2.3013 rb:1.0549 dl:1033-1037 gd:1 +ttp: b570/782 bl:2.3526 bb:1.0538 rl:2.3021 rb:1.0549 dl:1010-1014 gd:1 +ttp: b561/782 bl:2.2486 bb:1.0143 rl:2.3013 rb:1.0543 dl:979-983 gd:1 +ttp: b554/782 bl:2.4295 bb:1.0937 rl:2.3031 rb:1.0548 dl:955-959 gd:1 +ttp: b550/782 bl:2.3654 bb:1.0583 rl:2.3039 rb:1.0549 dl:943-946 gd:1 +ttp: b542/782 bl:2.3289 bb:1.0400 rl:2.3043 rb:1.0547 dl:918-921 gd:1 +ttp: b533/782 bl:2.3751 bb:1.0685 rl:2.3051 rb:1.0549 dl:890-892 gd:1 +ttp: b525/782 bl:2.3540 bb:1.0202 rl:2.3057 rb:1.0544 dl:866-869 gd:1 +ttp: b515/782 bl:2.3456 bb:1.0445 rl:2.3062 rb:1.0543 dl:838-841 gd:1 +ttp: b507/782 bl:2.2990 bb:1.0294 rl:2.3061 rb:1.0540 dl:814-817 gd:1 +ttp: b502/782 bl:2.3205 bb:1.0283 rl:2.3062 rb:1.0537 dl:802-804 gd:1 +ttp: b495/782 bl:2.3096 bb:1.0317 rl:2.3063 rb:1.0535 dl:783-785 gd:1 +ttp: b488/782 bl:2.2943 bb:1.0096 rl:2.3062 rb:1.0531 dl:766-769 gd:1 +ttp: b483/782 bl:2.2585 bb:1.0304 rl:2.3057 rb:1.0528 dl:754-756 gd:1 +ttp: b475/782 bl:2.3657 bb:1.0557 rl:2.3063 rb:1.0529 dl:735-737 gd:1 +ttp: b467/782 bl:2.3490 bb:1.0528 rl:2.3067 rb:1.0529 dl:717-719 gd:1 +ttp: b442/782 bl:2.2637 bb:1.0331 rl:2.3063 rb:1.0527 dl:664-666 gd:1 +ttp: b435/782 bl:2.3146 bb:1.0223 rl:2.3064 rb:1.0524 dl:648-651 gd:1 +ttp: b428/782 bl:2.3059 bb:1.0507 rl:2.3064 rb:1.0524 dl:636-638 gd:1 +ttp: b421/782 bl:2.2927 bb:1.0038 rl:2.3063 rb:1.0520 dl:622-624 gd:1 +ttp: b415/782 bl:2.2854 bb:1.0586 rl:2.3061 rb:1.0521 dl:611-613 gd:1 +ttp: b406/782 bl:2.3052 bb:1.0615 rl:2.3061 rb:1.0522 dl:593-595 gd:1 +ttp: b398/782 bl:2.2450 bb:1.0025 rl:2.3057 rb:1.0518 dl:579-581 gd:1 +ttp: b390/782 bl:2.3508 bb:1.0591 rl:2.3060 rb:1.0519 dl:564-566 gd:1 +ttp: b382/782 bl:2.2946 bb:1.0841 rl:2.3059 rb:1.0521 dl:550-552 gd:1 +ttp: b374/782 bl:2.3055 bb:1.0394 rl:2.3059 rb:1.0520 dl:537-538 gd:1 +ttp: b366/782 bl:2.3324 bb:1.0685 rl:2.3061 rb:1.0521 dl:524-525 gd:1 +ttp: b358/782 bl:2.4081 bb:1.0807 rl:2.3067 rb:1.0523 dl:510-512 gd:1 +ttp: b350/782 bl:2.3254 bb:1.0568 rl:2.3068 rb:1.0523 dl:497-498 gd:1 +ttp: b341/782 bl:2.2983 bb:1.0765 rl:2.3067 rb:1.0524 dl:483-485 gd:1 +ttp: b332/782 bl:2.3008 bb:1.0415 rl:2.3067 rb:1.0524 dl:469-471 gd:1 +ttp: b325/782 bl:2.3480 bb:1.0800 rl:2.3069 rb:1.0525 dl:459-461 gd:1 +ttp: b318/782 bl:2.3393 bb:1.0691 rl:2.3071 rb:1.0526 dl:448-450 gd:1 +ttp: b311/782 bl:2.3483 bb:1.0824 rl:2.3073 rb:1.0527 dl:438-439 gd:1 +ttp: b303/782 bl:2.3933 bb:1.0916 rl:2.3077 rb:1.0529 dl:426-427 gd:1 +ttp: b295/782 bl:2.2652 bb:1.0627 rl:2.3075 rb:1.0530 dl:414-415 gd:1 +ttp: b287/782 bl:2.4058 bb:1.0961 rl:2.3079 rb:1.0531 dl:402-403 gd:1 +ttp: b279/782 bl:2.3198 bb:1.0961 rl:2.3080 rb:1.0533 dl:391-392 gd:1 +ttp: b271/782 bl:2.3787 bb:1.1267 rl:2.3083 rb:1.0536 dl:380-382 gd:1 +ttp: b263/782 bl:2.3945 bb:1.0832 rl:2.3086 rb:1.0537 dl:370-371 gd:1 +ttp: b255/782 bl:2.3686 bb:1.0924 rl:2.3089 rb:1.0539 dl:360-361 gd:1 +ttp: b247/782 bl:2.3481 bb:1.0930 rl:2.3090 rb:1.0540 dl:350-351 gd:1 +ttp: b239/782 bl:2.3827 bb:1.1064 rl:2.3093 rb:1.0542 dl:340-341 gd:1 +ttp: b231/782 bl:2.3032 bb:1.0820 rl:2.3093 rb:1.0543 dl:330-331 gd:1 +ttp: b223/782 bl:2.3331 bb:1.1264 rl:2.3093 rb:1.0546 dl:321-322 gd:1 +ttp: b215/782 bl:2.3911 bb:1.0960 rl:2.3096 rb:1.0547 dl:312-313 gd:1 +ttp: b207/782 bl:2.3590 bb:1.1337 rl:2.3098 rb:1.0549 dl:303-304 gd:1 +ttp: b199/782 bl:2.4295 bb:1.1431 rl:2.3102 rb:1.0552 dl:295-296 gd:1 +ttp: b191/782 bl:2.4232 bb:1.1024 rl:2.3105 rb:1.0554 dl:285-286 gd:1 +ttp: b183/782 bl:2.3185 bb:1.0678 rl:2.3105 rb:1.0554 dl:277-278 gd:1 +ttp: b175/782 bl:2.3931 bb:1.1563 rl:2.3108 rb:1.0557 dl:269-270 gd:1 +ttp: b166/782 bl:2.4733 bb:1.1056 rl:2.3112 rb:1.0558 dl:260-262 gd:1 +ttp: b158/782 bl:2.3438 bb:1.1082 rl:2.3113 rb:1.0559 dl:253-254 gd:1 +ttp: b150/782 bl:2.3297 bb:1.1062 rl:2.3113 rb:1.0561 dl:245-246 gd:1 +ttp: b142/782 bl:2.3805 bb:1.1081 rl:2.3115 rb:1.0562 dl:237-238 gd:1 +ttp: b133/782 bl:2.3743 bb:1.1389 rl:2.3117 rb:1.0564 dl:229-230 gd:1 +ttp: b125/782 bl:2.4792 bb:1.1423 rl:2.3120 rb:1.0566 dl:222-222 gd:1 +ttp: b115/782 bl:2.4610 bb:1.1647 rl:2.3124 rb:1.0568 dl:212-213 gd:1 +ttp: b107/782 bl:2.4470 bb:1.1719 rl:2.3127 rb:1.0570 dl:205-206 gd:1 +ttp: b98/782 bl:2.5967 bb:1.2184 rl:2.3132 rb:1.0574 dl:197-198 gd:1 +ttp: b89/782 bl:2.4945 bb:1.1527 rl:2.3136 rb:1.0575 dl:189-190 gd:1 +ttp: b82/782 bl:2.4833 bb:1.1820 rl:2.3139 rb:1.0578 dl:183-183 gd:1 +ttp: b73/782 bl:2.5372 bb:1.2453 rl:2.3143 rb:1.0581 dl:174-175 gd:1 +ttp: b64/782 bl:2.5231 bb:1.1508 rl:2.3147 rb:1.0582 dl:166-167 gd:1 +ttp: b56/782 bl:2.5337 bb:1.2145 rl:2.3150 rb:1.0585 dl:159-160 gd:1 +ttp: b48/782 bl:2.5281 bb:1.2189 rl:2.3154 rb:1.0587 dl:151-152 gd:1 +ttp: b40/782 bl:2.4877 bb:1.1524 rl:2.3156 rb:1.0589 dl:143-144 gd:1 +ttp: b33/782 bl:2.5933 bb:1.2220 rl:2.3160 rb:1.0591 dl:136-137 gd:1 +ttp: b24/782 bl:2.4569 bb:1.1588 rl:2.3162 rb:1.0592 dl:127-128 gd:1 +ttp: b16/782 bl:2.6281 bb:1.2593 rl:2.3165 rb:1.0594 dl:117-118 gd:1 +ttp: b8/782 bl:2.7817 bb:1.2912 rl:2.3170 rb:1.0597 dl:103-105 gd:1 +quantized_ttt_phased val_loss:2.32225638 val_bpb:1.06118055 eval_time:392807ms +total_eval_time:392.8s +=== Done: SEED=42 ===