Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
112 changes: 112 additions & 0 deletions .github/scripts/run_smoke.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,112 @@
"""
Run the workspace smoke test suite.

Reads `smoke_tests.txt` from the workspace root and `config/build/env_vars.yaml`
for per-script env var overrides, then runs each listed script with the
appropriate environment. Continues through failures and exits non-zero
if any script failed.

Mirrors the logic of the `/smoke-test` skill so CI and local runs stay
in sync.
"""

from __future__ import annotations

import os
import subprocess
import sys
import time
from pathlib import Path

import yaml


WORKSPACE = Path(__file__).resolve().parents[2]
SMOKE_FILE = WORKSPACE / "smoke_tests.txt"
ENV_VARS_FILE = WORKSPACE / "config" / "build" / "env_vars.yaml"
SCRIPTS_DIR = WORKSPACE / "scripts"


def load_smoke_scripts() -> list[str]:
scripts: list[str] = []
for line in SMOKE_FILE.read_text().splitlines():
line = line.strip()
if not line or line.startswith("#"):
continue
scripts.append(line)
return scripts


def load_env_config() -> dict:
if not ENV_VARS_FILE.exists():
return {"defaults": {}, "overrides": []}
return yaml.safe_load(ENV_VARS_FILE.read_text()) or {}


def pattern_matches(pattern: str, script_path: str) -> bool:
if "/" in pattern:
return pattern in script_path
return Path(script_path).stem == pattern


def build_env(script_rel: str, cfg: dict) -> dict:
env = os.environ.copy()
defaults = cfg.get("defaults") or {}
env.update({k: str(v) for k, v in defaults.items()})
for override in cfg.get("overrides") or []:
if pattern_matches(override["pattern"], script_rel):
for key in override.get("unset", []):
env.pop(key, None)
for key, val in (override.get("set") or {}).items():
env[key] = str(val)
return env


def run_one(script_rel: str, cfg: dict) -> tuple[str, int, float, str]:
env = build_env(script_rel, cfg)
script_path = SCRIPTS_DIR / script_rel
t0 = time.time()
result = subprocess.run(
[sys.executable, str(script_path)],
cwd=str(WORKSPACE),
env=env,
capture_output=True,
text=True,
)
elapsed = time.time() - t0
output = result.stdout + result.stderr
return script_rel, result.returncode, elapsed, output


def main() -> int:
if not SMOKE_FILE.exists():
print(f"ERROR: no smoke_tests.txt at {SMOKE_FILE}", file=sys.stderr)
return 1
scripts = load_smoke_scripts()
if not scripts:
print("No smoke test scripts listed.")
return 0
cfg = load_env_config()

print(f"Running {len(scripts)} smoke test script(s) from {SMOKE_FILE.name}\n")
failures: list[tuple[str, int, str]] = []
for script_rel in scripts:
print(f"::group::{script_rel}")
name, rc, elapsed, output = run_one(script_rel, cfg)
print(output, end="")
status = "PASS" if rc == 0 else f"FAIL (exit {rc})"
print(f"\n[{status}] {name} — {elapsed:.1f}s")
print("::endgroup::")
if rc != 0:
failures.append((name, rc, output))

total = len(scripts)
passed = total - len(failures)
print(f"\n=== Smoke test summary: {passed}/{total} passed ===")
for name, rc, _ in failures:
print(f" FAIL {name} (exit {rc})")
return 0 if not failures else 1


if __name__ == "__main__":
sys.exit(main())
100 changes: 100 additions & 0 deletions .github/workflows/smoke_tests.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,100 @@
name: Smoke Tests

on: [push, pull_request]

jobs:
smoke:
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
python-version: ['3.12', '3.13']
steps:
- name: Checkout PyAutoConf
uses: actions/checkout@v4
with:
repository: PyAutoLabs/PyAutoConf
path: PyAutoConf
- name: Checkout PyAutoFit
uses: actions/checkout@v4
with:
repository: PyAutoLabs/PyAutoFit
path: PyAutoFit
- name: Checkout PyAutoArray
uses: actions/checkout@v4
with:
repository: PyAutoLabs/PyAutoArray
path: PyAutoArray
- name: Checkout PyAutoGalaxy
uses: actions/checkout@v4
with:
repository: PyAutoLabs/PyAutoGalaxy
path: PyAutoGalaxy
- name: Checkout PyAutoLens
uses: actions/checkout@v4
with:
repository: PyAutoLabs/PyAutoLens
path: PyAutoLens
- name: Checkout HowToLens
uses: actions/checkout@v4
with:
path: workspace
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Extract branch name
id: extract_branch
shell: bash
run: |
cd workspace
echo "branch=${GITHUB_HEAD_REF:-${GITHUB_REF#refs/heads/}}" >> "$GITHUB_OUTPUT"
- name: Match library branches
shell: bash
run: |
BRANCH="${{ steps.extract_branch.outputs.branch }}"
for PKG in PyAutoConf PyAutoFit PyAutoArray PyAutoGalaxy PyAutoLens; do
pushd "$PKG"
if [[ -n "$(git ls-remote --heads origin "$BRANCH")" ]]; then
echo "Branch $BRANCH exists in $PKG — checking out"
git fetch origin "$BRANCH"
git checkout "$BRANCH"
else
echo "Branch $BRANCH not in $PKG — staying on main"
fi
popd
done
- name: Install dependencies
run: |
pip install --upgrade pip setuptools wheel
pip install pyyaml
if [ "${{ matrix.python-version }}" = "3.12" ]; then
pip install ./PyAutoConf ./PyAutoFit ./PyAutoArray ./PyAutoGalaxy ./PyAutoLens
pip install "./PyAutoArray[optional]" "./PyAutoGalaxy[optional]" "./PyAutoLens[optional]"
else
pip install ./PyAutoConf ./PyAutoFit ./PyAutoArray ./PyAutoGalaxy ./PyAutoLens
pip install numba
fi
pip install tensorflow-probability==0.25.0
- name: Prepare cache dirs
run: |
mkdir -p /tmp/numba_cache /tmp/matplotlib
- name: Run smoke tests
env:
JAX_ENABLE_X64: "True"
NUMBA_CACHE_DIR: /tmp/numba_cache
MPLCONFIGDIR: /tmp/matplotlib
run: |
cd workspace
python .github/scripts/run_smoke.py
- name: Slack notify on failure
if: ${{ failure() }}
uses: slackapi/[email protected]
env:
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }}
with:
channel-id: C03S98FEDK2
payload: |
{
"text": "${{ github.repository }}/${{ github.ref_name }} smoke tests (Python ${{ matrix.python-version }}) ${{ job.status }}\n${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}"
}
23 changes: 23 additions & 0 deletions .github/workflows/url_check.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
name: URL Check

on:
push:
branches: [main]
pull_request:

jobs:
url_check:
runs-on: ubuntu-latest
steps:
- name: Checkout repo
uses: actions/checkout@v4
with:
path: repo
- name: Checkout PyAutoBuild
uses: actions/checkout@v4
with:
repository: PyAutoLabs/PyAutoBuild
ref: main
path: PyAutoBuild
- name: Run url_check.sh
run: bash PyAutoBuild/autobuild/url_check.sh repo
12 changes: 12 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
root.log
.idea/

__pycache__/
*.pyc
**/images/

output/
dataset/
notebooks/plot/
test_report.md
test_results/
31 changes: 31 additions & 0 deletions CITATIONS.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
.. _references:

Citations & References
======================

The bibtex entries for **PyAutoLens** and its affiliated software packages can be found
`here <https://github.com/Jammy2211/PyAutoLens/blob/main/files/citations.bib>`_, with example text for citing **PyAutoLens**
in `.tex format here <https://github.com/Jammy2211/PyAutoLens/blob/main/files/citations.tex>`_ format here and
`.md format here <https://github.com/Jammy2211/PyAutoLens/blob/main/files/citations.md>`_. As shown in the examples, we
would greatly appreciate it if you mention **PyAutoLens** by name and include a link to our GitHub page!

**PyAutoLens** is published in the `Journal of Open Source Software <https://joss.theoj.org/papers/10.21105/joss.02825#>`_ and its
entry in the above .bib file is under the citation key ``pyautolens``. Please also cite the MNRAS AutoLens
papers (https://academic.oup.com/mnras/article/452/3/2940/1749640 and https://academic.oup.com/mnras/article-abstract/478/4/4738/5001434?redirectedFrom=fulltext) which are included
under the citation keys ``Nightingale2015`` and ``Nightingale2018``.

You should also specify the non-linear search(es) you use in your analysis (e.g. Dynesty, Emcee, etc) in
the main body of text, and delete as appropriate any packages your analysis did not use. The citations.bib file includes
the citation key for all of these projects.

If you use decomposed mass models (e.g. stellar mass models like an ``Sersic`` or dark matter models like
an ``NFW``) please cite the following paper https://arxiv.org/abs/2106.11464 under
citation key ``Oguri2021``. Our deflection angle calculations are based on this method.

If you specifically use a decomposed mass model with the ``gNFW`` please cite the following paper https://academic.oup.com/mnras/article/488/1/1387/5526256 under
citation key ``Anowar2019``.

The citations.bib file above also includes my work on `using strong lensing to study galaxy structure
<https://ui.adsabs.harvard.edu/abs/2019MNRAS.489.2049N/abstract>`_. If you're feeling kind, please go ahead and stick
a citation in your introduction using \citep{Nightingale2019} or [@Nightingale2019] ;).

82 changes: 82 additions & 0 deletions CLAUDE.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,82 @@
# HowToLens

This is the **HowToLens** tutorial lecture series for `PyAutoLens`, a Python library for strong gravitational lens modeling. Tutorials teach new users what strong lensing is and how to model it from first principles.

## Repository Structure

- `scripts/` — Runnable Python tutorial scripts
- `chapter_1_introduction/` — Grids, profiles, galaxies, ray-tracing, data, fitting
- `chapter_2_lens_modeling/` — Non-linear searches, Bayesian inference, lens modeling
- `chapter_3_search_chaining/` — Search chaining, prior passing, automated pipelines
- `chapter_4_pixelizations/` — Pixelized source reconstruction, inversions, regularization
- `chapter_optional/` — Alternative non-linear searches and advanced topics
- `simulator/` — Simulator scripts that generate the tutorial datasets at runtime
- `notebooks/` — Jupyter notebook versions of scripts (generated from `scripts/`, do not edit directly)
- `config/` — `PyAutoLens` configuration YAML files
- `dataset/` — Empty in the repo; tutorial datasets are written here at runtime by the simulator scripts
- `output/` — Model-fit results (generated at runtime, not committed)

## Running Scripts

Scripts are run from the repository root so relative paths to `dataset/` and `output/` resolve correctly:

```bash
python scripts/chapter_1_introduction/tutorial_1_grids_and_galaxies.py
```

Tutorials in chapters 1 and 2 that need a dataset invoke the relevant script in `scripts/simulator/` via `subprocess` if the dataset folder does not already exist — there is no manual simulate-then-run step.

**Integration testing / fast mode**: set `PYAUTO_TEST_MODE=1` to skip non-linear search sampling:

```bash
PYAUTO_TEST_MODE=1 python scripts/chapter_2_lens_modeling/tutorial_1_non_linear_search.py
```

**Fast smoke tests**: combine test mode with the skip flags:

```bash
PYAUTO_TEST_MODE=2 PYAUTO_SKIP_FIT_OUTPUT=1 PYAUTO_SKIP_VISUALIZATION=1 PYAUTO_SKIP_CHECKS=1 PYAUTO_FAST_PLOTS=1 python scripts/chapter_1_introduction/tutorial_7_fitting.py
```

Note: `PYAUTO_SMALL_DATASETS` is deliberately **not** used in HowToLens. Tutorials assume the full-resolution simulated datasets that the simulator scripts produce.

**Codex / sandboxed runs**: set writable cache directories so `numba` and `matplotlib` do not fail on unwritable home paths:

```bash
NUMBA_CACHE_DIR=/tmp/numba_cache MPLCONFIGDIR=/tmp/matplotlib python scripts/chapter_1_introduction/tutorial_1_grids_and_galaxies.py
```

## Core API Patterns

Imports used throughout the tutorials:

```python
import autofit as af
import autolens as al
import autolens.plot as aplt
```

## Notebooks vs Scripts

Notebooks in `notebooks/` are generated from the `.py` files in `scripts/` using `generate.py` from the `PyAutoBuild` repo. **Always edit the `.py` scripts**, never the notebooks directly. The `# %%` marker alternates between code and markdown cells.

### Building Notebooks

Run from the workspace root:

```bash
PYTHONPATH=../PyAutoBuild/autobuild python3 ../PyAutoBuild/autobuild/generate.py howtolens
```

The `howtolens` project target in `PyAutoBuild/autobuild/config.yaml` is what drives this.

## Relationship to autolens_workspace

HowToLens is the teaching companion to `autolens_workspace`. Many tutorials (particularly in chapters 2–4) point users to `autolens_workspace` scripts (e.g. `scripts/imaging/modeling.py`, `scripts/guides/...`) as the next destination after the relevant concept has been introduced. Those cross-references use absolute paths like `autolens_workspace/scripts/...` and refer to the separate `autolens_workspace` repository — not to anything inside HowToLens.

## Related Repos

- **PyAutoLens** source: `../PyAutoLens`
- **PyAutoGalaxy** source: `../PyAutoGalaxy`
- **autolens_workspace**: `../autolens_workspace` — main user-facing workspace
- **PyAutoBuild**: `../PyAutoBuild` — notebook generation and CI/CD tooling
Loading
Loading