diff --git a/.github/workflows/style.yml b/.github/workflows/style.yml
index 7996e4a536..91327e977e 100644
--- a/.github/workflows/style.yml
+++ b/.github/workflows/style.yml
@@ -14,6 +14,8 @@ jobs:
with:
python-version: '3.13'
+ - name: Install dependencies
+ run: pip install -r requirements.txt
+
- name: Run RST Formatter
- run: |
- bin/format_rst_file.py --check *.rst
+ run: ./bin/format_rst_file.py $(git ls-files '*.rst')
diff --git a/.gitmodules b/.gitmodules
index 234b02abaa..8f74f1180a 100644
--- a/.gitmodules
+++ b/.gitmodules
@@ -2,3 +2,7 @@
path = _spack_root
url = https://github.com/spack/spack.git
branch = releases/v1.0
+[submodule "_spack_packages"]
+ path = _spack_packages
+ url = https://github.com/spack/spack-packages
+ branch = releases/v2025.07
\ No newline at end of file
diff --git a/.readthedocs.yaml b/.readthedocs.yaml
index f5111a7a74..991b776201 100644
--- a/.readthedocs.yaml
+++ b/.readthedocs.yaml
@@ -15,6 +15,9 @@ build:
# golang: "1.20"
apt_packages:
- graphviz
+ jobs:
+ post_checkout:
+ - git fetch --unshallow || true # get accurate "Last updated on" info
# Build documentation in the "docs/" directory with Sphinx
sphinx:
@@ -22,7 +25,7 @@ sphinx:
# You can configure Sphinx to use a different builder, for instance use the dirhtml builder for simpler URLs
# builder: "dirhtml"
# Fail on all warnings to avoid broken references
- # fail_on_warning: true
+ fail_on_warning: true
# We need to include the _spack_root submodule to get the logo image
submodules:
diff --git a/_spack_packages b/_spack_packages
new file mode 160000
index 0000000000..4ff5737035
--- /dev/null
+++ b/_spack_packages
@@ -0,0 +1 @@
+Subproject commit 4ff57370356abebe3d38613bd27a04b3577b9d90
diff --git a/_templates/base.html b/_templates/base.html
index 2badf3be1d..d288e285ec 100644
--- a/_templates/base.html
+++ b/_templates/base.html
@@ -2,12 +2,12 @@
{%- block extrahead %}
-
+
{%- if READTHEDOCS %}
diff --git a/bin/format_rst_file.py b/bin/format_rst_file.py
index 86a91a9efd..af8ea8fa6d 100755
--- a/bin/format_rst_file.py
+++ b/bin/format_rst_file.py
@@ -1,683 +1,205 @@
#!/usr/bin/env python3
-"""
-RST File Sentence Formatter
+# Copyright Spack Project Developers. See COPYRIGHT file for details.
+#
+# SPDX-License-Identifier: (Apache-2.0 OR MIT)
-A tool to reformat RST files by joining sentences split across multiple lines
-and ensuring each sentence is on its own line, while preserving RST structure.
-"""
+"""This script formats reStructuredText files to ensure one sentence per line and no trailing
+whitespace. It exits with a non-zero status if any files were modified."""
-import argparse
+import difflib
+import io
+import json
+import os
import re
+import subprocess
import sys
-from pathlib import Path
-from typing import List, Tuple, Optional
-
-
-class RSTProcessor:
- """Processes RST files to format sentences properly."""
+from typing import List
+
+import black
+from docutils import nodes
+from docutils.core import publish_doctree
+from docutils.parsers.rst import Directive, directives
+from ruamel.yaml import YAML
+
+END_OF_SENTENCE = re.compile(
+ r"""
+(
+ (?:
+ (? List[nodes.Node]:
+ # Produce a literal block with block.attributes["language"] set.
+ language = self.arguments[0] if self.arguments else "python"
+ literal = nodes.literal_block("\n".join(self.content), "\n".join(self.content))
+ literal["language"] = language
+ return [literal]
+
+
+directives.register_directive("code-block", SphinxCodeBlock)
+
+
+class ParagraphInfo:
+ lineno: int
+ end_lineno: int
+ src: str
+ lines: List[str]
+
+ def __init__(self, line: int, src: str) -> None:
+ self.lineno = line
+ self.src = src
+ self.lines = src.splitlines()
+ self.end_lineno = line + len(self.lines) - 1
+
+
+def _is_node_in_table(node: nodes.Node) -> bool:
+ """Check if a node is inside a table by walking up the parent chain."""
+ while node.parent:
+ node = node.parent
+ if isinstance(node, nodes.table):
+ return True
+ return False
- def process_file(self, file_path: Path) -> bool:
- """
- Process a single RST file.
- Args:
- file_path: Path to the RST file to process
+def _format_code_blocks(document: nodes.document, path: str) -> None:
+ """Try to parse and format Python, YAML, and JSON code blocks. This does *not* update the
+ sources, but merely warns. That's because not all code examples are meant to be valid."""
+ for code_block in document.findall(nodes.literal_block):
+ language = code_block.attributes.get("language", "")
+ if language not in ("python", "yaml", "json"):
+ continue
+ original = code_block.astext()
+ line = code_block.line if code_block.line else 0
- Returns:
- True if file was changed, False if already formatted correctly
- """
try:
- # Read the original file
- content = self._read_file(file_path)
- if content is None:
- return False
-
- # Process the content
- processed_content = self._process_rst_content(content)
-
- # Check if content changed
- if content == processed_content:
- return False # No changes needed
-
- # Write back to the same file
- if self._write_file(file_path, processed_content):
- return True # File was changed
+ if language == "python":
+ formatted = black.format_str(original, mode=black.FileMode(line_length=99))
+ elif language == "yaml":
+ yaml = YAML(pure=True)
+ yaml.width = 10000 # do not wrap lines
+ yaml.preserve_quotes = True # do not force particular quotes
+ buf = io.BytesIO()
+ yaml.dump(yaml.load(original), buf)
+ formatted = buf.getvalue().decode("utf-8")
+ elif language == "json":
+ formatted = json.dumps(json.loads(original), indent=2)
else:
- return False
-
- except Exception as e:
- print(f"error: cannot format {file_path}: {e}", file=sys.stderr)
- return False
-
- def _read_file(self, file_path: Path) -> Optional[str]:
- """Read file content safely."""
- try:
- with open(file_path, "r", encoding="utf-8") as f:
- return f.read()
- except Exception as e:
- print(f"error: cannot read {file_path}: {e}", file=sys.stderr)
- return None
-
- def _write_file(self, file_path: Path, content: str) -> bool:
- """Write file content safely."""
- try:
- with open(file_path, "w", encoding="utf-8") as f:
- f.write(content)
- return True
+ assert False
except Exception as e:
- print(f"error: cannot write {file_path}: {e}", file=sys.stderr)
- return False
-
- def _process_rst_content(self, content: str) -> str:
- """
- Process RST content to format sentences properly.
-
- Args:
- content: Raw RST content
-
- Returns:
- Processed RST content
- """
- lines = content.split("\n")
- result_lines = []
- in_license_header = False
- in_directive_block = False
- directive_indent = 0
-
- # Check if we start with a license header
- if lines and self._is_license_header_start(lines[0]):
- in_license_header = True
-
- i = 0
- while i < len(lines):
- line = lines[i]
-
- # Handle license header
- if in_license_header:
- result_lines.append(line)
- if self._is_license_header_end(line, lines, i):
- in_license_header = False
- i += 1
- continue
-
- # Handle RST lists (process entire list at once)
- if self._is_list_item(line):
- list_lines, next_i = self._collect_list(lines, i)
- result_lines.extend(list_lines)
- i = next_i
- continue
-
- # Handle RST tables (process entire table at once)
- if self._is_table_line(line):
- table_lines, next_i = self._collect_table(lines, i)
- result_lines.extend(table_lines)
- i = next_i
- continue
-
- # Handle RST directive blocks (including code blocks)
- if self._is_rst_directive_start(line):
- in_directive_block = True
- directive_indent = self._get_indent_level(line)
- result_lines.append(line)
- i += 1
- continue
-
- if in_directive_block:
- if self._is_directive_block_end(line, directive_indent):
- in_directive_block = False
- else:
- result_lines.append(line)
- i += 1
- continue
-
- # Process regular content (only when not in special blocks)
- if not in_license_header and not in_directive_block:
- paragraph_lines, next_i = self._collect_paragraph(lines, i)
- processed_lines = self._process_paragraph(paragraph_lines)
- result_lines.extend(processed_lines)
- i = next_i
- else:
- result_lines.append(line)
- i += 1
-
- return "\n".join(result_lines)
-
- def _is_license_header_start(self, line: str) -> bool:
- """Check if line starts a license header."""
- return line.strip().startswith("..") and "Copyright" in line
-
- def _is_license_header_end(self, line: str, lines: List[str], index: int) -> bool:
- """Check if license header ends."""
- return (
- line.strip() == ""
- and index + 1 < len(lines)
- and not lines[index + 1].strip().startswith("..")
+ print(
+ f"{path}:{line}: formatting failed: {e}: {original!r}", flush=True, file=sys.stderr
+ )
+ continue
+ if formatted == original:
+ continue
+ diff = "\n".join(
+ difflib.unified_diff(
+ original.splitlines(),
+ formatted.splitlines(),
+ lineterm="",
+ fromfile=f"{path}:{line} (original)",
+ tofile=f"{path}:{line} (suggested, NOT required)",
+ )
)
-
- def _is_list_item(self, line: str) -> bool:
- """Check if line is an RST list item."""
- stripped = line.strip()
- if not stripped:
- return False
-
- # RST list patterns
- list_patterns = [
- r"^\s*\*\s+", # Bullet list: * item
- r"^\s*\+\s+", # Bullet list: + item
- r"^\s*-\s+", # Bullet list: - item
- r"^\s*\d+\.\s+", # Numbered list: 1. item
- r"^\s*#\.\s+", # Auto-numbered list: #. item
- r"^\s*\([a-zA-Z0-9]+\)\s+", # Parenthesized list: (a) item
- r"^\s*[a-zA-Z]\.\s+", # Letter list: a. item
- r"^\s*[IVX]+\.\s+", # Roman numeral list: I. item
+ if diff:
+ print(diff, flush=True, file=sys.stderr)
+
+
+def _format_paragraphs(document: nodes.document, path: str, src_lines: List[str]) -> bool:
+ """Format paragraphs in the document. Returns True if ``src_lines`` was modified."""
+
+ paragraphs = [
+ ParagraphInfo(line=p.line, src=p.rawsource)
+ for p in document.findall(nodes.paragraph)
+ if p.line is not None and p.rawsource and not _is_node_in_table(p)
+ ]
+
+ # Work from bottom to top to avoid messing up line numbers
+ paragraphs.sort(key=lambda p: p.lineno, reverse=True)
+ modified = False
+
+ for p in paragraphs:
+ # docutils does not give us the column offset, so we'll find it ourselves.
+ col_offset = src_lines[p.lineno - 1].rfind(p.lines[0])
+ assert col_offset >= 0, f"{path}:{p.lineno}: rst parsing error."
+ prefix = lambda i: " " * col_offset if i > 0 else src_lines[p.lineno - 1][:col_offset]
+
+ # Defensive check to ensure the source paragraph matches the docutils paragraph
+ for i, line in enumerate(p.lines):
+ line_lhs = f"{prefix(i)}{line}"
+ line_rhs = src_lines[p.lineno - 1 + i].rstrip() # docutils trims trailing whitespace
+ assert line_lhs == line_rhs, f"{path}:{p.lineno + i}: rst parsing error."
+
+ # Replace current newlines with whitespace, and then split sentences.
+ new_paragraph_src = END_OF_SENTENCE.sub(r"\1\n", p.src.replace("\n", " "))
+ new_paragraph_lines = [
+ f"{prefix(i)}{line.lstrip()}" for i, line in enumerate(new_paragraph_src.splitlines())
]
- return any(re.match(pattern, line) for pattern in list_patterns)
-
- def _collect_list(self, lines: List[str], start_idx: int) -> Tuple[List[str], int]:
- """
- Collect all lines that are part of an RST list.
-
- Args:
- lines: All lines in the document
- start_idx: Starting index
-
- Returns:
- Tuple of (list_lines, next_index)
- """
- list_lines = []
- i = start_idx
- base_indent = self._get_indent_level(lines[start_idx])
-
- while i < len(lines):
- line = lines[i]
-
- # If it's a list item at the same or deeper indentation, include it
- if self._is_list_item(line):
- current_indent = self._get_indent_level(line)
- if current_indent >= base_indent:
- list_lines.append(line)
- i += 1
- continue
- else:
- # List item at shallower indentation, end current list
- break
-
- # If it's an empty line, check if the list continues
- if not line.strip():
- # Look ahead to see if list continues
- if i + 1 < len(lines):
- next_line = lines[i + 1]
- if (
- self._is_list_item(next_line)
- and self._get_indent_level(next_line) >= base_indent
- ):
- list_lines.append(line) # Include the empty line
- i += 1
- continue
- elif (
- next_line.strip()
- and self._get_indent_level(next_line) > base_indent
- ):
- # Continuation of list item content
- list_lines.append(line)
- i += 1
- continue
- # Empty line and no more list content, end list
- break
-
- # If it's indented content (continuation of list item), include it
- current_indent = self._get_indent_level(line)
- if line.strip() and current_indent > base_indent:
- list_lines.append(line)
- i += 1
- continue
-
- # If it's not a list item, not empty, and not indented continuation, end list
- break
-
- return list_lines, i
-
- def _is_table_line(self, line: str) -> bool:
- """Check if line is part of an RST table."""
- stripped = line.strip()
- if not stripped:
- return False
-
- # Grid table patterns
- # Lines made of =, -, +, and spaces (table borders)
- if re.match(r"^[=\-+\s]+$", stripped) and len(stripped) > 3:
- return True
-
- # Simple table patterns (lines with multiple spaces that could be column separators)
- # But be more conservative - look for patterns that are clearly tabular
- if " " in stripped and not stripped.startswith(".."):
- # Check if it looks like a table row (has multiple column-like segments)
- segments = [s.strip() for s in stripped.split(" ") if s.strip()]
- if len(segments) >= 2:
- return True
+ if new_paragraph_lines != src_lines[p.lineno - 1 : p.end_lineno]:
+ modified = True
+ src_lines[p.lineno - 1 : p.end_lineno] = new_paragraph_lines
- return False
+ return modified
- def _collect_table(self, lines: List[str], start_idx: int) -> Tuple[List[str], int]:
- """
- Collect all lines that are part of an RST table.
-
- Args:
- lines: All lines in the document
- start_idx: Starting index
-
- Returns:
- Tuple of (table_lines, next_index)
- """
- table_lines = []
- i = start_idx
-
- # Collect all consecutive table-related lines
- while i < len(lines):
- line = lines[i]
-
- # If it's a table line, include it
- if self._is_table_line(line):
- table_lines.append(line)
- i += 1
- continue
-
- # If it's an empty line, check if the next line is also a table line
- if not line.strip():
- # Look ahead to see if table continues
- if i + 1 < len(lines) and self._is_table_line(lines[i + 1]):
- table_lines.append(line) # Include the empty line
- i += 1
- continue
- else:
- # Empty line and no more table content, end table
- break
-
- # If it's not a table line and not empty, end table
- break
-
- return table_lines, i
-
- def _is_rst_directive_start(self, line: str) -> bool:
- """Check if line starts an RST directive that has indented content."""
- # Match any RST directive pattern (allowing hyphens in directive names)
- return bool(re.match(r"^\s*\.\.\s+[\w-]+::", line))
-
- def _is_directive_block_end(self, line: str, directive_indent: int) -> bool:
- """Check if directive block ends."""
- if not line.strip():
- return False
- current_indent = self._get_indent_level(line)
- return current_indent <= directive_indent
-
- def _get_indent_level(self, line: str) -> int:
- """Get the indentation level of a line."""
- return len(line) - len(line.lstrip())
-
- def _collect_paragraph(
- self, lines: List[str], start_idx: int
- ) -> Tuple[List[str], int]:
- """
- Collect lines that form a paragraph.
-
- Args:
- lines: All lines in the document
- start_idx: Starting index
-
- Returns:
- Tuple of (paragraph_lines, next_index)
- """
- if start_idx >= len(lines):
- return [], start_idx
-
- current_line = lines[start_idx]
-
- # Handle special lines
- if self._is_special_line(current_line) or not current_line.strip():
- return [current_line], start_idx + 1
-
- # Collect continuation lines
- paragraph_lines = [current_line]
- base_indent = self._get_indent_level(current_line)
-
- i = start_idx + 1
- while i < len(lines):
- line = lines[i]
-
- # Stop conditions
- if (
- not line.strip()
- or self._is_special_line(line)
- or self._is_table_line(line) # Stop at table lines
- or self._is_list_item(line) # Stop at list items
- or abs(self._get_indent_level(line) - base_indent) > 2
- ):
- break
-
- paragraph_lines.append(line)
- i += 1
-
- return paragraph_lines, i
-
- def _is_special_line(self, line: str) -> bool:
- """Check if a line is a special RST construct."""
- stripped = line.strip()
-
- patterns = [
- r"^\.\.", # RST directives
- r'^[=\-~^"#*+<>]{3,}$', # RST headers
- r"^:", # RST fields
- r"^\s*\.\.\s+_", # RST targets
- ]
- return any(re.match(pattern, line) for pattern in patterns)
-
- def _process_paragraph(self, paragraph_lines: List[str]) -> List[str]:
- """
- Process a paragraph by joining and splitting sentences.
-
- Args:
- paragraph_lines: Lines that form a paragraph
-
- Returns:
- Processed lines with proper sentence formatting
- """
- if not paragraph_lines or len(paragraph_lines) == 1:
- return paragraph_lines
-
- if all(self._is_special_line(line) for line in paragraph_lines):
- return paragraph_lines
-
- # Join all lines
- joined_text = self._join_paragraph_lines(paragraph_lines)
-
- if not joined_text:
- return paragraph_lines
-
- # Split into sentences
- sentences = self._split_into_sentences(joined_text)
-
- # Return sentences with no leading whitespace
- if len(sentences) > 1:
- return [sentence for sentence in sentences if sentence.strip()]
- else:
- return [joined_text]
-
- def _join_paragraph_lines(self, lines: List[str]) -> str:
- """Join paragraph lines into a single text block."""
- joined_text = ""
- for line in lines:
- text = line.strip()
- if text:
- if joined_text and not joined_text.endswith(" "):
- joined_text += " "
- joined_text += text
- return joined_text
-
- def _split_into_sentences(self, text: str) -> List[str]:
- """Split text into sentences at sentence boundaries."""
- # Pattern for sentence endings
- sentence_pattern = r"([.!?]+)(\s+)(?=[A-Z]|\s*$)"
- parts = re.split(sentence_pattern, text)
-
- if len(parts) <= 1:
- return [text]
-
- sentences = []
- current_sentence = ""
-
- i = 0
- while i < len(parts):
- if i + 2 < len(parts) and re.match(r"[.!?]+", parts[i + 1]):
- current_sentence += parts[i] + parts[i + 1]
- sentences.append(current_sentence.strip())
- current_sentence = ""
- i += 3
- else:
- current_sentence += parts[i]
- i += 1
-
- if current_sentence.strip():
- sentences.append(current_sentence.strip())
-
- return [s for s in sentences if s.strip()]
-
-
-def is_rst_file(file_path: Path) -> bool:
- """
- Check if a file is a reStructuredText file.
-
- Args:
- file_path: Path to check
-
- Returns:
- True if file appears to be RST, False otherwise
- """
- # Check file extension first
- rst_extensions = {".rst", ".rest", ".restx", ".rtxt"}
- if file_path.suffix.lower() in rst_extensions:
- return True
-
- # For files without RST extensions, check content
- try:
- with open(file_path, "r", encoding="utf-8") as f:
- # Read first few lines to check for RST patterns
- lines = []
- for _ in range(20): # Check first 20 lines
- try:
- line = next(f)
- lines.append(line)
- except StopIteration:
- break
-
- content = "".join(lines)
-
- # Look for common RST patterns
- rst_patterns = [
- r"^\.\. ", # RST directives
- r'^[=\-~^"#*+<>]{3,}$', # RST headers/underlines
- r"^\.\. _", # RST targets
- r"^\.\. \|", # RST substitutions
- r"::\s*$", # RST literal blocks
- r"^\.\. code-block::", # Code blocks
- r"^\.\. literalinclude::", # Literal includes
- r"^\.\. note::", # Admonitions
- r"^\.\. warning::", # Admonitions
- r"^\.\. image::", # Images
- r"^\.\. figure::", # Figures
- ]
+def reformat_rst_file(path: str) -> bool:
+ """Reformat a reStructuredText file "in-place". Returns True if modified, False otherwise."""
+ with open(path, "r", encoding="utf-8") as f:
+ src = f.read()
- # Count RST-specific patterns
- rst_indicators = 0
- for line in lines:
- line = line.strip()
- for pattern in rst_patterns:
- if re.match(pattern, line, re.MULTILINE):
- rst_indicators += 1
- break
+ src_lines = src.splitlines()
+ document: nodes.document = publish_doctree(src, settings_overrides=DOCUTILS_SETTING)
- # If we found multiple RST indicators, consider it an RST file
- return rst_indicators >= 2
+ _format_code_blocks(document, path)
- except (UnicodeDecodeError, IOError):
+ if not _format_paragraphs(document, path, src_lines):
return False
+ with open(f"{path}.tmp", "w", encoding="utf-8") as f:
+ f.write("\n".join(src_lines))
+ f.write("\n")
+ os.rename(f"{path}.tmp", path)
+ print(f"Fixed reStructuredText formatting: {path}", flush=True)
+ return True
-def filter_rst_files(file_paths: List[Path]) -> Tuple[List[Path], List[Path]]:
- """
- Filter files to only include RST files.
-
- Args:
- file_paths: List of file paths to filter
-
- Returns:
- Tuple of (rst_files, skipped_files)
- """
- rst_files = []
- skipped_files = []
-
- for file_path in file_paths:
- if not file_path.exists():
- print(
- f"error: cannot read {file_path}: No such file or directory",
- file=sys.stderr,
- )
- continue
-
- if not file_path.is_file():
- print(f"error: cannot read {file_path}: Not a file", file=sys.stderr)
- continue
-
- if is_rst_file(file_path):
- rst_files.append(file_path)
- else:
- skipped_files.append(file_path)
-
- return rst_files, skipped_files
-
-
-def main() -> int:
- """Main entry point."""
- parser = argparse.ArgumentParser(
- description="Reformat RST files by ensuring each sentence is on its own line",
- formatter_class=argparse.RawDescriptionHelpFormatter,
- epilog="""
-Examples:
- %(prog)s file1.rst file2.rst file3.rst
- %(prog)s *.rst
- %(prog)s docs/*.rst
- %(prog)s docs/ # Will find all RST files in directory
- """,
- )
-
- parser.add_argument(
- "files", nargs="+", type=Path, help="RST files or directories to process"
- )
-
- parser.add_argument(
- "--check",
- action="store_true",
- help="Don't write the files back, just return the status. "
- "Return code 0 means nothing would change. "
- "Return code 1 means some files would be reformatted.",
- )
-
- parser.add_argument(
- "--verbose", action="store_true", help="Show files that are skipped"
- )
-
- parser.add_argument("--version", action="version", version="%(prog)s 1.0.0")
-
- args = parser.parse_args()
-
- # Expand directories to find RST files
- all_files = []
- for path in args.files:
- if path.is_dir():
- # Find all potential RST files in directory
- for ext in [".rst", ".rest", ".restx", ".rtxt"]:
- all_files.extend(path.glob(f"**/*{ext}"))
- # Also check files without extensions that might be RST
- for file_path in path.rglob("*"):
- if file_path.is_file() and not file_path.suffix:
- all_files.append(file_path)
- else:
- all_files.append(path)
-
- # Filter to only RST files
- rst_files, skipped_files = filter_rst_files(all_files)
-
- # Show skipped files if verbose
- if args.verbose and skipped_files:
- for file_path in skipped_files:
- print(f"skipped: {file_path} (not a reStructuredText file)")
-
- if not rst_files:
- if skipped_files:
- print("No reStructuredText files found to format")
- return 0
-
- # Process RST files
- processor = RSTProcessor()
- changed_files = []
- unchanged_files = []
- error_files = []
-
- for file_path in rst_files:
- try:
- # Read the original file
- content = processor._read_file(file_path)
- if content is None:
- error_files.append(file_path)
- continue
-
- # Process the content
- processed_content = processor._process_rst_content(content)
-
- # Check if content changed
- if content == processed_content:
- unchanged_files.append(file_path)
- else:
- changed_files.append(file_path)
- if not args.check:
- # Write back to the same file
- if not processor._write_file(file_path, processed_content):
- error_files.append(file_path)
- changed_files.remove(file_path)
-
- except Exception as e:
- print(f"error: cannot format {file_path}: {e}", file=sys.stderr)
- error_files.append(file_path)
-
- # Report results in black style
- total_files = len(rst_files)
-
- if args.check:
- if changed_files:
- print(
- f"would reformat {len(changed_files)} file{'s' if len(changed_files) != 1 else ''}"
- )
- for file_path in changed_files:
- print(f"would reformat {file_path}")
- return 1
- else:
- if total_files == 1:
- print(f"{total_files} file left unchanged")
- else:
- print(f"{total_files} files left unchanged")
- return 0
- else:
- # Normal mode output
- if changed_files:
- for file_path in changed_files:
- print(f"reformatted {file_path}")
-
- if unchanged_files and not changed_files:
- # Only show "left unchanged" if no files were changed
- if len(unchanged_files) == 1:
- print(f"{len(unchanged_files)} file left unchanged")
- else:
- print(f"{len(unchanged_files)} files left unchanged")
- elif unchanged_files and changed_files:
- # Show summary when both changed and unchanged files exist
- changed_count = len(changed_files)
- unchanged_count = len(unchanged_files)
-
- parts = []
- if changed_count:
- parts.append(
- f"{changed_count} file{'s' if changed_count != 1 else ''} reformatted"
- )
- if unchanged_count:
- parts.append(
- f"{unchanged_count} file{'s' if unchanged_count != 1 else ''} left unchanged"
- )
-
- print(", ".join(parts))
- return 1 if error_files else 0
+def main(*files: str) -> None:
+ modified = False
+ for f in files:
+ modified |= reformat_rst_file(f)
+ if modified:
+ subprocess.run(["git", "--no-pager", "diff", "--color=always", "--", *files])
+ sys.exit(1 if modified else 0)
if __name__ == "__main__":
- sys.exit(main())
+ main(*sys.argv[1:])
diff --git a/common/setup.rst b/common/setup.rst
index 9ec2a17b97..72a131c8fc 100644
--- a/common/setup.rst
+++ b/common/setup.rst
@@ -12,9 +12,5 @@
spack mirror add --unsigned tutorial /mirror
spack compiler find
- See the :ref:`basics-tutorial` for full details on setup. For more help, join us in the ``#tutorial`` channel on Slack -- get an invitation at `slack.spack.io `_
-
- .. warning::
-
- The ``spack tutorial -y`` command is intended for use in a container or VM.
- Use with care in other environments since it replaces some configuration files in order to establish suitable settings for the tutorial.
+ See the :ref:`basics-tutorial` for full details on setup.
+ For more help, join us in the ``#tutorial`` channel on Slack -- get an invitation at `slack.spack.io `_
diff --git a/conf.py b/conf.py
index 4dfb2e85f8..f7c6b77412 100644
--- a/conf.py
+++ b/conf.py
@@ -1,7 +1,6 @@
# Copyright Spack Project Developers. See COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
-
# flake8: noqa
# -*- coding: utf-8 -*-
#
@@ -15,14 +14,15 @@
#
# All configuration values have a default; values that are commented out
# serve to show the default.
-import sys
-import os
-from sphinx.domains.python import PythonDomain
+import os
+import sys
+from pygments.formatters.html import HtmlFormatter
from pygments.lexer import RegexLexer, default
from pygments.token import *
-
+from sphinx.domains.python import PythonDomain
+from sphinx.highlighting import PygmentsBridge
# -- Spack customizations -----------------------------------------------------
# Add the Spack bin directory to the path so that we can use its output in docs.
@@ -35,6 +35,25 @@
sys.path.insert(0, os.path.abspath("_spack_root/lib/spack/"))
+
+class NoWhitespaceHtmlFormatter(HtmlFormatter):
+ """HTML formatter that suppresses redundant span elements for Text.Whitespace tokens."""
+
+ def _get_css_classes(self, ttype):
+ # For Text.Whitespace return an empty string, which avoids
+ # elements from being generated.
+ return "" if ttype is Text.Whitespace else super()._get_css_classes(ttype)
+
+
+class CustomPygmentsBridge(PygmentsBridge):
+ def get_formatter(self, **options):
+ return NoWhitespaceHtmlFormatter(**options)
+
+
+# Use custom HTML formatter to avoid redundant elements.
+# See https://github.com/pygments/pygments/issues/1905#issuecomment-3170486995.
+PygmentsBridge.html_formatter = NoWhitespaceHtmlFormatter
+
# Enable todo items
todo_include_todos = True
@@ -112,12 +131,10 @@ class SpecLexer(RegexLexer):
# Disable duplicate cross-reference warnings.
#
class PatchedPythonDomain(PythonDomain):
- def resolve_xref(self, env, fromdocname, builder, typ, target, node, contnode):
+ def resolve_xref(self, env, fromdocname, builder, type, target, node, contnode):
if "refspecific" in node:
del node["refspecific"]
- return super(PatchedPythonDomain, self).resolve_xref(
- env, fromdocname, builder, typ, target, node, contnode
- )
+ return super().resolve_xref(env, fromdocname, builder, type, target, node, contnode)
def setup(sphinx):
@@ -127,7 +144,7 @@ def setup(sphinx):
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
-needs_sphinx = "1.8"
+needs_sphinx = "3.4"
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
@@ -135,6 +152,8 @@ def setup(sphinx):
"sphinx.ext.graphviz",
"sphinx.ext.todo",
"sphinx_copybutton",
+ "sphinx_last_updated_by_git",
+ "sphinx_sitemap",
]
# Set default graphviz options
@@ -287,7 +306,7 @@ def setup(sphinx):
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
-# html_show_sphinx = False
+html_show_sphinx = False
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
@@ -300,19 +319,27 @@ def setup(sphinx):
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
+# Base URL for the documentation, used to generate for better indexing
+html_baseurl = "https://spack-tutorial.readthedocs.io/en/latest/"
+
# Output file base name for HTML help builder.
htmlhelp_basename = "Spackdoc"
+# Sitemap settings
+sitemap_show_lastmod = True
+sitemap_url_scheme = "{link}"
+sitemap_excludes = ["search.html"]
+
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
- #'papersize': 'letterpaper',
+ # 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
- #'pointsize': '10pt',
+ # 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
- #'preamble': '',
+ # 'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
diff --git a/index.rst b/index.rst
index 9cb55cb102..d9eb620429 100644
--- a/index.rst
+++ b/index.rst
@@ -51,7 +51,8 @@ You can invoke
$ docker pull ghcr.io/spack/tutorial:hpcic25
$ docker run -it ghcr.io/spack/tutorial:hpcic25
-to start using the container. You should now be ready to run through our demo scripts:
+to start using the container.
+You should now be ready to run through our demo scripts:
#. :ref:`basics-tutorial`
#. :ref:`environments-tutorial`
diff --git a/requirements.txt b/requirements.txt
index 9be78f5a15..6c1839d80f 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -4,7 +4,10 @@ sphinx==8.2.3
sphinxcontrib-programoutput==0.18
sphinx-copybutton==0.5.2
sphinx_design==0.6.1
-furo==2025.7.19
+sphinx-last-updated-by-git==0.3.8
+sphinx-sitemap==2.8.0
+# fork of furo with a few changes that are not merged upstream
+git+https://github.com/haampie/furo.git@c1d161cb9e04b481ec3331db981aacfa132ecaa6#egg=furo
python-levenshtein==0.27.1
docutils==0.21.2
pygments==2.19.2
@@ -14,3 +17,4 @@ isort==6.0.1
black==25.1.0
flake8==7.3.0
mypy==1.17.1
+ruamel.yaml==0.18.15
diff --git a/tutorial_advanced_packaging.rst b/tutorial_advanced_packaging.rst
index b6540136bf..ce84748a6f 100644
--- a/tutorial_advanced_packaging.rst
+++ b/tutorial_advanced_packaging.rst
@@ -22,8 +22,7 @@ Setup for the Tutorial
.. note::
- We do not recommend doing this section of the tutorial in a
- production Spack instance.
+ We do not recommend doing this section of the tutorial in a production Spack instance.
The tutorial uses custom package definitions with missing sections that will be filled in during the tutorial.
These package definitions are stored in a separate package repository, which can be enabled with:
@@ -53,11 +52,10 @@ Now, you are ready to set your preferred ``EDITOR`` and continue with the rest o
.. note::
- Several of these packages depend on an MPI implementation. You can use
- OpenMPI if you install it from scratch, but this is slow (>10 min.).
- A binary cache of MPICH may be provided, in which case you can force
- the package to use it and install quickly. All tutorial examples with
- packages that depend on MPICH include the spec syntax for building with it.
+ Several of these packages depend on an MPI implementation.
+ You can use OpenMPI if you install it from scratch, but this is slow (>10 min.).
+ A binary cache of MPICH may be provided, in which case you can force the package to use it and install quickly.
+ All tutorial examples with packages that depend on MPICH include the spec syntax for building with it.
.. _adv_pkg_tutorial_start:
@@ -75,21 +73,17 @@ Set environment variables in dependent packages at build-time
Dependencies can set environment variables that are required when their dependents build.
For example, when a package depends on a python extension like py-numpy, Spack's ``python`` package will add it to ``PYTHONPATH`` so it is available at build time; this is required because the default setup that Spack does is not sufficient for python to import modules.
-To provide environment setup for a dependent, a package can implement the
-:py:func:`setup_dependent_build_environment
-` and/or :py:func:`setup_dependent_run_environment ` functions.
+To provide environment setup for a dependent, a package can implement the :py:func:`setup_dependent_build_environment ` and/or :py:func:`setup_dependent_run_environment ` functions.
These functions take as a parameter a :py:class:`EnvironmentModifications ` object, which includes convenience methods to update the environment.
For example, an MPI implementation can set ``MPICC`` for build-time use for packages that depend on it:
.. code-block:: python
def setup_dependent_build_environment(self, env, dependent_spec):
- env.set('MPICC', join_path(self.prefix.bin, 'mpicc'))
+ env.set("MPICC", join_path(self.prefix.bin, "mpicc"))
In this case packages that depend on ``mpi`` will have ``MPICC`` defined in their environment when they build.
-This section is focused on modifying the build-time environment represented by ``env``, but it's worth noting that modifications to the run-time environment, made through the
-:py:func:`setup_dependent_run_environment
-` function's ``env`` parameter, are included in Spack's automatically-generated module files.
+This section is focused on modifying the build-time environment represented by ``env``, but it's worth noting that modifications to the run-time environment, made through the :py:func:`setup_dependent_run_environment ` function's ``env`` parameter, are included in Spack's automatically-generated module files.
We can practice by editing the ``mpich`` package to set the ``MPICC`` environment variable in the build-time environment of dependent packages.
@@ -102,16 +96,16 @@ Once you're finished, the method should look like this:
.. code-block:: python
def setup_dependent_environment(self, spack_env, run_env, dependent_spec):
- spack_env.set('MPICC', join_path(self.prefix.bin, 'mpicc'))
- spack_env.set('MPICXX', join_path(self.prefix.bin, 'mpic++'))
- spack_env.set('MPIF77', join_path(self.prefix.bin, 'mpif77'))
- spack_env.set('MPIF90', join_path(self.prefix.bin, 'mpif90'))
+ spack_env.set("MPICC", join_path(self.prefix.bin, "mpicc"))
+ spack_env.set("MPICXX", join_path(self.prefix.bin, "mpic++"))
+ spack_env.set("MPIF77", join_path(self.prefix.bin, "mpif77"))
+ spack_env.set("MPIF90", join_path(self.prefix.bin, "mpif90"))
- spack_env.set('MPICH_CC', spack_cc)
- spack_env.set('MPICH_CXX', spack_cxx)
- spack_env.set('MPICH_F77', spack_f77)
- spack_env.set('MPICH_F90', spack_fc)
- spack_env.set('MPICH_FC', spack_fc)
+ spack_env.set("MPICH_CC", spack_cc)
+ spack_env.set("MPICH_CXX", spack_cxx)
+ spack_env.set("MPICH_F77", spack_f77)
+ spack_env.set("MPICH_F90", spack_fc)
+ spack_env.set("MPICH_FC", spack_fc)
At this point we can, for instance, install ``netlib-scalapack`` with ``mpich``:
@@ -136,32 +130,29 @@ and double check the environment logs to verify that every variable was set to t
Set environment variables in your own package
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-Packages can modify their own build-time environment by implementing the
-:py:func:`setup_build_environment
- pkg-config is already installed in /usr/local/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/pkg-config-0.29.2-ae2hwm7q57byfbxtymts55xppqwk7ecj
- ...
- ==> superlu is already installed in /usr/local/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/superlu-5.2.1-q2mbtw2wo4kpzis2e2n227ip2fquxrno
- ==> Installing armadillo
- ==> Using cached archive: /usr/local/var/spack/cache/armadillo/armadillo-8.100.1.tar.xz
- ==> Staging archive: /usr/local/var/spack/stage/armadillo-8.100.1-n2eojtazxbku6g4l5izucwwgnpwz77r4/armadillo-8.100.1.tar.xz
- ==> Created stage in /usr/local/var/spack/stage/armadillo-8.100.1-n2eojtazxbku6g4l5izucwwgnpwz77r4
- ==> Applied patch undef_linux.patch
- ==> Building armadillo [CMakePackage]
- ==> Executing phase: 'cmake'
- ==> Executing phase: 'build'
- ==> Executing phase: 'install'
- ==> Successfully installed armadillo
- Fetch: 0.01s. Build: 3.96s. Total: 3.98s.
- [+] /usr/local/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/armadillo-8.100.1-n2eojtazxbku6g4l5izucwwgnpwz77r4
+ root@advanced-packaging-tutorial:/# spack install armadillo ^openblas ^mpich
+ ==> pkg-config is already installed in /usr/local/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/pkg-config-0.29.2-ae2hwm7q57byfbxtymts55xppqwk7ecj
+ ...
+ ==> superlu is already installed in /usr/local/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/superlu-5.2.1-q2mbtw2wo4kpzis2e2n227ip2fquxrno
+ ==> Installing armadillo
+ ==> Using cached archive: /usr/local/var/spack/cache/armadillo/armadillo-8.100.1.tar.xz
+ ==> Staging archive: /usr/local/var/spack/stage/armadillo-8.100.1-n2eojtazxbku6g4l5izucwwgnpwz77r4/armadillo-8.100.1.tar.xz
+ ==> Created stage in /usr/local/var/spack/stage/armadillo-8.100.1-n2eojtazxbku6g4l5izucwwgnpwz77r4
+ ==> Applied patch undef_linux.patch
+ ==> Building armadillo [CMakePackage]
+ ==> Executing phase: 'cmake'
+ ==> Executing phase: 'build'
+ ==> Executing phase: 'install'
+ ==> Successfully installed armadillo
+ Fetch: 0.01s. Build: 3.96s. Total: 3.98s.
+ [+] /usr/local/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/armadillo-8.100.1-n2eojtazxbku6g4l5izucwwgnpwz77r4
Hopefully the installation went fine and the code we added expanded to the right list of semicolon separated libraries (you are encouraged to open ``armadillo``'s build logs to double check).
@@ -293,61 +283,57 @@ Packages which don't follow this naming scheme must implement this function them
.. code-block:: python
- @property
- def libs(self):
- shared = "+shared" in self.spec
- return find_libraries(
- "libopencv_*", root=self.prefix, shared=shared, recursive=True
- )
+ @property
+ def libs(self):
+ shared = "+shared" in self.spec
+ return find_libraries("libopencv_*", root=self.prefix, shared=shared, recursive=True)
This issue is common for packages which implement an interface (i.e. virtual package providers in Spack).
If we try to build another version of ``armadillo`` tied to ``netlib-lapack`` (``armadillo ^netlib-lapack ^mpich``) we'll notice that this time the installation won't complete:
.. code-block:: console
- root@advanced-packaging-tutorial:/# spack install armadillo ^netlib-lapack ^mpich
- ==> pkg-config is already installed in /usr/local/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/pkg-config-0.29.2-ae2hwm7q57byfbxtymts55xppqwk7ecj
- ...
- ==> openmpi is already installed in /usr/local/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/openmpi-3.0.0-yo5qkfvumpmgmvlbalqcadu46j5bd52f
- ==> Installing arpack-ng
- ==> Using cached archive: /usr/local/var/spack/cache/arpack-ng/arpack-ng-3.5.0.tar.gz
- ==> Already staged arpack-ng-3.5.0-bloz7cqirpdxj33pg7uj32zs5likz2un in /usr/local/var/spack/stage/arpack-ng-3.5.0-bloz7cqirpdxj33pg7uj32zs5likz2un
- ==> No patches needed for arpack-ng
- ==> Building arpack-ng [Package]
- ==> Executing phase: 'install'
- ==> Error: RuntimeError: Unable to recursively locate netlib-lapack libraries in /usr/local/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/netlib-lapack-3.6.1-jjfe23wgt7nkjnp2adeklhseg3ftpx6z
- RuntimeError: RuntimeError: Unable to recursively locate netlib-lapack libraries in /usr/local/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/netlib-lapack-3.6.1-jjfe23wgt7nkjnp2adeklhseg3ftpx6z
-
- /usr/local/var/spack/repos/builtin/packages/arpack-ng/package.py:105, in install:
- 5 options.append('-DCMAKE_INSTALL_NAME_DIR:PATH=%s/lib' % prefix)
- 6
- 7 # Make sure we use Spack's blas/lapack:
- >> 8 lapack_libs = spec['lapack'].libs.joined(';')
- 9 blas_libs = spec['blas'].libs.joined(';')
- 10
- 11 options.extend([
-
- See build log for details:
- /usr/local/var/spack/stage/arpack-ng-3.5.0-bloz7cqirpdxj33pg7uj32zs5likz2un/arpack-ng-3.5.0/spack-build-out.txt
+ root@advanced-packaging-tutorial:/# spack install armadillo ^netlib-lapack ^mpich
+ ==> pkg-config is already installed in /usr/local/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/pkg-config-0.29.2-ae2hwm7q57byfbxtymts55xppqwk7ecj
+ ...
+ ==> openmpi is already installed in /usr/local/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/openmpi-3.0.0-yo5qkfvumpmgmvlbalqcadu46j5bd52f
+ ==> Installing arpack-ng
+ ==> Using cached archive: /usr/local/var/spack/cache/arpack-ng/arpack-ng-3.5.0.tar.gz
+ ==> Already staged arpack-ng-3.5.0-bloz7cqirpdxj33pg7uj32zs5likz2un in /usr/local/var/spack/stage/arpack-ng-3.5.0-bloz7cqirpdxj33pg7uj32zs5likz2un
+ ==> No patches needed for arpack-ng
+ ==> Building arpack-ng [Package]
+ ==> Executing phase: 'install'
+ ==> Error: RuntimeError: Unable to recursively locate netlib-lapack libraries in /usr/local/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/netlib-lapack-3.6.1-jjfe23wgt7nkjnp2adeklhseg3ftpx6z
+ RuntimeError: RuntimeError: Unable to recursively locate netlib-lapack libraries in /usr/local/opt/spack/linux-ubuntu16.04-x86_64/gcc-5.4.0/netlib-lapack-3.6.1-jjfe23wgt7nkjnp2adeklhseg3ftpx6z
+
+ /usr/local/var/spack/repos/builtin/packages/arpack-ng/package.py:105, in install:
+ 5 options.append('-DCMAKE_INSTALL_NAME_DIR:PATH=%s/lib' % prefix)
+ 6
+ 7 # Make sure we use Spack's blas/lapack:
+ >> 8 lapack_libs = spec['lapack'].libs.joined(';')
+ 9 blas_libs = spec['blas'].libs.joined(';')
+ 10
+ 11 options.extend([
+
+ See build log for details:
+ /usr/local/var/spack/stage/arpack-ng-3.5.0-bloz7cqirpdxj33pg7uj32zs5likz2un/arpack-ng-3.5.0/spack-build-out.txt
Unlike ``openblas`` which provides a library named ``libopenblas.so``, ``netlib-lapack`` provides ``liblapack.so``, so it needs to implement customized library search logic.
Let's edit it:
.. code-block:: console
- root@advanced-packaging-tutorial:/# spack edit netlib-lapack
+ root@advanced-packaging-tutorial:/# spack edit netlib-lapack
and follow the instructions in the ``# TUTORIAL:`` comment as before.
What we need to implement is:
.. code-block:: python
- @property
- def lapack_libs(self):
- shared = True if '+shared' in self.spec else False
- return find_libraries(
- 'liblapack', root=self.prefix, shared=shared, recursive=True
- )
+ @property
+ def lapack_libs(self):
+ shared = True if "+shared" in self.spec else False
+ return find_libraries("liblapack", root=self.prefix, shared=shared, recursive=True)
i.e., a property that returns the correct list of libraries for the LAPACK interface.
@@ -382,23 +368,22 @@ Build tools also usually provide a set of executables that can be used when anot
Spack gives you the opportunity to monkey-patch dependent modules and attach attributes to them.
This helps make the packager's experience as similar as possible to what would have been the manual installation of the same package.
-An example here is the ``automake`` package, which overrides
-:py:func:`setup_dependent_package `:
+An example here is the ``automake`` package, which overrides :py:func:`setup_dependent_package `:
.. code-block:: python
- def setup_dependent_package(self, module, dependent_spec):
- # Automake is very likely to be a build dependency,
- # so we add the tools it provides to the dependent module
- executables = ['aclocal', 'automake']
- for name in executables:
- setattr(module, name, self._make_executable(name))
+ def setup_dependent_package(self, module, dependent_spec):
+ # Automake is very likely to be a build dependency,
+ # so we add the tools it provides to the dependent module
+ executables = ["aclocal", "automake"]
+ for name in executables:
+ setattr(module, name, self._make_executable(name))
so that every other package that depends on it can use directly ``aclocal`` and ``automake`` with the usual function call syntax of :py:class:`Executable `:
.. code-block:: python
- aclocal('--force')
+ aclocal("--force")
^^^^^^^^^^^^^^^^^^^^^^^
Extra query parameters
@@ -440,15 +425,13 @@ Clearly the implementation in the ``hdf5`` package is not complete, and we need
If you followed the instructions correctly, the code added to the ``lib`` property should be similar to:
.. code-block:: python
- :emphasize-lines: 1
-
- query_parameters = self.spec.last_query.extra_parameters
- key = tuple(sorted(query_parameters))
- libraries = query2libraries[key]
- shared = '+shared' in self.spec
- return find_libraries(
- libraries, root=self.prefix, shared=shared, recurse=True
- )
+ :emphasize-lines: 1
+
+ query_parameters = self.spec.last_query.extra_parameters
+ key = tuple(sorted(query_parameters))
+ libraries = query2libraries[key]
+ shared = "+shared" in self.spec
+ return find_libraries(libraries, root=self.prefix, shared=shared, recurse=True)
where we highlighted the line retrieving the extra parameters.
Now we can successfully complete the installation of ``netcdf ^mpich``:
diff --git a/tutorial_basics.rst b/tutorial_basics.rst
index 79b026bc78..8ca2bb205f 100644
--- a/tutorial_basics.rst
+++ b/tutorial_basics.rst
@@ -205,9 +205,8 @@ Assuming we're happy with that configuration, we will now install it.
:language: spec
Spack packages can also have build options, called variants.
-Boolean variants can be specified using the ``+`` (enable) and ``~`` or ``-``
-(disable) sigils. There are two sigils for "disable" to avoid conflicts
-with shell parsing in different situations.
+Boolean variants can be specified using the ``+`` (enable) and ``~`` or ``-`` (disable) sigils.
+There are two sigils for "disable" to avoid conflicts with shell parsing in different situations.
Variants (boolean or otherwise) can also be specified using the same syntax as compiler flags.
Here we can install HDF5 without MPI support.
@@ -216,7 +215,8 @@ Here we can install HDF5 without MPI support.
We might also want to install HDF5 with a different MPI implementation.
While ``mpi`` itself is a virtual package representing an interface, other packages can depend on such abstract interfaces.
-Spack handles these through "virtual dependencies." A package, such as HDF5, can depend on the ``mpi`` virtual package (the interface).
+Spack handles these through "virtual dependencies."
+A package, such as HDF5, can depend on the ``mpi`` virtual package (the interface).
Actual MPI implementation packages (like ``openmpi``, ``mpich``, ``mvapich2``, etc.) provide the MPI interface.
Any of these providers can be requested to satisfy an MPI dependency.
For example, we can build HDF5 with MPI support provided by MPICH by specifying a dependency on ``mpich`` (e.g., ``hdf5 ^mpich``).
@@ -244,10 +244,8 @@ This is also why we didn't care to specify which virtuals ``gcc`` and ``clang``
.. note::
- It is frequently sufficient to specify ``%gcc`` even for packages
- that use multiple languages, because Spack prefers to minimize the
- number of packages needed for a build. Later on we will discuss
- more complex compiler requests, and how and when they are useful.
+ It is frequently sufficient to specify ``%gcc`` even for packages that use multiple languages, because Spack prefers to minimize the number of packages needed for a build.
+ Later on we will discuss more complex compiler requests, and how and when they are useful.
We'll do a quick check in on what we have installed so far.
@@ -341,10 +339,10 @@ The ``--all`` (or ``-a``) flag can be used to uninstall all packages matching an
Advanced ``spack find`` Usage
-----------------------------
-We will go over some additional uses for the ``spack find`` command not already covered in the :ref:`basics-tutorial-install` and
-:ref:`basics-tutorial-uninstall` sections.
+We will go over some additional uses for the ``spack find`` command not already covered in the :ref:`basics-tutorial-install` and :ref:`basics-tutorial-uninstall` sections.
-The ``spack find`` command can accept what we call "anonymous specs." These are expressions in spec syntax that do not contain a package name.
+The ``spack find`` command can accept what we call "anonymous specs."
+These are expressions in spec syntax that do not contain a package name.
For example, ``spack find ^mpich`` will return every installed package that depends on MPICH, and ``spack find cflags="-O3"`` will return every package which was built with ``cflags="-O3"``.
.. literalinclude:: outputs/basics/find-dep-mpich.out
@@ -384,10 +382,11 @@ Because this compiler is a newer version than the external compilers Spack knows
We will discuss changing these defaults in a later section.
We can check that this compiler is preferred by looking at the install plan for a package that isn't being reused from binary.
-.. literalinclude:: outputs/basics/spec-zziplib
+.. literalinclude:: outputs/basics/spec-zziplib.out
:language: spec
-For the test of the tutorial we will sometimes use this new compiler, and sometimes we want to demonstrate things without it. For now, we will uninstall it to avoid using it in the next section.
+For the rest of the tutorial we will sometimes use this new compiler, and sometimes we want to demonstrate things without it.
+For now, we will uninstall it to avoid using it in the next section.
.. literalinclude:: outputs/basics/compiler-uninstall.out
:language: spec
diff --git a/tutorial_binary_cache.rst b/tutorial_binary_cache.rst
index e7be55b6ec..7a8d2b26df 100644
--- a/tutorial_binary_cache.rst
+++ b/tutorial_binary_cache.rst
@@ -73,12 +73,12 @@ Your ``spack.yaml`` file should now contain the following:
specs:
- julia
mirrors:
- my-mirror:
- url: oci://ghcr.io//buildcache--
- access_pair:
- id:
- secret_variable: MY_OCI_TOKEN
- signed: false
+ my-mirror:
+ url: oci://ghcr.io//buildcache--
+ access_pair:
+ id:
+ secret_variable: MY_OCI_TOKEN
+ signed: false
Let's push ``julia`` and its dependencies to the build cache
@@ -136,12 +136,12 @@ The easiest way to do this is to override the ``mirrors`` config section in the
specs:
- julia
mirrors:: # <- note the double colon
- my-mirror:
- url: oci://ghcr.io//buildcache--
- access_pair:
- id:
- secret_variable: MY_OCI_TOKEN
- signed: false
+ my-mirror:
+ url: oci://ghcr.io//buildcache--
+ access_pair:
+ id:
+ secret_variable: MY_OCI_TOKEN
+ signed: false
An "overwrite install" should be enough to show that the build cache is used (output will vary based on your specific configuration):
@@ -248,8 +248,7 @@ Let's add a simple text editor like ``vim`` to our previous environment next to
.. note::
- You may want to change ``mirrors::`` to ``mirrors:`` in the ``spack.yaml`` file to avoid
- a source build of ``vim`` --- but a source build should be quick.
+ You may want to change ``mirrors::`` to ``mirrors:`` in the ``spack.yaml`` file to avoid a source build of ``vim`` --- but a source build should be quick.
.. code-block:: spec
@@ -291,8 +290,10 @@ For those familiar with ``Dockerfile`` syntax, it would structurally look like t
This approach is still valid, and the ``spack containerize`` command continues to exist, but it has a few downsides:
-* When ``RUN spack -e /root/env install`` fails, ``docker`` will not cache the layer, meaning that all dependencies that did install successfully are lost. Troubleshooting the build typically means starting from scratch either within a ``docker run`` session or on the host system.
-* In certain CI environments, it is not possible to use ``docker build`` directly. For example, the CI script itself may already run in a Docker container, and running ``docker build`` *safely* inside a container (Docker-in-Docker) is tricky.
+* When ``RUN spack -e /root/env install`` fails, ``docker`` will not cache the layer, meaning that all dependencies that did install successfully are lost.
+ Troubleshooting the build typically means starting from scratch either within a ``docker run`` session or on the host system.
+* In certain CI environments, it is not possible to use ``docker build`` directly.
+ For example, the CI script itself may already run in a Docker container, and running ``docker build`` *safely* inside a container (Docker-in-Docker) is tricky.
The takeaway is that Spack decouples the steps that ``docker build`` combines: build isolation, running the build, and creating an image.
You can run ``spack install`` on your host machine or in a container, and run ``spack buildcache push`` separately to create an image.
diff --git a/tutorial_buildsystems.rst b/tutorial_buildsystems.rst
index 5210895723..85d72d47c3 100644
--- a/tutorial_buildsystems.rst
+++ b/tutorial_buildsystems.rst
@@ -90,11 +90,11 @@ Let's take a quick look at some of the internals of the ``Autotools`` class:
This will open the ``AutotoolsPackage`` file in your text editor.
.. note::
- The examples showing code for these classes are abridged to avoid having
- long examples. We only show what is relevant to the packager.
+ The examples showing code for these classes are abridged to avoid having long examples.
+ We only show what is relevant to the packager.
-.. literalinclude:: _spack_root/lib/spack/spack/build_systems/autotools.py
+.. literalinclude:: _spack_packages/repos/spack_repo/builtin/build_systems/autotools.py
:emphasize-lines: 2,4,28-37
:lines: 138-158,589-617
:linenos:
@@ -178,7 +178,7 @@ Let's also take a look inside the ``MakefilePackage`` class:
Take note of the following:
-.. literalinclude:: _spack_root/lib/spack/spack/build_systems/makefile.py
+.. literalinclude:: _spack_packages/repos/spack_repo/builtin/build_systems/makefile.py
:language: python
:emphasize-lines: 60,64,69
:lines: 40-111
@@ -293,23 +293,23 @@ In our ``esmf`` example we set two environment variables in our ``edit()`` metho
def edit(self, spec, prefix):
for var in os.environ:
- if var.startswith('ESMF_'):
+ if var.startswith("ESMF_"):
os.environ.pop(var)
# More code ...
- if self.compiler.name == 'gcc':
- os.environ['ESMF_COMPILER'] = 'gfortran'
- elif self.compiler.name == 'intel':
- os.environ['ESMF_COMPILER'] = 'intel'
- elif self.compiler.name == 'clang':
- os.environ['ESMF_COMPILER'] = 'gfortranclang'
- elif self.compiler.name == 'nag':
- os.environ['ESMF_COMPILER'] = 'nag'
- elif self.compiler.name == 'pgi':
- os.environ['ESMF_COMPILER'] = 'pgi'
+ if self.compiler.name == "gcc":
+ os.environ["ESMF_COMPILER"] = "gfortran"
+ elif self.compiler.name == "intel":
+ os.environ["ESMF_COMPILER"] = "intel"
+ elif self.compiler.name == "clang":
+ os.environ["ESMF_COMPILER"] = "gfortranclang"
+ elif self.compiler.name == "nag":
+ os.environ["ESMF_COMPILER"] = "nag"
+ elif self.compiler.name == "pgi":
+ os.environ["ESMF_COMPILER"] = "pgi"
else:
- msg = "The compiler you are building with, "
+ msg = "The compiler you are building with, "
msg += "'{0}', is not supported by ESMF."
raise InstallError(msg.format(self.compiler.name))
@@ -327,85 +327,79 @@ Let's look at an example of this in the ``elk`` package:
.. code-block:: python
- def edit(self, spec, prefix):
- # Dictionary of configuration options
- config = {
- 'MAKE': 'make',
- 'AR': 'ar'
- }
-
- # Compiler-specific flags
- flags = ''
- if self.compiler.name == 'intel':
- flags = '-O3 -ip -unroll -no-prec-div'
- elif self.compiler.name == 'gcc':
- flags = '-O3 -ffast-math -funroll-loops'
- elif self.compiler.name == 'pgi':
- flags = '-O3 -lpthread'
- elif self.compiler.name == 'g95':
- flags = '-O3 -fno-second-underscore'
- elif self.compiler.name == 'nag':
- flags = '-O4 -kind=byte -dusty -dcfuns'
- elif self.compiler.name == 'xl':
- flags = '-O3'
- config['F90_OPTS'] = flags
- config['F77_OPTS'] = flags
-
- # BLAS/LAPACK support
- # Note: BLAS/LAPACK must be compiled with OpenMP support
- # if the +openmp variant is chosen
- blas = 'blas.a'
- lapack = 'lapack.a'
- if '+blas' in spec:
- blas = spec['blas'].libs.joined()
- if '+lapack' in spec:
- lapack = spec['lapack'].libs.joined()
- # lapack must come before blas
- config['LIB_LPK'] = ' '.join([lapack, blas])
-
- # FFT support
- if '+fft' in spec:
- config['LIB_FFT'] = join_path(spec['fftw'].prefix.lib,
- 'libfftw3.so')
- config['SRC_FFT'] = 'zfftifc_fftw.f90'
- else:
- config['LIB_FFT'] = 'fftlib.a'
- config['SRC_FFT'] = 'zfftifc.f90'
-
- # MPI support
- if '+mpi' in spec:
- config['F90'] = spec['mpi'].mpifc
- config['F77'] = spec['mpi'].mpif77
- else:
- config['F90'] = spack_fc
- config['F77'] = spack_f77
- config['SRC_MPI'] = 'mpi_stub.f90'
-
- # OpenMP support
- if '+openmp' in spec:
- config['F90_OPTS'] += ' ' + self.compiler.openmp_flag
- config['F77_OPTS'] += ' ' + self.compiler.openmp_flag
- else:
- config['SRC_OMP'] = 'omp_stub.f90'
-
- # Libxc support
- if '+libxc' in spec:
- config['LIB_libxc'] = ' '.join([
- join_path(spec['libxc'].prefix.lib, 'libxcf90.so'),
- join_path(spec['libxc'].prefix.lib, 'libxc.so')
- ])
- config['SRC_libxc'] = ' '.join([
- 'libxc_funcs.f90',
- 'libxc.f90',
- 'libxcifc.f90'
- ])
- else:
- config['SRC_libxc'] = 'libxcifc_stub.f90'
-
- # Write configuration options to include file
- with open('make.inc', 'w') as inc:
- for key in config:
- inc.write('{0} = {1}\n'.format(key, config[key]))
+ def edit(self, spec, prefix):
+ # Dictionary of configuration options
+ config = {"MAKE": "make", "AR": "ar"}
+
+ # Compiler-specific flags
+ flags = ""
+ if self.compiler.name == "intel":
+ flags = "-O3 -ip -unroll -no-prec-div"
+ elif self.compiler.name == "gcc":
+ flags = "-O3 -ffast-math -funroll-loops"
+ elif self.compiler.name == "pgi":
+ flags = "-O3 -lpthread"
+ elif self.compiler.name == "g95":
+ flags = "-O3 -fno-second-underscore"
+ elif self.compiler.name == "nag":
+ flags = "-O4 -kind=byte -dusty -dcfuns"
+ elif self.compiler.name == "xl":
+ flags = "-O3"
+ config["F90_OPTS"] = flags
+ config["F77_OPTS"] = flags
+
+ # BLAS/LAPACK support
+ # Note: BLAS/LAPACK must be compiled with OpenMP support
+ # if the +openmp variant is chosen
+ blas = "blas.a"
+ lapack = "lapack.a"
+ if "+blas" in spec:
+ blas = spec["blas"].libs.joined()
+ if "+lapack" in spec:
+ lapack = spec["lapack"].libs.joined()
+ # lapack must come before blas
+ config["LIB_LPK"] = " ".join([lapack, blas])
+
+ # FFT support
+ if "+fft" in spec:
+ config["LIB_FFT"] = join_path(spec["fftw"].prefix.lib, "libfftw3.so")
+ config["SRC_FFT"] = "zfftifc_fftw.f90"
+ else:
+ config["LIB_FFT"] = "fftlib.a"
+ config["SRC_FFT"] = "zfftifc.f90"
+
+ # MPI support
+ if "+mpi" in spec:
+ config["F90"] = spec["mpi"].mpifc
+ config["F77"] = spec["mpi"].mpif77
+ else:
+ config["F90"] = spack_fc
+ config["F77"] = spack_f77
+ config["SRC_MPI"] = "mpi_stub.f90"
+
+ # OpenMP support
+ if "+openmp" in spec:
+ config["F90_OPTS"] += " " + self.compiler.openmp_flag
+ config["F77_OPTS"] += " " + self.compiler.openmp_flag
+ else:
+ config["SRC_OMP"] = "omp_stub.f90"
+
+ # Libxc support
+ if "+libxc" in spec:
+ config["LIB_libxc"] = " ".join(
+ [
+ join_path(spec["libxc"].prefix.lib, "libxcf90.so"),
+ join_path(spec["libxc"].prefix.lib, "libxc.so"),
+ ]
+ )
+ config["SRC_libxc"] = " ".join(["libxc_funcs.f90", "libxc.f90", "libxcifc.f90"])
+ else:
+ config["SRC_libxc"] = "libxcifc_stub.f90"
+
+ # Write configuration options to include file
+ with open("make.inc", "w") as inc:
+ for key in config:
+ inc.write("{0} = {1}\n".format(key, config[key]))
``config`` is just a Python dictionary that we populate with key-value pairs.
By the end of the ``edit()`` method, we write the contents of our dictionary to the ``make.inc`` file, which the package's ``Makefile`` then includes.
@@ -445,7 +439,7 @@ Let's look at these defaults in the ``CMakePackage`` class in the ``_std_args()`
$ spack edit --build-system cmake
-.. literalinclude:: _spack_root/lib/spack/spack/build_systems/cmake.py
+.. literalinclude:: _spack_packages/repos/spack_repo/builtin/build_systems/cmake.py
:language: python
:lines: 167-300
:emphasize-lines: 87,96
@@ -570,7 +564,7 @@ In the ``install()`` method, we have to manually install our targets so we overr
def install(self, spec, prefix):
mkdir(prefix.bin)
src = "bin/sniffles-core-{0}".format(spec.version.dotted)
- binaries = ['sniffles', 'sniffles-debug']
+ binaries = ["sniffles", "sniffles-debug"]
for b in binaries:
install(join_path(src, b), join_path(prefix.bin, b))
@@ -629,7 +623,7 @@ Dependencies are usually listed in ``setup.py``.
You can find the dependencies by searching for ``install_requires`` keyword in that file.
Here it is for ``Pandas``:
-.. code-block:: python
+.. code-block:: text
# ... code
if sys.version_info[0] >= 3:
diff --git a/tutorial_configuration.rst b/tutorial_configuration.rst
index f48aa5d5e0..77b1e7115c 100644
--- a/tutorial_configuration.rst
+++ b/tutorial_configuration.rst
@@ -138,7 +138,7 @@ For example, look at high-level config:
$ spack config blame config
-.. code-block:: yaml
+.. code-block:: text
--- config:
/etc/spack/config.yaml:2 suppress_gpg_warnings: True
@@ -157,7 +157,7 @@ We can see overrides in action with:
$ spack config add config:aliases::{}
$ spack config blame config
-.. code-block:: yaml
+.. code-block:: text
--- config:
/home/spack/.spack/config.yaml:2 aliases: {}
@@ -359,7 +359,7 @@ First, we will look at the default ``packages.yaml`` file.
$ spack config --scope=defaults:base edit packages
-.. literalinclude:: _spack_root/etc/spack/defaults/packages.yaml
+.. literalinclude:: _spack_root/etc/spack/defaults/base/packages.yaml
:language: yaml
:emphasize-lines: 51
@@ -386,12 +386,7 @@ When you have an activated environment, you can edit the associated configuratio
.. warning::
- You will get exactly the same effects if you make these changes
- without using an environment, but you must delete the
- associated ``packages.yaml`` file after the config tutorial or
- the commands you run in later tutorial sections will not
- produce the same output (because they weren't run with the
- configuration changes made here)
+ You will get exactly the same effects if you make these changes without using an environment, but you must delete the associated ``packages.yaml`` file after the config tutorial or the commands you run in later tutorial sections will not produce the same output (because they weren't run with the configuration changes made here)
.. code-block:: yaml
@@ -596,9 +591,7 @@ At this point we want to discard the configuration changes we made in this tutor
.. warning::
- If you do not deactivate the ``config-env`` environment, then
- specs will be concretized differently in later tutorial sections
- and your results will not match.
+ If you do not deactivate the ``config-env`` environment, then specs will be concretized differently in later tutorial sections and your results will not match.
-----------------
@@ -627,17 +620,14 @@ If you have a fast scratch file system, you can run builds from this file system
config:
build_stage:
- - /scratch/$user/spack-stage
+ - /scratch/$user/spack-stage
.. note::
- It is important to distinguish the build stage directory from other
- directories in your scratch space to ensure ``spack clean`` does not
- inadvertently remove unrelated files. This can be accomplished by
- including a combination of ``spack`` and or ``stage`` in each path
- as shown in the default settings and documented examples. See
- `Basic Settings `_ for details.
+ It is important to distinguish the build stage directory from other directories in your scratch space to ensure ``spack clean`` does not inadvertently remove unrelated files.
+ This can be accomplished by including a combination of ``spack`` and or ``stage`` in each path as shown in the default settings and documented examples.
+ See `Basic Settings `_ for details.
On systems with compilers that absolutely *require* environment variables like ``LD_LIBRARY_PATH``, it is possible to prevent Spack from cleaning the build environment with the ``dirty`` setting:
@@ -704,9 +694,9 @@ Obviously, if you want to build everything in serial for whatever reason, you wo
Last, we'll unset ``concretizer:reuse:false`` since we'll want to enable concretizer reuse for the rest of this tutorial.
-.. code-block:: yaml
+.. code-block:: console
- $ spack config rm concretizer:reuse
+ $ spack config rm concretizer:reuse
.. warning::
diff --git a/tutorial_developer_workflows.rst b/tutorial_developer_workflows.rst
index 7b0d592422..817da7e2fa 100644
--- a/tutorial_developer_workflows.rst
+++ b/tutorial_developer_workflows.rst
@@ -21,7 +21,8 @@ Installing from local source
The ``spack install`` command, as you know, fetches source code from a mirror or the internet before building and installing your package.
As developers, we want to build from local source, which we will constantly change, build, and test.
-Let's imagine, for a second, we're working on ``scr``. ``scr`` is a library used to implement scalable checkpointing in application codes.
+Let's imagine, for a second, we're working on ``scr``.
+``scr`` is a library used to implement scalable checkpointing in application codes.
It supports writing/reading checkpoints quickly and efficiently using MPI and high-bandwidth file I/O.
We'd like to test changes to ``scr`` within an actual application, so we'll test with ``macsio``, a proxy application written to mimic typical HPC I/O workloads.
We've chosen ``scr`` and ``macsio`` because together they are quick to build.
@@ -152,20 +153,13 @@ You can change the location of this source directory by modifying the ``path:``
There are a few gotchas with the ``spack develop`` command
-* You often specify the package version manually when specifying a
- package as a dev package. Spack needs to know the version of the dev
- package so it can supply the correct flags for the package's build
- system. If a version is not supplied, then Spack will take the maximum version
- defined in the package where `infinity versions `_ like ``develop`` and ``main``
- have a higher value than the numeric versions.
+* You often specify the package version manually when specifying a package as a dev package.
+ Spack needs to know the version of the dev package so it can supply the correct flags for the package's build system.
+ If a version is not supplied, then Spack will take the maximum version defined in the package where `infinity versions `_ like ``develop`` and ``main`` have a higher value than the numeric versions.
* You should ensure a spec for the package you are developing appears in the DAG of at least one of the roots of the environment with the same version that you are developing.
- ``spack add `` with the matching version you want to develop is a way to ensure
- the develop spec is satisfied in the ``spack.yaml`` environments file. This is because
- develop specs are not concretization constraints but rather criteria for adding
- the ``dev_path=`` variant to existing spec.
-* You'll need to re-concretize the environment so that the version
- number and the ``dev_path=`` attributes are properly added to the
- cached spec in ``spack.lock``.
+ ``spack add `` with the matching version you want to develop is a way to ensure the develop spec is satisfied in the ``spack.yaml`` environments file.
+ This is because develop specs are not concretization constraints but rather criteria for adding the ``dev_path=`` variant to existing spec.
+* You'll need to re-concretize the environment so that the version number and the ``dev_path=`` attributes are properly added to the cached spec in ``spack.lock``.
.. literalinclude:: outputs/dev/develop-conc.out
:language: console
@@ -207,15 +201,9 @@ Taking advantage of iterative builds with Spack requires cooperation from your b
When Spack performs a rebuild on a development package, it reruns all the build stages for your package without cleaning the source and build directories to a pristine state.
If your build system can take advantage of the previously compiled object files then you'll end up with an iterative build.
-- If your package just uses make, you also should get iterative builds
- for free when running ``spack develop``.
-- If your package uses CMake with the typical ``cmake`` / ``build`` /
- ``install`` build stages, you'll get iterative builds for free with
- Spack because CMake doesn’t modify the filetime on the
- ``CMakeCache.txt`` file if your cmake flags haven't changed.
-- If your package uses autoconf, then rerunning the typical
- ``autoreconf`` stage typically modifies the filetime of
- ``config.h``, which can trigger a cascade of rebuilding.
+- If your package just uses make, you also should get iterative builds for free when running ``spack develop``.
+- If your package uses CMake with the typical ``cmake`` / ``build`` / ``install`` build stages, you'll get iterative builds for free with Spack because CMake doesn’t modify the filetime on the ``CMakeCache.txt`` file if your cmake flags haven't changed.
+- If your package uses autoconf, then rerunning the typical ``autoreconf`` stage typically modifies the filetime of ``config.h``, which can trigger a cascade of rebuilding.
Multiple packages can also be marked as develop.
If we were co-developing ``macsio``, we could run
diff --git a/tutorial_environments.rst b/tutorial_environments.rst
index 03d0d5fc12..265b70c285 100644
--- a/tutorial_environments.rst
+++ b/tutorial_environments.rst
@@ -95,8 +95,7 @@ To see all of our environments we've created so far we can run ``spack env list`
.. note::
- Once we activate an environment it will show up highlighted in
- green in the list of environments.
+ Once we activate an environment it will show up highlighted in green in the list of environments.
Now let's **activate** our environment by running the ``spack env activate`` command or ``spacktivate`` alias:
@@ -104,9 +103,8 @@ Now let's **activate** our environment by running the ``spack env activate`` com
:language: console
.. note::
- If we use the ``-p`` option for ``spack env activate``, Spack
- will prepend the environment name to our shell prompt. This is a handy
- way to be reminded if and which environment you are in.
+ If we use the ``-p`` option for ``spack env activate``, Spack will prepend the environment name to our shell prompt.
+ This is a handy way to be reminded if and which environment you are in.
Once we activate an environment, ``spack find`` will only show what is in the current environment.
For example, because we just created this environment the output below doesn't show any installed packages.
@@ -115,9 +113,7 @@ For example, because we just created this environment the output below doesn't s
:language: console
.. note::
- Although Spack doesn't show all installed software packages when
- in an active environment, Spack will reuse packages across
- environments to save disk space and reduce build times.
+ Although Spack doesn't show all installed software packages when in an active environment, Spack will reuse packages across environments to save disk space and reduce build times.
Additionally the output now tells us that we're in the ``myproject`` environment, so there is no need to panic when we no longer see our previously installed packages.
It also states that there are **no** *root specs*.
@@ -147,7 +143,8 @@ Let's try the usual install commands we learned earlier:
.. literalinclude:: outputs/environments/env-fail-install-1.out
:language: spec
-Environments are special in that we must *add* specs to the an environment before we can install them. This additional step helps prevent us from accidentally modifying a shared environment when installing new software.
+Environments are special in that we must *add* specs to the an environment before we can install them.
+This additional step helps prevent us from accidentally modifying a shared environment when installing new software.
``spack add`` allows us to queue up several specs to be installed together.
Let's try it:
@@ -155,7 +152,8 @@ Let's try it:
.. literalinclude:: outputs/environments/env-add-1.out
:language: spec
-Now, ``tcl`` and ``trilinos`` have been registered as **root specs** in our environment. **Root specs** are packages that we've explicitly requested to be installed in an environment.
+Now, ``tcl`` and ``trilinos`` have been registered as **root specs** in our environment.
+**Root specs** are packages that we've explicitly requested to be installed in an environment.
They're called **"roots"** because they sit at the top of the dependency graph---when Spack installs these packages, with their respective dependency packages sitting below them.
@@ -166,7 +164,8 @@ Now, let's install:
We can see that Spack reused existing installations of ``tcl`` and the dependencies of ``trilinos`` that were already present on the system, rather than rebuilding them from scratch.
-Additionally, the environment's view was automatically updated to include the installations. This means all the software in this environment has been added to our PATH, making the installed packages readily accessible from the command line while we have the environment activated.
+Additionally, the environment's view was automatically updated to include the installations.
+This means all the software in this environment has been added to our PATH, making the installed packages readily accessible from the command line while we have the environment activated.
Let's now confirm the contents of the environment using ``spack find``:
@@ -191,7 +190,8 @@ When you activate an environment with ``spack env activate``, Spack automaticall
This means that executables, libraries, and other files from your environment's packages become immediately accessible from your command line, just as if they were installed system-wide.
-Let's explore how views work using the ``tcl`` package we just installed in our ``myproject`` environment. The Tcl package includes a shell-like application called ``tclsh``.
+Let's explore how views work using the ``tcl`` package we just installed in our ``myproject`` environment.
+The Tcl package includes a shell-like application called ``tclsh``.
To see the path to ``tclsh`` let's use the ``which`` command:
@@ -240,7 +240,8 @@ Now let's attempt to uninstall ``trilinos`` from ``myproject2`` and examine what
:language: spec
-Notice that ``trilinos`` won't be uninstalled because it's still referenced in ``myproject``. This safety feature prevents accidental removal of packages that other environments depend on.
+Notice that ``trilinos`` won't be uninstalled because it's still referenced in ``myproject``.
+This safety feature prevents accidental removal of packages that other environments depend on.
Instead, if we want to remove ``trilinos`` from the ``myproject2`` environment (without affecting it in other environments), we need to use ``spack remove``:
@@ -261,8 +262,7 @@ We can see that ``myproject`` still has ``trilinos`` as a root spec.
.. note::
- You can also uninstall a package and remove it from the environment
- in one go with ``spack uninstall --remove trilinos``.
+ You can also uninstall a package and remove it from the environment in one go with ``spack uninstall --remove trilinos``.
-----------------------
The ``spack.yaml`` file
@@ -284,23 +284,16 @@ The output shows the special ``spack.yaml`` configuration file that Spack uses t
There are several important parts of this file:
* ``specs:`` The list of package specs to install in the environment.
-* ``view:`` Controls whether the environment generates a *view* (the
- directory tree with symlinks to installed packages we discussed earlier).
-* ``concretizer:unify:`` Determines how package specs in the environment are
- concretized together to reduce duplicated dependencies when possible.
+* ``view:`` Controls whether the environment generates a *view* (the directory tree with symlinks to installed packages we discussed earlier).
+* ``concretizer:unify:`` Determines how package specs in the environment are concretized together to reduce duplicated dependencies when possible.
The ``specs`` list should look familiar --- these are the package specs we've been modifying previously with ``spack add`` and ``spack install``.
The ``concretizer:unify:true`` setting controls how Spack resolves dependencies across packages specs in an environment:
-* ``true`` (default): specs are concretized *together*, ensuring
- there is only one version of each package in the environment.
-* ``false``: specs are concretized *independently* from each other,
- potentially allowing multiple versions of the package to appear in the
- environment twice.
-* ``when_possible``: A middle ground --- Spack attempts to unify dependencies
- as possible but will backoff to allow duplicates when root specs require
- incompatible versions of dependencies.
+* ``true`` (default): specs are concretized *together*, ensuring there is only one version of each package in the environment.
+* ``false``: specs are concretized *independently* from each other, potentially allowing multiple versions of the package to appear in the environment twice.
+* ``when_possible``: A middle ground --- Spack attempts to unify dependencies as possible but will backoff to allow duplicates when root specs require incompatible versions of dependencies.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Editing environment configuration
@@ -308,8 +301,7 @@ Editing environment configuration
.. note::
- Before proceeding, make sure your ``EDITOR`` environment variable
- is set to the path of your preferred text editor.
+ Before proceeding, make sure your ``EDITOR`` environment variable is set to the path of your preferred text editor.
Let's edit ``spack.yaml`` to *require* ``mpich`` as our ``mpi`` provider using ``spack config edit``.
@@ -336,9 +328,8 @@ Change it to include the ``packages:mpi:require`` entry below:
.. note::
- We introduce this here to show you how environment configuration
- can affect concretization. Configuration options are covered in much
- more detail in the :ref:`configuration tutorial `.
+ We introduce this here to show you how environment configuration can affect concretization.
+ Configuration options are covered in much more detail in the :ref:`configuration tutorial `.
We've only scratched the surface here by requiring a specific ``mpi`` provider for packages depending on ``mpi``.
@@ -368,7 +359,8 @@ All the specs are now concrete **and** ready to be installed with ``mpich`` as t
Creating an environment incrementally
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-We can also add and install specs to an environment incrementally. For example:
+We can also add and install specs to an environment incrementally.
+For example:
.. code-block:: spec
@@ -394,12 +386,9 @@ The solution is to re-concretize the environment as a whole, which causes ``pyth
.. note::
There are other advantages to concretizing and installing an environment all at once:
- * If you have a number of specs that can be installed together,
- adding them first and installing them together enables them to
- share dependencies and reduces total installation time.
+ * If you have a number of specs that can be installed together, adding them first and installing them together enables them to share dependencies and reduces total installation time.
- * You can launch all builds in parallel by taking advantage of Spack's
- `install-level build parallelism `_.
+ * You can launch all builds in parallel by taking advantage of Spack's `install-level build parallelism `_.
------------------------
Building in environments
@@ -473,7 +462,7 @@ Note that the reported version *does* match that of our installation.
Reproducing builds
------------------
-Spack environments provide users with *virtual environments* similar to `Python venv `_ and `Conda environments `_).
+Spack environments provide users with *virtual environments* similar to `Python venv `_ and `Conda environments `__).
The goal is to ensure packages in one environment are kept separate from those of another.
These environments can be managed by Spack or independent.
In either case, their environment files can be used to reproduce builds by other users and on other machines.
@@ -498,9 +487,7 @@ We will cover their reuse later.
.. note::
- Both environment files can be versioned in repositories, shared, and
- used to install the same set of software by different users and on
- other machines.
+ Both environment files can be versioned in repositories, shared, and used to install the same set of software by different users and on other machines.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Managed versus independent environments
@@ -632,10 +619,7 @@ Notice that ``spack add`` added the package to our active environment and it app
.. note::
- You'll need to run ``spack concretize`` and ``spack install`` to install added packages
- in your environment because ``spack add`` only adds it to the
- configuration and ``spack install`` only automatically concretizes the first time an
- environment is used.
+ You'll need to run ``spack concretize`` and ``spack install`` to install added packages in your environment because ``spack add`` only adds it to the configuration and ``spack install`` only automatically concretizes the first time an environment is used.
Now use ``spack remove`` to remove the spec from the configuration:
@@ -646,8 +630,7 @@ and we see that the spec *was* removed from the spec list of our environment.
.. note::
- You can also edit the ``spack.yaml`` file directly instead of
- using the ``spack add`` and ``spack remove`` commands.
+ You can also edit the ``spack.yaml`` file directly instead of using the ``spack add`` and ``spack remove`` commands.
^^^^^^^^^^^^^^^^^^^^^^^^
Reviewing ``spack.lock``
@@ -725,8 +708,7 @@ Since we created the environment from our ``spack.lock`` file, not only do we ge
.. note::
- Use of ``spack.lock`` to reproduce a build (currently) requires you
- to be on the same type of machine.
+ Use of ``spack.lock`` to reproduce a build (currently) requires you to be on the same type of machine.
--------
Clean-up
@@ -753,38 +735,22 @@ For more information, take a look at the Spack resources below.
Setting up and building environments
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-* `Environments `_:
- reference docs
-* `Configuration tutorial
- `_:
- for customizing your environment
-* `Spack stacks tutorial
- `_:
- for configuring combinatorial environments (e.g., same packages across a
- list of compilers)
-* `Install-level parallel builds
- `_:
- for how to launch ``spack install`` to build your environment in parallel
+* `Environments `_: reference docs
+* `Configuration tutorial `_: for customizing your environment
+* `Spack stacks tutorial `_: for configuring combinatorial environments (e.g., same packages across a list of compilers)
+* `Install-level parallel builds `_: for how to launch ``spack install`` to build your environment in parallel
^^^^^^^^^^^^^^^^^^^^^^^
Using environments
^^^^^^^^^^^^^^^^^^^^^^^
-* `Developer workflows
- `_:
- for developing code in an environment
-* `GitLab CI pipelines with Spack environments
- `_:
- for using environments to generate CI pipelines
-* `Container Images `_:
- for creating containers from environments
-* `Spack stacks tutorial
- `_:
- for managing large deployments of software
+* `Developer workflows `_: for developing code in an environment
+* `GitLab CI pipelines with Spack environments `_: for using environments to generate CI pipelines
+* `Container Images `_: for creating containers from environments
+* `Spack stacks tutorial `_: for managing large deployments of software
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Finding examples of environments
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-* `Spack Stack Catalog `_: for
- discovering environments that you can explore on GitHub
+* `Spack Stack Catalog `_: for discovering environments that you can explore on GitHub
diff --git a/tutorial_modules.rst b/tutorial_modules.rst
index aa163ed57d..4703c81e54 100644
--- a/tutorial_modules.rst
+++ b/tutorial_modules.rst
@@ -209,9 +209,8 @@ Spack comes with `Jinja2 `_, an external templ
Modules vs ``spack load``
^^^^^^^^^^^^^^^^^^^^^^^^^
-You may have noticed that we used ``spack load`` in the
-:ref:`module_file_tutorial_prerequisites` section above. This is a
-built-in mechanism of Spack's -- it's designed so that users on a cluster or a laptop can quickly get a package into their path, and it understands Spack's spec syntax.
+You may have noticed that we used ``spack load`` in the :ref:`module_file_tutorial_prerequisites` section above.
+This is a built-in mechanism of Spack's -- it's designed so that users on a cluster or a laptop can quickly get a package into their path, and it understands Spack's spec syntax.
It does *not* require modules, as Spack needs to work regardless of whether modules are set up on the system.
As you might expect, you can see what is loaded via ``spack load`` using ``spack find``:
@@ -302,7 +301,7 @@ To do this you should add the ``exclude`` keyword to ``${SPACK_ROOT}/etc/spack/m
default:
tcl:
exclude:
- - '%gcc@11'
+ - '%gcc@11'
all:
filter:
exclude_env_vars:
@@ -332,9 +331,9 @@ To specify exceptions to the ``exclude`` rules you can use ``include``:
default:
tcl:
include:
- - gcc
+ - gcc
exclude:
- - '%gcc@11'
+ - '%gcc@11'
all:
filter:
exclude_env_vars:
@@ -343,7 +342,8 @@ To specify exceptions to the ``exclude`` rules you can use ``include``:
- "FC"
- "F77"
-``include`` rules always have precedence over ``exclude`` rules. If you regenerate the modules again:
+``include`` rules always have precedence over ``exclude`` rules.
+If you regenerate the modules again:
.. literalinclude:: outputs/modules/tcl-refresh-3.out
:language: console
@@ -364,9 +364,9 @@ In this case you only need to add the following line:
tcl:
exclude_implicits: true
include:
- - gcc
+ - gcc
exclude:
- - '%gcc@11'
+ - '%gcc@11'
all:
filter:
exclude_env_vars:
@@ -392,9 +392,9 @@ To reduce the length of the hash or remove it altogether you can use the ``hash_
tcl:
hash_length: 0
include:
- - gcc
+ - gcc
exclude:
- - '%gcc@11'
+ - '%gcc@11'
all:
filter:
exclude_env_vars:
@@ -411,9 +411,7 @@ If you try to regenerate the module files now you will get an error:
.. note::
We try to check for errors up front!
- In Spack we check for errors upfront whenever possible, so don't worry
- about your module files: as a name clash was detected nothing has been
- changed on disk.
+ In Spack we check for errors upfront whenever possible, so don't worry about your module files: as a name clash was detected nothing has been changed on disk.
The problem here is that without the hashes the four different flavors of ``netlib-scalapack`` map to the same module file name.
We can change how the names are formatted to differentiate them:
@@ -426,9 +424,9 @@ We can change how the names are formatted to differentiate them:
tcl:
hash_length: 0
include:
- - gcc
+ - gcc
exclude:
- - '%gcc@11'
+ - '%gcc@11'
all:
conflict:
- '{name}'
@@ -439,9 +437,9 @@ We can change how the names are formatted to differentiate them:
- "FC"
- "F77"
projections:
- all: '{name}/{version}-{compiler.name}-{compiler.version}'
- netlib-scalapack: '{name}/{version}-{compiler.name}-{compiler.version}-{^lapack.name}-{^mpi.name}'
- ^python^lapack: '{name}/{version}-{compiler.name}-{compiler.version}-{^lapack.name}'
+ all: '{name}/{version}-{compiler.name}-{compiler.version}'
+ netlib-scalapack: '{name}/{version}-{compiler.name}-{compiler.version}-{^lapack.name}-{^mpi.name}'
+ ^python^lapack: '{name}/{version}-{compiler.name}-{compiler.version}-{^lapack.name}'
As you can see, it is possible to specify rules that apply only to a restricted set of packages using `anonymous specs `_ like ``^python^lapack``.
Here we declare a conflict between any two modules with the same name, so they cannot be loaded together.
@@ -455,8 +453,7 @@ This allows us to match specs by their dependencies, and format them based on th
:language: console
.. note::
- The ``conflict`` directive is Tcl-specific and can't be used in the
- ``lmod`` section of the configuration file.
+ The ``conflict`` directive is Tcl-specific and can't be used in the ``lmod`` section of the configuration file.
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
Add custom environment modifications
@@ -474,9 +471,9 @@ You can achieve this with Spack by adding an ``environment`` directive to the co
hash_length: 0
naming_scheme: '{name}/{version}-{compiler.name}-{compiler.version}'
include:
- - gcc
+ - gcc
exclude:
- - '%gcc@11'
+ - '%gcc@11'
all:
conflict:
- '{name}'
@@ -490,19 +487,16 @@ You can achieve this with Spack by adding an ``environment`` directive to the co
set:
'{name}_ROOT': '{prefix}'
projections:
- all: '{name}/{version}-{compiler.name}-{compiler.version}'
- netlib-scalapack: '{name}/{version}-{compiler.name}-{compiler.version}-{^lapack.name}-{^mpi.name}'
- ^python^lapack: '{name}/{version}-{compiler.name}-{compiler.version}-{^lapack.name}'
+ all: '{name}/{version}-{compiler.name}-{compiler.version}'
+ netlib-scalapack: '{name}/{version}-{compiler.name}-{compiler.version}-{^lapack.name}-{^mpi.name}'
+ ^python^lapack: '{name}/{version}-{compiler.name}-{compiler.version}-{^lapack.name}'
Under the hood Spack uses the :meth:`~spack.spec.Spec.format` API to substitute tokens in either environment variable names or values.
There are two caveats though:
-- The set of allowed tokens in variable names is restricted to
- ``name``, ``version``, ``compiler``, ``compiler.name``,
- ``compiler.version``, ``architecture``
-- Any token expanded in a variable name is made uppercase, but other than that
- case sensitivity is preserved
+- The set of allowed tokens in variable names is restricted to ``name``, ``version``, ``compiler``, ``compiler.name``, ``compiler.version``, ``architecture``
+- Any token expanded in a variable name is made uppercase, but other than that case sensitivity is preserved
Regenerating the module files results in something like:
@@ -547,9 +541,9 @@ You can for instance apply modifications to the ``openmpi`` module as follows:
SLURM_MPI_TYPE: pmi2
OMPI_MCA_btl_openib_warn_default_gid_prefix: '0'
projections:
- all: '{name}/{version}-{compiler.name}-{compiler.version}'
- netlib-scalapack: '{name}/{version}-{compiler.name}-{compiler.version}-{^lapack.name}-{^mpi.name}'
- ^python^lapack: '{name}/{version}-{compiler.name}-{compiler.version}-{^lapack.name}'
+ all: '{name}/{version}-{compiler.name}-{compiler.version}'
+ netlib-scalapack: '{name}/{version}-{compiler.name}-{compiler.version}-{^lapack.name}-{^mpi.name}'
+ ^python^lapack: '{name}/{version}-{compiler.name}-{compiler.version}-{^lapack.name}'
This time we will be more selective and regenerate only the ``openmpi`` module file:
@@ -600,11 +594,11 @@ You can, for instance, generate python modules that load their dependencies by a
SLURM_MPI_TYPE: pmi2
OMPI_MCA_btl_openib_warn_default_gid_prefix: '0'
projections:
- all: '{name}/{version}-{compiler.name}-{compiler.version}'
- netlib-scalapack: '{name}/{version}-{compiler.name}-{compiler.version}-{^lapack.name}-{^mpi.name}'
- ^python^lapack: '{name}/{version}-{compiler.name}-{compiler.version}-{^lapack.name}'
+ all: '{name}/{version}-{compiler.name}-{compiler.version}'
+ netlib-scalapack: '{name}/{version}-{compiler.name}-{compiler.version}-{^lapack.name}-{^mpi.name}'
+ ^python^lapack: '{name}/{version}-{compiler.name}-{compiler.version}-{^lapack.name}'
^python:
- autoload: direct
+ autoload: direct
and regenerating the module files for every package that depends on ``python``:
@@ -663,7 +657,7 @@ After these modifications your configuration file should look like:
modules:
default:
enable::
- - lmod
+ - lmod
lmod:
core_compilers:
- 'gcc@11'
@@ -689,16 +683,13 @@ After these modifications your configuration file should look like:
SLURM_MPI_TYPE: pmi2
OMPI_MCA_btl_openib_warn_default_gid_prefix: '0'
projections:
- all: '{name}/{version}'
- ^lapack: '{name}/{version}-{^lapack.name}'
+ all: '{name}/{version}'
+ ^lapack: '{name}/{version}-{^lapack.name}'
.. note::
Double colon in configuration files
- The double colon after ``enable`` is intentional, and it serves the
- purpose of overriding the default list of enabled generators so
- that only ``lmod`` will be active (see `Overriding entire sections `_ for more
- details).
+ The double colon after ``enable`` is intentional, and it serves the purpose of overriding the default list of enabled generators so that only ``lmod`` will be active (see `Overriding entire sections `_ for more details).
The directive ``core_compilers`` accepts a list of compilers.
Everything built using these compilers will create a module in the ``Core`` part of the hierarchy, which is the entry point for hierarchical module files.
@@ -808,7 +799,7 @@ Coming back to our example, let's add ``lapack`` to the hierarchy and remove the
SLURM_MPI_TYPE: pmi2
OMPI_MCA_btl_openib_warn_default_gid_prefix: '0'
projections:
- all: '{name}/{version}'
+ all: '{name}/{version}'
After module files have been regenerated as usual:
@@ -848,7 +839,8 @@ In the case of hierarchical module files it's:
:lines: 1-6
The statements within double curly brackets ``{{ ... }}`` denote `expressions `_ that will be evaluated and substituted at module generation time.
-The rest of the file is then divided into `blocks `_ that can be overridden or extended by users, if need be. `Control structures `_ , delimited by ``{% ... %}``, are also permitted in the template language:
+The rest of the file is then divided into `blocks `_ that can be overridden or extended by users, if need be.
+`Control structures `_ , delimited by ``{% ... %}``, are also permitted in the template language:
.. literalinclude:: _spack_root/share/spack/templates/modules/modulefile.lua
:language: jinja
@@ -876,7 +868,7 @@ Let's create the file ``${SPACK_ROOT}/etc/spack/config.yaml`` with the content:
config:
template_dirs:
- - $HOME/.spack/templates
+ - $HOME/.spack/templates
This tells Spack to also search another location when looking for template files.
Next, we need to create our custom template extension in the folder listed above:
@@ -894,7 +886,8 @@ Next, we need to create our custom template extension in the folder listed above
end
{% endblock %}
-Let's name this file ``group-restricted.lua``. The line:
+Let's name this file ``group-restricted.lua``.
+The line:
.. code-block:: jinja
@@ -924,25 +917,25 @@ For the sake of illustration let's assume it's ``netlib-scalapack``:
modules:
enable::
- - lmod
+ - lmod
lmod:
core_compilers:
- - 'gcc@11'
+ - 'gcc@11'
hierarchy:
- - mpi
- - lapack
+ - mpi
+ - lapack
hash_length: 0
include:
- - gcc
+ - gcc
exclude:
- - '%gcc@11'
- - readline
+ - '%gcc@11'
+ - readline
all:
filter:
exclude_env_vars:
- - "C_INCLUDE_PATH"
- - "CPLUS_INCLUDE_PATH"
- - "LIBRARY_PATH"
+ - "C_INCLUDE_PATH"
+ - "CPLUS_INCLUDE_PATH"
+ - "LIBRARY_PATH"
environment:
set:
'{name}_ROOT': '{prefix}'
diff --git a/tutorial_packaging.rst b/tutorial_packaging.rst
index 1ccbe68f81..c4de67eb3a 100644
--- a/tutorial_packaging.rst
+++ b/tutorial_packaging.rst
@@ -167,8 +167,7 @@ Bring ``tutorial-mpileaks``' ``package.py`` file back up in your editor with the
Let's make the following changes:
* remove the boilerplate comments between and including the dashed lines at the top;
-* replace the first ``FIXME`` comment with a description of ``mpileaks``
- in the docstring;
+* replace the first ``FIXME`` comment with a description of ``mpileaks`` in the docstring;
* replace the ``homepage`` property with the correct link;
* uncomment the ``maintainers`` directive and replace the placeholder with your GitHub user name; and
* replace the ``license`` of the project with the correct name and the placeholder with your GitHub user name.
@@ -271,8 +270,7 @@ Let's check that dependencies are effectively built when we try to install ``tut
.. note::
- This command may take a while to run and may produce more output if
- you don't already have an MPI installed or configured in Spack.
+ This command may take a while to run and may produce more output if you don't already have an MPI installed or configured in Spack.
While Spack was unable to install our package, we do see that it identified and built all of our dependencies.
It found that:
@@ -342,7 +340,8 @@ Now let's ensure the environment is properly set up using the ``spack build-env`
$ spack build-env tutorial-mpileaks bash
-This command spawned a new shell containing the same environment that Spack used to build the ``tutorial-mpileaks`` package. (Feel free to substitute your favorite shell for ``bash``.)
+This command spawned a new shell containing the same environment that Spack used to build the ``tutorial-mpileaks`` package.
+(Feel free to substitute your favorite shell for ``bash``.)
.. note::
@@ -387,9 +386,8 @@ Specifying Configure Arguments
We now know which options we need to pass to ``configure``, but how do we know where to find the installation paths for the package's dependencies from within the ``package.py`` file?
Fortunately, we can query the package's concrete ``Spec`` instance.
-The ``self.spec`` property holds the package's directed acyclic graph
-(DAG) of its dependencies. Each dependency's ``Spec``, accessed by name,
-has a ``prefix`` property containing its installation path.
+The ``self.spec`` property holds the package's directed acyclic graph (DAG) of its dependencies.
+Each dependency's ``Spec``, accessed by name, has a ``prefix`` property containing its installation path.
So let's add the `configuration arguments `_ for specifying the paths to the two concrete dependencies in the ``configure_args`` method of our package.
@@ -505,7 +503,8 @@ Since these are `build-time tests `_:
- for supporting unique configuration options needed to locate libraries.
-* `Modifying a Package's Build Environment
- `_:
- for customizing package and dependency build and run environments.
+* `Retrieving Library Information `_: for supporting unique configuration options needed to locate libraries.
+* `Modifying a Package's Build Environment `_: for customizing package and dependency build and run environments.
~~~~~~~~~~~~~~~~~~~~~~~
Testing an installation
~~~~~~~~~~~~~~~~~~~~~~~
-* `Build-time tests
- `_:
- for sanity checks and pre-/post- ``build`` and or ``install`` phase tests.
-* `Stand-alone tests
- `_:
- for tests that can run against any installed Spack package.
+* `Build-time tests `_: for sanity checks and pre-/post- ``build`` and or ``install`` phase tests.
+* `Stand-alone tests `_: for tests that can run against any installed Spack package.
~~~~~~~~~~~~~~~~~~~~~~~~~
Using other build systems
~~~~~~~~~~~~~~~~~~~~~~~~~
-* `Build Systems
- `_:
- for the full list of built-in build systems.
-* `Spack Package Build Systems tutorial
- `_:
- for tutorials on common build systems.
-* `Multiple Build Systems
- `_:
- for a reference on writing packages with multiple build systems.
-* `Package Class Architecture
- `_:
- for more insight on the inner workings of ``Package`` and ``Builder`` classes.
-* `The GDAL Package
- `_:
- for an example of a complex package that extends Python while supporting two build systems.
+* `Build Systems `_: for the full list of built-in build systems.
+* `Spack Package Build Systems tutorial `_: for tutorials on common build systems.
+* `Multiple Build Systems `_: for a reference on writing packages with multiple build systems.
+* `Package Class Architecture `_: for more insight on the inner workings of ``Package`` and ``Builder`` classes.
+* `The GDAL Package `_: for an example of a complex package that extends Python while supporting two build systems.
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Making a package externally detectable
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-* `Making a package externally discoverable
- `_:
- for making a package discoverable using the ``spack external find`` command.
+* `Making a package externally discoverable `_: for making a package discoverable using the ``spack external find`` command.
diff --git a/tutorial_scripting.rst b/tutorial_scripting.rst
index 85420ca806..c476e5a0de 100644
--- a/tutorial_scripting.rst
+++ b/tutorial_scripting.rst
@@ -197,7 +197,8 @@ Exit the editor and add execute permissions to the script before running it as f
:emphasize-lines: 1-2
If we're lucky, it ran successfully, but there's no guarantee this will work for every system.
-Some systems only support a single argument on the shebang line (see `here `_). ``spack-python``, which is a wrapper script for ``spack python``, solves this issue.
+Some systems only support a single argument on the shebang line (see `here `_).
+``spack-python``, which is a wrapper script for ``spack python``, solves this issue.
Bring up the file in the editor again and change the ``env`` argument to ``spack-python`` as follows:
diff --git a/tutorial_stacks.rst b/tutorial_stacks.rst
index 387ba8e445..5ad921b3cf 100644
--- a/tutorial_stacks.rst
+++ b/tutorial_stacks.rst
@@ -25,8 +25,7 @@ We'll consider how the software we install might be consumed by our users, and s
.. note::
- Before we start this hands-on, make sure the ``EDITOR`` environment variable is set to your
- preferred editor, for instance:
+ Before we start this hands-on, make sure the ``EDITOR`` environment variable is set to your preferred editor, for instance:
.. code-block:: console
@@ -149,10 +148,10 @@ Matrices will expand to the cross-product of their rows, so this matrix:
.. code-block:: yaml
- matrix:
- - ["netlib-scalapack"]
- - ["^openmpi", "^mpich"]
- - ["^openblas", "^netlib-lapack"]
- - ["%gcc@12"]
+ - ["netlib-scalapack"]
+ - ["^openmpi", "^mpich"]
+ - ["^openblas", "^netlib-lapack"]
+ - ["%gcc@12"]
is equivalent to this list of specs:
@@ -210,7 +209,8 @@ Concretize the environment and install the specs again:
.. literalinclude:: outputs/stacks/concretize-3.out
:language: console
-At this point, the environment contains only ``py-scipy ^openblas``. Verify it:
+At this point, the environment contains only ``py-scipy ^openblas``.
+Verify it:
.. literalinclude:: outputs/stacks/concretize-4.out
:language: spec
@@ -276,7 +276,8 @@ Later you can move this mirror to e.g. an air-gapped machine and:
$ spack mirror add
-to be able to re-build the specs from sources. Alternatively, to create a buildcache you can:
+to be able to re-build the specs from sources.
+Alternatively, to create a buildcache you can:
.. code-block:: console
@@ -327,7 +328,8 @@ In the configuration above we created two views, named ``default`` and ``full``.
The ``default`` view consists of all the packages that are compiled with ``gcc@12``, but do not depend on either ``mpich`` or ``netlib-lapack``.
As we can see, we can both *include* and *exclude* specs using constraints.
-The ``full`` view contains a more complex projection, so to put each spec into an appropriate subdirectory, according to the first constraint that the spec matches. ``all`` is the default projection, and has always the lowest priority, independent of the order in which it appears.
+The ``full`` view contains a more complex projection, so to put each spec into an appropriate subdirectory, according to the first constraint that the spec matches.
+``all`` is the default projection, and has always the lowest priority, independent of the order in which it appears.
To avoid confusion, we advise always keeping it last in projections.
Concretize to regenerate the views, and check their structure: