diff --git a/.flake8 b/.flake8
deleted file mode 100644
index 470cf8b0..00000000
--- a/.flake8
+++ /dev/null
@@ -1,12 +0,0 @@
-[flake8]
-max-line-length = 100
-
-## IGNORES
-
-# E127: flake8 reporting incorrect continuation line indent errors
-# on multi-line and multi-level indents
-
-# W503, W504: flake8 reports this as incorrect, and scripts/format_code
-# changes code to it, so let format_code win.
-
-ignore = E127,W503,W504
\ No newline at end of file
diff --git a/.isort.cfg b/.isort.cfg
deleted file mode 100644
index f238bf7e..00000000
--- a/.isort.cfg
+++ /dev/null
@@ -1,2 +0,0 @@
-[settings]
-profile = black
diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 0c06577f..cec6e04d 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -2,12 +2,16 @@
# Please run `pre-commit run --all-files` when adding or changing entries.
repos:
+ - repo: https://github.com/charliermarsh/ruff-pre-commit
+ rev: v0.0.258
+ hooks:
+ - id: ruff
- repo: https://github.com/psf/black
- rev: 22.12.0
+ rev: 23.1.0
hooks:
- id: black
- repo: https://github.com/codespell-project/codespell
- rev: v2.2.2
+ rev: v2.2.4
hooks:
- id: codespell
args: [--ignore-words=.codespellignore]
@@ -16,12 +20,8 @@ repos:
rev: v1.1.1
hooks:
- id: doc8
- - repo: https://github.com/PyCQA/flake8
- rev: 6.0.0
- hooks:
- - id: flake8
- repo: https://github.com/pre-commit/mirrors-mypy
- rev: v0.991
+ rev: v1.1.1
hooks:
- id: mypy
# TODO lint test and scripts too
@@ -35,8 +35,3 @@ repos:
- pyproj
- pystac
- types-requests
- - repo: https://github.com/pycqa/isort
- rev: 5.12.0
- hooks:
- - id: isort
- name: isort (python)
diff --git a/.readthedocs.environment.yml b/.readthedocs.environment.yml
index 2a1e0ae0..cd67cb03 100644
--- a/.readthedocs.environment.yml
+++ b/.readthedocs.environment.yml
@@ -10,7 +10,7 @@ dependencies:
- black
- codespell
- coverage
- - flake8
+ - ruff
- ipython
- jupyter
- lxml-stubs
diff --git a/docs/api.rst b/docs/api.rst
index 5130aa4c..ad051335 100644
--- a/docs/api.rst
+++ b/docs/api.rst
@@ -92,6 +92,12 @@ Raster footprint generation
.. automodule:: stactools.core.utils.raster_footprint
:members:
+Geometry
+~~~~~~~~
+
+.. automodule:: stactools.core.geometry
+ :members:
+
Testing
-------
diff --git a/pyproject.toml b/pyproject.toml
index 9787c3bd..fd65b944 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,3 +1,6 @@
[build-system]
requires = ["setuptools", "wheel"]
build-backend = "setuptools.build_meta"
+
+[tool.ruff]
+line-length = 88
diff --git a/requirements-dev.txt b/requirements-dev.txt
index fce96fea..7bf7d4f1 100644
--- a/requirements-dev.txt
+++ b/requirements-dev.txt
@@ -1,6 +1,5 @@
black
codespell
-flake8
importlib-metadata
ipython
jupyter
@@ -13,6 +12,7 @@ pydata-sphinx-theme
pylint
pytest
pytest-cov
+ruff
sphinx < 6
sphinx-autobuild
sphinx-click
diff --git a/scripts/check_minimum_requirements b/scripts/check_minimum_requirements
index 5464700b..74dc92f6 100755
--- a/scripts/check_minimum_requirements
+++ b/scripts/check_minimum_requirements
@@ -24,7 +24,7 @@ for package_requirement in package_requirements:
if package_requirement.marker is not None:
continue
min_requirement = min_requirements[package_requirement.name]
- for (package_specifier, min_specifier) in zip(
+ for package_specifier, min_specifier in zip(
package_requirement.specifier, min_requirement.specifier
):
if (
diff --git a/scripts/format b/scripts/format
index 0616c682..6b605b10 100755
--- a/scripts/format
+++ b/scripts/format
@@ -18,6 +18,5 @@ if [ "${BASH_SOURCE[0]}" = "${0}" ]; then
usage
else
pre-commit run black --all-files
- pre-commit run isort --all-files
fi
fi
diff --git a/scripts/lint b/scripts/lint
index 16a2aa67..82cb119e 100755
--- a/scripts/lint
+++ b/scripts/lint
@@ -19,7 +19,7 @@ if [ "${BASH_SOURCE[0]}" = "${0}" ]; then
else
pre-commit run codespell --all-files
pre-commit run doc8 --all-files
- pre-commit run flake8 --all-files
+ pre-commit run ruff --all-files
pre-commit run mypy --all-files
fi
fi
diff --git a/src/stactools/cli/commands/copy.py b/src/stactools/cli/commands/copy.py
index 111ed767..00957f2e 100644
--- a/src/stactools/cli/commands/copy.py
+++ b/src/stactools/cli/commands/copy.py
@@ -86,10 +86,10 @@ def copy_command(
copy_assets: bool,
publish_location: Optional[str],
) -> None:
- """Copy a STAC Catalog or Collection at SRC to the directory
- at DST.
+ """Copy a STAC Catalog or Collection at SRC to the directory at DST.
- Note: Copying a catalog will upgrade it to the latest version of STAC."""
+ Note: Copying a catalog will upgrade it to the latest version of STAC.
+ """
source_catalog = pystac.read_file(make_absolute_href(src))
if not isinstance(source_catalog, pystac.Catalog):
raise click.BadArgumentUsage(f"{src} is not a STAC Catalog")
diff --git a/src/stactools/cli/commands/summary.py b/src/stactools/cli/commands/summary.py
index 7bb680a7..9d036052 100644
--- a/src/stactools/cli/commands/summary.py
+++ b/src/stactools/cli/commands/summary.py
@@ -42,7 +42,10 @@ def create_summary_command(cli: click.Group) -> click.Command:
"--inplace",
is_flag=True,
default=False,
- help="If updating, update the collection in-place, instead of printing it to stdout.",
+ help=(
+ "If updating, update the collection in-place, "
+ "instead of printing it to stdout."
+ ),
)
def summary_command(
href: str, fields: Optional[str], update: bool, inplace: bool
diff --git a/src/stactools/cli/commands/update_geometry.py b/src/stactools/cli/commands/update_geometry.py
index acfe1402..f5464b42 100644
--- a/src/stactools/cli/commands/update_geometry.py
+++ b/src/stactools/cli/commands/update_geometry.py
@@ -20,7 +20,8 @@ def create_update_geometry_command(cli: Group) -> Command:
help=(
"The names of the assets to try for footprint extraction. "
"The first successful footprint will be used. "
- "If no assets are provided, all assets will be tried until one is successful."
+ "If no assets are provided, all assets will be tried until one is "
+ "successful."
),
)
@click.option(
@@ -41,7 +42,10 @@ def create_update_geometry_command(cli: Group) -> Command:
"-i",
"--densification-distance",
type=float,
- help="The distance interval at which to increase point density within the polygon",
+ help=(
+ "The distance interval at which to increase point density within the "
+ "polygon"
+ ),
)
@click.option(
"-s",
diff --git a/src/stactools/cli/commands/version.py b/src/stactools/cli/commands/version.py
index cf3866d6..735743e0 100644
--- a/src/stactools/cli/commands/version.py
+++ b/src/stactools/cli/commands/version.py
@@ -9,7 +9,7 @@
def create_version_command(cli: Group) -> Command:
@cli.command("version", short_help="Display version info.")
def version_command() -> None:
- """Display version info"""
+ """Display version info."""
echo(f"stactools version {__version__}")
echo(f"PySTAC version {pystac.__version__}")
echo(f"STAC version {get_stac_version()}")
diff --git a/src/stactools/cli/registry.py b/src/stactools/cli/registry.py
index c0a1bbdf..50ffc419 100644
--- a/src/stactools/cli/registry.py
+++ b/src/stactools/cli/registry.py
@@ -21,8 +21,10 @@ def get_create_subcommand_functions(self) -> List[Callable[[Group], Command]]:
def load_plugins(self) -> None:
"""Discover all plugins and register their resources.
+
Import each Python module within the stactools namespace package
- and call the register_plugin function at its root (if it exists).
+ and call the register_plugin function at its root (if it
+ exists).
"""
import importlib
import pkgutil
diff --git a/src/stactools/core/add.py b/src/stactools/core/add.py
index 06069409..7066001c 100644
--- a/src/stactools/core/add.py
+++ b/src/stactools/core/add.py
@@ -46,5 +46,6 @@ def add_item(
do_move_assets(item_copy, copy=False)
else:
raise ValueError(
- f"Cannot add Item {source_item.id} because {target_catalog} does not have a self href."
+ f"Cannot add Item {source_item.id} because {target_catalog} does "
+ "not have a self href."
)
diff --git a/src/stactools/core/add_raster.py b/src/stactools/core/add_raster.py
index 7db1857e..7db7b97c 100644
--- a/src/stactools/core/add_raster.py
+++ b/src/stactools/core/add_raster.py
@@ -43,7 +43,7 @@ def add_raster_to_item(item: Item) -> Item:
def _read_bands(href: str) -> List[RasterBand]:
bands = []
with rasterio.open(href) as dataset:
- for (i, index) in enumerate(dataset.indexes):
+ for i, index in enumerate(dataset.indexes):
data = dataset.read(index, masked=True)
band = RasterBand.create()
band.nodata = dataset.nodatavals[i]
diff --git a/src/stactools/core/copy.py b/src/stactools/core/copy.py
index ec0c0626..41391403 100644
--- a/src/stactools/core/copy.py
+++ b/src/stactools/core/copy.py
@@ -162,7 +162,8 @@ def move_assets(
abs_asset_href = asset.get_absolute_href()
if abs_asset_href is None:
raise ValueError(
- f"Asset {asset.title} HREF is not available for item {item.id}. This operation "
+ f"Asset {asset.title} HREF is not available for item {item.id}. "
+ "This operation "
"requires that the Asset HREFs are available."
)
diff --git a/src/stactools/core/geometry.py b/src/stactools/core/geometry.py
index 47854f97..9f2f1c43 100644
--- a/src/stactools/core/geometry.py
+++ b/src/stactools/core/geometry.py
@@ -7,11 +7,11 @@ def bounding_box(geom: Dict[str, Any]) -> List[float]:
"""Extracts and returns the bounding box of a GeoJSON geometry.
Args:
- geom (dict): A GeoJSON Feature, GeoJSON FeatureCollection, GeoJSON geometry, STAC Item,
- or STAC ItemCollection.
+ geom (dict): A GeoJSON Feature, GeoJSON FeatureCollection, GeoJSON geometry,
+ STAC Item, or STAC ItemCollection.
Returns:
- list: A list of float values containing the bounding box of the GeoJSON geometry in the
- format [min X, min Y, max X, max Y]
+ list: A list of float values containing the bounding box of the GeoJSON
+ geometry in the format [min X, min Y, max X, max Y]
"""
return list(rasterio.features.bounds(geom))
diff --git a/src/stactools/core/io/__init__.py b/src/stactools/core/io/__init__.py
index fd91fb81..28972aba 100644
--- a/src/stactools/core/io/__init__.py
+++ b/src/stactools/core/io/__init__.py
@@ -12,7 +12,8 @@
ReadHrefModifier = Callable[[str], str]
"""Type alias for a function parameter that allows users to manipulate HREFs.
-Used for reading, e.g. appending an Azure SAS Token or translating to a signed URL.
+Used for reading, e.g. appending an Azure SAS Token or translating to a
+signed URL.
"""
@@ -46,21 +47,25 @@ def read_text(
class FsspecStacIO(StacIO):
- """A subclass of :py:class:`pystac.DefaultStacIO` that uses `fsspec
- `_ for reads and writes.
- """
+ """A subclass of :py:class:`pystac.DefaultStacIO` that uses
+ `fsspec `_
+ for reads and writes."""
def read_text(self, source: HREF, *args: Any, **kwargs: Any) -> str:
- """A concrete implementation of :meth:`StacIO.read_text
- `. Converts the ``source`` argument to a string (if it
- is not already) and delegates to :meth:`FsspecStacIO.read_text_from_href` for
- opening and reading the file."""
+ """A concrete implementation of
+ :meth:`StacIO.read_text `.
+
+ Converts the ``source`` argument to
+ a string (if it is not already) and delegates to
+ :meth:`FsspecStacIO.read_text_from_href` for opening and reading
+ the file.
+ """
href = str(os.fspath(source))
return self.read_text_from_href(href, **kwargs)
def read_text_from_href(self, href: str, **kwargs: Any) -> str:
- """Reads a file as a utf-8 string using `fsspec
- `_
+ """Reads a file as a utf-8 string using
+ `fsspec `_.
Args:
href (str): The href to read.
@@ -79,10 +84,13 @@ def read_text_from_href(self, href: str, **kwargs: Any) -> str:
raise ValueError(f"Unable to decode data loaded from HREF: {href}")
def write_text(self, dest: HREF, txt: str, *args: Any, **kwargs: Any) -> None:
- """A concrete implementation of :meth:`StacIO.write_text
- `. Converts the ``dest`` argument to a string (if it
- is not already) and delegates to :meth:`FsspecStacIO.write_text_from_href` for
- opening and reading the file."""
+ """A concrete implementation of :meth:`StacIO.write_text `.
+
+ Converts the ``dest`` argument to a
+ string (if it is not already) and delegates to
+ :meth:`FsspecStacIO.write_text_from_href` for opening and
+ reading the file.
+ """ # noqa: E501
href = str(os.fspath(dest))
return self.write_text_to_href(href, txt, **kwargs)
@@ -107,5 +115,6 @@ def write_text_to_href(self, href: str, txt: str, **kwargs: Any) -> None:
def use_fsspec() -> None:
- """Sets the default :py:class:`pystac.StacIO` to :py:class:`FsspecStacIO`."""
+ """Sets the default :py:class:`pystac.StacIO` to
+ :py:class:`FsspecStacIO`."""
StacIO.set_default(FsspecStacIO)
diff --git a/src/stactools/core/io/xml.py b/src/stactools/core/io/xml.py
index 375df7e2..66a2467c 100644
--- a/src/stactools/core/io/xml.py
+++ b/src/stactools/core/io/xml.py
@@ -35,7 +35,8 @@ def find(self, xpath: str) -> Optional["XmlElement"]:
def find_or_throw(
self, xpath: str, get_exception: Callable[[str], Exception]
) -> "XmlElement":
- """Find a child ``XmlElement`` by xpath, or throw an exception if not found.
+ """Find a child ``XmlElement`` by xpath, or throw an exception if not
+ found.
Args:
xpath (str): The xpath to use for search.
@@ -145,7 +146,7 @@ def text(self) -> Optional[str]:
@lru_cache(maxsize=100)
def get_attr(self, attr: str) -> Optional[str]:
- """Returns the value of a given attribute of this element
+ """Returns the value of a given attribute of this element.
Args:
attr (str): The name of the attribute.
@@ -171,7 +172,7 @@ def from_file(
Args:
href (str): The href to read.
- read_href_modifier (Optional[:py:class:`stactools.core.io.ReadHrefModifier`]):
+ read_href_modifier (Optional[:class:`stactools.core.io.ReadHrefModifier`]):
An optional callable that will be used to modify the href.
Defaults to None.
diff --git a/src/stactools/core/projection.py b/src/stactools/core/projection.py
index 2160e4f7..6c3a932a 100644
--- a/src/stactools/core/projection.py
+++ b/src/stactools/core/projection.py
@@ -27,8 +27,8 @@ def reproject_geom(
geom: Dict[str, Any],
precision: Optional[int] = None,
) -> Dict[str, Any]:
- """Reprojects a geometry represented as GeoJSON from the src_crs to the dest
- crs.
+ """Reprojects a geometry represented as GeoJSON from the src_crs to the
+ dest crs.
Args:
src_crs (pyproj.crs.CRS, rasterio.crs.CRS, or str): Projection of input data.
diff --git a/src/stactools/core/utils/__init__.py b/src/stactools/core/utils/__init__.py
index 646ae65d..938e2151 100644
--- a/src/stactools/core/utils/__init__.py
+++ b/src/stactools/core/utils/__init__.py
@@ -73,8 +73,7 @@ def deprecate(from_: str, to: str, version: str) -> None:
@contextmanager
def ignore_not_georeferenced() -> Generator[None, None, None]:
"""Suppress rasterio's warning when opening a dataset that contains no
- georeferencing information.
- """
+ georeferencing information."""
with warnings.catch_warnings():
warnings.simplefilter("ignore", category=NotGeoreferencedWarning)
yield
diff --git a/src/stactools/core/utils/antimeridian.py b/src/stactools/core/utils/antimeridian.py
index 64ee0d9c..047afe05 100644
--- a/src/stactools/core/utils/antimeridian.py
+++ b/src/stactools/core/utils/antimeridian.py
@@ -14,12 +14,12 @@ class Strategy(Enum):
"""Strategy for handling antimeridian-crossing polygons."""
SPLIT = auto()
- """Split the polygon into multiple polygons so none cross the antimeridian."""
+ """Split the polygon into multiple polygons so none cross the
+ antimeridian."""
NORMALIZE = auto()
- """Keep the polygon as one polygon, but extend its values to be greater than
- 180 or less than -180.
- """
+ """Keep the polygon as one polygon, but extend its values to be greater
+ than 180 or less than -180."""
def fix_item(item: Item, strategy: Strategy) -> Item:
@@ -41,7 +41,8 @@ def fix_item(item: Item, strategy: Strategy) -> Item:
multi_polygon = True
else:
raise ValueError(
- f"Can only fix antimeridian issues for Polygons or MultiPolygons, geometry={geometry}"
+ "Can only fix antimeridian issues for Polygons or MultiPolygons, "
+ f"geometry={geometry}"
)
if strategy == Strategy.NORMALIZE:
if multi_polygon:
@@ -73,7 +74,8 @@ def fix_item(item: Item, strategy: Strategy) -> Item:
def split(polygon: Polygon) -> Optional[MultiPolygon]:
- """Splits a single WGS84 polygon into a multipolygon across the antimeridian.
+ """Splits a single WGS84 polygon into a multipolygon across the
+ antimeridian.
If the polygon does not cross the antimeridian, returns None. Only handles
exterior rings (can't handle interior).
@@ -118,7 +120,8 @@ def split(polygon: Polygon) -> Optional[MultiPolygon]:
def split_multipolygon(multi_polygon: MultiPolygon) -> Optional[MultiPolygon]:
- """Splits multiple WGS84 polygons into a multipolygon across the antimeridian.
+ """Splits multiple WGS84 polygons into a multipolygon across the
+ antimeridian.
If none of the contained polygons cross the antimeridian, returns None. Only
handles exterior rings (can't handle interior).
@@ -130,7 +133,8 @@ def split_multipolygon(multi_polygon: MultiPolygon) -> Optional[MultiPolygon]:
Fix this
Args:
- multi_polygon (:py:class:`shapely.geometry.MultiPolygon`): The input multi polygon.
+ multi_polygon (:py:class:`shapely.geometry.MultiPolygon`): The input
+ multi polygon.
Returns:
Optional[:py:class:`shapely.geometry.MultiPolygon`]:
@@ -148,7 +152,8 @@ def split_multipolygon(multi_polygon: MultiPolygon) -> Optional[MultiPolygon]:
def normalize(polygon: Polygon) -> Optional[Polygon]:
- """'Normalizes' a WGS84 lat/lon polygon, or returns None if no changes were made.
+ """'Normalizes' a WGS84 lat/lon polygon, or returns None if no changes were
+ made.
This converts the polygon's x coordinates to all be the same sign, even if
the polygon crosses the antimeridian. E.g.:
@@ -194,7 +199,8 @@ def normalize(polygon: Polygon) -> Optional[Polygon]:
def normalize_multipolygon(multi_polygon: MultiPolygon) -> Optional[MultiPolygon]:
- """'Normalizes' a WGS84 lat/lon multi polygon, or returns None if no changes were made.
+ """'Normalizes' a WGS84 lat/lon multi polygon, or returns None if no
+ changes were made.
For each polygon in the multi-polygon, this converts the x coordinates to
all be the same sign, even if the polygon crosses the antimeridian. Although
@@ -209,10 +215,12 @@ def normalize_multipolygon(multi_polygon: MultiPolygon) -> Optional[MultiPolygon
Fix this
Args:
- multi_polygon (:py:class:`shapely.geometry.MultiPolygon`): The input multi-polygon.
+ multi_polygon (:py:class:`shapely.geometry.MultiPolygon`): The input
+ multi-polygon.
Returns:
- Optional[:py:class:`shapely.geometry.MultiPolygon`]: The normalized multi-polygon.
+ Optional[:py:class:`shapely.geometry.MultiPolygon`]: The normalized
+ multi-polygon.
"""
polygons = list()
changes_made = False
diff --git a/src/stactools/core/utils/convert.py b/src/stactools/core/utils/convert.py
index 2908ce36..2d5911aa 100644
--- a/src/stactools/core/utils/convert.py
+++ b/src/stactools/core/utils/convert.py
@@ -78,11 +78,13 @@ def cogify(
def cogify_subdatasets(
infile: str, outdir: str, subdataset_names: Optional[List[str]] = None
) -> Tuple[List[str], List[str]]:
- """Creates Cloud-Optimized GeoTIFFs for all subdatasets in a multi-dataset raster file.
+ """Creates Cloud-Optimized GeoTIFFs for all subdatasets in a multi-dataset
+ raster file.
- The created files will be named the same as the source file, with a ``_SUBDATASET`` suffix.
- E.g. if the source file is named ``foo.hdf`` and the subdataset is named ``bar``, the output
- COG will be named ``foo_bar.tif``. Only 2D (and not 3D) subdatasets are supported.
+ The created files will be named the same as the source file, with a
+ ``_SUBDATASET`` suffix. E.g. if the source file is named ``foo.hdf`` and
+ the subdataset is named ``bar``, the output COG will be named
+ ``foo_bar.tif``. Only 2D (and not 3D) subdatasets are supported.
Args:
infile (str): The input file containing subdatasets.
diff --git a/src/stactools/core/utils/raster_footprint.py b/src/stactools/core/utils/raster_footprint.py
index b569ff79..5416af73 100644
--- a/src/stactools/core/utils/raster_footprint.py
+++ b/src/stactools/core/utils/raster_footprint.py
@@ -1,4 +1,5 @@
-"""Generate convex hulls of valid raster data for use in STAC Item geometries."""
+"""Generate convex hulls of valid raster data for use in STAC Item
+geometries."""
import logging
import warnings
@@ -28,13 +29,12 @@
def densify_by_factor(
point_list: List[Tuple[float, float]], factor: int
) -> List[Tuple[float, float]]:
- """
- Densifies the number of points in a list of points by a ``factor``. For
+ """Densifies the number of points in a list of points by a ``factor``. For
example, a list of 5 points and a factor of 2 will result in 10 points (one
new point between each original adjacent points).
Derived from code found at
- https://stackoverflow.com/questions/64995977/generating-equidistance-points-along-the-boundary-of-a-polygon-but-cw-ccw # noqa
+ https://stackoverflow.com/questions/64995977/generating-equidistance-points-along-the-boundary-of-a-polygon-but-cw-ccw
Args:
point_list (List[Tuple[float, float]]): The list of points to be
@@ -45,7 +45,7 @@ def densify_by_factor(
Returns:
List[Tuple[float, float]]: A list of the densified points.
- """
+ """ # noqa: E501
points: Any = np.asarray(point_list)
densified_number = len(points) * factor
existing_indices = np.arange(0, densified_number, factor)
@@ -59,15 +59,14 @@ def densify_by_factor(
def densify_by_distance(
point_list: List[Tuple[float, float]], distance: float
) -> List[Tuple[float, float]]:
- """
- Densifies the number of points in a list of points by inserting new
+ """Densifies the number of points in a list of points by inserting new
points at intervals between each set of successive points. For example, if
two successive points in the list are separated by 10 units and a
``distance`` of 2 is provided, 4 new points will be added between the two
original points (one new point every 2 units of ``distance``).
Derived from code found at
- https://stackoverflow.com/questions/64995977/generating-equidistance-points-along-the-boundary-of-a-polygon-but-cw-ccw # noqa
+ https://stackoverflow.com/questions/64995977/generating-equidistance-points-along-the-boundary-of-a-polygon-but-cw-ccw
Args:
point_list (List[Tuple[float, float]]): The list of points to be
@@ -162,8 +161,8 @@ class RasterFootprint:
created along the segment. Higher densities produce higher
fidelity footprints in areas of high projection distortion.
Mutually exclusive with ``densification_factor``.
- simplify_tolerance (Optional[float]): Distance, in degrees, within which
- all locations on the simplified polygon will be to the original
+ simplify_tolerance (Optional[float]): Distance, in degrees, within
+ which all locations on the simplified polygon will be to the original
polygon.
no_data (Optional[Union[int, float]]): The nodata value in
``data_array``. If set to None, this will return a footprint
@@ -177,10 +176,12 @@ class RasterFootprint:
"""2D or 3D array of raster data."""
densification_distance: Optional[float]
- """Optional distance for densifying polygon vertices before reprojection to EPSG 4326."""
+ """Optional distance for densifying polygon vertices before reprojection to
+ EPSG 4326."""
densification_factor: Optional[int]
- """Optional factor for densifying polygon vertices before reprojection to EPSG 4326."""
+ """Optional factor for densifying polygon vertices before reprojection to
+ EPSG 4326."""
no_data: Optional[Union[int, float]]
"""Optional value defining pixels to exclude from the footprint."""
@@ -189,7 +190,8 @@ class RasterFootprint:
"""Number of decimal places in the final footprint coordinates."""
simplify_tolerance: Optional[float]
- """Optional maximum allowable error when simplifying the reprojected polygon."""
+ """Optional maximum allowable error when simplifying the reprojected
+ polygon."""
transform: Affine
"""Transformation matrix from pixel to CRS coordinates."""
@@ -223,9 +225,9 @@ def __init__(
self.no_data = no_data
def footprint(self) -> Optional[Dict[str, Any]]:
- """Produces the footprint surrounding data (not nodata) pixels in
- the source image. If the footprint is unable to be computed, None
- is returned.
+ """Produces the footprint surrounding data (not nodata) pixels in the
+ source image. If the footprint is unable to be computed, None is
+ returned.
Returns:
Optional[Dict[str, Any]]: A GeoJSON dictionary containing the
@@ -247,7 +249,6 @@ def data_mask(self) -> npt.NDArray[np.uint8]:
Returns:
numpy.NDArray[numpy.uint8]: A 2D array containing 0s and 1s for
nodata/data pixels.
-
"""
assert self.data_array.ndim == 3
shape = self.data_array.shape
@@ -292,8 +293,9 @@ def data_extent(self, mask: npt.NDArray[np.uint8]) -> Optional[Polygon]:
return polygon
def densify_polygon(self, polygon: Polygon) -> Polygon:
- """Adds vertices to the footprint polygon in the native CRS using either
- ``self.densification_factor`` or ``self.densification_distance``.
+ """Adds vertices to the footprint polygon in the native CRS using
+ either ``self.densification_factor`` or
+ ``self.densification_distance``.
Args:
polygon (Polygon): Footprint polygon in the native CRS.
@@ -331,8 +333,8 @@ def reproject_polygon(self, polygon: Polygon) -> Polygon:
def simplify_polygon(self, polygon: Polygon) -> Polygon:
"""Reduces the number of polygon vertices such that the simplified
- polygon shape is no further away than the original polygon vertices than
- ``self.simplify_tolerance``.
+ polygon shape is no further away than the original polygon vertices
+ than ``self.simplify_tolerance``.
Args:
polygon (Polygon): Polygon to be simplified.
@@ -497,10 +499,9 @@ def update_geometry_from_asset_footprint(
bands: List[int] = [1],
skip_errors: bool = True,
) -> bool:
- """
- Accepts an Item and an optional list of asset names within that Item, and
- updates the geometry of that Item in-place with the data footprint derived
- from the first of the assets that exists in the Item.
+ """Accepts an Item and an optional list of asset names within that
+ Item, and updates the geometry of that Item in-place with the data
+ footprint derived from the first of the assets that exists in the Item.
See :class:`RasterFootprint` for details on the data footprint
calculation.
@@ -578,11 +579,10 @@ def data_footprints_for_data_assets(
bands: List[int] = [1],
skip_errors: bool = True,
) -> Iterator[Tuple[str, Dict[str, Any]]]:
- """
- Accepts an Item and an optional list of asset names within that Item, and
- produces an iterator over the same asset names (if they exist) and
- dictionaries representing GeoJSON Polygons of the data footprints of the
- assets.
+ """Accepts an Item and an optional list of asset names within that
+ Item, and produces an iterator over the same asset names (if they
+ exist) and dictionaries representing GeoJSON Polygons of the data
+ footprints of the assets.
See :class:`RasterFootprint` for details on the data footprint
calculation.
@@ -872,9 +872,8 @@ def densify_reproject_simplify(
precision: int = DEFAULT_PRECISION,
simplify_tolerance: Optional[float] = None,
) -> Polygon:
- """
- Densifies the input polygon, reprojects it to EPSG 4326, and simplifies the
- resulting polygon.
+ """Densifies the input polygon, reprojects it to EPSG 4326, and simplifies
+ the resulting polygon.
See :class:`RasterFootprint` for details on densification and
simplification.
diff --git a/src/stactools/core/utils/round.py b/src/stactools/core/utils/round.py
index 15807ddb..fea4d8fd 100644
--- a/src/stactools/core/utils/round.py
+++ b/src/stactools/core/utils/round.py
@@ -37,8 +37,8 @@ def round_coordinates(stac_object: S, precision: int = DEFAULT_PRECISION) -> S:
def recursive_round(coordinates: List[Any], precision: int) -> List[Any]:
- """Rounds a list of numbers. The list can contain additional nested lists or
- tuples of numbers.
+ """Rounds a list of numbers. The list can contain additional nested lists
+ or tuples of numbers.
Any tuples encountered will be converted to lists.
diff --git a/src/stactools/testing/cli.py b/src/stactools/testing/cli.py
index d77bbcb8..03d065af 100644
--- a/src/stactools/testing/cli.py
+++ b/src/stactools/testing/cli.py
@@ -1,5 +1,4 @@
-""" CLI for test data maintenance and generation.
-"""
+"""CLI for test data maintenance and generation."""
import logging
import os
import shutil
diff --git a/src/stactools/testing/test_data.py b/src/stactools/testing/test_data.py
index 81cff3c8..6d4add0f 100644
--- a/src/stactools/testing/test_data.py
+++ b/src/stactools/testing/test_data.py
@@ -76,7 +76,8 @@ def get_external_data(self, rel_path: str) -> str:
from an external source.
Args:
- rel_path (str): The key to the external data, as configured in class instantiation.
+ rel_path (str): The key to the external data, as configured in class
+ instantiation.
Returns:
str: The absolute path to the external data file.
diff --git a/tests/cli/commands/test_cases.py b/tests/cli/commands/test_cases.py
index 58c155d3..0d23cb75 100644
--- a/tests/cli/commands/test_cases.py
+++ b/tests/cli/commands/test_cases.py
@@ -168,7 +168,7 @@ def test_case_4():
"""Test case that is based on a local copy of the Tier 1 dataset from
DrivenData's OpenCities AI Challenge.
See: https://www.drivendata.org/competitions/60/building-segmentation-disaster-resilience
- """
+ """ # noqa: E501
return Catalog.from_file(
test_data.get_path("data-files/catalogs/test-case-4/catalog.json")
)
diff --git a/tests/core/utils/test_raster_footprint.py b/tests/core/utils/test_raster_footprint.py
index 33ef3d37..a9aad64e 100644
--- a/tests/core/utils/test_raster_footprint.py
+++ b/tests/core/utils/test_raster_footprint.py
@@ -220,7 +220,6 @@ def test_landsat8() -> None:
def test_nan_as_nodata() -> None:
-
polygon = data_footprint(
test_data.get_path("data-files/raster_footprint/LC08_LST_crop.tif"), # noqa
simplify_tolerance=0.01,
diff --git a/tests/utils.py b/tests/utils.py
index 1adb1995..d5224044 100644
--- a/tests/utils.py
+++ b/tests/utils.py
@@ -7,7 +7,8 @@ def create_temp_copy(src_path: str, tmp_dir: str, target_name: str) -> str:
Args:
src_path (str): path of the file to be copied.
- tmp_dir (TemporaryDirectory): path of the temporary directory where the file will be copied.
+ tmp_dir (TemporaryDirectory): path of the temporary directory where the
+ file will be copied.
target_name (str): name of the file in the target location.
Returns: