Skip to content

build(pre-commit.ci): pre-commit autoupdate #148

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 2 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 7 additions & 7 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -3,39 +3,39 @@
default_stages: [pre-commit]
repos:
- repo: https://github.com/compilerla/conventional-pre-commit
rev: v3.4.0
rev: v4.2.0
hooks:
- id: conventional-pre-commit
stages: [commit-msg]
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: 'v0.5.6'
rev: 'v0.11.12'
hooks:
- id: ruff
args: [--fix, --exit-non-zero-on-fix, --show-fixes]
# TODO: Switch back to upstream docformatter
# after https://github.com/PyCQA/docformatter/issues/289 is fixed
- repo: https://github.com/PyCQA/docformatter
rev: eb1df34
rev: v1.7.7
# rev: v1.7.5
hooks:
- id: docformatter
additional_dependencies: [tomli]
args: [--in-place, --config, ./pyproject.toml]
- repo: https://github.com/pre-commit/mirrors-mypy
rev: v1.15.0
rev: v1.16.0
hooks:
- id: mypy
additional_dependencies: ['types-requests', 'types-six']
- repo: https://github.com/pdm-project/pdm
rev: 2.22.0
rev: 2.24.2
hooks:
- id: pdm-lock-check
- id: pdm-export
args: ["-o", "requirements.txt", "--without-hashes", "-G", "cli", "-G", "index-generation", "-dG:all"]
files: ^pdm.lock$
stages: [manual]
- repo: https://github.com/kynan/nbstripout
rev: 0.7.1
rev: 0.8.1
hooks:
- id: nbstripout
- repo: https://github.com/pre-commit/pre-commit-hooks
Expand All @@ -50,7 +50,7 @@ repos:
args: ['--maxkb=1000']
- id: detect-private-key
- repo: https://github.com/dosisod/refurb
rev: v2.0.0
rev: v2.1.0
hooks:
- id: refurb
args: ["--python-version", "3.9", "--format", "github"]
Expand Down
2 changes: 1 addition & 1 deletion dev/generate_resources_usage_plot.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,7 @@
" verbosity_mode=\"silent\",\n",
" debug_times=True,\n",
" )\n",
" output_text = cast(Console, GLOBAL_CONSOLE).export_text()\n",
" output_text = cast(\"Console\", GLOBAL_CONSOLE).export_text()\n",
" search_text = \"Steps times: \"\n",
" times = json.loads(output_text[(len(search_text) - 1) :])\n",
" return path, times\n",
Expand Down
2 changes: 1 addition & 1 deletion docs/gen_cli_docs.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ def _get_rich_console_new(stderr: bool = False) -> Console:
click_obj = typer.main.get_command(typer_obj)
ctx = typer.Context(command=click_obj, info_name="QuackOSM")
rich_format_help(obj=click_obj, ctx=ctx, markup_mode="rich")
html_text: str = cast(Console, GLOBAL_CONSOLE).export_html(
html_text: str = cast("Console", GLOBAL_CONSOLE).export_html(
inline_styles=True,
code_format='<div class="highlight"><pre><code>{code}</code></pre></div>',
)
Expand Down
2 changes: 1 addition & 1 deletion examples/advanced_examples/custom_sql_filter.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -155,7 +155,7 @@
"data = qosm.convert_geometry_to_geodataframe(\n",
" geometry_filter=qosm.geocode_to_geometry(\"Greater London\"),\n",
" osm_extract_source=\"Geofabrik\",\n",
" custom_sql_filter=\"\"\"\n",
" custom_sql_filter=r\"\"\"\n",
" list_has_all(map_keys(tags), ['highway', 'name'])\n",
" AND regexp_matches(tags['name'], '^(New|Old)\\s\\w+')\n",
" \"\"\",\n",
Expand Down
21 changes: 12 additions & 9 deletions examples/advanced_examples/osm_extracts.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@
"metadata": {},
"outputs": [],
"source": [
"from quackosm.osm_extracts import display_available_extracts, OsmExtractSource"
"from quackosm.osm_extracts import OsmExtractSource, display_available_extracts"
]
},
{
Expand Down Expand Up @@ -479,10 +479,13 @@
"from shapely.geometry import shape\n",
"\n",
"from quackosm import convert_geometry_to_geodataframe, geocode_to_geometry\n",
"from quackosm.osm_extracts import (_cover_geometry_with_extracts,\n",
" _get_combined_index, _get_geofabrik_index,\n",
" find_smallest_containing_extracts_total,\n",
" find_smallest_containing_geofabrik_extracts)"
"from quackosm.osm_extracts import (\n",
" _cover_geometry_with_extracts,\n",
" _get_combined_index,\n",
" _get_geofabrik_index,\n",
" find_smallest_containing_extracts_total,\n",
" find_smallest_containing_geofabrik_extracts,\n",
")"
]
},
{
Expand Down Expand Up @@ -600,7 +603,7 @@
" fig.tight_layout()\n",
"\n",
" plt.show()\n",
" \n",
"\n",
"def plot_features_with_geometry_filter(features_gdf: gpd.GeoDataFrame, geometry_filter: Polygon) -> None:\n",
" fig = plt.figure()\n",
" ax = fig.subplots()\n",
Expand All @@ -611,7 +614,7 @@
" ax.set_axis_off()\n",
"\n",
" cx.add_basemap(ax, source=cx.providers.CartoDB.PositronNoLabels, crs=4326)\n",
" \n",
"\n",
" features_gdf.plot(ax=ax, markersize=1, zorder=1, alpha=0.2)\n",
" features_gdf.boundary.plot(ax=ax, markersize=0, zorder=1, alpha=0.2)\n",
" gpd.GeoSeries([geometry_filter], crs=4326).plot(\n",
Expand All @@ -627,10 +630,10 @@
" orange_patch = mpatches.Patch(\n",
" facecolor=(0, 0, 0, 0), edgecolor=\"orange\", hatch=\"///\", linewidth=1.5, label=\"Geometry filter\"\n",
" )\n",
" ax.legend(handles=[blue_patch, orange_patch], loc='lower right')\n",
" ax.legend(handles=[blue_patch, orange_patch], loc=\"lower right\")\n",
"\n",
" fig.tight_layout()\n",
" \n",
"\n",
" plt.show()"
]
},
Expand Down
1 change: 0 additions & 1 deletion examples/advanced_examples/osm_tags_filter.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,6 @@
"metadata": {},
"outputs": [],
"source": [
"import urllib.request\n",
"\n",
"from quackosm import convert_pbf_to_geodataframe"
]
Expand Down
6 changes: 3 additions & 3 deletions examples/basic_usage.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -163,9 +163,9 @@
"metadata": {},
"outputs": [],
"source": [
"import matplotlib.pyplot as plt\n",
"import matplotlib.patches as mpatches\n",
"import geopandas as gpd\n",
"import matplotlib.patches as mpatches\n",
"import matplotlib.pyplot as plt\n",
"\n",
"fig = plt.figure(figsize=(10, 10))\n",
"ax = fig.subplots()\n",
Expand Down Expand Up @@ -262,7 +262,7 @@
"outputs": [],
"source": [
"qosm.convert_geometry_to_parquet(\n",
" area, result_file_path='barcelona_osm_output.parquet'\n",
" area, result_file_path=\"barcelona_osm_output.parquet\"\n",
")"
]
},
Expand Down
2 changes: 1 addition & 1 deletion examples/command_line_interface.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -360,8 +360,8 @@
"metadata": {},
"outputs": [],
"source": [
"import matplotlib.pyplot as plt\n",
"import matplotlib.patches as mpatches\n",
"import matplotlib.pyplot as plt\n",
"\n",
"fig, axs = plt.subplots(2, 3, sharex=True, sharey=True, figsize=(10, 6))\n",
"\n",
Expand Down
2 changes: 1 addition & 1 deletion examples/pbf_file_reader.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -280,7 +280,7 @@
"reader = PbfFileReader(geometry_filter=None, tags_filter=None)\n",
"liechtenstein_features_gpq = reader.convert_pbf_to_parquet(\n",
" liechtenstein_pbf_file, explode_tags=False\n",
") \n",
")\n",
"liechtenstein_features_gpq"
]
},
Expand Down
14 changes: 7 additions & 7 deletions quackosm/_osm_tags_filters.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,14 +48,14 @@ def merge_osm_tags_filter(
OsmTagsFilter: Merged filters.
"""
if is_expected_type(osm_tags_filter, OsmTagsFilter):
return cast(OsmTagsFilter, osm_tags_filter)
return cast("OsmTagsFilter", osm_tags_filter)
elif is_expected_type(osm_tags_filter, GroupedOsmTagsFilter):
return _merge_grouped_osm_tags_filter(cast(GroupedOsmTagsFilter, osm_tags_filter))
return _merge_grouped_osm_tags_filter(cast("GroupedOsmTagsFilter", osm_tags_filter))
elif is_expected_type(osm_tags_filter, Iterable):
return _merge_multiple_osm_tags_filters(
[
merge_osm_tags_filter(
cast(Union[OsmTagsFilter, GroupedOsmTagsFilter], sub_osm_tags_filter)
cast("Union[OsmTagsFilter, GroupedOsmTagsFilter]", sub_osm_tags_filter)
)
for sub_osm_tags_filter in osm_tags_filter
]
Expand Down Expand Up @@ -83,12 +83,12 @@ def check_if_any_osm_tags_filter_value_is_positive(
if is_expected_type(osm_tags_filter, OsmTagsFilter):
return any(
osm_tag_filter_value != False # noqa: E712
for osm_tag_filter_value in cast(OsmTagsFilter, osm_tags_filter).values()
for osm_tag_filter_value in cast("OsmTagsFilter", osm_tags_filter).values()
)
elif is_expected_type(osm_tags_filter, GroupedOsmTagsFilter):
return any(
check_if_any_osm_tags_filter_value_is_positive(osm_tags_filter_group)
for osm_tags_filter_group in cast(GroupedOsmTagsFilter, osm_tags_filter).values()
for osm_tags_filter_group in cast("GroupedOsmTagsFilter", osm_tags_filter).values()
)

raise AttributeError(
Expand Down Expand Up @@ -160,7 +160,7 @@ def _merge_multiple_osm_tags_filters(osm_tags_filters: Iterable[OsmTagsFilter])
if isinstance(result[osm_tag_key], bool) and result[osm_tag_key]:
continue

current_values_list = cast(list[str], result[osm_tag_key])
current_values_list = cast("list[str]", result[osm_tag_key])

# Check bool
if osm_tag_value == True: # noqa: E712
Expand All @@ -175,4 +175,4 @@ def _merge_multiple_osm_tags_filters(osm_tags_filters: Iterable[OsmTagsFilter])
new_values = [value for value in osm_tag_value if value not in current_values_list]
current_values_list.extend(new_values)

return cast(OsmTagsFilter, result)
return cast("OsmTagsFilter", result)
6 changes: 3 additions & 3 deletions quackosm/_osm_way_polygon_features.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ def parse_dict_to_config_object(raw_config: dict[str, Any]) -> OsmWayPolygonConf
raise ValueError(f"Wrong type of key: denylist ({type(denylist_tags)})")

return OsmWayPolygonConfig(
all=cast(Iterable[str], all_tags),
allowlist=cast(dict[str, Iterable[str]], allowlist_tags),
denylist=cast(dict[str, Iterable[str]], denylist_tags),
all=cast("Iterable[str]", all_tags),
allowlist=cast("dict[str, Iterable[str]]", allowlist_tags),
denylist=cast("dict[str, Iterable[str]]", denylist_tags),
)
12 changes: 6 additions & 6 deletions quackosm/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def _display_osm_extracts_callback(ctx: typer.Context, value: bool) -> None:

param_values = {p.name: p.default for p in ctx.command.params}
param_values.update(ctx.params)
osm_source = cast(str, param_values.get("osm_extract_source"))
osm_source = cast("str", param_values.get("osm_extract_source"))
display_available_extracts(source=osm_source, use_full_names=True, use_pager=True)
raise typer.Exit()

Expand Down Expand Up @@ -255,7 +255,7 @@ def convert(self, value, param=None, ctx=None): # type: ignore
"Provided OSM tags filter is not in a required format."
) from None

return cast(Union[OsmTagsFilter, GroupedOsmTagsFilter], parsed_dict)
return cast("Union[OsmTagsFilter, GroupedOsmTagsFilter]", parsed_dict)


class OsmTagsFilterFileParser(OsmTagsFilterJsonParser):
Expand Down Expand Up @@ -770,7 +770,7 @@ def main(
from quackosm.functions import convert_pbf_to_parquet

result_path = convert_pbf_to_parquet(
pbf_path=cast(str, pbf_file),
pbf_path=cast("str", pbf_file),
tags_filter=osm_tags_filter or osm_tags_filter_file, # type: ignore
keep_all_tags=keep_all_tags,
geometry_filter=geometry_filter_value,
Expand All @@ -794,7 +794,7 @@ def main(
from quackosm.functions import convert_pbf_to_duckdb

result_path = convert_pbf_to_duckdb(
pbf_path=cast(str, pbf_file),
pbf_path=cast("str", pbf_file),
tags_filter=osm_tags_filter or osm_tags_filter_file, # type: ignore
keep_all_tags=keep_all_tags,
geometry_filter=geometry_filter_value,
Expand All @@ -820,7 +820,7 @@ def main(

try:
result_path = convert_osm_extract_to_parquet(
osm_extract_query=cast(str, osm_extract_query),
osm_extract_query=cast("str", osm_extract_query),
osm_extract_source=osm_extract_source,
tags_filter=osm_tags_filter or osm_tags_filter_file, # type: ignore
keep_all_tags=keep_all_tags,
Expand Down Expand Up @@ -853,7 +853,7 @@ def main(

try:
result_path = convert_osm_extract_to_duckdb(
osm_extract_query=cast(str, osm_extract_query),
osm_extract_query=cast("str", osm_extract_query),
osm_extract_source=osm_extract_source,
tags_filter=osm_tags_filter or osm_tags_filter_file, # type: ignore
keep_all_tags=keep_all_tags,
Expand Down
2 changes: 1 addition & 1 deletion quackosm/osm_extracts/extract.py
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,7 @@
geod = Geod(ellps="WGS84")
poly_area_m2, _ = geod.geometry_area_perimeter(orient(geometry, sign=1))
poly_area_km2 = round(poly_area_m2) / 1_000_000
return cast(float, poly_area_km2)
return cast("float", poly_area_km2)

Check warning on line 143 in quackosm/osm_extracts/extract.py

View check run for this annotation

Codecov / codecov/patch

quackosm/osm_extracts/extract.py#L143

Added line #L143 was not covered by tests


def _get_full_file_name_function(index: "DataFrame") -> Callable[[str], str]:
Expand Down
24 changes: 12 additions & 12 deletions quackosm/pbf_file_reader.py
Original file line number Diff line number Diff line change
Expand Up @@ -546,8 +546,8 @@
original_geometry_filter = self.geometry_filter

if pbf_extract_geometry is not None:
self.geometry_filter = cast(BaseGeometry, self.geometry_filter).intersection(
cast(BaseGeometry, pbf_extract_geometry)
self.geometry_filter = cast("BaseGeometry", self.geometry_filter).intersection(
cast("BaseGeometry", pbf_extract_geometry)
)

result_file_path = result_file_path or self._generate_result_file_path(
Expand Down Expand Up @@ -1098,7 +1098,7 @@
else:
self.expanded_tags_filter = self._expand_osm_tags_filter(elements)
self.merged_tags_filter = merge_osm_tags_filter(
cast(Union[GroupedOsmTagsFilter, OsmTagsFilter], self.expanded_tags_filter)
cast("Union[GroupedOsmTagsFilter, OsmTagsFilter]", self.expanded_tags_filter)
)

converted_osm_parquet_files = self._prefilter_elements_ids(elements, filter_osm_ids)
Expand Down Expand Up @@ -1325,7 +1325,7 @@
perimeter = list(geometry.coords)
else:
perimeter = list(geometry.coords)[::-1]
smallest_point = sorted(perimeter)[0]

Check failure on line 1328 in quackosm/pbf_file_reader.py

View workflow job for this annotation

GitHub Actions / Run pre-commit manual stage

Refurb FURB192

Replace `sorted(perimeter)[0]` with `min(perimeter)`
double_iteration = itertools.chain(perimeter[:-1], perimeter)
for point in double_iteration:
if point == smallest_point:
Expand All @@ -1338,7 +1338,7 @@
if isinstance(geometry, Polygon):
oriented_exterior = self._get_oriented_geometry_filter(geometry.exterior)
oriented_interiors = [
cast(BaseGeometry, self._get_oriented_geometry_filter(interior))
cast("BaseGeometry", self._get_oriented_geometry_filter(interior))
for interior in geometry.interiors
]
return Polygon(
Expand All @@ -1347,7 +1347,7 @@
)
elif isinstance(geometry, BaseMultipartGeometry):
oriented_geoms = [
cast(BaseGeometry, self._get_oriented_geometry_filter(geom))
cast("BaseGeometry", self._get_oriented_geometry_filter(geom))
for geom in geometry.geoms
]
return geometry.__class__(
Expand All @@ -1361,28 +1361,28 @@
) -> Union[GroupedOsmTagsFilter, OsmTagsFilter]:
is_any_key_expandable = False
if is_expected_type(self.tags_filter, GroupedOsmTagsFilter):
grouped_osm_tags_filter = cast(GroupedOsmTagsFilter, self.tags_filter)
grouped_osm_tags_filter = cast("GroupedOsmTagsFilter", self.tags_filter)
is_any_key_expandable = any(
any("*" in key for key in osm_tags_filter.keys())
for osm_tags_filter in grouped_osm_tags_filter.values()
)
else:
osm_tags_filter = cast(OsmTagsFilter, self.tags_filter)
osm_tags_filter = cast("OsmTagsFilter", self.tags_filter)
is_any_key_expandable = any("*" in key for key in osm_tags_filter.keys())

if not is_any_key_expandable:
return cast(Union[GroupedOsmTagsFilter, OsmTagsFilter], self.tags_filter)
return cast("Union[GroupedOsmTagsFilter, OsmTagsFilter]", self.tags_filter)

self.task_progress_tracker.major_step_number = -1
with self.task_progress_tracker.get_spinner("Preparing OSM tags filter"):
if is_expected_type(self.tags_filter, GroupedOsmTagsFilter):
grouped_osm_tags_filter = cast(GroupedOsmTagsFilter, self.tags_filter)
grouped_osm_tags_filter = cast("GroupedOsmTagsFilter", self.tags_filter)
return {
group: self._expand_single_osm_tags_filter(elements, osm_tags_filter)
for group, osm_tags_filter in grouped_osm_tags_filter.items()
}
else:
osm_tags_filter = cast(OsmTagsFilter, self.tags_filter)
osm_tags_filter = cast("OsmTagsFilter", self.tags_filter)
return self._expand_single_osm_tags_filter(elements, osm_tags_filter)

def _expand_single_osm_tags_filter(
Expand Down Expand Up @@ -1421,7 +1421,7 @@
value_with_star = value_with_star.replace("**", "*")

value_with_percent = value_with_star.replace("*", "%")
return cast(str, sql_escape(value_with_percent))
return cast("str", sql_escape(value_with_percent))

def _prefilter_elements_ids(
self, elements: "duckdb.DuckDBPyRelation", filter_osm_ids: list[str]
Expand Down Expand Up @@ -2758,7 +2758,7 @@
return features_relation

grouped_features_relation: duckdb.DuckDBPyRelation
grouped_tags_filter = cast(GroupedOsmTagsFilter, self.expanded_tags_filter)
grouped_tags_filter = cast("GroupedOsmTagsFilter", self.expanded_tags_filter)

if explode_tags:
case_clauses = []
Expand Down
Loading
Loading