diff --git a/ocsmesh/__init__.py b/ocsmesh/__init__.py index 105b1737..3acf836e 100644 --- a/ocsmesh/__init__.py +++ b/ocsmesh/__init__.py @@ -1,40 +1,40 @@ -import pathlib -from importlib import util -import tempfile import os -import sys +import pathlib import platform - +import sys +import tempfile +from importlib import util try: import jigsawpy # noqa: F401 except OSError as e: pkg = util.find_spec("jigsawpy") libjigsaw = { - "Windows": "jigsaw.dll", - "Linux": "libjigsaw.so", - "Darwin": "libjigsaw.dylib" - }[platform.system()] + "Windows": "jigsaw.dll", + "Linux": "libjigsaw.so", + "Darwin": "libjigsaw.dylib", + }[platform.system()] tgt_libpath = pathlib.Path(pkg.origin).parent / "_lib" / libjigsaw - pyenv = pathlib.Path("/".join(sys.executable.split('/')[:-2])) - src_libpath = pyenv / 'lib' / libjigsaw + pyenv = pathlib.Path("/".join(sys.executable.split("/")[:-2])) + src_libpath = pyenv / "lib" / libjigsaw if not src_libpath.is_file(): raise e os.symlink(src_libpath, tgt_libpath) +from .driver import JigsawDriver from .geom import Geom from .hfun import Hfun -from .raster import Raster -from .driver import JigsawDriver from .mesh import Mesh +from .raster import Raster if util.find_spec("colored_traceback") is not None: import colored_traceback + colored_traceback.add_hook(always=True) -tmpdir = str(pathlib.Path(tempfile.gettempdir()+'/ocsmesh'))+'/' +tmpdir = str(pathlib.Path(tempfile.gettempdir() + "/ocsmesh")) + "/" os.makedirs(tmpdir, exist_ok=True) __all__ = [ diff --git a/ocsmesh/__main__.py b/ocsmesh/__main__.py index 30300c0a..52772630 100644 --- a/ocsmesh/__main__.py +++ b/ocsmesh/__main__.py @@ -2,19 +2,18 @@ import argparse -from ocsmesh.ops import combine_geometry, combine_hfun from ocsmesh.cli.cli import CmdCli +from ocsmesh.ops import combine_geometry, combine_hfun class OCSMesh: - def __init__(self, args, ocsmesh_cli): self._args = args self._cli = ocsmesh_cli def main(self): - if self._args.command == 'geom': + if self._args.command == "geom": nprocs = self._args.nprocs if self._args.geom_nprocs: @@ -34,10 +33,11 @@ def main(self): overlap=self._args.overlap, nprocs=nprocs, out_crs=self._args.output_crs, - base_crs=self._args.mesh_crs) + base_crs=self._args.mesh_crs, + ) combine_geometry(**arg_dict) - elif self._args.command == 'hfun': + elif self._args.command == "hfun": nprocs = self._args.nprocs if self._args.hfun_nprocs: @@ -57,10 +57,11 @@ def main(self): chunk_size=self._args.chunk_size, overlap=self._args.overlap, method=self._args.method, - nprocs=nprocs) + nprocs=nprocs, + ) combine_hfun(**arg_dict) - elif self._args.command == 'scripts': + elif self._args.command == "scripts": self._cli.execute(self._args) @@ -69,80 +70,98 @@ def create_parser(): common_parser.add_argument("--log-level", choices=["info", "debug", "warning"]) common_parser.add_argument( - "--nprocs", type=int, help="Number of parallel threads to use when " - "computing geom and hfun.") + "--nprocs", + type=int, + help="Number of parallel threads to use when " "computing geom and hfun.", + ) common_parser.add_argument( - "--geom-nprocs", type=int, help="Number of processors used when " - "computing the geom, overrides --nprocs argument.") + "--geom-nprocs", + type=int, + help="Number of processors used when " + "computing the geom, overrides --nprocs argument.", + ) common_parser.add_argument( - "--hfun-nprocs", type=int, help="Number of processors used when " - "computing the hfun, overrides --nprocs argument.") + "--hfun-nprocs", + type=int, + help="Number of processors used when " + "computing the hfun, overrides --nprocs argument.", + ) common_parser.add_argument( "--chunk-size", - help='Size of square window to be used for processing the raster') + help="Size of square window to be used for processing the raster", + ) common_parser.add_argument( - "--overlap", - help='Size of overlap to be used for between raster windows') + "--overlap", help="Size of overlap to be used for between raster windows" + ) - sub_parse_common = { - 'parents': [common_parser], - 'add_help': False - } + sub_parse_common = {"parents": [common_parser], "add_help": False} parser = argparse.ArgumentParser(**sub_parse_common) - subp = parser.add_subparsers(dest='command') - - geom_parser = subp.add_parser('geom', **sub_parse_common) - geom_subp = geom_parser.add_subparsers(dest='geom_cmd') - geom_bld = geom_subp.add_parser('build', **sub_parse_common) - geom_bld.add_argument('-o', '--output', required=True) - geom_bld.add_argument('-f', '--output-format', default="shapefile") - geom_bld.add_argument('--output-crs', default="EPSG:4326") - geom_bld.add_argument('--mesh', help='Mesh to extract hull from') - geom_bld.add_argument( - '--ignore-mesh-boundary', action='store_true', - help='Flag to ignore mesh boundary for final boundary union') + subp = parser.add_subparsers(dest="command") + + geom_parser = subp.add_parser("geom", **sub_parse_common) + geom_subp = geom_parser.add_subparsers(dest="geom_cmd") + geom_bld = geom_subp.add_parser("build", **sub_parse_common) + geom_bld.add_argument("-o", "--output", required=True) + geom_bld.add_argument("-f", "--output-format", default="shapefile") + geom_bld.add_argument("--output-crs", default="EPSG:4326") + geom_bld.add_argument("--mesh", help="Mesh to extract hull from") geom_bld.add_argument( - '--mesh-crs', help='CRS of the input base mesh (overrides)') + "--ignore-mesh-boundary", + action="store_true", + help="Flag to ignore mesh boundary for final boundary union", + ) + geom_bld.add_argument("--mesh-crs", help="CRS of the input base mesh (overrides)") + geom_bld.add_argument("--zmin", type=float, help="Maximum elevation to consider") + geom_bld.add_argument("--zmax", type=float, help="Maximum elevation to consider") geom_bld.add_argument( - '--zmin', type=float, - help='Maximum elevation to consider') - geom_bld.add_argument( - '--zmax', type=float, - help='Maximum elevation to consider') - geom_bld.add_argument( - 'dem', nargs='+', - help='Digital elevation model list to be used in geometry creation') - - hfun_parser = subp.add_parser('hfun', **sub_parse_common) - hfun_subp = hfun_parser.add_subparsers(dest='hfun_cmd') - hfun_bld = hfun_subp.add_parser('build', **sub_parse_common) - hfun_bld.add_argument('-o', '--output', required=True) - hfun_bld.add_argument('-f', '--output-format', default="2dm") - hfun_bld.add_argument('--mesh', help='Base mesh size function') - hfun_bld.add_argument( - '--hmax', type=float, help='Maximum element size') - hfun_bld.add_argument( - '--hmin', type=float, help='Minimum element size') + "dem", + nargs="+", + help="Digital elevation model list to be used in geometry creation", + ) + + hfun_parser = subp.add_parser("hfun", **sub_parse_common) + hfun_subp = hfun_parser.add_subparsers(dest="hfun_cmd") + hfun_bld = hfun_subp.add_parser("build", **sub_parse_common) + hfun_bld.add_argument("-o", "--output", required=True) + hfun_bld.add_argument("-f", "--output-format", default="2dm") + hfun_bld.add_argument("--mesh", help="Base mesh size function") + hfun_bld.add_argument("--hmax", type=float, help="Maximum element size") + hfun_bld.add_argument("--hmin", type=float, help="Minimum element size") hfun_bld.add_argument( - '--contour', action='append', nargs='+', type=float, default=[], + "--contour", + action="append", + nargs="+", + type=float, + default=[], help="Each contour's (level, [expansion, target])" - " to be applied on all size functions in collector") + " to be applied on all size functions in collector", + ) hfun_bld.add_argument( - '--constant', - action='append', nargs=2, type=float, dest='constants', - metavar='CONST_DEFN', default=[], + "--constant", + action="append", + nargs=2, + type=float, + dest="constants", + metavar="CONST_DEFN", + default=[], help="Specify constant mesh size above a given contour level" - " by passing (lower_bound, target_size) for each constant") + " by passing (lower_bound, target_size) for each constant", + ) hfun_bld.add_argument( - '--method', type=str, default='exact', - help='Method used to calculate size function ({exact} |fast)') + "--method", + type=str, + default="exact", + help="Method used to calculate size function ({exact} |fast)", + ) hfun_bld.add_argument( - 'dem', nargs='+', - help='Digital elevation model list to be used in size function creation') + "dem", + nargs="+", + help="Digital elevation model list to be used in size function creation", + ) # Scripts don't use common arguments as they are standalon code - scripts_parser = subp.add_parser('scripts') + scripts_parser = subp.add_parser("scripts") cmd_cli = CmdCli(scripts_parser) return parser, cmd_cli @@ -150,9 +169,9 @@ def create_parser(): def main(): parser, ocsmesh_cli = create_parser() -# logger.init(args.log_level) + # logger.init(args.log_level) OCSMesh(parser.parse_args(), ocsmesh_cli).main() -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/ocsmesh/cli/cli.py b/ocsmesh/cli/cli.py index d0350a44..7d1bb6ce 100644 --- a/ocsmesh/cli/cli.py +++ b/ocsmesh/cli/cli.py @@ -1,17 +1,17 @@ import warnings -from ocsmesh.cli.remesh_by_shape_factor import RemeshByShape -from ocsmesh.cli.remesh import RemeshByDEM from ocsmesh.cli.mesh_upgrader import MeshUpgrader +from ocsmesh.cli.remesh import RemeshByDEM +from ocsmesh.cli.remesh_by_shape_factor import RemeshByShape -class CmdCli: +class CmdCli: def __init__(self, parser): # TODO: Later add non experimental CLI through this class self._script_dict = {} - scripts_subp = parser.add_subparsers(dest='scripts_cmd') + scripts_subp = parser.add_subparsers(dest="scripts_cmd") for cls in [RemeshByShape, RemeshByDEM, MeshUpgrader]: item = cls(scripts_subp) self._script_dict[item.script_name] = item @@ -20,6 +20,7 @@ def execute(self, args): warnings.warn( "Scripts CLI is used for experimental new features" - " and is subject to change.") + " and is subject to change." + ) self._script_dict[args.scripts_cmd].run(args) diff --git a/ocsmesh/cli/mesh_upgrader.py b/ocsmesh/cli/mesh_upgrader.py index 7f4cbb27..efd4454f 100644 --- a/ocsmesh/cli/mesh_upgrader.py +++ b/ocsmesh/cli/mesh_upgrader.py @@ -1,35 +1,32 @@ #!/bin/env python3 import gc -import sys -import pathlib import logging +import pathlib +import sys import geopandas as gpd -from shapely.geometry import MultiPolygon - -from ocsmesh import Raster, Geom, Hfun, JigsawDriver -from ocsmesh.mesh.mesh import Mesh +from ocsmesh import Geom, Hfun, JigsawDriver, Raster +from ocsmesh.features.contour import Contour from ocsmesh.geom.shapely import MultiPolygonGeom from ocsmesh.hfun.mesh import HfunMesh -from ocsmesh.features.contour import Contour +from ocsmesh.mesh.mesh import Mesh from ocsmesh.mesh.parsers import sms2dm from ocsmesh.utils import msh_t_to_2dm - +from shapely.geometry import MultiPolygon logging.basicConfig( stream=sys.stdout, - format='%(asctime)s,%(msecs)d %(levelname)-8s [%(filename)s:%(lineno)d] %(message)s', - datefmt='%Y-%m-%d:%H:%M:%S' - ) -#logging.getLogger().setLevel(logging.DEBUG) -#logging.getLogger().setLevel(logging.INFO) + format="%(asctime)s,%(msecs)d %(levelname)-8s [%(filename)s:%(lineno)d] %(message)s", + datefmt="%Y-%m-%d:%H:%M:%S", +) +# logging.getLogger().setLevel(logging.DEBUG) +# logging.getLogger().setLevel(logging.INFO) class MeshUpgrader: - @property def script_name(self): - return 'mesh_upgrader' + return "mesh_upgrader" def __init__(self, sub_parser): # e.g @@ -41,10 +38,10 @@ def __init__(self, sub_parser): this_parser = sub_parser.add_parser(self.script_name) - this_parser.add_argument('--basemesh', required=True) - this_parser.add_argument('--demlo', nargs='*', required=True) - this_parser.add_argument('--demhi', nargs='*', required=True) - this_parser.add_argument('--out', required=True) + this_parser.add_argument("--basemesh", required=True) + this_parser.add_argument("--demlo", nargs="*", required=True) + this_parser.add_argument("--demhi", nargs="*", required=True) + this_parser.add_argument("--out", required=True) def run(self, args): @@ -74,16 +71,13 @@ def run(self, args): hfun_hirast_list.append(Raster(dem_path)) interp_rast_list.append(Raster(dem_path)) - hfun_rast_list = [*hfun_lorast_list, *hfun_hirast_list] - geom = Geom( - geom_rast_list, base_mesh=base_mesh_4_geom, - zmax=15, nprocs=4) + geom = Geom(geom_rast_list, base_mesh=base_mesh_4_geom, zmax=15, nprocs=4) hfun = Hfun( - hfun_rast_list, base_mesh=base_mesh_4_hfun, - hmin=30, hmax=15000, nprocs=4) + hfun_rast_list, base_mesh=base_mesh_4_hfun, hmin=30, hmax=15000, nprocs=4 + ) ## Add contour refinements at 0 separately for GEBCO and NCEI ctr1 = Contour(level=0, sources=hfun_hirast_list) @@ -95,52 +89,46 @@ def run(self, args): ## Add constant values from 0 to inf on hi-res rasters hfun.add_constant_value(30, 0, source_index=list(range(len(demhi_paths)))) - # Calculate geom geom_mp = geom.get_multipolygon() # Write to disk - gpd.GeoDataFrame( - {'geometry': geom_mp}, - crs="EPSG:4326" - ).to_file(str(out_path) + '.geom.shp') + gpd.GeoDataFrame({"geometry": geom_mp}, crs="EPSG:4326").to_file( + str(out_path) + ".geom.shp" + ) del geom_mp # Calculate hfun hfun_msh_t = hfun.msh_t() # Write to disk - sms2dm.writer( - msh_t_to_2dm(hfun_msh_t), - str(out_path) + '.hfun.2dm', - True) + sms2dm.writer(msh_t_to_2dm(hfun_msh_t), str(out_path) + ".hfun.2dm", True) del hfun_msh_t - # Read back stored values to pass to mesh driver - read_gdf = gpd.read_file(str(out_path) + '.geom.shp') + read_gdf = gpd.read_file(str(out_path) + ".geom.shp") geom_from_disk = MultiPolygonGeom( - MultiPolygon(list(read_gdf.geometry)), - crs=read_gdf.crs) + MultiPolygon(list(read_gdf.geometry)), crs=read_gdf.crs + ) - read_hfun = Mesh.open(str(out_path) + '.hfun.2dm', crs="EPSG:4326") + read_hfun = Mesh.open(str(out_path) + ".hfun.2dm", crs="EPSG:4326") hfun_from_disk = HfunMesh(read_hfun) jigsaw = JigsawDriver(geom_from_disk, hfun=hfun_from_disk, initial_mesh=None) jigsaw.verbosity = 1 - ## Execute mesher (processing of geom and hfun happens here) + # Execute mesher (processing of geom and hfun happens here) mesh = jigsaw.run() - ## Free-up memory + # Free-up memory del read_gdf del geom_from_disk del read_hfun del hfun_from_disk gc.collect() - mesh.write(str(out_path) + '.raw.2dm', format='2dm', overwrite=True) + mesh.write(str(out_path) + ".raw.2dm", format="2dm", overwrite=True) - ## Interpolate DEMs on the mesh + # Interpolate DEMs on the mesh mesh.interpolate(interp_rast_list, nprocs=4) - ## Output - mesh.write(out_path, format='2dm', overwrite=True) + # Output + mesh.write(out_path, format="2dm", overwrite=True) diff --git a/ocsmesh/cli/remesh.py b/ocsmesh/cli/remesh.py index 3ab6e668..2c5f78de 100644 --- a/ocsmesh/cli/remesh.py +++ b/ocsmesh/cli/remesh.py @@ -1,34 +1,32 @@ #!/usr/bin/env python -import sys import gc import logging -from pathlib import Path +import sys from copy import deepcopy +from pathlib import Path +import geopandas as gpd import jigsawpy import numpy as np -import geopandas as gpd -from shapely.geometry import Polygon, MultiPolygon - -from ocsmesh import Raster, Geom, Hfun, Mesh -from ocsmesh import utils +from shapely.geometry import MultiPolygon, Polygon +from ocsmesh import Geom, Hfun, Mesh, Raster, utils logging.basicConfig( stream=sys.stdout, - format='%(asctime)s,%(msecs)d %(levelname)-8s [%(filename)s:%(lineno)d] %(message)s', - datefmt='%Y-%m-%d:%H:%M:%S' - ) -#logging.getLogger().setLevel(logging.DEBUG) -#logging.getLogger().setLevel(logging.INFO) + format="%(asctime)s,%(msecs)d %(levelname)-8s [%(filename)s:%(lineno)d] %(message)s", + datefmt="%Y-%m-%d:%H:%M:%S", +) +# logging.getLogger().setLevel(logging.DEBUG) +# logging.getLogger().setLevel(logging.INFO) _logger = logging.getLogger(__name__) -class RemeshByDEM: +class RemeshByDEM: @property def script_name(self): - return 'remesh_by_dem' + return "remesh_by_dem" def __init__(self, sub_parser): @@ -47,45 +45,59 @@ def __init__(self, sub_parser): this_parser = sub_parser.add_parser(self.script_name) - this_parser.add_argument('--mesh', required=True, type=Path) - this_parser.add_argument('--mesh-crs', default='EPSG:4326') - + this_parser.add_argument("--mesh", required=True, type=Path) + this_parser.add_argument("--mesh-crs", default="EPSG:4326") this_parser.add_argument( - '--contour', - action='append', nargs='+', type=float, dest='contours', - metavar='CONTOUR_DEFN', default=[], + "--contour", + action="append", + nargs="+", + type=float, + dest="contours", + metavar="CONTOUR_DEFN", + default=[], help="Each contour's (level, [expansion, target])" - " to be applied on all size functions in collector") + " to be applied on all size functions in collector", + ) this_parser.add_argument( - '--constant', - action='append', nargs=2, type=float, dest='constants', - metavar='CONST_DEFN', default=[], + "--constant", + action="append", + nargs=2, + type=float, + dest="constants", + metavar="CONST_DEFN", + default=[], help="Specify constant mesh size above a given contour level" - " by passing (lower_bound, target_size) for each constant") - this_parser.add_argument('--hmin', type=float, default=250) - this_parser.add_argument('--zmax', type=float, default=0) + " by passing (lower_bound, target_size) for each constant", + ) + this_parser.add_argument("--hmin", type=float, default=250) + this_parser.add_argument("--zmax", type=float, default=0) this_parser.add_argument( - '--clip-by-base', action='store_true', - help='Flag to clip input DEMs using base mesh polygon') + "--clip-by-base", + action="store_true", + help="Flag to clip input DEMs using base mesh polygon", + ) - this_parser.add_argument('--geom', type=Path) - this_parser.add_argument('--hfun', type=Path) - this_parser.add_argument('--hfun-crs', default='EPSG:4326') + this_parser.add_argument("--geom", type=Path) + this_parser.add_argument("--hfun", type=Path) + this_parser.add_argument("--hfun-crs", default="EPSG:4326") - this_parser.add_argument('-s', '--sieve', type=float) + this_parser.add_argument("-s", "--sieve", type=float) this_parser.add_argument( - '--interpolate', nargs='+', type=Path, default=[], + "--interpolate", + nargs="+", + type=Path, + default=[], help="To interpolate from depth of DEMs not involved in" - " the remeshing process") + " the remeshing process", + ) - this_parser.add_argument('-o', '--output', type=Path) - this_parser.add_argument('-f', '--output-format', default='2dm') - this_parser.add_argument('-k', '--keep-intermediate', action='store_true') - this_parser.add_argument('--nprocs', type=int, default=-1) - - this_parser.add_argument('dem', nargs='+', type=Path) + this_parser.add_argument("-o", "--output", type=Path) + this_parser.add_argument("-f", "--output-format", default="2dm") + this_parser.add_argument("-k", "--keep-intermediate", action="store_true") + this_parser.add_argument("--nprocs", type=int, default=-1) + this_parser.add_argument("dem", nargs="+", type=Path) @staticmethod def _read_geom_hfun(geom_file, hfun_file, hfun_crs): @@ -109,7 +121,6 @@ def _read_geom_hfun(geom_file, hfun_file, hfun_crs): return geom, hfun - def run(self, args): # Get inputs @@ -141,18 +152,20 @@ def run(self, args): if len(contour) > 3: raise ValueError( "Invalid format for contour specification." - " It should be level [expansion target-size].") + " It should be level [expansion target-size]." + ) level, expansion_rate, target_size = [ - *contour, *[None]*(3-len(contour))] + *contour, + *[None] * (3 - len(contour)), + ] contour_defns.append((level, expansion_rate, target_size)) constant_defns = [] for lower_bound, target_size in constants: constant_defns.append((lower_bound, target_size)) - if out_path is None: - out_path = base_path.parent / ('remeshed.' + out_format) + out_path = base_path.parent / ("remeshed." + out_format) out_path.parent.mkdir(exist_ok=True, parents=True) nprocs = -1 if nprocs is None else nprocs @@ -182,8 +195,7 @@ def run(self, args): log_calculation = True # Read geometry and hfun from files if provided - if (geom_file and hfun_file - and geom_file.is_file() and hfun_file.is_file()): + if geom_file and hfun_file and geom_file.is_file() and hfun_file.is_file(): geom, hfun = self._read_geom_hfun(geom_file, hfun_file, hfun_crs) log_calculation = False @@ -199,12 +211,7 @@ def run(self, args): else: _logger.info("Union raster data with base mesh") geom_inputs = [deepcopy(init_mesh), *geom_rast_list] - geom = Geom( - geom_inputs, - base_mesh=geom_base_mesh, - zmax=zmax, - nprocs=nprocs) - + geom = Geom(geom_inputs, base_mesh=geom_base_mesh, zmax=zmax, nprocs=nprocs) # NOTE: Instead of passing base mesh to be used as boundary, # it is passed as an hfun itself @@ -216,22 +223,22 @@ def run(self, args): [hfun_base_mesh, *hfun_rast_list], hmin=hmin, hmax=np.max(hfun_base_mesh.msh_t().value), - nprocs=nprocs) + nprocs=nprocs, + ) for level, expansion_rate, target_size in contour_defns: if expansion_rate is None: expansion_rate = 0.1 if target_size is None: target_size = hmin - _logger.info(f"Adding contour to collector:" - f" {level} {expansion_rate} {target_size}") - hfun.add_contour( - level, expansion_rate, target_size) + _logger.info( + f"Adding contour to collector:" + f" {level} {expansion_rate} {target_size}" + ) + hfun.add_contour(level, expansion_rate, target_size) for lower_bound, target_size in constant_defns: - hfun.add_constant_value( - value=target_size, lower_bound=lower_bound) - + hfun.add_constant_value(value=target_size, lower_bound=lower_bound) if write_intermediate: _logger.info("Calculating final geometry") @@ -239,9 +246,8 @@ def run(self, args): _logger.info("Writing geom to disk") gpd.GeoDataFrame( - {'geometry': gpd.GeoSeries(poly_geom)}, - crs=geom.crs - ).to_file(str(out_path)+'.geom.shp') + {"geometry": gpd.GeoSeries(poly_geom)}, crs=geom.crs + ).to_file(str(out_path) + ".geom.shp") del poly_geom gc.collect() @@ -251,23 +257,22 @@ def run(self, args): _logger.info("Writing hfun to disk") # This writes in EPSG:4326 Mesh(jig_hfun).write( - str(out_path)+'.hfun.2dm', - format='2dm', overwrite=True) + str(out_path) + ".hfun.2dm", format="2dm", overwrite=True + ) del jig_hfun gc.collect() # Read back from file to avoid recalculation of hfun # and geom geom, hfun = self._read_geom_hfun( - str(out_path) + '.geom.shp', - str(out_path) + '.hfun.2dm', - "EPSG:4326") + str(out_path) + ".geom.shp", + str(out_path) + ".hfun.2dm", + "EPSG:4326", + ) log_calculation = False else: - raise ValueError( - "Input not valid to initialize geom and hfun") - + raise ValueError("Input not valid to initialize geom and hfun") if log_calculation: # NOTE: If intermediate files are written then we calculated @@ -289,13 +294,11 @@ def run(self, args): _logger.info("Projecting initial mesh to be in meters unit") utils.msh_t_to_utm(jig_init) - # pylint: disable=C0325 if not (jig_geom.crs == jig_hfun.crs == jig_init.crs): raise ValueError( - "Converted UTM CRS for geometry, hfun and init mesh" - "is not equivalent") - + "Converted UTM CRS for geometry, hfun and init mesh" "is not equivalent" + ) _logger.info("Calculate remeshing region of interest") # Prep for Remeshing @@ -306,15 +309,14 @@ def run(self, args): _logger.info("Clip mesh by inverse of region of interest") fixed_mesh_w_hole = utils.clip_mesh_by_shape( - jig_init, region_of_interest, fit_inside=True, inverse=True) + jig_init, region_of_interest, fit_inside=True, inverse=True + ) - _logger.info( - "Get all initial mesh vertices in the region of interest") - vert_idx_to_refin = utils.get_verts_in_shape( - jig_hfun, region_of_interest) + _logger.info("Get all initial mesh vertices in the region of interest") + vert_idx_to_refin = utils.get_verts_in_shape(jig_hfun, region_of_interest) - fixed_mesh_w_hole.point['IDtag'][:] = -1 - fixed_mesh_w_hole.edge2['IDtag'][:] = -1 + fixed_mesh_w_hole.point["IDtag"][:] = -1 + fixed_mesh_w_hole.edge2["IDtag"][:] = -1 refine_opts = jigsawpy.jigsaw_jig_t() refine_opts.hfun_scal = "absolute" @@ -322,8 +324,8 @@ def run(self, args): refine_opts.hfun_hmax = np.max(jig_hfun.value) refine_opts.mesh_dims = +2 # Mesh becomes TOO refined on exact boundaries from DEM -# refine_opts.mesh_top1 = True -# refine_opts.geom_feat = True + # refine_opts.mesh_top1 = True + # refine_opts.geom_feat = True jig_remeshed = jigsawpy.jigsaw_msh_t() jig_remeshed.ndims = +2 @@ -331,36 +333,31 @@ def run(self, args): _logger.info("Remeshing...") # Remeshing jigsawpy.lib.jigsaw( - refine_opts, - jig_geom, - jig_remeshed, - init=fixed_mesh_w_hole, - hfun=jig_hfun) + refine_opts, jig_geom, jig_remeshed, init=fixed_mesh_w_hole, hfun=jig_hfun + ) jig_remeshed.crs = fixed_mesh_w_hole.crs _logger.info("Done") - if jig_remeshed.tria3['index'].shape[0] == 0: - _err = 'ERROR: Jigsaw returned empty mesh.' + if jig_remeshed.tria3["index"].shape[0] == 0: + _err = "ERROR: Jigsaw returned empty mesh." _logger.error(_err) raise ValueError(_err) # TODO: This is irrelevant right now since output file is # always is EPSG:4326, enable when APIs for remeshing is added -# if out_crs is not None: -# utils.reproject(jig_remeshed, out_crs) + # if out_crs is not None: + # utils.reproject(jig_remeshed, out_crs) - _logger.info('Finalizing mesh...') + _logger.info("Finalizing mesh...") utils.finalize_mesh(jig_remeshed, sieve) _logger.info("Interpolating depths on mesh...") # Interpolation - utils.interpolate_euclidean_mesh_to_euclidean_mesh( - jig_init, jig_remeshed) + utils.interpolate_euclidean_mesh_to_euclidean_mesh(jig_init, jig_remeshed) final_mesh = Mesh(jig_remeshed) final_mesh.interpolate(interp_rast_list, nprocs=nprocs) _logger.info("Done") - _logger.info("Writing final mesh to disk...") # This writes EPSG:4326 to file, whatever the crs of the object final_mesh.write(str(out_path), format=out_format, overwrite=True) diff --git a/ocsmesh/cli/remesh_by_shape_factor.py b/ocsmesh/cli/remesh_by_shape_factor.py index e0d9c883..fbd414a9 100644 --- a/ocsmesh/cli/remesh_by_shape_factor.py +++ b/ocsmesh/cli/remesh_by_shape_factor.py @@ -1,82 +1,102 @@ #!/usr/bin/env python -from pathlib import Path -from copy import deepcopy import logging import sys +from copy import deepcopy +from pathlib import Path import geopandas as gpd +import jigsawpy import numpy as np +from pyproj import Transformer from shapely.geometry import MultiPolygon, Polygon from shapely.ops import transform -import jigsawpy -from pyproj import Transformer -from ocsmesh import Raster, Geom, Hfun, Mesh -from ocsmesh import utils +from ocsmesh import Geom, Hfun, Mesh, Raster, utils logging.basicConfig( stream=sys.stdout, - format='%(asctime)s,%(msecs)d %(levelname)-8s [%(filename)s:%(lineno)d] %(message)s', - datefmt='%Y-%m-%d:%H:%M:%S' - ) -#logging.getLogger().setLevel(logging.DEBUG) -#logging.getLogger().setLevel(logging.INFO) + format="%(asctime)s,%(msecs)d %(levelname)-8s [%(filename)s:%(lineno)d] %(message)s", + datefmt="%Y-%m-%d:%H:%M:%S", +) +# logging.getLogger().setLevel(logging.DEBUG) +# logging.getLogger().setLevel(logging.INFO) _logger = logging.getLogger(__name__) # Enable KML driver -gpd.io.file.fiona.drvsupport.supported_drivers['KML'] = 'rw' +gpd.io.file.fiona.drvsupport.supported_drivers["KML"] = "rw" -class RemeshByShape: +class RemeshByShape: @property def script_name(self): - return 'remesh_by_shape' + return "remesh_by_shape" def __init__(self, sub_parser): this_parser = sub_parser.add_parser(self.script_name) - this_parser.add_argument('--in-crs', default='EPSG:4326') - this_parser.add_argument('--shape', type=str) + this_parser.add_argument("--in-crs", default="EPSG:4326") + this_parser.add_argument("--shape", type=str) - this_parser.add_argument('-u', '--upstream', action='store_true') + this_parser.add_argument("-u", "--upstream", action="store_true") this_parser.add_argument( - '--cutoff', default=-250, type=float, - help="Refinement cutoff depth in meters (positive up)") - this_parser.add_argument('--factor', default=2, type=float) + "--cutoff", + default=-250, + type=float, + help="Refinement cutoff depth in meters (positive up)", + ) + this_parser.add_argument("--factor", default=2, type=float) this_parser.add_argument( - '--contour', - action='append', nargs='+', type=float, dest='contours', - metavar='CONTOUR_DEFN', default=[], + "--contour", + action="append", + nargs="+", + type=float, + dest="contours", + metavar="CONTOUR_DEFN", + default=[], help="Each contour's (level, [expansion, target])" - " to be applied on all size functions in collector") + " to be applied on all size functions in collector", + ) this_parser.add_argument( - '--patch', - action='append', nargs=3, type=float, dest='patches', - metavar='PATCH_DEFN', default=[], + "--patch", + action="append", + nargs=3, + type=float, + dest="patches", + metavar="PATCH_DEFN", + default=[], help="Specify patch mesh size above a given contour level" - " by passing (lower_bound, expansion, target_size)" - " for each patch") + " by passing (lower_bound, expansion, target_size)" + " for each patch", + ) this_parser.add_argument( - '--constant', - action='append', nargs=2, type=float, dest='constants', - metavar='CONST_DEFN', default=[], + "--constant", + action="append", + nargs=2, + type=float, + dest="constants", + metavar="CONST_DEFN", + default=[], help="Specify constant mesh size above a given contour level" - " by passing (lower_bound, target_size) for each constant") - this_parser.add_argument('-s', '--sieve', type=float) + " by passing (lower_bound, target_size) for each constant", + ) + this_parser.add_argument("-s", "--sieve", type=float) this_parser.add_argument( - '--interpolate', nargs='+', type=Path, default=[], + "--interpolate", + nargs="+", + type=Path, + default=[], help="To interpolate from depth of DEMs not involved in" - " the remeshing process") - - this_parser.add_argument('-o', '--output', type=Path) - this_parser.add_argument('-f', '--output-format', default='2dm') - this_parser.add_argument('--nprocs', type=int, default=-1) + " the remeshing process", + ) - this_parser.add_argument('mesh', type=Path) + this_parser.add_argument("-o", "--output", type=Path) + this_parser.add_argument("-f", "--output-format", default="2dm") + this_parser.add_argument("--nprocs", type=int, default=-1) + this_parser.add_argument("mesh", type=Path) def run(self, args): @@ -107,9 +127,12 @@ def run(self, args): if len(contour) > 3: raise ValueError( "Invalid format for contour specification." - " It should be level [expansion target-size].") + " It should be level [expansion target-size]." + ) level, expansion_rate, target_size = [ - *contour, *[None]*(3-len(contour))] + *contour, + *[None] * (3 - len(contour)), + ] contour_defns.append((level, expansion_rate, target_size)) patch_defns = [] @@ -120,7 +143,6 @@ def run(self, args): for lower_bound, target_size in constants: constant_defns.append((lower_bound, target_size)) - interp_rast_list = [] for dem in interp: interp_rast_list.append(Raster(dem)) @@ -147,19 +169,19 @@ def run(self, args): mesh_poly = mesh.hull.multipolygon() gdf_mesh_poly = gpd.GeoDataFrame( - geometry=gpd.GeoSeries(mesh_poly), crs=mesh.crs) + geometry=gpd.GeoSeries(mesh_poly), crs=mesh.crs + ) if shape_path: gdf_shape = gpd.read_file(shape_path) gdf_shape = gdf_shape.to_crs(mesh.crs) - gdf_to_refine = gpd.overlay( - gdf_mesh_poly, gdf_shape, how='intersection') + gdf_mesh_poly, gdf_shape, how="intersection" + ) - gdf_diff = gpd.overlay( - gdf_mesh_poly, gdf_shape, how='difference') + gdf_diff = gpd.overlay(gdf_mesh_poly, gdf_shape, how="difference") diff_polys = [] for geom in gdf_diff.geometry: if isinstance(geom, Polygon): @@ -169,19 +191,19 @@ def run(self, args): if refine_upstream: # TODO: Check for multipolygon and single polygon in multi assumption - area_ref = 0.05 * np.sum( - [i.area for i in gdf_to_refine.geometry]) + area_ref = 0.05 * np.sum([i.area for i in gdf_to_refine.geometry]) upstream_polys = [] for ipoly in diff_polys: if ipoly.area < area_ref: upstream_polys.append(ipoly) if upstream_polys: gdf_upstream = gpd.GeoDataFrame( - geometry=gpd.GeoSeries(upstream_polys), - crs=gdf_diff.crs) + geometry=gpd.GeoSeries(upstream_polys), crs=gdf_diff.crs + ) gdf_to_refine = gpd.overlay( - gdf_upstream, gdf_to_refine, how='union') + gdf_upstream, gdf_to_refine, how="union" + ) else: gdf_to_refine = gdf_mesh_poly @@ -190,10 +212,12 @@ def run(self, args): if refine_cutoff is not None: cutoff_mp = mesh.get_multipolygon(zmin=refine_cutoff) cutoff_gdf = gpd.GeoDataFrame( - geometry=gpd.GeoSeries(cutoff_mp), crs=mesh.crs) + geometry=gpd.GeoSeries(cutoff_mp), crs=mesh.crs + ) cutoff_gdf = cutoff_gdf.to_crs(ref_crs) gdf_to_refine = gpd.overlay( - gdf_to_refine, cutoff_gdf, how='intersection') + gdf_to_refine, cutoff_gdf, how="intersection" + ) refine_polys = gdf_to_refine.unary_union @@ -201,27 +225,28 @@ def run(self, args): init_jig = deepcopy(mesh.msh_t) utils.reproject(init_jig, ref_crs) utils.clip_mesh_by_shape( - init_jig, - refine_polys, - fit_inside=True, - inverse=True, - in_place=True) + init_jig, refine_polys, fit_inside=True, inverse=True, in_place=True + ) # Fix elements in the inital mesh that are NOT clipped by refine # polygon - init_jig.vert2['IDtag'][:] = -1 + init_jig.vert2["IDtag"][:] = -1 # Preparing refinement size function vert_in = utils.get_verts_in_shape(initial_hfun_jig, refine_polys) # Reduce hfun by factor in refinement area; modifying in-place refine_hfun_jig = utils.clip_mesh_by_shape( - initial_hfun_jig, refine_polys, fit_inside=False) + initial_hfun_jig, refine_polys, fit_inside=False + ) utils.clip_mesh_by_shape( - initial_hfun_jig, refine_polys, - fit_inside=True, inverse=True, in_place=True) - + initial_hfun_jig, + refine_polys, + fit_inside=True, + inverse=True, + in_place=True, + ) else: # Refine the whole domain by factor @@ -232,8 +257,7 @@ def run(self, args): hfun_refine = Hfun(Mesh(deepcopy(refine_hfun_jig))) - transformer = Transformer.from_crs( - mesh.crs, ref_crs, always_xy=True) + transformer = Transformer.from_crs(mesh.crs, ref_crs, always_xy=True) for level, expansion_rate, target_size in contour_defns: if expansion_rate is None: expansion_rate = 0.1 @@ -244,35 +268,30 @@ def run(self, args): refine_ctr = transform(transformer.transform, refine_ctr) hfun_refine.add_feature( - refine_ctr, expansion_rate, target_size, - nprocs=nprocs) + refine_ctr, expansion_rate, target_size, nprocs=nprocs + ) for lower_bound, expansion_rate, target_size in patch_defns: refine_mp = mesh.get_multipolygon(zmin=lower_bound) refine_mp = transform(transformer.transform, refine_mp) - hfun_refine.add_patch( - refine_mp, expansion_rate, target_size, nprocs) + hfun_refine.add_patch(refine_mp, expansion_rate, target_size, nprocs) for lower_bound, target_size in constant_defns: refine_mp = mesh.get_multipolygon(zmin=lower_bound) refine_mp = transform(transformer.transform, refine_mp) - hfun_refine.add_patch( - refine_mp, None, target_size, nprocs) + hfun_refine.add_patch(refine_mp, None, target_size, nprocs) refine_hfun_jig = hfun_refine.msh_t() utils.reproject(refine_hfun_jig, ref_crs) final_hfun_jig = utils.merge_msh_t( - initial_hfun_jig, refine_hfun_jig, - out_crs=ref_crs, - drop_by_bbox=False) + initial_hfun_jig, refine_hfun_jig, out_crs=ref_crs, drop_by_bbox=False + ) - if not (geom_jig.crs == ref_crs - and (init_jig and init_jig.crs == ref_crs)): - raise ValueError( - "CRS for geometry, hfun and init mesh is not the same") + if not (geom_jig.crs == ref_crs and (init_jig and init_jig.crs == ref_crs)): + raise ValueError("CRS for geometry, hfun and init mesh is not the same") opts = jigsawpy.jigsaw_jig_t() opts.hfun_scal = "absolute" @@ -281,24 +300,21 @@ def run(self, args): opts.mesh_dims = +2 remesh_jig = jigsawpy.jigsaw_msh_t() - remesh_jig.mshID = 'euclidean-mesh' + remesh_jig.mshID = "euclidean-mesh" remesh_jig.ndims = 2 remesh_jig.crs = init_jig.crs jigsawpy.lib.jigsaw( - opts, geom_jig, remesh_jig, - init=init_jig, - hfun=final_hfun_jig) + opts, geom_jig, remesh_jig, init=init_jig, hfun=final_hfun_jig + ) utils.finalize_mesh(remesh_jig, sieve) # Interpolate from inpu mesh and DEM if any - utils.interpolate_euclidean_mesh_to_euclidean_mesh( - mesh.msh_t, remesh_jig) + utils.interpolate_euclidean_mesh_to_euclidean_mesh(mesh.msh_t, remesh_jig) final_mesh = Mesh(remesh_jig) if interp_rast_list: final_mesh.interpolate(interp_rast_list, nprocs=nprocs) # Write to disk - final_mesh.write( - str(out_path), format=out_format, overwrite=True) + final_mesh.write(str(out_path), format=out_format, overwrite=True) diff --git a/ocsmesh/cmd.py b/ocsmesh/cmd.py index e9f89710..d57cb2d8 100644 --- a/ocsmesh/cmd.py +++ b/ocsmesh/cmd.py @@ -1,29 +1,28 @@ -import logging +import hashlib import json -import pathlib +import logging import os +import pathlib from functools import lru_cache -import hashlib from multiprocessing import Pool +import fiona +import geoalchemy2 from pyproj import CRS from shapely import ops from shapely.geometry import MultiPolygon, box -import fiona -import geoalchemy2 -from ocsmesh import Hfun, Geom, Raster, db +from ocsmesh import Geom, Hfun, Raster, db class _ConfigManager: - def __init__(self, args, session): self._args = args self._session = session self._certify_config() def get_geom(self): - self._logger.debug('get_geom()') + self._logger.debug("get_geom()") geom_collection = [] @@ -33,99 +32,120 @@ def get_geom(self): geom = None for id, geom_opts in self._geom.items(): - self._logger.debug(f'get_geom(): processsing group id={id}') + self._logger.debug(f"get_geom(): processsing group id={id}") zmin = geom_opts.get("zmin") zmax = geom_opts.get("zmax") driver = geom_opts.get("driver", "matplotlib") for raster_path, raster_opts in self._get_raster_by_id(id): self._logger.debug( - f'get_geom(): appending raster {raster_path} for ' - 'parallel processing.') - hash = _geom_identifier( - zmin, zmax, driver, Raster(raster_path).md5) + f"get_geom(): appending raster {raster_path} for " + "parallel processing." + ) + hash = _geom_identifier(zmin, zmax, driver, Raster(raster_path).md5) query = self._session.query(db.GeomCollection).get(hash) if query is None: chunk_size = raster_opts.get("chunk_size") if self._geom_nprocs is not None: if chunk_size == 0: - job_args.append(( - raster_path, raster_opts, zmin, zmax, - geom_opts.get("join_method"), - driver, - chunk_size, - raster_opts.get("overlap", 2))) + job_args.append( + ( + raster_path, + raster_opts, + zmin, + zmax, + geom_opts.get("join_method"), + driver, + chunk_size, + raster_opts.get("overlap", 2), + ) + ) hashes.append(hash) else: geom = _geom_raster_processing_worker( - raster_path, raster_opts, zmin, zmax, + raster_path, + raster_opts, + zmin, + zmax, geom_opts.get("join_method"), - driver, chunk_size, - raster_opts.get("overlap")) + driver, + chunk_size, + raster_opts.get("overlap"), + ) else: geom = _geom_raster_processing_worker( - raster_path, raster_opts, zmin, zmax, + raster_path, + raster_opts, + zmin, + zmax, geom_opts.get("join_method"), - driver, chunk_size, - raster_opts.get("overlap")) + driver, + chunk_size, + raster_opts.get("overlap"), + ) self._save_geom_to_db( - geom, raster_path.name, zmin, zmax, driver, hash) + geom, raster_path.name, zmin, zmax, driver, hash + ) self._session.commit() else: - geom = Geom( - geoalchemy2.shape.to_shape(query.geom), - crs=self._crs) + geom = Geom(geoalchemy2.shape.to_shape(query.geom), crs=self._crs) if geom is not None: geom_collection.append(geom) if self._geom_nprocs is not None: - self._logger.debug( - 'get_geom(): executing parallel geom computations...') + self._logger.debug("get_geom(): executing parallel geom computations...") with Pool(processes=self._geom_nprocs) as pool: res = pool.starmap(_geom_raster_processing_worker, job_args) pool.join() for i, geom in enumerate(res): geom_collection.append(geom) self._save_geom_to_db( - geom, job_args[i][0].name, job_args[i][2], - job_args[i][3], job_args[i][5], hashes[i]) + geom, + job_args[i][0].name, + job_args[i][2], + job_args[i][3], + job_args[i][5], + hashes[i], + ) self._session.commit() del res for feature in self._features: - raise NotImplementedError('features') + raise NotImplementedError("features") mpc = [] for geom in geom_collection: mpc.append(geom.multipolygon) - self._logger.debug('get_geom(): apply unary_union...') + self._logger.debug("get_geom(): apply unary_union...") mp = ops.unary_union(mpc) return Geom(mp, crs=self._crs) def get_hfun(self, geom=None): - self._logger.debug('get_hfun()') + self._logger.debug("get_hfun()") - if 'rasters' in self._hfun: - for id, hfun_raster_opts in self._hfun['rasters'].items(): - self._logger.debug(f'get_hfun(): processsing group id={id}') + if "rasters" in self._hfun: + for id, hfun_raster_opts in self._hfun["rasters"].items(): + self._logger.debug(f"get_hfun(): processsing group id={id}") for raster_path, raster_opts in self._get_raster_by_id(id): self._logger.debug( - f'get_hfun(): appending raster {raster_path} for ' - 'parallel processing.') - raster = Raster(raster_path, crs=raster_opts.get('crs')) + f"get_hfun(): appending raster {raster_path} for " + "parallel processing." + ) + raster = Raster(raster_path, crs=raster_opts.get("crs")) _apply_raster_opts(raster, raster_opts) hfun = Hfun( raster, hmin=self._hfun_hmin, hmax=self._hfun_hmax, - nprocs=self._hfun_nprocs) + nprocs=self._hfun_nprocs, + ) _apply_hfun_raster_opts(hfun, hfun_raster_opts) mesh = hfun.get_mesh(geom=geom) - if 'features' in self._hfun: + if "features" in self._hfun: raise NotImplementedError("config.hfun.features not implemented") return Hfun(mesh, crs=self._crs) @@ -142,27 +162,26 @@ def _certify_config(self): self._logger.debug(" done checking configuration file") def _get_raster_by_id(self, rast_id): - def check_if_uri_is_tile_index(uri): try: - fiona.open(uri, 'r') + fiona.open(uri, "r") return True except fiona.errors.DriverError: return False raster_opts = self._rasters[rast_id] - if 'http' in raster_opts['uri'] or 'ftp' in raster_opts['uri']: + if "http" in raster_opts["uri"] or "ftp" in raster_opts["uri"]: raise NotImplementedError("URI is internet address") - uri = pathlib.Path(os.path.expandvars(raster_opts['uri'])) + uri = pathlib.Path(os.path.expandvars(raster_opts["uri"])) uri = pathlib.Path(self._path).parent / uri if not uri.is_file(): raise FileNotFoundError(f"No file with path: {str(uri.resolve())}") if check_if_uri_is_tile_index(uri): - raise NotImplementedError('URI is a tile index') + raise NotImplementedError("URI is a tile index") chunk_size = raster_opts.get("chunk_size") if chunk_size is None: @@ -177,7 +196,7 @@ def _save_geom_to_db(self, geom, source, zmin, zmax, driver, key): _original_crs = geom.crs if not _original_crs.equals(CRS.from_epsg(4326)): self._logger.debug(f"tranforming from {geom.crs} to EPSG:4326") - geom.transform_to('EPSG:4326') + geom.transform_to("EPSG:4326") self._session.add( db.GeomCollection( geom=geoalchemy2.shape.from_shape(geom.multipolygon), @@ -185,23 +204,26 @@ def _save_geom_to_db(self, geom, source, zmin, zmax, driver, key): zmin=zmin, zmax=zmax, driver=driver, - id=key)) + id=key, + ) + ) if not geom.crs.equals(_original_crs): geom.transform_to(_original_crs) @property @lru_cache(maxsize=None) def _config(self): - with open(self._path, 'r') as f: + with open(self._path, "r") as f: config = json.loads(f.read()) if not isinstance(config, dict): - raise TypeError('config json must be a dictionary.') + raise TypeError("config json must be a dictionary.") - if not any(x in config.keys() for x in ['rasters', 'features']): + if not any(x in config.keys() for x in ["rasters", "features"]): raise KeyError( "Configuration file must contain at least one of 'rasters' " - "or 'features' keys.") + "or 'features' keys." + ) return config @@ -218,7 +240,7 @@ def _crs(self): @lru_cache(maxsize=None) def _rasters(self): - config_rasters = self._config.get('rasters') + config_rasters = self._config.get("rasters") config_rasters = {} if config_rasters is None else config_rasters @@ -226,7 +248,8 @@ def _rasters(self): raise TypeError( "config.rasters must be a dictionary or list of " "dictionaries containing at least one 'id' and one 'uri' " - "key.") + "key." + ) if isinstance(config_rasters, dict): config_rasters = [config_rasters] @@ -239,32 +262,33 @@ def _rasters(self): raise TypeError( "config.rasters must be a dictionary or list of " "dictionaries containing at least one 'id' and one " - "'uri' key.") + "'uri' key." + ) - config_raster_id = config_raster.get('id') + config_raster_id = config_raster.get("id") if config_raster_id is None: - raise KeyError( - "config.rasters entry must contain a unique 'id' key.") + raise KeyError("config.rasters entry must contain a unique 'id' key.") if config_raster_id in _config_rasters: raise KeyError( "'id' entry in config.raster must be unique. " - f"repeated key: {config_raster_id}") + f"repeated key: {config_raster_id}" + ) - config_raster_uri = config_raster.get('uri') + config_raster_uri = config_raster.get("uri") if config_raster_uri is None: raise KeyError( "config.rasters entry must contain a 'uri' key " - "(mutually exclusive).") + "(mutually exclusive)." + ) - if 'http' in config_raster_uri or 'ftp' in config_raster_uri: - raise NotImplementedError( - "URI provided is an internet address.") + if "http" in config_raster_uri or "ftp" in config_raster_uri: + raise NotImplementedError("URI provided is an internet address.") opts = config_rasters[i].copy() - opts.pop('id') + opts.pop("id") _config_rasters.update({config_raster_id: opts}) return _config_rasters @@ -272,7 +296,7 @@ def _rasters(self): @property @lru_cache(maxsize=None) def _features(self): - config_features = self._config.get('features') + config_features = self._config.get("features") # _features = [] if config_features is None else config_features if config_features is not None: raise NotImplementedError("config.features is not yet implemented") @@ -288,35 +312,37 @@ def _geom(self): if not isinstance(config_geom, dict): raise TypeError( "config.geom must be a dictionary containing 'rasters' or " - "'features' keys.") + "'features' keys." + ) - if not any( - x in config_geom.keys() for x in ['rasters', 'features']): + if not any(x in config_geom.keys() for x in ["rasters", "features"]): raise TypeError( "config.geom must be a dictionary containing 'rasters' or " - "'features' keys.") + "'features' keys." + ) - if 'rasters' in config_geom: + if "rasters" in config_geom: config_geom_rasters = config_geom["rasters"].copy() if not isinstance(config_geom_rasters, (dict, list)): raise TypeError( "geom.rasters must be a dictionary or list of dictionaries" " containing at least one 'id' key that matches an some " - "id on the 'rasters' key.") + "id on the 'rasters' key." + ) if isinstance(config_geom_rasters, dict): config_geom_rasters = [config_geom_rasters] for geom_raster in config_geom_rasters: - geom_raster_id = geom_raster.pop('id') + geom_raster_id = geom_raster.pop("id") if geom_raster_id in self._rasters: _geom.update({geom_raster_id: geom_raster}) else: raise KeyError( - f'No raster with id={geom_raster_id} specified in ' - 'config.rasters') + f"No raster with id={geom_raster_id} specified in " + "config.rasters" + ) - if 'features' in config_geom: - raise NotImplementedError( - 'config.geom.features not yet implemented') + if "features" in config_geom: + raise NotImplementedError("config.geom.features not yet implemented") return _geom @@ -331,10 +357,11 @@ def _hfun(self): raise TypeError( "config.geom must be an scalar value (constant size) or a " "dictionary containing either the 'rasters' or 'features' " - "keys, or both.") + "keys, or both." + ) if isinstance(config_hfun, (int, float)): - raise NotImplementedError('Constant size funtion.') + raise NotImplementedError("Constant size funtion.") _config_hfun = {} _config_hfun.update({"hmin": config_hfun.get("hmin")}) @@ -345,30 +372,30 @@ def _hfun(self): _config_hfun.update({"rasters": {}}) if not isinstance(_config_hfun_rasters, (list, dict)): raise TypeError( - 'config.hfun.rasters must be a dictionary or list of ' + "config.hfun.rasters must be a dictionary or list of " 'dictionaries that contain an "id" key that matches a ' - 'key from the config.rasters entry.') + "key from the config.rasters entry." + ) if isinstance(_config_hfun_rasters, dict): _config_hfun_rasters = [_config_hfun_rasters] for hfun_raster_opts in _config_hfun_rasters: - hfun_raster_id = hfun_raster_opts.pop('id') + hfun_raster_id = hfun_raster_opts.pop("id") if hfun_raster_id not in self._rasters.keys(): raise KeyError( - f'No raster with id={hfun_raster_id} specified in ' - 'config.rasters') + f"No raster with id={hfun_raster_id} specified in " + "config.rasters" + ) contours = hfun_raster_opts.get("contours", []) if isinstance(contours, dict): contours = [contours] hfun_raster_opts.update({"contours": contours}) - _config_hfun["rasters"].update( - {hfun_raster_id: hfun_raster_opts}) + _config_hfun["rasters"].update({hfun_raster_id: hfun_raster_opts}) # _geom.update({geom_raster_id: geom_raster}) - if 'features' in config_hfun: - raise NotImplementedError( - 'config.hfun.features not yet implemented') + if "features" in config_hfun: + raise NotImplementedError("config.hfun.features not yet implemented") return _config_hfun @@ -417,15 +444,13 @@ def _hfun_raster_opts(self): @property @lru_cache(maxsize=None) def _logger(self): - return logging.getLogger(__name__ + '.' + self.__class__.__name__) + return logging.getLogger(__name__ + "." + self.__class__.__name__) def _geom_identifier(zmin, zmax, driver, salt): zmin = "" if zmin is None else f"{zmin:G}" zmax = "" if zmax is None else f"{zmax:G}" - return hashlib.md5( - f"{zmin}{zmax}{driver}{salt}".encode('utf-8') - ).hexdigest() + return hashlib.md5(f"{zmin}{zmax}{driver}{salt}".encode("utf-8")).hexdigest() def _hfun_identifier(config_hfun): @@ -439,39 +464,37 @@ def _apply_raster_opts(raster, raster_opts): raster.resample(opt) else: raster.resample(**opt) - if key == 'fill_nodata': + if key == "fill_nodata": raster.fill_nodata() - if key == 'clip': + if key == "clip": if isinstance(opt, dict): raster.clip( MultiPolygon( - [box( - opt['xmin'], - opt['ymin'], - opt['xmax'], - opt['ymax'])])) + [box(opt["xmin"], opt["ymin"], opt["xmax"], opt["ymax"])] + ) + ) else: raise NotImplementedError("clip by geometry") - if key == 'chunk_size': + if key == "chunk_size": raster.chunk_size = opt - if key == 'overlap': + if key == "overlap": raster.overlap = opt def _apply_hfun_raster_opts(hfun, hfun_raster_opts): for key, opts in hfun_raster_opts.items(): - if key == 'contours': + if key == "contours": for kwargs in opts: hfun.add_contour(**kwargs) - if key == 'features': + if key == "features": for kwargs in opts: hfun.add_features(**kwargs) - if key == 'subtidal_flow_limiter': + if key == "subtidal_flow_limiter": if not isinstance(opts, (bool, dict)): raise TypeError( - "subtidal_flow_limiter options must be " - "a boolean or dict.") + "subtidal_flow_limiter options must be " "a boolean or dict." + ) if opts is True: hfun.add_subtidal_flow_limiter() else: @@ -479,22 +502,22 @@ def _apply_hfun_raster_opts(hfun, hfun_raster_opts): def _geom_raster_processing_worker( - raster_path, - raster_opts, - zmin, - zmax, - join_method, - driver, - chunk_size, - overlap, + raster_path, + raster_opts, + zmin, + zmax, + join_method, + driver, + chunk_size, + overlap, ): raster = Raster(raster_path) _apply_raster_opts(raster, raster_opts) - geom = Geom(raster.get_multipolygon( - zmin=zmin, - zmax=zmax), + geom = Geom( + raster.get_multipolygon(zmin=zmin, zmax=zmax), join_method=join_method, driver=driver, - nprocs=1) + nprocs=1, + ) return geom diff --git a/ocsmesh/crs.py b/ocsmesh/crs.py index 8e093545..e2360831 100644 --- a/ocsmesh/crs.py +++ b/ocsmesh/crs.py @@ -2,9 +2,8 @@ class CRS: - def __set__(self, obj, crs): - obj.__dict__['crs'] = pyproj.CRS.from_user_input(crs) + obj.__dict__["crs"] = pyproj.CRS.from_user_input(crs) def __get__(self, obj, val): - return obj.__dict__['crs'] + return obj.__dict__["crs"] diff --git a/ocsmesh/db.py b/ocsmesh/db.py index 1036d718..f02b79f6 100644 --- a/ocsmesh/db.py +++ b/ocsmesh/db.py @@ -1,28 +1,23 @@ import pathlib -from sqlalchemy.ext.declarative import declarative_base -from sqlalchemy.event import listen -from sqlalchemy.sql import select, func -from sqlalchemy.orm import sessionmaker -from sqlalchemy import create_engine -from sqlalchemy import Column, Float, String + from geoalchemy2 import Geometry from geoalchemy2 import Raster as _Raster - +from sqlalchemy import Column, Float, String, create_engine +from sqlalchemy.event import listen +from sqlalchemy.ext.declarative import declarative_base +from sqlalchemy.orm import sessionmaker +from sqlalchemy.sql import func, select Base = declarative_base() class Geom(Base): - __tablename__ = 'geom' + __tablename__ = "geom" id = Column(String, primary_key=True, nullable=False) tag = Column(String) geom = Column( - Geometry( - geometry_type='MULTIPOLYGON', - management=True - ), - nullable=False - ) + Geometry(geometry_type="MULTIPOLYGON", management=True), nullable=False + ) class GeomCollection(Base): @@ -33,29 +28,25 @@ class GeomCollection(Base): zmax = Column(Float) driver = Column(String, nullable=False) geom = Column( - Geometry( - geometry_type='MULTIPOLYGON', - management=True - ), - nullable=False - ) + Geometry(geometry_type="MULTIPOLYGON", management=True), nullable=False + ) class Hfun(Base): - __tablename__ = 'hfun' + __tablename__ = "hfun" id = Column(String, primary_key=True, nullable=False) hfun = Column( Geometry( - geometry_type='MULTIPOLYGON', + geometry_type="MULTIPOLYGON", management=True, dimension=3, - ), - nullable=False - ) + ), + nullable=False, + ) class HfunCollection(Base): - __tablename__ = 'hfun_collection' + __tablename__ = "hfun_collection" id = Column(String, primary_key=True, nullable=False) @@ -66,14 +57,8 @@ class Raster(Base): class TileIndexRasters(Base): - __tablename__ = 'tile_index_rasters' - geom = Column( - Geometry( - 'POLYGON', - management=True, - srid=4326 - ), - nullable=False) + __tablename__ = "tile_index_rasters" + geom = Column(Geometry("POLYGON", management=True, srid=4326), nullable=False) # raster = Column(Raster(srid=4326, spatial_index=False)) url = Column(String, primary_key=True, nullable=False) name = Column(String, nullable=False) @@ -81,17 +66,16 @@ class TileIndexRasters(Base): def spatialite_session(path, echo=False): - def _engine(path, echo=False): path = pathlib.Path(path) _new_db = not path.is_file() - engine = create_engine(f'sqlite:///{str(path)}', echo=echo) + engine = create_engine(f"sqlite:///{str(path)}", echo=echo) def load_spatialite(dbapi_conn, connection_record): dbapi_conn.enable_load_extension(True) - dbapi_conn.load_extension('mod_spatialite') + dbapi_conn.load_extension("mod_spatialite") - listen(engine, 'connect', load_spatialite) + listen(engine, "connect", load_spatialite) if _new_db: conn = engine.connect() conn.execute(select([func.InitSpatialMetaData()])) diff --git a/ocsmesh/driver.py b/ocsmesh/driver.py index cb0d469f..7727ab8b 100644 --- a/ocsmesh/driver.py +++ b/ocsmesh/driver.py @@ -1,56 +1,53 @@ import logging from typing import Union -from jigsawpy import jigsaw_msh_t, jigsaw_jig_t -from jigsawpy import libsaw import numpy as np +from jigsawpy import jigsaw_jig_t, jigsaw_msh_t, libsaw from pyproj import CRS - from ocsmesh import utils -from ocsmesh.mesh import Mesh -from ocsmesh.hfun import Hfun -from ocsmesh.hfun.base import BaseHfun from ocsmesh.geom import Geom from ocsmesh.geom.base import BaseGeom +from ocsmesh.hfun import Hfun +from ocsmesh.hfun.base import BaseHfun +from ocsmesh.mesh import Mesh _logger = logging.getLogger(__name__) class GeomDescriptor: - def __set__(self, obj, val): if not isinstance(val, BaseGeom): - raise TypeError(f'Argument geom must be of type {Geom}, ' - f'not type {type(val)}.') - obj.__dict__['geom'] = val + raise TypeError( + f"Argument geom must be of type {Geom}, " f"not type {type(val)}." + ) + obj.__dict__["geom"] = val def __get__(self, obj, val): - return obj.__dict__['geom'] + return obj.__dict__["geom"] class HfunDescriptor: - def __set__(self, obj, val): if not isinstance(val, BaseHfun): - raise TypeError(f'Argument hfun must be of type {Hfun}, ' - f'not type {type(val)}.') - obj.__dict__['hfun'] = val + raise TypeError( + f"Argument hfun must be of type {Hfun}, " f"not type {type(val)}." + ) + obj.__dict__["hfun"] = val def __get__(self, obj, val): - return obj.__dict__['hfun'] + return obj.__dict__["hfun"] class OptsDescriptor: - def __get__(self, obj, val): - opts = obj.__dict__.get('opts') + opts = obj.__dict__.get("opts") if opts is None: opts = jigsaw_jig_t() opts.mesh_dims = +2 opts.optm_tria = True - opts.hfun_scal = 'absolute' - obj.__dict__['opts'] = opts + opts.hfun_scal = "absolute" + obj.__dict__["opts"] = opts return opts @@ -61,12 +58,12 @@ class JigsawDriver: _opts = OptsDescriptor() def __init__( - self, - geom: Geom, - hfun: Hfun, - initial_mesh: bool = False, - crs: Union[str, CRS] = None, - verbosity: int = 0, + self, + geom: Geom, + hfun: Hfun, + initial_mesh: bool = False, + crs: Union[str, CRS] = None, + verbosity: int = 0, ): """ geom can be SizeFunction or PlanarStraightLineGraph instance. @@ -82,7 +79,7 @@ def run(self, sieve=None, quality_metric=1.05): hfun_msh_t = self.hfun.msh_t() output_mesh = jigsaw_msh_t() - output_mesh.mshID = 'euclidean-mesh' + output_mesh.mshID = "euclidean-mesh" output_mesh.ndims = 2 self.opts.hfun_hmin = np.min(hfun_msh_t.value) @@ -97,32 +94,32 @@ def run(self, sieve=None, quality_metric=1.05): utils.reproject(hfun_msh_t, geom_msh_t.crs) output_mesh.crs = hfun_msh_t.crs - _logger.info('Calling libsaw.jigsaw() ...') + _logger.info("Calling libsaw.jigsaw() ...") libsaw.jigsaw( self.opts, geom_msh_t, output_mesh, init=hfun_msh_t if self._init is True else None, - hfun=hfun_msh_t + hfun=hfun_msh_t, ) # post process - if output_mesh.tria3['index'].shape[0] == 0: - _err = 'ERROR: Jigsaw returned empty mesh.' + if output_mesh.tria3["index"].shape[0] == 0: + _err = "ERROR: Jigsaw returned empty mesh." _logger.error(_err) raise Exception(_err) if self._crs is not None: utils.reproject(output_mesh, self._crs) - _logger.info('Finalizing mesh...') + _logger.info("Finalizing mesh...") # Don't need to use ad-hoc fix since Jigsaw tiny element # issue is resolve. In case needed add a flag for remesh # since it's computationally expensive -# if self.opts.hfun_hmin > 0: -# output_mesh = utils.remesh_small_elements( -# self.opts, geom_msh_t, output_mesh, hfun_msh_t) + # if self.opts.hfun_hmin > 0: + # output_mesh = utils.remesh_small_elements( + # self.opts, geom_msh_t, output_mesh, hfun_msh_t) utils.finalize_mesh(output_mesh, sieve) - _logger.info('done!') + _logger.info("done!") return Mesh(output_mesh) diff --git a/ocsmesh/features/channel.py b/ocsmesh/features/channel.py index 97fc8646..5f13a4a6 100644 --- a/ocsmesh/features/channel.py +++ b/ocsmesh/features/channel.py @@ -1,5 +1,4 @@ class Channel: - def __init__(self, level=0, width=1000, tolerance=50, sources=[]): # Even a tolerance of 1 for simplifying polygon for channel @@ -8,8 +7,8 @@ def __init__(self, level=0, width=1000, tolerance=50, sources=[]): # preserve topology self._level = level - self._width = width # and less - self._tolerance = tolerance # to simplify + self._width = width # and less + self._tolerance = tolerance # to simplify self._sources = [] if not isinstance(sources, (list, tuple)): @@ -52,12 +51,12 @@ def _get_contour_from_source(self, source): src_class = type(source).__name__ if src_class == "Raster": - channels = source.get_channels( - self._level, self._width, self._tolerance) + channels = source.get_channels(self._level, self._width, self._tolerance) crs = source.crs elif src_class in ("RasterGeom", "HfunRaster"): channels = source.raster.get_channels( - self._level, self._width, self._tolerance) + self._level, self._width, self._tolerance + ) crs = source.raster.crs else: raise TypeError("") diff --git a/ocsmesh/features/constraint.py b/ocsmesh/features/constraint.py index 74e1ea9a..5cd355dd 100644 --- a/ocsmesh/features/constraint.py +++ b/ocsmesh/features/constraint.py @@ -1,14 +1,14 @@ -from enum import Enum from abc import ABC, abstractmethod +from enum import Enum import numpy as np from scipy.spatial import cKDTree ConstraintValueType = Enum("ConstraintValueType", "MIN MAX") -class Constraint(ABC): - def __init__(self, value_type: str = 'min', rate: float = 0.1): +class Constraint(ABC): + def __init__(self, value_type: str = "min", rate: float = 0.1): # TODO: Put rate in a mixin ? self._rate = rate @@ -21,12 +21,10 @@ def __init__(self, value_type: str = 'min', rate: float = 0.1): else: raise ValueError("Invalid input for value type!") - @property def type(self): return type(self) - @property def value_type(self): return self._val_type @@ -34,11 +32,11 @@ def value_type(self): @property def satisfies(self): - ''' + """ The function to compare a value with the constraint value and evaluate wether it satisfies the constraint function's needs to receive values to check as first argument - ''' + """ # pylint: disable=R1705 if self.value_type == ConstraintValueType.MIN: @@ -52,11 +50,11 @@ def satisfies(self): def rate_sign(self): # TODO: Put this method in a mixin - ''' + """ Based on the value-type of the constraints, return a sign indicating whether rate is for expansion or contraction of size outside calculated zone - ''' + """ # pylint: disable=R1705 if self.value_type == ConstraintValueType.MIN: @@ -66,28 +64,27 @@ def rate_sign(self): raise ValueError("Invalid value type for constraint!") - @abstractmethod def apply(self): pass - def _apply_rate(self, ref_values, values, locations, mask): if not np.any(mask): - return values # TODO: COPY? + return values # TODO: COPY? new_values = values.copy().ravel() bound_values = ref_values.copy().ravel() coords = locations.reshape(-1, 2) if self._rate is None: - return values # TODO: COPY? + return values # TODO: COPY? if len(coords) != len(new_values): raise ValueError( "Number of locations and values" - + f" don't match: {len(coords)} vs {len(new_values)}") + + f" don't match: {len(coords)} vs {len(new_values)}" + ) mask_r = mask.copy().ravel() nomask_r = np.logical_not(mask_r) @@ -98,18 +95,18 @@ def _apply_rate(self, ref_values, values, locations, mask): tree = cKDTree(points) near_dists, neighbors = tree.query(xy) temp_values = new_values[mask_r][neighbors] * ( - 1 + near_dists * self._rate * self.rate_sign) + 1 + near_dists * self._rate * self.rate_sign + ) # NOTE: No bounds are applied for rate - mask2 = np.logical_not(self.satisfies( - new_values[nomask_r], temp_values)) + mask2 = np.logical_not(self.satisfies(new_values[nomask_r], temp_values)) # Double indexing copies, we want to modify "new_values" temp_values_2 = new_values[nomask_r] temp_values_2[mask2] = temp_values[mask2] new_values[nomask_r] = temp_values_2 new_values = new_values.reshape(values.shape) - return new_values + return new_values # TODO: @@ -121,17 +118,15 @@ class RateMixin: pass - - class TopoConstConstraint(Constraint): - def __init__( - self, - value, - upper_bound=np.inf, - lower_bound=-np.inf, - value_type: str = 'min', - rate=None): + self, + value, + upper_bound=np.inf, + lower_bound=-np.inf, + value_type: str = "min", + rate=None, + ): super().__init__(value_type, rate) @@ -140,29 +135,26 @@ def __init__( self._val = value - @property def value(self): return self._val - - - @property def topo_bounds(self): return self._lb, self._ub - def apply(self, ref_values, old_values, locations=None): lower_bound, upper_bound = self.topo_bounds new_values = old_values.copy() - mask = ((ref_values > lower_bound) & - (ref_values < upper_bound) & - (np.logical_not(self.satisfies(new_values, self.value)))) + mask = ( + (ref_values > lower_bound) + & (ref_values < upper_bound) + & (np.logical_not(self.satisfies(new_values, self.value))) + ) new_values[mask] = self.value new_values = self._apply_rate(ref_values, new_values, locations, mask) @@ -170,16 +162,15 @@ def apply(self, ref_values, old_values, locations=None): return new_values - class TopoFuncConstraint(Constraint): - def __init__( - self, - function=lambda i: i / 2.0, - upper_bound=np.inf, - lower_bound=-np.inf, - value_type: str = 'min', - rate=None): + self, + function=lambda i: i / 2.0, + upper_bound=np.inf, + lower_bound=-np.inf, + value_type: str = "min", + rate=None, + ): super().__init__(value_type, rate) @@ -190,13 +181,11 @@ def __init__( if callable(function): self._func = function - @property def topo_bounds(self): return self._lb, self._ub - def apply(self, ref_values, old_values, locations=None): lower_bound, upper_bound = self.topo_bounds @@ -204,9 +193,11 @@ def apply(self, ref_values, old_values, locations=None): new_values = old_values.copy() temp_values = self._func(ref_values) - mask = ((ref_values > lower_bound) & - (ref_values < upper_bound) & - (np.logical_not(self.satisfies(new_values, temp_values)))) + mask = ( + (ref_values > lower_bound) + & (ref_values < upper_bound) + & (np.logical_not(self.satisfies(new_values, temp_values))) + ) new_values[mask] = temp_values[mask] new_values = self._apply_rate(ref_values, new_values, locations, mask) diff --git a/ocsmesh/features/contour.py b/ocsmesh/features/contour.py index d477077a..68eb7779 100644 --- a/ocsmesh/features/contour.py +++ b/ocsmesh/features/contour.py @@ -1,17 +1,19 @@ -from pathlib import Path from abc import ABC, abstractmethod +from pathlib import Path -class ContourBase(ABC): +class ContourBase(ABC): def __init__(self, sources=[], shapefile=None): if sources and shapefile: raise ValueError( - "Both sources and shapefile cannot be specified at the same time!") + "Both sources and shapefile cannot be specified at the same time!" + ) # Either based on shape or the source-level if shapefile and Path(shapefile).is_file(): raise NotImplementedError( - "Contour based on shapefiles are not supported yet!") + "Contour based on shapefiles are not supported yet!" + ) self._sources = [] if not isinstance(sources, (list, tuple)): @@ -50,11 +52,10 @@ def iter_contours(self): def _get_contour_from_source(self, source): pass -# @abstractmethod + # @abstractmethod def _get_contour_from_shapefile(self, shapefile): # TODO: Support shapefile? - raise NotImplementedError( - "Contour based on shapefiles are not supported yet!") + raise NotImplementedError("Contour based on shapefiles are not supported yet!") @property @abstractmethod @@ -63,7 +64,6 @@ def level(self): class Contour(ContourBase): - def __init__(self, level=None, sources=[], shapefile=None): super().__init__(sources, shapefile) @@ -88,13 +88,14 @@ def level(self): class FilledContour(ContourBase): - - def __init__(self, - level0=None, - level1=None, - sources=[], - max_contour_defn : Contour = None, - shapefile=None): + def __init__( + self, + level0=None, + level1=None, + sources=[], + max_contour_defn: Contour = None, + shapefile=None, + ): super().__init__(sources, shapefile) if max_contour_defn: @@ -110,9 +111,9 @@ def __init__(self, def _get_contour_from_source(self, source): z_info = {} if self._level0 is not None: - z_info['zmin'] = self._level0 + z_info["zmin"] = self._level0 if self._level1 is not None: - z_info['zmax'] = self._level1 + z_info["zmax"] = self._level1 src_class = type(source).__name__ if src_class == "Raster": diff --git a/ocsmesh/features/patch.py b/ocsmesh/features/patch.py index c2d06508..f7b8118b 100644 --- a/ocsmesh/features/patch.py +++ b/ocsmesh/features/patch.py @@ -5,17 +5,17 @@ from pyproj import CRS from shapely.geometry import MultiPolygon, Polygon -class Patch: - def __init__(self, - shape: Union[None, MultiPolygon, Polygon] = None, - shape_crs: CRS = CRS.from_user_input("EPSG:4326"), - shapefile: Union[None, str, Path] = None - ): +class Patch: + def __init__( + self, + shape: Union[None, MultiPolygon, Polygon] = None, + shape_crs: CRS = CRS.from_user_input("EPSG:4326"), + shapefile: Union[None, str, Path] = None, + ): - if not(shape or shapefile): - raise ValueError( - "No patch input provided") + if not (shape or shapefile): + raise ValueError("No patch input provided") # crs input is only for shape, shapefile needs to provide # its own crs @@ -30,17 +30,15 @@ def __init__(self, elif shape is not None: raise TypeError( - f"Type of shape input must be either {MultiPolygon}" - f" or {Polygon}") + f"Type of shape input must be either {MultiPolygon}" f" or {Polygon}" + ) elif not self._shapefile.is_file(): - raise ValueError( - "Not shape input for patch definition") - + raise ValueError("Not shape input for patch definition") def get_multipolygon(self) -> MultiPolygon: - if self._shape: # pylint: disable=R1705 + if self._shape: # pylint: disable=R1705 return self._shape, self._shape_crs elif self._shapefile.is_file(): @@ -56,5 +54,4 @@ def get_multipolygon(self) -> MultiPolygon: return multipolygon, gdf.crs - raise ValueError( - "Error retrieving shape information for patch") + raise ValueError("Error retrieving shape information for patch") diff --git a/ocsmesh/figures.py b/ocsmesh/figures.py index bce1b44e..19a6e0c4 100644 --- a/ocsmesh/figures.py +++ b/ocsmesh/figures.py @@ -1,36 +1,31 @@ +import matplotlib.pyplot as plt import numpy as np from matplotlib import rcParams -from matplotlib.colors import LinearSegmentedColormap -import matplotlib.pyplot as plt -from matplotlib.colors import Normalize +from matplotlib.colors import LinearSegmentedColormap, Normalize def get_topobathy_kwargs(values, vmin, vmax, colors=256): vmin = np.min(values) if vmin is None else vmin vmax = np.max(values) if vmax is None else vmax - if vmax <= 0.: + if vmax <= 0.0: cmap = plt.cm.seismic - col_val = 0. + col_val = 0.0 levels = np.linspace(vmin, vmax, colors) else: - wet_count = int(np.floor( - colors*(float((values < 0.).sum()) / float(values.size)))) - col_val = float(wet_count)/colors + wet_count = int( + np.floor(colors * (float((values < 0.0).sum()) / float(values.size))) + ) + col_val = float(wet_count) / colors dry_count = colors - wet_count - colors_undersea = plt.cm.bwr(np.linspace(1., 0., wet_count)) - colors_land = plt.cm.terrain(np.linspace(0.25, 1., dry_count)) + colors_undersea = plt.cm.bwr(np.linspace(1.0, 0.0, wet_count)) + colors_land = plt.cm.terrain(np.linspace(0.25, 1.0, dry_count)) colors = np.vstack((colors_undersea, colors_land)) - cmap = LinearSegmentedColormap.from_list('cut_terrain', colors) + cmap = LinearSegmentedColormap.from_list("cut_terrain", colors) wlevels = np.linspace(vmin, 0.0, wet_count, endpoint=False) dlevels = np.linspace(0.0, vmax, dry_count) levels = np.hstack((wlevels, dlevels)) if vmax > 0: - norm = FixPointNormalize( - sealevel=0.0, - vmax=vmax, - vmin=vmin, - col_val=col_val - ) + norm = FixPointNormalize(sealevel=0.0, vmax=vmax, vmin=vmin, col_val=col_val) else: norm = None return cmap, norm, levels, col_val @@ -56,8 +51,8 @@ class FixPointNormalize(Normalize): This may be useful for a `terrain` map, to set the "sea level" to a color in the blue/turquise range. """ - def __init__(self, vmin=None, vmax=None, sealevel=0, col_val=0.5, - clip=False): + + def __init__(self, vmin=None, vmax=None, sealevel=0, col_val=0.5, clip=False): # sealevel is the fix point of the colormap (in data units) self.sealevel = sealevel # col_val is the color value in the range [0,1] that should represent @@ -74,14 +69,12 @@ def __call__(self, value, clip=None): def figure(f): def decorator(*argv, **kwargs): - axes = get_axes( - kwargs.get('axes', None), - kwargs.get('figsize', None) - ) - kwargs.update({'axes': axes}) + axes = get_axes(kwargs.get("axes", None), kwargs.get("figsize", None)) + kwargs.update({"axes": axes}) f(*argv, **kwargs) - if kwargs.get('show', False): - plt.gca().axis('scaled') + if kwargs.get("show", False): + plt.gca().axis("scaled") plt.show() return axes + return decorator diff --git a/ocsmesh/geom/__init__.py b/ocsmesh/geom/__init__.py index 5f6b3b3b..08c40259 100644 --- a/ocsmesh/geom/__init__.py +++ b/ocsmesh/geom/__init__.py @@ -1,6 +1,4 @@ from ocsmesh.geom.geom import Geom from ocsmesh.geom.raster import RasterGeom -__all__ = [ - "Geom", - "RasterGeom" -] + +__all__ = ["Geom", "RasterGeom"] diff --git a/ocsmesh/geom/base.py b/ocsmesh/geom/base.py index 1e9f6785..bffe2f96 100644 --- a/ocsmesh/geom/base.py +++ b/ocsmesh/geom/base.py @@ -1,18 +1,18 @@ from abc import ABC, abstractmethod from typing import List, Tuple -from jigsawpy import jigsaw_msh_t import numpy as np +from jigsawpy import jigsaw_msh_t from pyproj import CRS, Transformer from shapely import ops from shapely.geometry import MultiPolygon -from ocsmesh.crs import CRS as CRSDescriptor from ocsmesh import utils +from ocsmesh.crs import CRS as CRSDescriptor class BaseGeom(ABC): - '''Abstract base class used to construct ocsmesh "geom" objects. + """Abstract base class used to construct ocsmesh "geom" objects. More concretely, a "geom" object can be visualized as a collection of polygons. In terms of data structures, a collection of polygons can be @@ -26,7 +26,7 @@ class BaseGeom(ABC): Derived classes from :class:`ocsmesh.geom.BaseGeom` expose the concrete implementation of how to compute this hull based on inputs provided by the users. - ''' + """ _crs = CRSDescriptor() @@ -35,22 +35,19 @@ def __init__(self, crs): @property def multipolygon(self) -> MultiPolygon: - '''Returns a :class:shapely.geometry.MultiPolygon object representing - the configured geometry.''' + """Returns a :class:shapely.geometry.MultiPolygon object representing + the configured geometry.""" return self.get_multipolygon() def msh_t(self, **kwargs) -> jigsaw_msh_t: - '''Returns a :class:jigsawpy.jigsaw_msh_t object representing the - geometry constrained by the arguments.''' - return multipolygon_to_jigsaw_msh_t( - self.get_multipolygon(**kwargs), - self.crs - ) + """Returns a :class:jigsawpy.jigsaw_msh_t object representing the + geometry constrained by the arguments.""" + return multipolygon_to_jigsaw_msh_t(self.get_multipolygon(**kwargs), self.crs) @abstractmethod def get_multipolygon(self, **kwargs) -> MultiPolygon: - '''Returns a :class:shapely.geometry.MultiPolygon object representing - the geometry constrained by the arguments.''' + """Returns a :class:shapely.geometry.MultiPolygon object representing + the geometry constrained by the arguments.""" raise NotImplementedError @property @@ -58,22 +55,16 @@ def crs(self): return self._crs -def multipolygon_to_jigsaw_msh_t( - multipolygon: MultiPolygon, - crs: CRS -) -> jigsaw_msh_t: - '''Casts shapely.geometry.MultiPolygon to jigsawpy.jigsaw_msh_t''' - utm_crs = utils.estimate_bounds_utm( - multipolygon.bounds, crs) +def multipolygon_to_jigsaw_msh_t(multipolygon: MultiPolygon, crs: CRS) -> jigsaw_msh_t: + """Casts shapely.geometry.MultiPolygon to jigsawpy.jigsaw_msh_t""" + utm_crs = utils.estimate_bounds_utm(multipolygon.bounds, crs) if utm_crs is not None: transformer = Transformer.from_crs(crs, utm_crs, always_xy=True) multipolygon = ops.transform(transformer.transform, multipolygon) vert2: List[Tuple[Tuple[float, float], int]] = [] for polygon in multipolygon: - if np.all( - np.asarray( - polygon.exterior.coords).flatten() == float('inf')): + if np.all(np.asarray(polygon.exterior.coords).flatten() == float("inf")): raise NotImplementedError("ellispoidal-mesh") for x, y in polygon.exterior.coords[:-1]: vert2.append(((x, y), 0)) @@ -87,23 +78,21 @@ def multipolygon_to_jigsaw_msh_t( polygon = [polygon.exterior, *polygon.interiors] for linear_ring in polygon: _edge2 = [] - for i in range(len(linear_ring.coords)-2): - _edge2.append((i, i+1)) + for i in range(len(linear_ring.coords) - 2): + _edge2.append((i, i + 1)) _edge2.append((_edge2[-1][1], _edge2[0][0])) - edge2.extend( - [(e0+len(edge2), e1+len(edge2)) - for e0, e1 in _edge2]) + edge2.extend([(e0 + len(edge2), e1 + len(edge2)) for e0, e1 in _edge2]) # geom geom = jigsaw_msh_t() geom.ndims = +2 - geom.mshID = 'euclidean-mesh' + geom.mshID = "euclidean-mesh" # TODO: Consider ellipsoidal case. # geom.mshID = 'euclidean-mesh' if self._ellipsoid is None \ # else 'ellipsoidal-mesh' geom.vert2 = np.asarray(vert2, dtype=jigsaw_msh_t.VERT2_t) geom.edge2 = np.asarray( - [((e0, e1), 0) for e0, e1 in edge2], - dtype=jigsaw_msh_t.EDGE2_t) + [((e0, e1), 0) for e0, e1 in edge2], dtype=jigsaw_msh_t.EDGE2_t + ) geom.crs = crs if utm_crs is not None: geom.crs = utm_crs diff --git a/ocsmesh/geom/collector.py b/ocsmesh/geom/collector.py index 5d360ca0..f3fdc4f7 100644 --- a/ocsmesh/geom/collector.py +++ b/ocsmesh/geom/collector.py @@ -1,31 +1,32 @@ -import os import logging -import warnings +import os import tempfile +import warnings +from multiprocessing import cpu_count from numbers import Number from pathlib import Path -from multiprocessing import cpu_count -from typing import Union, Sequence, Tuple +from typing import Sequence, Tuple, Union import geopandas as gpd from pyproj import CRS, Transformer -from shapely.geometry import MultiPolygon, Polygon from shapely import ops +from shapely.geometry import MultiPolygon, Polygon from shapely.validation import explain_validity -from ocsmesh.mesh import Mesh -from ocsmesh.mesh.base import BaseMesh -from ocsmesh.raster import Raster +from ocsmesh.features.contour import Contour, FilledContour +from ocsmesh.features.patch import Patch from ocsmesh.geom.base import BaseGeom +from ocsmesh.geom.mesh import MeshGeom from ocsmesh.geom.raster import RasterGeom from ocsmesh.geom.shapely import MultiPolygonGeom, PolygonGeom -from ocsmesh.geom.mesh import MeshGeom -from ocsmesh.features.contour import FilledContour, Contour -from ocsmesh.features.patch import Patch +from ocsmesh.mesh import Mesh +from ocsmesh.mesh.base import BaseMesh from ocsmesh.ops import combine_geometry +from ocsmesh.raster import Raster _logger = logging.getLogger(__name__) + class ContourPatchInfoCollector: def __init__(self): self._contour_patch_info = [] @@ -39,22 +40,21 @@ def __iter__(self): class GeomCollector(BaseGeom): - def __init__( - self, - in_list: Sequence[ - Union[str, Raster, RasterGeom, MeshGeom, - MultiPolygonGeom, PolygonGeom]], - base_mesh: Mesh = None, - zmin: float = None, - zmax: float = None, - nprocs: int = None, - chunk_size: int = None, - overlap: int = None, - verbosity: int = 0, - base_shape: Union[Polygon, MultiPolygon] = None, - base_shape_crs: Union[str, CRS] = 'EPSG:4326' - ): + self, + in_list: Sequence[ + Union[str, Raster, RasterGeom, MeshGeom, MultiPolygonGeom, PolygonGeom] + ], + base_mesh: Mesh = None, + zmin: float = None, + zmax: float = None, + nprocs: int = None, + chunk_size: int = None, + overlap: int = None, + verbosity: int = 0, + base_shape: Union[Polygon, MultiPolygon] = None, + base_shape_crs: Union[str, CRS] = "EPSG:4326", + ): # TODO: Like hfun collector and ops, later move the geom # combine functionality here and just call it from ops instead @@ -75,7 +75,6 @@ def __init__( nprocs = -1 if nprocs is None else nprocs nprocs = cpu_count() if nprocs == -1 else nprocs - self._elev_info = dict(zmin=zmin, zmax=zmax) self._nprocs = nprocs self._chunk_size = chunk_size @@ -94,7 +93,7 @@ def __init__( # TODO: CRS considerations -- geom combine doesn't necessarily # return EPSG:4326 (unlike hfun collector msh_t) - self._crs = 'EPSG:4326' + self._crs = "EPSG:4326" for in_item in in_list: # Add supports(ext) to each hfun type? @@ -107,9 +106,9 @@ def __init__( clip_shape = self._base_shape if not self._base_shape_crs.equals(in_item.crs): transformer = Transformer.from_crs( - self._base_shape_crs, in_item.crs, always_xy=True) - clip_shape = ops.transform( - transformer.transform, clip_shape) + self._base_shape_crs, in_item.crs, always_xy=True + ) + clip_shape = ops.transform(transformer.transform, clip_shape) try: in_item.clip(clip_shape) except ValueError as err: @@ -131,15 +130,17 @@ def __init__( geom = MeshGeom(in_item) elif isinstance(in_item, str): - if in_item.endswith('.tif'): + if in_item.endswith(".tif"): raster = Raster(in_item) if self._base_shape: clip_shape = self._base_shape if not self._base_shape_crs.equals(raster.crs): transformer = Transformer.from_crs( - self._base_shape_crs, raster.crs, always_xy=True) + self._base_shape_crs, raster.crs, always_xy=True + ) clip_shape = ops.transform( - transformer.transform, clip_shape) + transformer.transform, clip_shape + ) try: in_item.clip(clip_shape) except ValueError as err: @@ -157,8 +158,7 @@ def __init__( geom = RasterGeom(raster, **self._elev_info) - elif in_item.endswith( - ('.14', '.grd', '.gr3', '.msh', '.2dm')): + elif in_item.endswith((".14", ".grd", ".gr3", ".msh", ".2dm")): geom = MeshGeom(Mesh.open(in_item)) else: @@ -166,10 +166,9 @@ def __init__( self._geom_list.append(geom) - def get_multipolygon(self, **kwargs) -> MultiPolygon: - '''Returns a :class:shapely.geometry.MultiPolygon object representing - the geometry constrained by the arguments.''' + """Returns a :class:shapely.geometry.MultiPolygon object representing + the geometry constrained by the arguments.""" # For now we don't need to do any calculations here, the # ops will take care of extracting everything. Later the logic @@ -190,22 +189,25 @@ def get_multipolygon(self, **kwargs) -> MultiPolygon: base_multipoly = self._base_shape if not self._base_shape_crs.equals(epsg4326): transformer = Transformer.from_crs( - self._base_shape_crs, epsg4326, always_xy=True) + self._base_shape_crs, epsg4326, always_xy=True + ) base_multipoly = ops.transform( - transformer.transform, base_multipoly) + transformer.transform, base_multipoly + ) elif self._base_mesh: # TODO: Make sure all calcs are in EPSG:4326 base_multipoly = self._base_mesh.hull.multipolygon() - feather_files.append(self._extract_global_boundary( - temp_path, base_multipoly)) - feather_files.extend(self._extract_nonraster_boundary( - temp_path, base_multipoly)) - feather_files.extend(self._extract_features( - temp_path, base_multipoly)) + feather_files.append( + self._extract_global_boundary(temp_path, base_multipoly) + ) + feather_files.extend( + self._extract_nonraster_boundary(temp_path, base_multipoly) + ) + feather_files.extend(self._extract_features(temp_path, base_multipoly)) - gdf = gpd.GeoDataFrame(columns=['geometry'], crs=epsg4326) + gdf = gpd.GeoDataFrame(columns=["geometry"], crs=epsg4326) for f in feather_files: gdf = gdf.append(gpd.read_feather(f)) @@ -215,18 +217,19 @@ def get_multipolygon(self, **kwargs) -> MultiPolygon: elif not isinstance(mp, MultiPolygon): raise ValueError( - "Union of all shapes resulted in invalid geometry" - + " type") + "Union of all shapes resulted in invalid geometry" + " type" + ) return mp - def add_patch(self, - shape: Union[MultiPolygon, Polygon] = None, - level: Union[Tuple[float, float], float] = None, - contour_defn: Union[FilledContour, Contour] = None, - patch_defn: Patch = None, - shapefile: Union[None, str, Path] = None, - ): + def add_patch( + self, + shape: Union[MultiPolygon, Polygon] = None, + level: Union[Tuple[float, float], float] = None, + contour_defn: Union[FilledContour, Contour] = None, + patch_defn: Patch = None, + shapefile: Union[None, str, Path] = None, + ): # Always lazy @@ -242,7 +245,8 @@ def add_patch(self, else: raise ValueError( "Level must be specified either by min and max values" - " or by only max value ") + " or by only max value " + ) contour_defn = FilledContour(level1=level) @@ -252,7 +256,8 @@ def add_patch(self, elif not isinstance(contour_defn, FilledContour): raise TypeError( f"Filled contour definition must be of type" - f" {FilledContour} not {type(contour_defn)}!") + f" {FilledContour} not {type(contour_defn)}!" + ) elif level is not None: msg = "Level is ignored since a contour definition is provided!" @@ -268,28 +273,25 @@ def add_patch(self, elif not isinstance(patch_defn, Patch): raise TypeError( - f"Patch definition must be of type {Patch} not" - f" {type(patch_defn)}!") - + f"Patch definition must be of type {Patch} not" f" {type(patch_defn)}!" + ) # If patch defn is None it means the patch applies to # all the sources of the accompanying contour - self._contour_patch_info_coll.add( - contour_defn, patch_defn) - + self._contour_patch_info_coll.add(contour_defn, patch_defn) def _type_chk(self, input_list): - ''' Check the input type for constructor ''' + """Check the input type for constructor""" valid_types = (str, Raster, BaseGeom, BaseMesh) if not all(isinstance(item, valid_types) for item in input_list): raise TypeError( f'Input list items must be of type {", ".join(valid_types)}' - f', or a derived type.') + f", or a derived type." + ) def _get_raster_sources(self): raster_types = (RasterGeom, Raster) - rasters = [ - i for i in self._geom_list if isinstance(i, raster_types)] + rasters = [i for i in self._geom_list if isinstance(i, raster_types)] return rasters def _get_raster_source_files(self): @@ -309,14 +311,12 @@ def _get_raster_files_from_source(self, rasters): def _get_non_raster_sources(self): raster_types = (RasterGeom, Raster) - non_rasters = [ - i for i in self._geom_list if not isinstance(i, raster_types)] + non_rasters = [i for i in self._geom_list if not isinstance(i, raster_types)] return non_rasters def _get_valid_multipolygon( - self, - polygon: Union[Polygon, MultiPolygon] - ) -> MultiPolygon: + self, polygon: Union[Polygon, MultiPolygon] + ) -> MultiPolygon: # TODO: Performance bottleneck for valid checks if not polygon.is_valid: @@ -337,18 +337,25 @@ def _extract_global_boundary(self, out_dir, base_multipoly): out_path = Path(out_dir) - geom_path = out_path / 'global_boundary.feather' + geom_path = out_path / "global_boundary.feather" raster_files = self._get_raster_source_files() - zmin = self._elev_info['zmin'] - zmax = self._elev_info['zmax'] + zmin = self._elev_info["zmin"] + zmax = self._elev_info["zmax"] _logger.info("Extracting global boundaries") combine_geometry( - raster_files, geom_path, "feather", - None, base_multipoly, False, - zmin, zmax, - self._chunk_size, self._overlap, - self._nprocs) + raster_files, + geom_path, + "feather", + None, + base_multipoly, + False, + zmin, + zmax, + self._chunk_size, + self._overlap, + self._nprocs, + ) return geom_path @@ -360,13 +367,11 @@ def _extract_nonraster_boundary(self, out_dir, base_multipoly): feather_files = [] for e, geom in enumerate(non_rasters): - geom_path = out_path / f'nonraster_{os.getpid()}_{e}.feather' + geom_path = out_path / f"nonraster_{os.getpid()}_{e}.feather" crs = geom.crs - multipoly = self._get_valid_multipolygon( - geom.get_multipolygon()) - gdf_non_raster = gpd.GeoDataFrame( - {'geometry': multipoly}, crs=crs) + multipoly = self._get_valid_multipolygon(geom.get_multipolygon()) + gdf_non_raster = gpd.GeoDataFrame({"geometry": multipoly}, crs=crs) if crs != CRS.from_user_input("EPSG:4326"): gdf_non_raster = gdf_non_raster.to_crs("EPSG:4326") @@ -389,10 +394,9 @@ def _apply_patch(self, out_dir, base_multipoly): out_path = Path(out_dir) - raster_files = self._get_raster_source_files() - zmin = self._elev_info['zmin'] - zmax = self._elev_info['zmax'] + zmin = self._elev_info["zmin"] + zmax = self._elev_info["zmax"] feather_files = [] for e, (ctr_defn, ptch_defn) in enumerate(self._contour_patch_info_coll): @@ -406,8 +410,7 @@ def _apply_patch(self, out_dir, base_multipoly): patch_raster_files = raster_files if ctr_defn.has_source: patch_rasters = ctr_defn.sources - patch_raster_files = self._get_raster_files_from_source( - patch_rasters) + patch_raster_files = self._get_raster_files_from_source(patch_rasters) # Pass patch shape instead of base mesh # See explanation in add_patch @@ -415,18 +418,24 @@ def _apply_patch(self, out_dir, base_multipoly): combine_poly = base_multipoly if ptch_defn: patch_mp, crs = ptch_defn.get_multipolygon() - gdf_patch = gpd.GeoDataFrame( - {'geometry': patch_mp}, crs=crs) + gdf_patch = gpd.GeoDataFrame({"geometry": patch_mp}, crs=crs) if crs != CRS.from_user_input("EPSG:4326"): gdf_patch = gdf_patch.to_crs("EPSG:4326") combine_poly = MultiPolygon(list(gdf_patch.geometry)) - geom_path = out_path / f'patch_{os.getpid()}_{e}.feather' + geom_path = out_path / f"patch_{os.getpid()}_{e}.feather" combine_geometry( - patch_raster_files, geom_path, "feather", - None, combine_poly, True, - patch_zmin, patch_zmax, - self._chunk_size, self._overlap, - self._nprocs) + patch_raster_files, + geom_path, + "feather", + None, + combine_poly, + True, + patch_zmin, + patch_zmax, + self._chunk_size, + self._overlap, + self._nprocs, + ) if geom_path.is_file(): feather_files.append(geom_path) diff --git a/ocsmesh/geom/geom.py b/ocsmesh/geom/geom.py index f35934db..266980a4 100644 --- a/ocsmesh/geom/geom.py +++ b/ocsmesh/geom/geom.py @@ -1,12 +1,12 @@ -from shapely.geometry import Polygon, MultiPolygon # type: ignore[import] +from shapely.geometry import MultiPolygon, Polygon # type: ignore[import] -from ocsmesh.raster import Raster -from ocsmesh.mesh.base import BaseMesh from ocsmesh.geom.base import BaseGeom -from ocsmesh.geom.raster import RasterGeom -from ocsmesh.geom.mesh import MeshGeom -from ocsmesh.geom.shapely import PolygonGeom, MultiPolygonGeom from ocsmesh.geom.collector import GeomCollector +from ocsmesh.geom.mesh import MeshGeom +from ocsmesh.geom.raster import RasterGeom +from ocsmesh.geom.shapely import MultiPolygonGeom, PolygonGeom +from ocsmesh.mesh.base import BaseMesh +from ocsmesh.raster import Raster class Geom(BaseGeom): @@ -23,7 +23,7 @@ def __new__(cls, geom, **kwargs): Object to use as input to compute the output mesh hull. """ - if isinstance(geom, Raster): # pylint: disable=R1705 + if isinstance(geom, Raster): # pylint: disable=R1705 return RasterGeom(geom, **kwargs) elif isinstance(geom, BaseMesh): @@ -39,8 +39,9 @@ def __new__(cls, geom, **kwargs): return GeomCollector(geom, **kwargs) raise TypeError( - f'Argument geom must be of type {BaseGeom} or a derived type, ' - f'not type {type(geom)}.') + f"Argument geom must be of type {BaseGeom} or a derived type, " + f"not type {type(geom)}." + ) @staticmethod def is_valid_type(geom): diff --git a/ocsmesh/geom/mesh.py b/ocsmesh/geom/mesh.py index 156b4d5c..b2524d98 100644 --- a/ocsmesh/geom/mesh.py +++ b/ocsmesh/geom/mesh.py @@ -1,32 +1,33 @@ import os from typing import Union +from ocsmesh.geom.base import BaseGeom +from ocsmesh.mesh.base import BaseMesh +from ocsmesh.mesh.mesh import Mesh + # from jigsawpy import jigsaw_msh_t # type: ignore[import] # import matplotlib.pyplot as plt # type: ignore[import] # import mpl_toolkits.mplot3d as m3d # type: ignore[import] # import numpy as np # type: ignore[import] # from shapely import ops # type: ignore[import] -from ocsmesh.geom.base import BaseGeom -from ocsmesh.mesh.mesh import Mesh -from ocsmesh.mesh.base import BaseMesh - class MeshDescriptor: - def __set__(self, obj, val: Union[BaseMesh, str, os.PathLike]): if isinstance(val, (str, os.PathLike)): # type: ignore[misc] val = Mesh.open(val) if not isinstance(val, BaseMesh): - raise TypeError(f'Argument mesh must be of type {Mesh}, {str} ' - f'or {os.PathLike}, not type {type(val)}') + raise TypeError( + f"Argument mesh must be of type {Mesh}, {str} " + f"or {os.PathLike}, not type {type(val)}" + ) - obj.__dict__['mesh'] = val + obj.__dict__["mesh"] = val def __get__(self, obj, val): - return obj.__dict__['mesh'] + return obj.__dict__["mesh"] class MeshGeom(BaseGeom): diff --git a/ocsmesh/geom/raster.py b/ocsmesh/geom/raster.py index e774cb41..8376719f 100644 --- a/ocsmesh/geom/raster.py +++ b/ocsmesh/geom/raster.py @@ -9,8 +9,8 @@ class SourceRaster: - '''Descriptor class used for referencing a :class:`ocsmesh.Raster` - object.''' + """Descriptor class used for referencing a :class:`ocsmesh.Raster` + object.""" def __set__(self, obj, val: Union[Raster, str, os.PathLike]): @@ -19,12 +19,12 @@ def __set__(self, obj, val: Union[Raster, str, os.PathLike]): if not isinstance(val, Raster): raise TypeError( - f'Argument raster must be of type {Raster}, ' - f'not type {type(val)}.') - obj.__dict__['source_raster'] = val + f"Argument raster must be of type {Raster}, " f"not type {type(val)}." + ) + obj.__dict__["source_raster"] = val def __get__(self, obj, val): - return obj.__dict__['source_raster'] + return obj.__dict__["source_raster"] class RasterGeom(BaseGeom): @@ -32,10 +32,10 @@ class RasterGeom(BaseGeom): _source_raster = SourceRaster() def __init__( - self, - raster: Union[Raster, str, os.PathLike], - zmin=None, - zmax=None, + self, + raster: Union[Raster, str, os.PathLike], + zmin=None, + zmax=None, ): """ Input parameters @@ -48,7 +48,8 @@ def __init__( self._zmax = zmax def get_multipolygon( # type: ignore[override] - self, zmin: float = None, zmax: float = None) -> MultiPolygon: + self, zmin: float = None, zmax: float = None + ) -> MultiPolygon: """Returns the shapely.geometry.MultiPolygon object that represents the hull of the raster given optional zmin and zmax contraints. """ @@ -60,7 +61,6 @@ def get_multipolygon( # type: ignore[override] return self.raster.get_multipolygon(zmin=zmin, zmax=zmax) - @property def raster(self): return self._source_raster @@ -75,9 +75,9 @@ def make_plot(self, ax=None, show=False): # dd087257c15692dd7d8c8e201d251ab5e66ff67f on main branch for # ellipsoidal ploting routing (removed). for polygon in self.multipolygon: - plt.plot(*polygon.exterior.xy, color='k') + plt.plot(*polygon.exterior.xy, color="k") for interior in polygon.interiors: - plt.plot(*interior.xy, color='r') + plt.plot(*interior.xy, color="r") if show: plt.show() diff --git a/ocsmesh/geom/shapely.py b/ocsmesh/geom/shapely.py index 8d083f94..369a6c02 100644 --- a/ocsmesh/geom/shapely.py +++ b/ocsmesh/geom/shapely.py @@ -1,17 +1,16 @@ from typing import Union from pyproj import CRS -from shapely.geometry import Polygon, MultiPolygon +from shapely.geometry import MultiPolygon, Polygon from ocsmesh.geom.base import BaseGeom class ShapelyGeom(BaseGeom): - """ Base class for geoms based on shapely objects """ + """Base class for geoms based on shapely objects""" class PolygonGeom(ShapelyGeom): - def __init__(self, polygon: Polygon, crs: Union[CRS, str]): assert isinstance(polygon, Polygon) self._polygon = polygon @@ -30,7 +29,6 @@ def crs(self): class MultiPolygonGeom(ShapelyGeom): - def __init__(self, multipolygon: MultiPolygon, crs: Union[CRS, str]): assert isinstance(multipolygon, MultiPolygon) self._multipolygon = multipolygon diff --git a/ocsmesh/hfun/base.py b/ocsmesh/hfun/base.py index 95d13d9f..add0aa47 100644 --- a/ocsmesh/hfun/base.py +++ b/ocsmesh/hfun/base.py @@ -4,7 +4,6 @@ class BaseHfun(ABC): - @abstractmethod def msh_t(self) -> jigsaw_msh_t: - '''Abstract method to generate hfun object.''' + """Abstract method to generate hfun object.""" diff --git a/ocsmesh/hfun/collector.py b/ocsmesh/hfun/collector.py index 7185bc03..8e1fd929 100644 --- a/ocsmesh/hfun/collector.py +++ b/ocsmesh/hfun/collector.py @@ -1,40 +1,39 @@ -import os import gc import logging -import warnings +import os import tempfile +import warnings +from copy import copy, deepcopy +from multiprocessing import Pool, cpu_count from pathlib import Path from time import time -from multiprocessing import Pool, cpu_count -from copy import copy, deepcopy -from typing import Union, Sequence, List, Tuple +from typing import List, Sequence, Tuple, Union -import numpy as np import geopandas as gpd -from pyproj import CRS, Transformer -from shapely.geometry import MultiPolygon, Polygon, GeometryCollection, box -from shapely import ops +import numpy as np +import rasterio from jigsawpy import jigsaw_msh_t +from pyproj import CRS, Transformer from rasterio.transform import from_origin -from rasterio.warp import reproject, Resampling -import rasterio +from rasterio.warp import Resampling, reproject +from shapely import ops +from shapely.geometry import GeometryCollection, MultiPolygon, Polygon, box from ocsmesh import utils +from ocsmesh.features.channel import Channel +from ocsmesh.features.constraint import TopoConstConstraint, TopoFuncConstraint +from ocsmesh.features.contour import Contour +from ocsmesh.features.patch import Patch from ocsmesh.hfun.base import BaseHfun -from ocsmesh.hfun.raster import HfunRaster from ocsmesh.hfun.mesh import HfunMesh -from ocsmesh.mesh.mesh import Mesh, EuclideanMesh2D +from ocsmesh.hfun.raster import HfunRaster +from ocsmesh.mesh.mesh import EuclideanMesh2D, Mesh from ocsmesh.raster import Raster, get_iter_windows -from ocsmesh.features.contour import Contour -from ocsmesh.features.patch import Patch -from ocsmesh.features.channel import Channel -from ocsmesh.features.constraint import ( - TopoConstConstraint, TopoFuncConstraint) _logger = logging.getLogger(__name__) -class RefinementContourInfoCollector: +class RefinementContourInfoCollector: def __init__(self): self._contours_info = {} @@ -46,10 +45,7 @@ def __iter__(self): yield defn, info - - class RefinementContourCollector: - def __init__(self, contours_info): self._contours_info = contours_info self._container: List[Union[Tuple, None]] = [] @@ -74,17 +70,18 @@ def calculate(self, source_list, out_path): feather_path = out_dir / f"contour_{pid}_{file_counter}.feather" crs_path = out_dir / f"crs_{pid}_{file_counter}.json" gpd.GeoDataFrame( - { 'geometry': [contour], - 'expansion_rate': size_info['expansion_rate'], - 'target_size': size_info['target_size'], + { + "geometry": [contour], + "expansion_rate": size_info["expansion_rate"], + "target_size": size_info["target_size"], }, - crs=crs).to_feather(feather_path) + crs=crs, + ).to_feather(feather_path) gc.collect() - with open(crs_path, 'w') as fp: + with open(crs_path, "w") as fp: fp.write(crs.to_json()) self._container.append((feather_path, crs_path)) - def __iter__(self): for raster_data in self._container: feather_path, crs_path = raster_data @@ -94,26 +91,20 @@ def __iter__(self): yield gdf - - class ConstantValueContourInfoCollector: - def __init__(self): self._contours_info = {} def add(self, src_idx, contour_defn0, contour_defn1, value): srcs = tuple(src_idx) if src_idx is not None else None - self._contours_info[ - (srcs, contour_defn0, contour_defn1)] = value + self._contours_info[(srcs, contour_defn0, contour_defn1)] = value def __iter__(self): for defn, info in self._contours_info.items(): yield defn, info - class RefinementPatchInfoCollector: - def __init__(self): self._patch_info = {} @@ -125,17 +116,14 @@ def __iter__(self): yield defn, info - class FlowLimiterInfoCollector: - def __init__(self): self._flow_lim_info = [] def add(self, src_idx, hmin, hmax, upper_bound, lower_bound): srcs = tuple(src_idx) if src_idx is not None else None - self._flow_lim_info.append( - (src_idx, hmin, hmax, upper_bound, lower_bound)) + self._flow_lim_info.append((src_idx, hmin, hmax, upper_bound, lower_bound)) def __iter__(self): @@ -143,9 +131,7 @@ def __iter__(self): yield src_idx, hmin, hmax, ub, lb - class ChannelRefineInfoCollector: - def __init__(self): self._ch_info_dict = {} @@ -159,7 +145,6 @@ def __iter__(self): class ChannelRefineCollector: - def __init__(self, channels_info): self._channels_info = channels_info self._container: List[Union[Tuple, None]] = [] @@ -184,13 +169,15 @@ def calculate(self, source_list, out_path): feather_path = out_dir / f"channels_{pid}_{file_counter}.feather" crs_path = out_dir / f"crs_{pid}_{file_counter}.json" gpd.GeoDataFrame( - { 'geometry': [channels], - 'expansion_rate': size_info['expansion_rate'], - 'target_size': size_info['target_size'], + { + "geometry": [channels], + "expansion_rate": size_info["expansion_rate"], + "target_size": size_info["target_size"], }, - crs=crs).to_feather(feather_path) + crs=crs, + ).to_feather(feather_path) gc.collect() - with open(crs_path, 'w') as fp: + with open(crs_path, "w") as fp: fp.write(crs.to_json()) self._container.append((feather_path, crs_path)) @@ -204,7 +191,6 @@ def __iter__(self): class ConstraintInfoCollector: - def __init__(self): self._constraints_info = [] @@ -217,23 +203,20 @@ def __iter__(self): yield defn - class HfunCollector(BaseHfun): - def __init__( - self, - in_list: Sequence[ - Union[str, Raster, Mesh, HfunRaster, HfunMesh]], - base_mesh: Mesh = None, - hmin: float = None, - hmax: float = None, - nprocs: int = None, - verbosity: int = 0, - method: str = 'exact', - base_as_hfun: bool = True, - base_shape: Union[Polygon, MultiPolygon] = None, - base_shape_crs: Union[str, CRS] = 'EPSG:4326' - ): + self, + in_list: Sequence[Union[str, Raster, Mesh, HfunRaster, HfunMesh]], + base_mesh: Mesh = None, + hmin: float = None, + hmax: float = None, + nprocs: int = None, + verbosity: int = 0, + method: str = "exact", + base_as_hfun: bool = True, + base_shape: Union[Polygon, MultiPolygon] = None, + base_shape_crs: Union[str, CRS] = "EPSG:4326", + ): # NOTE: Input Hfuns and their Rasters can get modified @@ -260,8 +243,7 @@ def __init__( self._base_mesh.size_from_mesh() self._contour_info_coll = RefinementContourInfoCollector() - self._contour_coll = RefinementContourCollector( - self._contour_info_coll) + self._contour_coll = RefinementContourCollector(self._contour_info_coll) self._const_val_contour_coll = ConstantValueContourInfoCollector() @@ -270,8 +252,7 @@ def __init__( self._flow_lim_coll = FlowLimiterInfoCollector() self._ch_info_coll = ChannelRefineInfoCollector() - self._channels_coll = ChannelRefineCollector( - self._ch_info_coll) + self._channels_coll = ChannelRefineCollector(self._ch_info_coll) self._constraint_info_coll = ConstraintInfoCollector() @@ -292,9 +273,9 @@ def __init__( clip_shape = self._base_shape if not self._base_shape_crs.equals(in_item.crs): transformer = Transformer.from_crs( - self._base_shape_crs, in_item.crs, always_xy=True) - clip_shape = ops.transform( - transformer.transform, clip_shape) + self._base_shape_crs, in_item.crs, always_xy=True + ) + clip_shape = ops.transform(transformer.transform, clip_shape) try: in_item.clip(clip_shape) except ValueError as err: @@ -316,15 +297,17 @@ def __init__( hfun = HfunMesh(in_item) elif isinstance(in_item, str): - if in_item.endswith('.tif'): + if in_item.endswith(".tif"): raster = Raster(in_item) if self._base_shape: clip_shape = self._base_shape if not self._base_shape_crs.equals(raster.crs): transformer = Transformer.from_crs( - self._base_shape_crs, raster.crs, always_xy=True) + self._base_shape_crs, raster.crs, always_xy=True + ) clip_shape = ops.transform( - transformer.transform, clip_shape) + transformer.transform, clip_shape + ) try: in_item.clip(clip_shape) except ValueError as err: @@ -342,8 +325,7 @@ def __init__( hfun = HfunRaster(raster, **self._size_info) - elif in_item.endswith( - ('.14', '.grd', '.gr3', '.msh', '.2dm')): + elif in_item.endswith((".14", ".grd", ".gr3", ".msh", ".2dm")): mesh = Mesh.open(in_item) hfun = HfunMesh(mesh) @@ -352,20 +334,18 @@ def __init__( self._hfun_list.append(hfun) - def msh_t(self) -> jigsaw_msh_t: composite_hfun = jigsaw_msh_t() - if self._method == 'exact': + if self._method == "exact": self._apply_features() with tempfile.TemporaryDirectory() as temp_dir: hfun_path_list = self._write_hfun_to_disk(temp_dir) composite_hfun = self._get_hfun_composite(hfun_path_list) - - elif self._method == 'fast': + elif self._method == "fast": with tempfile.TemporaryDirectory() as temp_dir: rast = self._create_big_raster(temp_dir) @@ -377,57 +357,58 @@ def msh_t(self) -> jigsaw_msh_t: return composite_hfun - def add_topo_bound_constraint( - self, - value, - upper_bound=np.inf, - lower_bound=-np.inf, - value_type: str = 'min', - rate=0.01, - source_index: Union[List[int], int, None] = None): + self, + value, + upper_bound=np.inf, + lower_bound=-np.inf, + value_type: str = "min", + rate=0.01, + source_index: Union[List[int], int, None] = None, + ): self._applied = False constraint_defn = TopoConstConstraint( - value, upper_bound, lower_bound, value_type, rate) + value, upper_bound, lower_bound, value_type, rate + ) if source_index is not None and not isinstance(source_index, (tuple, list)): source_index = [source_index] self._constraint_info_coll.add(source_index, constraint_defn) - def add_topo_func_constraint( - self, - func=lambda i: i / 2.0, - upper_bound=np.inf, - lower_bound=-np.inf, - value_type: str = 'min', - rate=0.01, - source_index: Union[List[int], int, None] = None): + self, + func=lambda i: i / 2.0, + upper_bound=np.inf, + lower_bound=-np.inf, + value_type: str = "min", + rate=0.01, + source_index: Union[List[int], int, None] = None, + ): self._applied = False constraint_defn = TopoFuncConstraint( - func, upper_bound, lower_bound, value_type, rate) + func, upper_bound, lower_bound, value_type, rate + ) if source_index is not None and not isinstance(source_index, (tuple, list)): source_index = [source_index] self._constraint_info_coll.add(source_index, constraint_defn) - def add_contour( - self, - level: Union[List[float], float] = None, - expansion_rate: float = 0.01, - target_size: float = None, - contour_defn: Contour = None, + self, + level: Union[List[float], float] = None, + expansion_rate: float = 0.01, + target_size: float = None, + contour_defn: Contour = None, ): - ''' + """ Contours are defined by contour defn or by raster sources only, but are applied on both raster and mesh hfun - ''' + """ # Always lazy self._applied = False @@ -438,7 +419,6 @@ def add_contour( else: levels.append(level) - contour_defns = [] if contour_defn is None: for lvl in levels: @@ -447,7 +427,8 @@ def add_contour( elif not isinstance(contour_defn, Contour): raise TypeError( f"Contour definition must be of type {Contour} not" - f" {type(contour_defn)}!") + f" {type(contour_defn)}!" + ) elif level is not None: msg = "Level is ignored since a contour definition is provided!" @@ -459,18 +440,18 @@ def add_contour( for ctr_dfn in contour_defns: self._contour_info_coll.add( - ctr_dfn, - expansion_rate=expansion_rate, - target_size=target_size) + ctr_dfn, expansion_rate=expansion_rate, target_size=target_size + ) def add_channel( - self, - level: float = 0, - width: float = 1000, # in meters - target_size: float = 200, - expansion_rate: float = None, - tolerance: Union[None, float] = 50, - channel_defn = None): + self, + level: float = 0, + width: float = 1000, # in meters + target_size: float = 200, + expansion_rate: float = None, + tolerance: Union[None, float] = 50, + channel_defn=None, + ): self._applied = False @@ -482,27 +463,26 @@ def add_channel( # is much faster than 1. The reason is in simplify we don't # preserve topology if channel_defn is None: - channel_defn = Channel( - level=level, width=width, tolerance=tolerance) + channel_defn = Channel(level=level, width=width, tolerance=tolerance) elif not isinstance(channel_defn, Channel): raise TypeError( f"Channel definition must be of type {Channel} not" - f" {type(channel_defn)}!") + f" {type(channel_defn)}!" + ) self._ch_info_coll.add( - channel_defn, - expansion_rate=expansion_rate, - target_size=target_size) - + channel_defn, expansion_rate=expansion_rate, target_size=target_size + ) def add_subtidal_flow_limiter( - self, - hmin=None, - hmax=None, - upper_bound=None, - lower_bound=None, - source_index: Union[List[int], int, None] = None): + self, + hmin=None, + hmax=None, + upper_bound=None, + lower_bound=None, + source_index: Union[List[int], int, None] = None, + ): self._applied = False @@ -516,15 +496,16 @@ def add_subtidal_flow_limiter( hmin=hmin, hmax=hmax, upper_bound=upper_bound, - lower_bound=lower_bound) - + lower_bound=lower_bound, + ) def add_constant_value( - self, value, - lower_bound=None, - upper_bound=None, - source_index: Union[List[int], int, None] =None): - + self, + value, + lower_bound=None, + upper_bound=None, + source_index: Union[List[int], int, None] = None, + ): self._applied = False @@ -538,16 +519,16 @@ def add_constant_value( if source_index is not None and not isinstance(source_index, (tuple, list)): source_index = [source_index] self._const_val_contour_coll.add( - source_index, contour_defn0, contour_defn1, value) - + source_index, contour_defn0, contour_defn1, value + ) def add_patch( - self, - shape: Union[MultiPolygon, Polygon] = None, - patch_defn: Patch = None, - shapefile: Union[None, str, Path] = None, - expansion_rate: float = None, - target_size: float = None, + self, + shape: Union[MultiPolygon, Polygon] = None, + patch_defn: Patch = None, + shapefile: Union[None, str, Path] = None, + expansion_rate: float = None, + target_size: float = None, ): # "shape" should be in 4326 CRS. For shapefile or patch_defn @@ -563,20 +544,19 @@ def add_patch( patch_defn = Patch(shapefile=shapefile) self._refine_patch_info_coll.add( - patch_defn, - expansion_rate=expansion_rate, - target_size=target_size) - + patch_defn, expansion_rate=expansion_rate, target_size=target_size + ) @staticmethod def _type_chk(input_list): - ''' Check the input type for constructor ''' + """Check the input type for constructor""" valid_types = (str, Raster, Mesh, HfunRaster, HfunMesh) if not all(isinstance(item, valid_types) for item in input_list): raise TypeError( - f'Input list items must be of type' + f"Input list items must be of type" f' {", ".join(str(i) for i in valid_types)},' - f' or a derived type.') + f" or a derived type." + ) def _apply_features(self): @@ -590,14 +570,11 @@ def _apply_features(self): self._applied = True - def _apply_constraints(self): - if self._method == 'fast': - raise NotImplementedError( - "This function does not suuport fast hfun method") + if self._method == "fast": + raise NotImplementedError("This function does not suuport fast hfun method") - raster_hfun_list = [ - i for i in self._hfun_list if isinstance(i, HfunRaster)] + raster_hfun_list = [i for i in self._hfun_list if isinstance(i, HfunRaster)] for in_idx, hfun in enumerate(raster_hfun_list): constraint_list = [] @@ -610,7 +587,6 @@ def _apply_constraints(self): if constraint_list: hfun.apply_constraints(constraint_list) - def _apply_contours(self, apply_to=None): # TODO: Consider CRS before applying to different hfuns @@ -618,11 +594,9 @@ def _apply_contours(self, apply_to=None): # NOTE: for parallelization make sure a single hfun is NOT # passed to multiple processes - raster_hfun_list = [ - i for i in self._hfun_list if isinstance(i, HfunRaster)] + raster_hfun_list = [i for i in self._hfun_list if isinstance(i, HfunRaster)] if apply_to is None: - mesh_hfun_list = [ - i for i in self._hfun_list if isinstance(i, HfunMesh)] + mesh_hfun_list = [i for i in self._hfun_list if isinstance(i, HfunMesh)] if self._base_mesh and self._base_as_hfun: mesh_hfun_list.insert(0, self._base_mesh) apply_to = [*mesh_hfun_list, *raster_hfun_list] @@ -646,32 +620,33 @@ def _apply_contours(self, apply_to=None): if not gdf.crs.equals(hfun.crs): _logger.info("Reprojecting feature...") transformer = Transformer.from_crs( - gdf.crs, hfun.crs, always_xy=True) - shape = ops.transform( - transformer.transform, shape) + gdf.crs, hfun.crs, always_xy=True + ) + shape = ops.transform(transformer.transform, shape) counter = counter + 1 - hfun.add_feature(**{ - 'feature': shape, - 'expansion_rate': row.expansion_rate, - 'target_size': row.target_size, - 'pool': p - }) + hfun.add_feature( + **{ + "feature": shape, + "expansion_rate": row.expansion_rate, + "target_size": row.target_size, + "pool": p, + } + ) p.join() # hfun objects cause issue with pickling # -> cannot be passed to pool -# with Pool(processes=self._nprocs) as p: -# p.starmap( -# _apply_contours_worker, -# [(hfun, self._contour_coll, self._nprocs) -# for hfun in apply_to]) + + # with Pool(processes=self._nprocs) as p: + # p.starmap( + # _apply_contours_worker, + # [(hfun, self._contour_coll, self._nprocs) + # for hfun in apply_to]) def _apply_channels(self, apply_to=None): - raster_hfun_list = [ - i for i in self._hfun_list if isinstance(i, HfunRaster)] + raster_hfun_list = [i for i in self._hfun_list if isinstance(i, HfunRaster)] if apply_to is None: - mesh_hfun_list = [ - i for i in self._hfun_list if isinstance(i, HfunMesh)] + mesh_hfun_list = [i for i in self._hfun_list if isinstance(i, HfunMesh)] if self._base_mesh and self._base_as_hfun: mesh_hfun_list.insert(0, self._base_mesh) apply_to = [*mesh_hfun_list, *raster_hfun_list] @@ -694,67 +669,60 @@ def _apply_channels(self, apply_to=None): if not gdf.crs.equals(hfun.crs): _logger.info("Reprojecting feature...") transformer = Transformer.from_crs( - gdf.crs, hfun.crs, always_xy=True) - shape = ops.transform( - transformer.transform, shape) + gdf.crs, hfun.crs, always_xy=True + ) + shape = ops.transform(transformer.transform, shape) counter = counter + 1 - hfun.add_patch(**{ - 'multipolygon': shape, - 'expansion_rate': row.expansion_rate, - 'target_size': row.target_size, - 'nprocs': self._nprocs - }) - + hfun.add_patch( + **{ + "multipolygon": shape, + "expansion_rate": row.expansion_rate, + "target_size": row.target_size, + "nprocs": self._nprocs, + } + ) def _apply_flow_limiters(self): - if self._method == 'fast': - raise NotImplementedError( - "This function does not suuport fast hfun method") + if self._method == "fast": + raise NotImplementedError("This function does not suuport fast hfun method") - raster_hfun_list = [ - i for i in self._hfun_list if isinstance(i, HfunRaster)] + raster_hfun_list = [i for i in self._hfun_list if isinstance(i, HfunRaster)] for in_idx, hfun in enumerate(raster_hfun_list): for src_idx, hmin, hmax, zmax, zmin in self._flow_lim_coll: if src_idx is not None and in_idx not in src_idx: continue if hmin is None: - hmin = self._size_info['hmin'] + hmin = self._size_info["hmin"] if hmax is None: - hmax = self._size_info['hmax'] + hmax = self._size_info["hmax"] hfun.add_subtidal_flow_limiter(hmin, hmax, zmax, zmin) - def _apply_const_val(self): - if self._method == 'fast': - raise NotImplementedError( - "This function does not suuport fast hfun method") + if self._method == "fast": + raise NotImplementedError("This function does not suuport fast hfun method") - raster_hfun_list = [ - i for i in self._hfun_list if isinstance(i, HfunRaster)] + raster_hfun_list = [i for i in self._hfun_list if isinstance(i, HfunRaster)] for in_idx, hfun in enumerate(raster_hfun_list): for (src_idx, ctr0, ctr1), const_val in self._const_val_contour_coll: if src_idx is not None and in_idx not in src_idx: continue level0 = None - level1 = None + level1 = None if ctr0 is not None: level0 = ctr0.level if ctr1 is not None: level1 = ctr1.level hfun.add_constant_value(const_val, level0, level1) - def _apply_patch(self, apply_to=None): - raster_hfun_list = [ - i for i in self._hfun_list if isinstance(i, HfunRaster)] + raster_hfun_list = [i for i in self._hfun_list if isinstance(i, HfunRaster)] if apply_to is None: - mesh_hfun_list = [ - i for i in self._hfun_list if isinstance(i, HfunMesh)] + mesh_hfun_list = [i for i in self._hfun_list if isinstance(i, HfunMesh)] if self._base_mesh and self._base_as_hfun: mesh_hfun_list.insert(0, self._base_mesh) apply_to = [*mesh_hfun_list, *raster_hfun_list] @@ -764,14 +732,10 @@ def _apply_patch(self, apply_to=None): for patch_defn, size_info in self._refine_patch_info_coll: shape, crs = patch_defn.get_multipolygon() if hfun.crs != crs: - transformer = Transformer.from_crs( - crs, hfun.crs, always_xy=True) - shape = ops.transform( - transformer.transform, shape) - - hfun.add_patch( - shape, nprocs=self._nprocs, **size_info) + transformer = Transformer.from_crs(crs, hfun.crs, always_xy=True) + shape = ops.transform(transformer.transform, shape) + hfun.add_patch(shape, nprocs=self._nprocs, **size_info) def _write_hfun_to_disk(self, out_path): @@ -805,19 +769,16 @@ def _write_hfun_to_disk(self, out_path): _logger.info("Removing bounds from hfun mesh...") for ibox in bbox_list: hfun_mesh = utils.clip_mesh_by_shape( - hfun_mesh, - ibox, - use_box_only=True, - fit_inside=True, - inverse=True) + hfun_mesh, ibox, use_box_only=True, fit_inside=True, inverse=True + ) if len(hfun_mesh.vert2) == 0: _logger.debug("Hfun ignored due to overlap") continue # Check hfun_mesh.value against hmin & hmax - hmin = self._size_info['hmin'] - hmax = self._size_info['hmax'] + hmin = self._size_info["hmin"] + hmax = self._size_info["hmax"] if hmin: hfun_mesh.value[hfun_mesh.value < hmin] = hmin if hmax: @@ -826,25 +787,23 @@ def _write_hfun_to_disk(self, out_path): mesh = Mesh(hfun_mesh) bbox_list.append(mesh.get_bbox(crs="EPSG:4326")) file_counter = file_counter + 1 - _logger.info(f'write mesh {file_counter} to file...') - file_path = out_dir / f'hfun_{pid}_{file_counter}.2dm' - mesh.write(file_path, format='2dm') + _logger.info(f"write mesh {file_counter} to file...") + file_path = out_dir / f"hfun_{pid}_{file_counter}.2dm" + mesh.write(file_path, format="2dm") path_list.append(file_path) - _logger.info('Done writing 2dm file.') + _logger.info("Done writing 2dm file.") del mesh gc.collect() return path_list - - def _get_hfun_composite(self, hfun_path_list): collection = [] - _logger.info('Reading 2dm hfun files...') + _logger.info("Reading 2dm hfun files...") start = time() for path in hfun_path_list: - collection.append(Mesh.open(path, crs='EPSG:4326')) - _logger.info(f'Reading 2dm hfun files took {time()-start}.') + collection.append(Mesh.open(path, crs="EPSG:4326")) + _logger.info(f"Reading 2dm hfun files took {time()-start}.") # NOTE: Overlaps are taken care of in the write stage @@ -853,24 +812,22 @@ def _get_hfun_composite(self, hfun_path_list): value = [] offset = 0 for hfun in collection: - index.append(hfun.tria3['index'] + offset) + index.append(hfun.tria3["index"] + offset) coord.append(hfun.coord) value.append(hfun.value) offset += hfun.coord.shape[0] composite_hfun = jigsaw_msh_t() - composite_hfun.mshID = 'euclidean-mesh' + composite_hfun.mshID = "euclidean-mesh" composite_hfun.ndims = 2 composite_hfun.vert2 = np.array( - [(coo, 0) for coo in np.vstack(coord)], - dtype=jigsaw_msh_t.VERT2_t) + [(coo, 0) for coo in np.vstack(coord)], dtype=jigsaw_msh_t.VERT2_t + ) composite_hfun.tria3 = np.array( - [(idx, 0) for idx in np.vstack(index)], - dtype=jigsaw_msh_t.TRIA3_t) - composite_hfun.value = np.array( - np.vstack(value), - dtype=jigsaw_msh_t.REALS_t) + [(idx, 0) for idx in np.vstack(index)], dtype=jigsaw_msh_t.TRIA3_t + ) + composite_hfun.value = np.array(np.vstack(value), dtype=jigsaw_msh_t.REALS_t) composite_hfun.crs = CRS.from_user_input("EPSG:4326") @@ -882,14 +839,12 @@ def _get_hfun_composite(self, hfun_path_list): return composite_hfun - def _create_big_raster(self, out_path): out_dir = Path(out_path) - out_rast = out_dir / 'big_raster.tif' + out_rast = out_dir / "big_raster.tif" - rast_hfun_list = [ - i for i in self._hfun_list if isinstance(i, HfunRaster)] + rast_hfun_list = [i for i in self._hfun_list if isinstance(i, HfunRaster)] if len(rast_hfun_list) == 0: return None @@ -897,11 +852,9 @@ def _create_big_raster(self, out_path): n_cell_lim = 0 for hfun_in in rast_hfun_list: n_cell_lim = max( - hfun_in.raster.src.shape[0] - * hfun_in.raster.src.shape[1], - n_cell_lim) - all_bounds.append( - hfun_in.get_bbox(crs='EPSG:4326').bounds) + hfun_in.raster.src.shape[0] * hfun_in.raster.src.shape[1], n_cell_lim + ) + all_bounds.append(hfun_in.get_bbox(crs="EPSG:4326").bounds) # 3 is just a arbitray tolerance for memory limit calculations n_cell_lim = n_cell_lim * self._nprocs / 3 all_bounds = np.array(all_bounds) @@ -909,11 +862,9 @@ def _create_big_raster(self, out_path): x0, y0 = np.min(all_bounds[:, [0, 1]], axis=0) x1, y1 = np.max(all_bounds[:, [2, 3]], axis=0) - utm_crs = utils.estimate_bounds_utm( - (x0, y0, x1, y1), "EPSG:4326") + utm_crs = utils.estimate_bounds_utm((x0, y0, x1, y1), "EPSG:4326") assert utm_crs is not None - transformer = Transformer.from_crs( - 'EPSG:4326', utm_crs, always_xy=True) + transformer = Transformer.from_crs("EPSG:4326", utm_crs, always_xy=True) box_epsg4326 = box(x0, y0, x1, y1) poly_utm = ops.transform(transformer.transform, Polygon(box_epsg4326)) @@ -923,7 +874,7 @@ def _create_big_raster(self, out_path): for hfun_in in rast_hfun_list: bnd1 = hfun_in.get_bbox(crs=utm_crs).bounds dim1 = np.max([bnd1[2] - bnd1[0], bnd1[3] - bnd1[1]]) - bnd2 = hfun_in.get_bbox(crs='EPSG:4326').bounds + bnd2 = hfun_in.get_bbox(crs="EPSG:4326").bounds dim2 = np.max([bnd2[2] - bnd2[0], bnd2[3] - bnd2[1]]) ratio = dim1 / dim2 pixel_size_x = hfun_in.raster.src.transform[0] * ratio @@ -932,42 +883,38 @@ def _create_big_raster(self, out_path): worst_res = np.max([worst_res, pixel_size_x, pixel_size_y]) # TODO: What if no hmin? -> use smallest raster res! - g_hmin = self._size_info['hmin'] + g_hmin = self._size_info["hmin"] res = np.max([g_hmin / 2, worst_res]) - _logger.info( - f"Spatial resolution" - f" chosen: {res}, worst: {worst_res}") + _logger.info(f"Spatial resolution" f" chosen: {res}, worst: {worst_res}") shape0 = int(np.ceil(abs(x1 - x0) / res)) shape1 = int(np.ceil(abs(y1 - y0) / res)) - approx = int(np.sqrt(n_cell_lim)) - window_size = None #default of OCSMesh.raster.Raster - mem_lim = 0 # default of rasterio + approx = int(np.sqrt(n_cell_lim)) + window_size = None # default of OCSMesh.raster.Raster + mem_lim = 0 # default of rasterio if approx < max(shape0, shape1): window_size = np.min([shape0, shape1, approx]) # Memory limit in MB mem_lim = n_cell_lim * np.float32(1).itemsize / 10e6 - # NOTE: Upper-left vs lower-left origin # (this only works for upper-left) transform = from_origin(x0 - res / 2, y1 + res / 2, res, res) rast_profile = { - 'driver': 'GTiff', - 'dtype': np.float32, - 'width': shape0, - 'height': shape1, - 'crs': utm_crs, - 'transform': transform, - 'count': 1, + "driver": "GTiff", + "dtype": np.float32, + "width": shape0, + "height": shape1, + "crs": utm_crs, + "transform": transform, + "count": 1, } - with rasterio.open(str(out_rast), 'w', **rast_profile) as dst: + with rasterio.open(str(out_rast), "w", **rast_profile) as dst: # For places where raster is DEM is not provided it's # assumed deep ocean for contouring purposes if window_size is not None: - write_wins = get_iter_windows( - shape0, shape1, chunk_size=window_size) + write_wins = get_iter_windows(shape0, shape1, chunk_size=window_size) for win in write_wins: z = np.full((win.width, win.height), -99999, dtype=np.float32) dst.write(z, 1, window=win) @@ -977,7 +924,6 @@ def _create_big_raster(self, out_path): dst.write(z, 1) del z - # Reproject if needed (for now only needed if constant # value levels or subtidal limiters are added) for in_idx, hfun in enumerate(rast_hfun_list): @@ -1003,11 +949,10 @@ def _create_big_raster(self, out_path): source=rasterio.band(hfun.raster.src, 1), destination=rasterio.band(dst, 1), resampling=Resampling.nearest, - init_dest_nodata=False, # To avoid overwrite + init_dest_nodata=False, # To avoid overwrite num_threads=self._nprocs, - warp_mem_limit=mem_lim) - - + warp_mem_limit=mem_lim, + ) return Raster(out_rast, chunk_size=window_size) @@ -1021,10 +966,7 @@ def _apply_features_fast(self, big_raster): hfun_rast = HfunRaster(big_raster, **self._size_info) rast_hfun_list.append(hfun_rast) - - - mesh_hfun_list = [ - i for i in self._hfun_list if isinstance(i, HfunMesh)] + mesh_hfun_list = [i for i in self._hfun_list if isinstance(i, HfunMesh)] if self._base_mesh and self._base_as_hfun: mesh_hfun_list.insert(0, self._base_mesh) @@ -1041,7 +983,6 @@ def _apply_features_fast(self, big_raster): if hfun_rast: self._apply_constraints_fast(hfun_rast) - return hfun_rast def _apply_flow_limiters_fast(self, big_hfun): @@ -1049,9 +990,9 @@ def _apply_flow_limiters_fast(self, big_hfun): for src_idx, hmin, hmax, zmax, zmin in self._flow_lim_coll: # TODO: Account for source index if hmin is None: - hmin = self._size_info['hmin'] + hmin = self._size_info["hmin"] if hmax is None: - hmax = self._size_info['hmax'] + hmax = self._size_info["hmax"] # To avoid sharp gradient where no raster is projected if zmin is None: @@ -1066,14 +1007,13 @@ def _apply_const_val_fast(self, big_hfun): for (src_idx, ctr0, ctr1), const_val in self._const_val_contour_coll: # TODO: Account for source index level0 = None - level1 = None + level1 = None if ctr0 is not None: level0 = ctr0.level if ctr1 is not None: level1 = ctr1.level big_hfun.add_constant_value(const_val, level0, level1) - def _apply_constraints_fast(self, big_hfun): constraint_list = [] @@ -1084,15 +1024,12 @@ def _apply_constraints_fast(self, big_hfun): if constraint_list: big_hfun.apply_constraints(constraint_list) - def _get_hfun_composite_fast(self, big_hfun): # In fast method all DEM hfuns have more priority than all # other inputs - dem_hfun_list = [ - i for i in self._hfun_list if isinstance(i, HfunRaster)] - nondem_hfun_list = [ - i for i in self._hfun_list if not isinstance(i, HfunRaster)] + dem_hfun_list = [i for i in self._hfun_list if isinstance(i, HfunRaster)] + nondem_hfun_list = [i for i in self._hfun_list if not isinstance(i, HfunRaster)] epsg4326 = CRS.from_user_input("EPSG:4326") @@ -1108,8 +1045,7 @@ def _get_hfun_composite_fast(self, big_hfun): # Calculate multipoly and clip big hfun big_cut_shape = None if big_hfun: - dem_gdf = gpd.GeoDataFrame( - geometry=dem_box_list, crs=epsg4326) + dem_gdf = gpd.GeoDataFrame(geometry=dem_box_list, crs=epsg4326) big_cut_shape = dem_gdf.unary_union big_msh_t = big_hfun.msh_t() if hasattr(big_msh_t, "crs"): @@ -1117,14 +1053,11 @@ def _get_hfun_composite_fast(self, big_hfun): utils.reproject(big_msh_t, epsg4326) big_msh_t = utils.clip_mesh_by_shape( - big_msh_t, - big_cut_shape, - use_box_only=False, - fit_inside=False) - + big_msh_t, big_cut_shape, use_box_only=False, fit_inside=False + ) - index.append(big_msh_t.tria3['index'] + offset) - coord.append(big_msh_t.vert2['coord']) + index.append(big_msh_t.tria3["index"] + offset) + coord.append(big_msh_t.vert2["coord"]) value.append(big_msh_t.value) offset = offset + coord[-1].shape[0] @@ -1141,10 +1074,8 @@ def _get_hfun_composite_fast(self, big_hfun): nondem_shape = utils.get_mesh_polygons(hfun.mesh.msh_t) if not epsg4326.equals(hfun.crs): - transformer = Transformer.from_crs( - hfun.crs, epsg4326, always_xy=True) - nondem_shape = ops.transform( - transformer.transform, nondem_shape) + transformer = Transformer.from_crs(hfun.crs, epsg4326, always_xy=True) + nondem_shape = ops.transform(transformer.transform, nondem_shape) # In fast method all DEM hfuns have more priority than all # other inputs @@ -1154,7 +1085,8 @@ def _get_hfun_composite_fast(self, big_hfun): big_cut_shape, use_box_only=False, fit_inside=True, - inverse=True) + inverse=True, + ) for ishp in nondem_shape_list: nondem_msh_t = utils.clip_mesh_by_shape( @@ -1162,33 +1094,32 @@ def _get_hfun_composite_fast(self, big_hfun): ishp, use_box_only=False, fit_inside=True, - inverse=True) + inverse=True, + ) nondem_shape_list.append(nondem_shape) - index.append(nondem_msh_t.tria3['index'] + offset) - coord.append(nondem_msh_t.vert2['coord']) + index.append(nondem_msh_t.tria3["index"] + offset) + coord.append(nondem_msh_t.vert2["coord"]) value.append(nondem_msh_t.value) offset += coord[-1].shape[0] composite_hfun = jigsaw_msh_t() - composite_hfun.mshID = 'euclidean-mesh' + composite_hfun.mshID = "euclidean-mesh" composite_hfun.ndims = 2 composite_hfun.vert2 = np.array( - [(coord, 0) for coord in np.vstack(coord)], - dtype=jigsaw_msh_t.VERT2_t) + [(coord, 0) for coord in np.vstack(coord)], dtype=jigsaw_msh_t.VERT2_t + ) composite_hfun.tria3 = np.array( - [(index, 0) for index in np.vstack(index)], - dtype=jigsaw_msh_t.TRIA3_t) - composite_hfun.value = np.array( - np.vstack(value), - dtype=jigsaw_msh_t.REALS_t) + [(index, 0) for index in np.vstack(index)], dtype=jigsaw_msh_t.TRIA3_t + ) + composite_hfun.value = np.array(np.vstack(value), dtype=jigsaw_msh_t.REALS_t) # TODO: Get user input for wether to force hmin and hmax on # final hfun (which includes non-raster and basemesh sizes) - hmin = self._size_info['hmin'] - hmax = self._size_info['hmax'] + hmin = self._size_info["hmin"] + hmax = self._size_info["hmax"] if hmin: composite_hfun.value[composite_hfun.value < hmin] = hmin if hmax: diff --git a/ocsmesh/hfun/hfun.py b/ocsmesh/hfun/hfun.py index fc8b3fb5..281b0df5 100644 --- a/ocsmesh/hfun/hfun.py +++ b/ocsmesh/hfun/hfun.py @@ -1,7 +1,7 @@ from ocsmesh.hfun.base import BaseHfun -from ocsmesh.hfun.raster import HfunRaster -from ocsmesh.hfun.mesh import HfunMesh from ocsmesh.hfun.collector import HfunCollector +from ocsmesh.hfun.mesh import HfunMesh +from ocsmesh.hfun.raster import HfunRaster from ocsmesh.mesh.mesh import EuclideanMesh2D from ocsmesh.raster import Raster @@ -17,7 +17,7 @@ def __new__(cls, hfun, **kwargs): hfun: Object used to define and compute mesh size function. """ - if isinstance(hfun, Raster): # pylint: disable=R1705 + if isinstance(hfun, Raster): # pylint: disable=R1705 return HfunRaster(hfun, **kwargs) elif isinstance(hfun, EuclideanMesh2D): @@ -28,8 +28,9 @@ def __new__(cls, hfun, **kwargs): else: raise TypeError( - f'Argument hfun must be of type {BaseHfun} or a derived type, ' - f'not type {type(hfun)}.') + f"Argument hfun must be of type {BaseHfun} or a derived type, " + f"not type {type(hfun)}." + ) @staticmethod def is_valid_type(hfun_object): diff --git a/ocsmesh/hfun/mesh.py b/ocsmesh/hfun/mesh.py index d2ddeb2c..56b6a822 100644 --- a/ocsmesh/hfun/mesh.py +++ b/ocsmesh/hfun/mesh.py @@ -2,25 +2,24 @@ import logging import operator from collections import defaultdict -from typing import Union -from multiprocessing import cpu_count, Pool +from multiprocessing import Pool, cpu_count from time import time +from typing import Union -from scipy.spatial import cKDTree -from jigsawpy import jigsaw_msh_t import numpy as np +from jigsawpy import jigsaw_msh_t from pyproj import Transformer +from scipy.spatial import cKDTree from shapely import ops -from shapely.geometry import ( - LineString, MultiLineString, Polygon, MultiPolygon) +from shapely.geometry import LineString, MultiLineString, MultiPolygon, Polygon -from ocsmesh.hfun.base import BaseHfun -from ocsmesh.crs import CRS as CRSDescriptor from ocsmesh import utils - +from ocsmesh.crs import CRS as CRSDescriptor +from ocsmesh.hfun.base import BaseHfun _logger = logging.getLogger(__name__) + class HfunMesh(BaseHfun): _crs = CRSDescriptor() @@ -33,15 +32,15 @@ def msh_t(self) -> jigsaw_msh_t: utm_crs = utils.estimate_mesh_utm(self.mesh.msh_t) if utm_crs is not None: - transformer = Transformer.from_crs( - self.crs, utm_crs, always_xy=True) + transformer = Transformer.from_crs(self.crs, utm_crs, always_xy=True) # TODO: This modifies the underlying mesh, is this # intended? - self.mesh.msh_t.vert2['coord'] = np.vstack( + self.mesh.msh_t.vert2["coord"] = np.vstack( transformer.transform( - self.mesh.msh_t.vert2['coord'][:, 0], - self.mesh.msh_t.vert2['coord'][:, 1] - )).T + self.mesh.msh_t.vert2["coord"][:, 0], + self.mesh.msh_t.vert2["coord"][:, 1], + ) + ).T self.mesh.msh_t.crs = utm_crs self._crs = utm_crs @@ -49,28 +48,27 @@ def msh_t(self) -> jigsaw_msh_t: def size_from_mesh(self): - ''' + """ Get size function values based on the mesh underlying this size function. This method overwrites the values in underlying msh_t. Also note that for calculation coordinates are projected to utm, but the projected coordinates are discarded - ''' + """ # Make sure it's in utm so that sizes are in meters hfun_msh = self.mesh.msh_t - coord = hfun_msh.vert2['coord'] + coord = hfun_msh.vert2["coord"] transformer = None utm_crs = utils.estimate_mesh_utm(hfun_msh) if utm_crs is not None: - _logger.info('Projecting to utm...') + _logger.info("Projecting to utm...") - transformer = Transformer.from_crs( - self.crs, utm_crs, always_xy=True) + transformer = Transformer.from_crs(self.crs, utm_crs, always_xy=True) # Calculate length of all edges based on acquired coords - _logger.info('Getting length of edges...') + _logger.info("Getting length of edges...") len_dict = utils.calculate_edge_lengths(hfun_msh, transformer) # Calculate the mesh size by getting average of lengths @@ -82,21 +80,23 @@ def size_from_mesh(self): for vidx in verts_idx: vert_to_lens[vidx].append(edge_len) - _logger.info('Creating size value array for vertices...') + _logger.info("Creating size value array for vertices...") vert_value = np.array( - [np.average(vert_to_lens[i]) if i in vert_to_lens else 0 - for i in range(coord.shape[0])]) + [ + np.average(vert_to_lens[i]) if i in vert_to_lens else 0 + for i in range(coord.shape[0]) + ] + ) # NOTE: Modifying values of underlying mesh hfun_msh.value = vert_value.reshape(len(vert_value), 1) - def add_patch( - self, - multipolygon: Union[MultiPolygon, Polygon], - expansion_rate: float = None, - target_size: float = None, - nprocs: int = None + self, + multipolygon: Union[MultiPolygon, Polygon], + expansion_rate: float = None, + target_size: float = None, + nprocs: int = None, ): # TODO: Add pool input support like add_feature for performance @@ -104,8 +104,8 @@ def add_patch( # TODO: Support other shapes - call buffer(1) on non polygons(?) if not isinstance(multipolygon, (Polygon, MultiPolygon)): raise TypeError( - f"Wrong type \"{type(multipolygon)}\"" - f" for multipolygon input.") + f'Wrong type "{type(multipolygon)}"' f" for multipolygon input." + ) if isinstance(multipolygon, Polygon): multipolygon = MultiPolygon([multipolygon]) @@ -113,23 +113,23 @@ def add_patch( # Check nprocs nprocs = -1 if nprocs is None else nprocs nprocs = cpu_count() if nprocs == -1 else nprocs - _logger.debug(f'Using nprocs={nprocs}') - + _logger.debug(f"Using nprocs={nprocs}") # check target size target_size = self.hmin if target_size is None else target_size if target_size is None: # TODO: Is this relevant for mesh type? - raise ValueError('Argument target_size must be specified if no ' - 'global hmin has been set.') + raise ValueError( + "Argument target_size must be specified if no " + "global hmin has been set." + ) if target_size <= 0: raise ValueError("Argument target_size must be greater than zero.") # For expansion_rate if expansion_rate is not None: exteriors = [ply.exterior for ply in multipolygon] - interiors = [ - inter for ply in multipolygon for inter in ply.interiors] + interiors = [inter for ply in multipolygon for inter in ply.interiors] features = MultiLineString([*exteriors, *interiors]) # pylint: disable=E1123, E1125 @@ -137,13 +137,15 @@ def add_patch( feature=features, expansion_rate=expansion_rate, target_size=target_size, - nprocs=nprocs) + nprocs=nprocs, + ) - coords = self.mesh.msh_t.vert2['coord'] + coords = self.mesh.msh_t.vert2["coord"] values = self.mesh.msh_t.value verts_in = utils.get_verts_in_shape( - self.mesh.msh_t, shape=multipolygon, from_box=False) + self.mesh.msh_t, shape=multipolygon, from_box=False + ) if len(verts_in): # NOTE: Don't continue, otherwise the final @@ -153,8 +155,8 @@ def add_patch( # NOTE: unlike raster self.hmin is based on values of this # hfun before applying feature; it is ignored so that # the new self.hmin becomes equal to "target" specified -# if self.hmin is not None: -# values[np.where(values < self.hmin)] = self.hmin + # if self.hmin is not None: + # values[np.where(values < self.hmin)] = self.hmin if self.hmax is not None: values[np.where(values > self.hmax)] = self.hmax values = np.minimum(self.mesh.msh_t.value, values) @@ -164,13 +166,13 @@ def add_patch( @utils.add_pool_args def add_feature( - self, - feature: Union[LineString, MultiLineString], - expansion_rate: float, - target_size: float = None, - max_verts=200, - *, # kwarg-only comes after this - pool: Pool + self, + feature: Union[LineString, MultiLineString], + expansion_rate: float, + target_size: float = None, + max_verts=200, + *, # kwarg-only comes after this + pool: Pool, ): # TODO: Partition features if they are too "long" which results in an # improvement for parallel pool. E.g. if a feature is too long, 1 @@ -178,8 +180,9 @@ def add_feature( if not isinstance(feature, (LineString, MultiLineString)): raise TypeError( - f'Argument feature must be of type {LineString} or ' - f'{MultiLineString}, not type {type(feature)}.') + f"Argument feature must be of type {LineString} or " + f"{MultiLineString}, not type {type(feature)}." + ) if isinstance(feature, LineString): feature = [feature] @@ -190,23 +193,25 @@ def add_feature( # check target size target_size = self.hmin if target_size is None else target_size if target_size is None: - raise ValueError('Argument target_size must be specified if no ' - 'global hmin has been set.') + raise ValueError( + "Argument target_size must be specified if no " + "global hmin has been set." + ) if target_size <= 0: raise ValueError("Argument target_size must be greater than zero.") utm_crs = utils.estimate_mesh_utm(self.mesh.msh_t) - _logger.info('Repartitioning features...') + _logger.info("Repartitioning features...") start = time() res = pool.starmap( utils.repartition_features, - [(linestring, max_verts) for linestring in feature] - ) + [(linestring, max_verts) for linestring in feature], + ) feature = functools.reduce(operator.iconcat, res, []) - _logger.info(f'Repartitioning features took {time()-start}.') + _logger.info(f"Repartitioning features took {time()-start}.") - _logger.info('Resampling features on ...') + _logger.info("Resampling features on ...") start = time() # We don't want to recreate the same transformation @@ -215,23 +220,21 @@ def add_feature( transformer = None if utm_crs is not None: start2 = time() - transformer = Transformer.from_crs( - self.crs, utm_crs, always_xy=True) - _logger.info( - f"Transform creation took {time() - start2:f}") + transformer = Transformer.from_crs(self.crs, utm_crs, always_xy=True) + _logger.info(f"Transform creation took {time() - start2:f}") start2 = time() feature = [ ops.transform(transformer.transform, linestring) - for linestring in feature] - _logger.info( - f"Transform apply took {time() - start2:f}") + for linestring in feature + ] + _logger.info(f"Transform apply took {time() - start2:f}") transformed_features = pool.starmap( utils.transform_linestring, - [(linestring, target_size) for linestring in feature] + [(linestring, target_size) for linestring in feature], ) - _logger.info(f'Resampling features took {time()-start}.') - _logger.info('Concatenating points...') + _logger.info(f"Resampling features took {time()-start}.") + _logger.info("Concatenating points...") start = time() points = [] for geom in transformed_features: @@ -240,36 +243,37 @@ def add_feature( elif isinstance(geom, MultiLineString): for linestring in geom: points.extend(linestring.coords) - _logger.info(f'Point concatenation took {time()-start}.') + _logger.info(f"Point concatenation took {time()-start}.") - _logger.info('Generating KDTree...') + _logger.info("Generating KDTree...") start = time() tree = cKDTree(np.array(points)) - _logger.info(f'Generating KDTree took {time()-start}.') + _logger.info(f"Generating KDTree took {time()-start}.") # We call msh_t() so that it also takes care of utm # transformation - xy = self.msh_t().vert2['coord'] + xy = self.msh_t().vert2["coord"] - _logger.info(f'transforming points took {time()-start}.') - _logger.info('querying kdtree...') + _logger.info(f"transforming points took {time()-start}.") + _logger.info("querying kdtree...") start = time() if self.hmax: r = (self.hmax - target_size) / (expansion_rate * target_size) near_dists, neighbors = tree.query( - xy, workers=pool._processes, distance_upper_bound=r) + xy, workers=pool._processes, distance_upper_bound=r + ) distances = r * np.ones(len(xy)) mask = np.logical_not(np.isinf(near_dists)) distances[mask] = near_dists[mask] else: distances, _ = tree.query(xy, workers=pool._processes) - _logger.info(f'querying kdtree took {time()-start}.') - values = expansion_rate*target_size*distances + target_size + _logger.info(f"querying kdtree took {time()-start}.") + values = expansion_rate * target_size * distances + target_size # NOTE: unlike raster self.hmin is based on values of this # hfun before applying feature; it is ignored so that # the new self.hmin becomes equal to "target" specified -# if self.hmin is not None: -# values[np.where(values < self.hmin)] = self.hmin + # if self.hmin is not None: + # values[np.where(values < self.hmin)] = self.hmin if self.hmax is not None: values[np.where(values > self.hmax)] = self.hmax values = np.minimum(self.mesh.msh_t.value.ravel(), values) diff --git a/ocsmesh/hfun/raster.py b/ocsmesh/hfun/raster.py index fc8f8a2e..34fcfe04 100644 --- a/ocsmesh/hfun/raster.py +++ b/ocsmesh/hfun/raster.py @@ -1,35 +1,37 @@ import functools import gc import logging -from multiprocessing import cpu_count, Pool import operator import tempfile -from time import time -from typing import Union, List -from contextlib import ExitStack import warnings +from contextlib import ExitStack +from multiprocessing import Pool, cpu_count +from time import time +from typing import List, Union -from jigsawpy import jigsaw_msh_t, jigsaw_jig_t -from jigsawpy import libsaw import numpy as np -from pyproj import CRS, Transformer import rasterio +from jigsawpy import jigsaw_jig_t, jigsaw_msh_t, libsaw +from pyproj import CRS, Transformer from scipy.spatial import cKDTree from shapely import ops from shapely.geometry import ( - LineString, MultiLineString, box, GeometryCollection, - Polygon, MultiPolygon) + GeometryCollection, + LineString, + MultiLineString, + MultiPolygon, + Polygon, + box, +) +from ocsmesh import utils +from ocsmesh.features.constraint import TopoConstConstraint, TopoFuncConstraint +from ocsmesh.geom.shapely import PolygonGeom from ocsmesh.hfun.base import BaseHfun from ocsmesh.raster import Raster, get_iter_windows -from ocsmesh.geom.shapely import PolygonGeom -from ocsmesh.features.constraint import ( - TopoConstConstraint, TopoFuncConstraint) -from ocsmesh import utils -# supress feather warning -warnings.filterwarnings( - 'ignore', message='.*initial implementation of Parquet.*') +# suppress feather warning +warnings.filterwarnings("ignore", message=".*initial implementation of Parquet.*") _logger = logging.getLogger(__name__) @@ -39,47 +41,47 @@ def wrapped(obj, *args, **kwargs): rv = method(obj, *args, **kwargs) obj.apply_added_constraints() return rv + return wrapped class HfunInputRaster: - def __set__(self, obj, raster: Raster): if not isinstance(raster, Raster): - raise TypeError(f'Argument raster must be of type {Raster}, not ' - f'type {type(raster)}.') + raise TypeError( + f"Argument raster must be of type {Raster}, not " + f"type {type(raster)}." + ) # init output raster file with ExitStack() as stack: src = stack.enter_context(rasterio.open(raster.tmpfile)) if raster.chunk_size is not None: windows = get_iter_windows( - src.width, src.height, chunk_size=raster.chunk_size) + src.width, src.height, chunk_size=raster.chunk_size + ) else: - windows = [rasterio.windows.Window( - 0, 0, src.width, src.height)] + windows = [rasterio.windows.Window(0, 0, src.width, src.height)] meta = src.meta.copy() - meta.update({'driver': 'GTiff', 'dtype': np.float32}) - dst = stack.enter_context( - obj.modifying_raster(use_src_meta=False, **meta)) + meta.update({"driver": "GTiff", "dtype": np.float32}) + dst = stack.enter_context(obj.modifying_raster(use_src_meta=False, **meta)) for window in windows: values = src.read(window=window).astype(np.float32) values[:] = np.finfo(np.float32).max dst.write(values, window=window) - obj.__dict__['raster'] = raster + obj.__dict__["raster"] = raster obj._chunk_size = raster.chunk_size obj._overlap = raster.overlap def __get__(self, obj, val) -> Raster: - return obj.__dict__['raster'] + return obj.__dict__["raster"] class FeatureCache: - def __get__(self, obj, val): - features = obj.__dict__.get('features') + features = obj.__dict__.get("features") if features is None: features = {} @@ -89,8 +91,9 @@ class HfunRaster(BaseHfun, Raster): _raster = HfunInputRaster() _feature_cache = FeatureCache() - def __init__(self, raster: Raster, hmin: float = None, hmax: float = None, - verbosity=0): + def __init__( + self, raster: Raster, hmin: float = None, hmax: float = None, verbosity=0 + ): self._xy_cache = {} # NOTE: unlike Raster, HfunRaster has no "path" set @@ -100,17 +103,18 @@ def __init__(self, raster: Raster, hmin: float = None, hmax: float = None, self._verbosity = int(verbosity) self._constraints = [] - - def msh_t(self, window: rasterio.windows.Window = None, - marche: bool = False, verbosity=None) -> jigsaw_msh_t: - + def msh_t( + self, + window: rasterio.windows.Window = None, + marche: bool = False, + verbosity=None, + ) -> jigsaw_msh_t: if window is None: iter_windows = list(self.iter_windows()) else: iter_windows = [window] - output_mesh = jigsaw_msh_t() output_mesh.ndims = +2 output_mesh.mshID = "euclidean-mesh" @@ -122,11 +126,10 @@ def msh_t(self, window: rasterio.windows.Window = None, x0, y0, x1, y1 = self.get_window_bounds(win) - utm_crs = utils.estimate_bounds_utm( - (x0, y0, x1, y1), self.crs) + utm_crs = utils.estimate_bounds_utm((x0, y0, x1, y1), self.crs) if utm_crs is not None: - hfun.mshID = 'euclidean-mesh' + hfun.mshID = "euclidean-mesh" # If these 3 objects (vert2, tria3, value) don't fit into # memroy, then the raster needs to be chunked. We need to # implement auto-chunking. @@ -142,109 +145,108 @@ def msh_t(self, window: rasterio.windows.Window = None, right = ygrid[:, 1] del ygrid - _logger.info('Building hfun.tria3...') + _logger.info("Building hfun.tria3...") dim1 = win.width dim2 = win.height - tria3 = np.empty( - ((dim1 - 1), (dim2 - 1)), - dtype=jigsaw_msh_t.TRIA3_t) + tria3 = np.empty(((dim1 - 1), (dim2 - 1)), dtype=jigsaw_msh_t.TRIA3_t) index = tria3["index"] - helper_ary = np.ones( - ((dim1 - 1), (dim2 - 1)), - dtype=jigsaw_msh_t.INDEX_t).cumsum(1) - 1 + helper_ary = ( + np.ones( + ((dim1 - 1), (dim2 - 1)), dtype=jigsaw_msh_t.INDEX_t + ).cumsum(1) + - 1 + ) index[:, :, 0] = np.arange( - 0, dim1 - 1, - dtype=jigsaw_msh_t.INDEX_t).reshape(dim1 - 1, 1) + 0, dim1 - 1, dtype=jigsaw_msh_t.INDEX_t + ).reshape(dim1 - 1, 1) index[:, :, 0] += (helper_ary + 0) * dim1 index[:, :, 1] = np.arange( - 1, dim1 - 0, - dtype=jigsaw_msh_t.INDEX_t).reshape(dim1 - 1, 1) + 1, dim1 - 0, dtype=jigsaw_msh_t.INDEX_t + ).reshape(dim1 - 1, 1) index[:, :, 1] += (helper_ary + 0) * dim1 index[:, :, 2] = np.arange( - 1, dim1 - 0, - dtype=jigsaw_msh_t.INDEX_t).reshape(dim1 - 1, 1) + 1, dim1 - 0, dtype=jigsaw_msh_t.INDEX_t + ).reshape(dim1 - 1, 1) index[:, :, 2] += (helper_ary + 1) * dim1 hfun.tria3 = tria3.ravel() del tria3, helper_ary gc.collect() - _logger.info('Done building hfun.tria3...') + _logger.info("Done building hfun.tria3...") # BUILD VERT2_t. this one comes from the memcache array - _logger.info('Building hfun.vert2...') + _logger.info("Building hfun.vert2...") hfun.vert2 = np.empty( - win.width*win.height, - dtype=jigsaw_msh_t.VERT2_t) - hfun.vert2['coord'] = np.array( - self.get_xy_memcache(win, utm_crs)) - _logger.info('Done building hfun.vert2...') + win.width * win.height, dtype=jigsaw_msh_t.VERT2_t + ) + hfun.vert2["coord"] = np.array(self.get_xy_memcache(win, utm_crs)) + _logger.info("Done building hfun.vert2...") # Build REALS_t: this one comes from hfun raster - _logger.info('Building hfun.value...') + _logger.info("Building hfun.value...") hfun.value = np.array( - self.get_values(window=win, band=1).flatten().reshape( - (win.width*win.height, 1)), - dtype=jigsaw_msh_t.REALS_t) - _logger.info('Done building hfun.value...') + self.get_values(window=win, band=1) + .flatten() + .reshape((win.width * win.height, 1)), + dtype=jigsaw_msh_t.REALS_t, + ) + _logger.info("Done building hfun.value...") # Build Geom - _logger.info('Building initial geom...') - transformer = Transformer.from_crs( - self.crs, utm_crs, always_xy=True) + _logger.info("Building initial geom...") + transformer = Transformer.from_crs(self.crs, utm_crs, always_xy=True) bbox = [ *[(x, left[0]) for x in bottom], *[(bottom[-1], y) for y in reversed(right)], *[(x, right[-1]) for x in reversed(top)], - *[(bottom[0], y) for y in reversed(left)]] + *[(bottom[0], y) for y in reversed(left)], + ] geom = PolygonGeom( - ops.transform(transformer.transform, Polygon(bbox)), - utm_crs + ops.transform(transformer.transform, Polygon(bbox)), utm_crs ).msh_t() - _logger.info('Building initial geom done.') - kwargs = {'method': 'nearest'} + _logger.info("Building initial geom done.") + kwargs = {"method": "nearest"} else: - _logger.info('Forming initial hmat (euclidean-grid).') + _logger.info("Forming initial hmat (euclidean-grid).") start = time() - hfun.mshID = 'euclidean-grid' + hfun.mshID = "euclidean-grid" hfun.xgrid = np.array( - np.array(self.get_x(window=win)), - dtype=jigsaw_msh_t.REALS_t) + np.array(self.get_x(window=win)), dtype=jigsaw_msh_t.REALS_t + ) hfun.ygrid = np.array( - np.flip(self.get_y(window=win)), - dtype=jigsaw_msh_t.REALS_t) + np.flip(self.get_y(window=win)), dtype=jigsaw_msh_t.REALS_t + ) hfun.value = np.array( np.flipud(self.get_values(window=win, band=1)), - dtype=jigsaw_msh_t.REALS_t) - kwargs = {'kx': 1, 'ky': 1} # type: ignore[dict-item] + dtype=jigsaw_msh_t.REALS_t, + ) + kwargs = {"kx": 1, "ky": 1} # type: ignore[dict-item] geom = PolygonGeom(box(x0, y1, x1, y0), self.crs).msh_t() - _logger.info(f'Initial hfun generation took {time()-start}.') + _logger.info(f"Initial hfun generation took {time()-start}.") - _logger.info('Configuring jigsaw...') + _logger.info("Configuring jigsaw...") opts = jigsaw_jig_t() # additional configuration options opts.mesh_dims = +2 - opts.hfun_scal = 'absolute' + opts.hfun_scal = "absolute" # no need to optimize for size function generation opts.optm_tria = False - opts.hfun_hmin = np.min(hfun.value) if self.hmin is None else \ - self.hmin - opts.hfun_hmax = np.max(hfun.value) if self.hmax is None else \ - self.hmax - opts.verbosity = self.verbosity if verbosity is None else \ - verbosity + opts.hfun_hmin = np.min(hfun.value) if self.hmin is None else self.hmin + opts.hfun_hmax = np.max(hfun.value) if self.hmax is None else self.hmax + opts.verbosity = self.verbosity if verbosity is None else verbosity # mesh of hfun window window_mesh = jigsaw_msh_t() - window_mesh.mshID = 'euclidean-mesh' + window_mesh.mshID = "euclidean-mesh" window_mesh.ndims = +2 if marche is True: @@ -262,53 +264,53 @@ def msh_t(self, window: rasterio.windows.Window = None, window_mesh.crs = utm_crs utils.reproject(window_mesh, self.crs) - # combine with results from previous windows output_mesh.tria3 = np.append( output_mesh.tria3, - np.array([((idx + len(output_mesh.vert2)), tag) - for idx, tag in window_mesh.tria3], - dtype=jigsaw_msh_t.TRIA3_t), - axis=0) + np.array( + [ + ((idx + len(output_mesh.vert2)), tag) + for idx, tag in window_mesh.tria3 + ], + dtype=jigsaw_msh_t.TRIA3_t, + ), + axis=0, + ) output_mesh.vert2 = np.append( output_mesh.vert2, - np.array(list(window_mesh.vert2), - dtype=jigsaw_msh_t.VERT2_t), - axis=0) + np.array(list(window_mesh.vert2), dtype=jigsaw_msh_t.VERT2_t), + axis=0, + ) if output_mesh.value.size: output_mesh.value = np.append( output_mesh.value, - np.array(list(window_mesh.value), - dtype=jigsaw_msh_t.REALS_t), - axis=0) + np.array(list(window_mesh.value), dtype=jigsaw_msh_t.REALS_t), + axis=0, + ) else: output_mesh.value = np.array( - list(window_mesh.value), - dtype=jigsaw_msh_t.REALS_t) + list(window_mesh.value), dtype=jigsaw_msh_t.REALS_t + ) # NOTE: In the end we need to return in a CRS that # uses meters as units. UTM based on the center of # the bounding box of the hfun is used - utm_crs = utils.estimate_bounds_utm( - self.get_bbox().bounds, self.crs) + utm_crs = utils.estimate_bounds_utm(self.get_bbox().bounds, self.crs) if utm_crs is not None: - transformer = Transformer.from_crs( - self.crs, utm_crs, always_xy=True) - output_mesh.vert2['coord'] = np.vstack( + transformer = Transformer.from_crs(self.crs, utm_crs, always_xy=True) + output_mesh.vert2["coord"] = np.vstack( transformer.transform( - output_mesh.vert2['coord'][:, 0], - output_mesh.vert2['coord'][:, 1] - )).T + output_mesh.vert2["coord"][:, 0], output_mesh.vert2["coord"][:, 1] + ) + ).T output_mesh.crs = utm_crs return output_mesh - def apply_added_constraints(self): self.apply_constraints(self._constraints) - def apply_constraints(self, constraint_list): # TODO: Validate conflicting constraints @@ -322,22 +324,22 @@ def apply_constraints(self, constraint_list): hfun_values = self.get_values(band=1, window=window) rast_values = self.raster.get_values(band=1, window=window) - # Get locations utm_crs = utils.estimate_bounds_utm( - self.get_window_bounds(window), self.crs) + self.get_window_bounds(window), self.crs + ) if utm_crs is not None: xy = self.get_xy_memcache(window, utm_crs) else: xy = self.get_xy(window) - # Apply custom constraints - _logger.debug(f'Processing window {i+1}/{tot}.') + _logger.debug(f"Processing window {i+1}/{tot}.") for constraint in constraint_list: hfun_values = constraint.apply( - rast_values, hfun_values, locations=xy) + rast_values, hfun_values, locations=xy + ) # Apply global constraints if self.hmin is not None: @@ -349,53 +351,52 @@ def apply_constraints(self, constraint_list): del rast_values gc.collect() - @_apply_constraints def add_topo_bound_constraint( - self, - value, - upper_bound=np.inf, - lower_bound=-np.inf, - value_type: str = 'min', - rate=0.01): + self, + value, + upper_bound=np.inf, + lower_bound=-np.inf, + value_type: str = "min", + rate=0.01, + ): # TODO: Validate conflicting constraints, right now last one wins - self._constraints.append(TopoConstConstraint( - value, upper_bound, lower_bound, value_type, rate)) - + self._constraints.append( + TopoConstConstraint(value, upper_bound, lower_bound, value_type, rate) + ) @_apply_constraints def add_topo_func_constraint( - self, - func=lambda i: i / 2.0, - upper_bound=np.inf, - lower_bound=-np.inf, - value_type: str = 'min', - rate=0.01): - + self, + func=lambda i: i / 2.0, + upper_bound=np.inf, + lower_bound=-np.inf, + value_type: str = "min", + rate=0.01, + ): # TODO: Validate conflicting constraints, right now last one wins - self._constraints.append(TopoFuncConstraint( - func, upper_bound, lower_bound, value_type, rate)) - - + self._constraints.append( + TopoFuncConstraint(func, upper_bound, lower_bound, value_type, rate) + ) @_apply_constraints def add_patch( - self, - multipolygon: Union[MultiPolygon, Polygon], - expansion_rate: float = None, - target_size: float = None, - nprocs: int = None - ): + self, + multipolygon: Union[MultiPolygon, Polygon], + expansion_rate: float = None, + target_size: float = None, + nprocs: int = None, + ): # TODO: Add pool input support like add_feature for performance # TODO: Support other shapes - call buffer(1) on non polygons(?) if not isinstance(multipolygon, (Polygon, MultiPolygon)): raise TypeError( - f"Wrong type \"{type(multipolygon)}\"" - f" for multipolygon input.") + f'Wrong type "{type(multipolygon)}"' f" for multipolygon input." + ) if isinstance(multipolygon, Polygon): multipolygon = MultiPolygon([multipolygon]) @@ -403,23 +404,23 @@ def add_patch( # Check nprocs nprocs = -1 if nprocs is None else nprocs nprocs = cpu_count() if nprocs == -1 else nprocs - _logger.debug(f'Using nprocs={nprocs}') - + _logger.debug(f"Using nprocs={nprocs}") # check target size target_size = self.hmin if target_size is None else target_size if target_size is None: # pylint: disable=W0101 - raise ValueError('Argument target_size must be specified if no ' - 'global hmin has been set.') + raise ValueError( + "Argument target_size must be specified if no " + "global hmin has been set." + ) if target_size <= 0: raise ValueError("Argument target_size must be greater than zero.") # For expansion_rate if expansion_rate is not None: exteriors = [ply.exterior for ply in multipolygon] - interiors = [ - inter for ply in multipolygon for inter in ply.interiors] + interiors = [inter for ply in multipolygon for inter in ply.interiors] features = MultiLineString([*exteriors, *interiors]) # pylint: disable=E1123, E1125 @@ -427,24 +428,25 @@ def add_patch( feature=features, expansion_rate=expansion_rate, target_size=target_size, - nprocs=nprocs) + nprocs=nprocs, + ) - with self.modifying_raster(driver='GTiff') as dst: + with self.modifying_raster(driver="GTiff") as dst: iter_windows = list(self.iter_windows()) tot = len(iter_windows) for i, window in enumerate(iter_windows): - _logger.debug(f'Processing window {i+1}/{tot}.') + _logger.debug(f"Processing window {i+1}/{tot}.") # NOTE: We should NOT transform polygon, user just # needs to make sure input polygon has the same CRS # as the hfun (we don't calculate distances in this # method) - _logger.info('Creating mask from shape ...') + _logger.info("Creating mask from shape ...") start = time() try: mask, _, _ = rasterio.mask.raster_geometry_mask( - self.src, multipolygon, - all_touched=True, invert=True) + self.src, multipolygon, all_touched=True, invert=True + ) mask = mask[rasterio.windows.window_index(window)] except ValueError: @@ -452,11 +454,9 @@ def add_patch( # shapes then it throws ValueError, instead of # checking for intersection, if there's a value # error we assume there's no overlap - _logger.debug( - 'Polygons don\'t intersect with the raster') + _logger.debug("Polygons don't intersect with the raster") continue - _logger.info( - f'Creating mask from shape took {time()-start}.') + _logger.info(f"Creating mask from shape took {time()-start}.") values = self.get_values(window=window).copy() if mask.any(): @@ -469,21 +469,20 @@ def add_patch( values[np.where(values > self.hmax)] = self.hmax values = np.minimum(self.get_values(window=window), values) - _logger.info('Write array to file...') + _logger.info("Write array to file...") start = time() dst.write_band(1, values, window=window) - _logger.info(f'Write array to file took {time()-start}.') - + _logger.info(f"Write array to file took {time()-start}.") @_apply_constraints def add_contour( - self, - level: Union[List[float], float], - expansion_rate: float, - target_size: float = None, - nprocs: int = None, + self, + level: Union[List[float], float], + expansion_rate: float, + target_size: float = None, + nprocs: int = None, ): - """ See https://outline.com/YU7nSM for an excellent explanation about + """See https://outline.com/YU7nSM for an excellent explanation about tree algorithms. """ if not isinstance(level, list): @@ -503,51 +502,47 @@ def add_contour( contours.append(_cont) if len(contours) == 0: - _logger.info('No contours found!') + _logger.info("No contours found!") return contours = MultiLineString(contours) - _logger.info('Adding contours as features...') + _logger.info("Adding contours as features...") # pylint: disable=E1123, E1125 - self.add_feature( - contours, expansion_rate, target_size, - nprocs=nprocs) + self.add_feature(contours, expansion_rate, target_size, nprocs=nprocs) @_apply_constraints def add_channel( - self, - level: float = 0, - width: float = 1000, # in meters - target_size: float = 200, - expansion_rate: float = None, - nprocs: int = None, - tolerance: Union[None, float] = None + self, + level: float = 0, + width: float = 1000, # in meters + target_size: float = 200, + expansion_rate: float = None, + nprocs: int = None, + tolerance: Union[None, float] = None, ): channels = self.raster.get_channels( - level=level, width=width, tolerance=tolerance) + level=level, width=width, tolerance=tolerance + ) if channels is None: return - self.add_patch( - channels, expansion_rate, target_size, nprocs) - - + self.add_patch(channels, expansion_rate, target_size, nprocs) @_apply_constraints @utils.add_pool_args def add_feature( - self, - feature: Union[LineString, MultiLineString], - expansion_rate: float, - target_size: float = None, - max_verts=200, - *, # kwarg-only comes after this - pool: Pool, + self, + feature: Union[LineString, MultiLineString], + expansion_rate: float, + target_size: float = None, + max_verts=200, + *, # kwarg-only comes after this + pool: Pool, ): - '''Adds a linear distance size function constraint to the mesh. + """Adds a linear distance size function constraint to the mesh. Arguments: feature: shapely.geometryLineString or MultiLineString @@ -559,7 +554,7 @@ def add_feature( TODO: Consider using BallTree with haversine or Vincenty metrics instead of a locally projected window. - ''' + """ # TODO: Partition features if they are too "long" which results in an # improvement for parallel pool. E.g. if a feature is too long, 1 @@ -567,8 +562,9 @@ def add_feature( if not isinstance(feature, (LineString, MultiLineString)): raise TypeError( - f'Argument feature must be of type {LineString} or ' - f'{MultiLineString}, not type {type(feature)}.') + f"Argument feature must be of type {LineString} or " + f"{MultiLineString}, not type {type(feature)}." + ) if isinstance(feature, LineString): feature = [feature] @@ -579,28 +575,31 @@ def add_feature( # check target size target_size = self.hmin if target_size is None else target_size if target_size is None: - raise ValueError('Argument target_size must be specified if no ' - 'global hmin has been set.') + raise ValueError( + "Argument target_size must be specified if no " + "global hmin has been set." + ) if target_size <= 0: raise ValueError("Argument target_size must be greater than zero.") - with self.modifying_raster(driver='GTiff') as dst: + with self.modifying_raster(driver="GTiff") as dst: iter_windows = list(self.iter_windows()) tot = len(iter_windows) for i, window in enumerate(iter_windows): - _logger.debug(f'Processing window {i+1}/{tot}.') + _logger.debug(f"Processing window {i+1}/{tot}.") utm_crs = utils.estimate_bounds_utm( - self.get_window_bounds(window), self.crs) + self.get_window_bounds(window), self.crs + ) - _logger.info('Repartitioning features...') + _logger.info("Repartitioning features...") start = time() res = pool.starmap( utils.repartition_features, - [(linestring, max_verts) for linestring in feature] - ) + [(linestring, max_verts) for linestring in feature], + ) win_feature = functools.reduce(operator.iconcat, res, []) - _logger.info(f'Repartitioning features took {time()-start}.') + _logger.info(f"Repartitioning features took {time()-start}.") - _logger.info('Resampling features on ...') + _logger.info("Resampling features on ...") start = time() # We don't want to recreate the same transformation @@ -610,22 +609,22 @@ def add_feature( if utm_crs is not None: start2 = time() transformer = Transformer.from_crs( - self.src.crs, utm_crs, always_xy=True) - _logger.info( - f"Transform creation took {time() - start2:f}") + self.src.crs, utm_crs, always_xy=True + ) + _logger.info(f"Transform creation took {time() - start2:f}") start2 = time() win_feature = [ ops.transform(transformer.transform, linestring) - for linestring in win_feature] - _logger.info( - f"Transform apply took {time() - start2:f}") + for linestring in win_feature + ] + _logger.info(f"Transform apply took {time() - start2:f}") transformed_features = pool.starmap( utils.transform_linestring, - [(linestring, target_size) for linestring in win_feature] + [(linestring, target_size) for linestring in win_feature], ) - _logger.info(f'Resampling features took {time()-start}.') - _logger.info('Concatenating points...') + _logger.info(f"Resampling features took {time()-start}.") + _logger.info("Concatenating points...") start = time() points = [] for geom in transformed_features: @@ -634,72 +633,72 @@ def add_feature( elif isinstance(geom, MultiLineString): for linestring in geom: points.extend(linestring.coords) - _logger.info(f'Point concatenation took {time()-start}.') + _logger.info(f"Point concatenation took {time()-start}.") - _logger.info('Generating KDTree...') + _logger.info("Generating KDTree...") start = time() tree = cKDTree(np.array(points)) - _logger.info(f'Generating KDTree took {time()-start}.') + _logger.info(f"Generating KDTree took {time()-start}.") if utm_crs is not None: xy = self.get_xy_memcache(window, utm_crs) else: xy = self.get_xy(window) - _logger.info(f'Transforming points took {time()-start}.') - _logger.info('Querying KDTree...') + _logger.info(f"Transforming points took {time()-start}.") + _logger.info("Querying KDTree...") start = time() if self.hmax: r = (self.hmax - target_size) / (expansion_rate * target_size) near_dists, neighbors = tree.query( - xy, workers=pool._processes, distance_upper_bound=r) + xy, workers=pool._processes, distance_upper_bound=r + ) distances = r * np.ones(len(xy)) mask = np.logical_not(np.isinf(near_dists)) distances[mask] = near_dists[mask] else: distances, _ = tree.query(xy, workers=pool._processes) - _logger.info(f'Querying KDTree took {time()-start}.') - values = expansion_rate*target_size*distances + target_size + _logger.info(f"Querying KDTree took {time()-start}.") + values = expansion_rate * target_size * distances + target_size values = values.reshape(window.height, window.width).astype( - self.dtype(1)) + self.dtype(1) + ) if self.hmin is not None: values[np.where(values < self.hmin)] = self.hmin if self.hmax is not None: values[np.where(values > self.hmax)] = self.hmax values = np.minimum(self.get_values(window=window), values) - _logger.info('Write array to file...') + _logger.info("Write array to file...") start = time() dst.write_band(1, values, window=window) - _logger.info(f'Write array to file took {time()-start}.') + _logger.info(f"Write array to file took {time()-start}.") def get_xy_memcache(self, window, dst_crs): - tmpfile = self._xy_cache.get(f'{window}{dst_crs}') + tmpfile = self._xy_cache.get(f"{window}{dst_crs}") if tmpfile is None: - _logger.info('Transform points to local CRS...') - transformer = Transformer.from_crs( - self.src.crs, dst_crs, always_xy=True) + _logger.info("Transform points to local CRS...") + transformer = Transformer.from_crs(self.src.crs, dst_crs, always_xy=True) # pylint: disable=R1732 tmpfile = tempfile.NamedTemporaryFile() xy = self.get_xy(window) - fp = np.memmap(tmpfile, dtype='float32', mode='w+', shape=xy.shape) - fp[:] = np.vstack( - transformer.transform(xy[:, 0], xy[:, 1])).T - _logger.info('Saving values to memcache...') + fp = np.memmap(tmpfile, dtype="float32", mode="w+", shape=xy.shape) + fp[:] = np.vstack(transformer.transform(xy[:, 0], xy[:, 1])).T + _logger.info("Saving values to memcache...") fp.flush() - _logger.info('Done!') - self._xy_cache[f'{window}{dst_crs}'] = tmpfile + _logger.info("Done!") + self._xy_cache[f"{window}{dst_crs}"] = tmpfile return fp[:] - _logger.info('Loading values from memcache...') - return np.memmap(tmpfile, dtype='float32', mode='r', - shape=((window.width*window.height), 2))[:] + _logger.info("Loading values from memcache...") + return np.memmap( + tmpfile, + dtype="float32", + mode="r", + shape=((window.width * window.height), 2), + )[:] @_apply_constraints def add_subtidal_flow_limiter( - self, - hmin=None, - hmax=None, - upper_bound=None, - lower_bound=None + self, hmin=None, hmax=None, upper_bound=None, lower_bound=None ): hmin = float(hmin) if hmin is not None else hmin @@ -712,16 +711,15 @@ def add_subtidal_flow_limiter( for i, window in enumerate(iter_windows): - _logger.debug(f'Processing window {i+1}/{tot}.') + _logger.debug(f"Processing window {i+1}/{tot}.") x0, y0, x1, y1 = self.get_window_bounds(window) - utm_crs = utils.estimate_bounds_utm( - (x0, y0, x1, y1), self.crs) + utm_crs = utils.estimate_bounds_utm((x0, y0, x1, y1), self.crs) if utm_crs is not None: transformer = Transformer.from_crs( - self.crs, utm_crs, always_xy=True) - (x0, x1), (y0, y1) = transformer.transform( - [x0, x1], [y0, y1]) + self.crs, utm_crs, always_xy=True + ) + (x0, x1), (y0, y1) = transformer.transform([x0, x1], [y0, y1]) dx = np.diff(np.linspace(x0, x1, window.width))[0] dy = np.diff(np.linspace(y0, y1, window.height))[0] else: @@ -732,19 +730,17 @@ def add_subtidal_flow_limiter( with warnings.catch_warnings(): # in case self._src.values is a masked array warnings.simplefilter("ignore", category=RuntimeWarning) - dh = np.sqrt(dx**2 + dy**2) - dh = np.ma.masked_equal(dh, 0.) - hfun_values = np.abs((1./3.)*(topobathy/dh)) + dh = np.sqrt(dx ** 2 + dy ** 2) + dh = np.ma.masked_equal(dh, 0.0) + hfun_values = np.abs((1.0 / 3.0) * (topobathy / dh)) # values = values.filled(np.max(values)) if upper_bound is not None: idxs = np.where(topobathy > upper_bound) - hfun_values[idxs] = self.get_values( - band=1, window=window)[idxs] + hfun_values[idxs] = self.get_values(band=1, window=window)[idxs] if lower_bound is not None: idxs = np.where(topobathy < lower_bound) - hfun_values[idxs] = self.get_values( - band=1, window=window)[idxs] + hfun_values[idxs] = self.get_values(band=1, window=window)[idxs] if hmin is not None: hfun_values[np.where(hfun_values < hmin)] = hmin @@ -758,17 +754,14 @@ def add_subtidal_flow_limiter( hfun_values[np.where(hfun_values > self._hmax)] = self._hmax hfun_values = np.minimum( - self.get_values(band=1, window=window), - hfun_values).astype( - self.dtype(1)) + self.get_values(band=1, window=window), hfun_values + ).astype(self.dtype(1)) dst.write_band(1, hfun_values, window=window) @_apply_constraints def add_constant_value(self, value, lower_bound=None, upper_bound=None): - lower_bound = -float('inf') if lower_bound is None \ - else float(lower_bound) - upper_bound = float('inf') if upper_bound is None \ - else float(upper_bound) + lower_bound = -float("inf") if lower_bound is None else float(lower_bound) + upper_bound = float("inf") if upper_bound is None else float(upper_bound) with self.modifying_raster() as dst: @@ -777,15 +770,20 @@ def add_constant_value(self, value, lower_bound=None, upper_bound=None): for i, window in enumerate(iter_windows): - _logger.debug(f'Processing window {i+1}/{tot}.') + _logger.debug(f"Processing window {i+1}/{tot}.") hfun_values = self.get_values(band=1, window=window) rast_values = self.raster.get_values(band=1, window=window) - hfun_values[np.where(np.logical_and( - rast_values > lower_bound, - rast_values < upper_bound))] = value + hfun_values[ + np.where( + np.logical_and( + rast_values > lower_bound, rast_values < upper_bound + ) + ) + ] = value hfun_values = np.minimum( self.get_values(band=1, window=window), - hfun_values.astype(self.dtype(1))) + hfun_values.astype(self.dtype(1)), + ) dst.write_band(1, hfun_values, window=window) del rast_values gc.collect() @@ -820,15 +818,9 @@ def transform_point(x, y, src_crs, utm_crs): return transformer.transform(x, y) -def transform_polygon( - polygon: Polygon, - src_crs: CRS = None, - utm_crs: CRS = None -): +def transform_polygon(polygon: Polygon, src_crs: CRS = None, utm_crs: CRS = None): if utm_crs is not None: - transformer = Transformer.from_crs( - src_crs, utm_crs, always_xy=True) + transformer = Transformer.from_crs(src_crs, utm_crs, always_xy=True) - polygon = ops.transform( - transformer.transform, polygon) + polygon = ops.transform(transformer.transform, polygon) return polygon diff --git a/ocsmesh/interp.py b/ocsmesh/interp.py index 223fe70a..db10c758 100644 --- a/ocsmesh/interp.py +++ b/ocsmesh/interp.py @@ -2,24 +2,25 @@ """ CLI interface for interpolating rasters into a mesh. """ -import sys import argparse import pathlib import shutil +import sys import tempfile from functools import lru_cache from multiprocessing import Pool -import numpy as np import fiona +import numpy as np import requests -from tqdm import tqdm +from geoalchemy2.shape import from_shape from matplotlib.path import Path # type: ignore[import] +from pyproj import CRS, Transformer from scipy.interpolate import RectBivariateSpline # , griddata -from geoalchemy2.shape import from_shape from shapely.geometry import box from shapely.ops import transform -from pyproj import CRS, Transformer +from tqdm import tqdm + from ocsmesh import Mesh, Raster, db @@ -33,24 +34,20 @@ def _mesh_interpolate_worker(args): yi = raster.get_y(window) zi = raster.get_values(window=window) f = RectBivariateSpline( - xi, np.flip(yi), np.fliplr(zi).T, - bbox=[ - np.min(xi), - np.max(xi), - np.min(yi), - np.max(yi)], + xi, + np.flip(yi), + np.fliplr(zi).T, + bbox=[np.min(xi), np.max(xi), np.min(yi), np.max(yi)], kx=3, ky=3, - s=0 + s=0, ) idxs = np.where( np.logical_and( - np.logical_and( - np.min(xi) < coords[:, 0], - np.max(xi) > coords[:, 0]), - np.logical_and( - np.min(yi) < coords[:, 1], - np.max(yi) > coords[:, 1])))[0] + np.logical_and(np.min(xi) < coords[:, 0], np.max(xi) > coords[:, 0]), + np.logical_and(np.min(yi) < coords[:, 1], np.max(yi) > coords[:, 1]), + ) + )[0] values = f.ev(coords[idxs, 0], coords[idxs, 1]) results.append((idxs, values)) @@ -58,7 +55,6 @@ def _mesh_interpolate_worker(args): class Interp: - def __init__(self, args): self._args = args @@ -71,22 +67,19 @@ def main(self): # write output to file self.mesh.write( - self._args.output_mesh_path, - overwrite=self._args.overwrite, - fmt='gr3' - ) + self._args.output_mesh_path, overwrite=self._args.overwrite, fmt="gr3" + ) self.mesh.write( - self._args.output_mesh_path + '.2dm', + self._args.output_mesh_path + ".2dm", overwrite=self._args.overwrite, - fmt='2dm' - ) + fmt="2dm", + ) def _main_serial(self): if self._args.use_anti_aliasing is True: self._initial_values = self.mesh.values.copy() - for tile in ( - tqdm(self._rasters) if self._args.verbose else self._rasters): + for tile in tqdm(self._rasters) if self._args.verbose else self._rasters: raster = Raster(tile) self.mesh.interpolate(raster) @@ -101,25 +94,31 @@ def _main_parallel(self): chunk_size = self._args.chunk_size tmpfile = tempfile.NamedTemporaryFile() fp = np.memmap( - tmpfile.name, - dtype='float32', - mode='w+', - shape=self.mesh.coords.shape - ) + tmpfile.name, dtype="float32", mode="w+", shape=self.mesh.coords.shape + ) fp[:] = self.mesh.coords[:] with Pool(processes=self._args.nprocs) as pool: if self._args.verbose: - res = list(tqdm(pool.imap( - _mesh_interpolate_worker, - [(fp, raster, self.mesh.crs, chunk_size) - for raster in self._rasters] - ), total=len(self._rasters))) + res = list( + tqdm( + pool.imap( + _mesh_interpolate_worker, + [ + (fp, raster, self.mesh.crs, chunk_size) + for raster in self._rasters + ], + ), + total=len(self._rasters), + ) + ) else: res = pool.map( _mesh_interpolate_worker, - [(fp, raster, self.mesh.crs, chunk_size) - for raster in self._rasters] - ) + [ + (fp, raster, self.mesh.crs, chunk_size) + for raster in self._rasters + ], + ) del tmpfile output = np.full(self.mesh.values.shape, np.nan) for _ in res: @@ -135,28 +134,31 @@ def _main_parallel(self): def mesh(self): return Mesh.open( pathlib.Path(self._args.input_mesh_path), - CRS.from_user_input(self._args.crs) - ) + CRS.from_user_input(self._args.crs), + ) def _resolve_aliasing(self, raster): xy = self.mesh.get_xy(crs=raster.crs) rbbox = raster.bbox - idxs = np.where(np.logical_and( - np.logical_and(xy[:, 0] >= rbbox.xmin, xy[:, 0] <= rbbox.xmax), - np.logical_and(xy[:, 1] >= rbbox.ymin, xy[:, 1] <= rbbox.ymax) - )) - - zero_cross = np.sign( - self.mesh._values[idxs]) - np.sign(self._initial_values[idxs]) - if np.any(zero_cross == 0.): - _idxs = np.where(zero_cross != 0.) + idxs = np.where( + np.logical_and( + np.logical_and(xy[:, 0] >= rbbox.xmin, xy[:, 0] <= rbbox.xmax), + np.logical_and(xy[:, 1] >= rbbox.ymin, xy[:, 1] <= rbbox.ymax), + ) + ) + + zero_cross = np.sign(self.mesh._values[idxs]) - np.sign( + self._initial_values[idxs] + ) + if np.any(zero_cross == 0.0): + _idxs = np.where(zero_cross != 0.0) if self._args.anti_aliasing_method == "reuse": self.mesh._values[idxs][_idxs] = self._initial_values[idxs][_idxs] elif self._args.anti_aliasing_method == "fv": all_rings = [] for rings in self.mesh.index_ring_collection.values(): - all_rings.extend([rings['exterior'], *rings['interiors']]) + all_rings.extend([rings["exterior"], *rings["interiors"]]) for i, idx in enumerate(idxs): if zero_cross[i] == 0: @@ -172,20 +174,18 @@ def _resolve_aliasing(self, raster): # find midpoint between neighbors for neigh in self.mesh.node_neighbors[idx]: vertices.append( - ((x0 + xy[neigh, 0])/2, (y0 + xy[neigh, 1])/2)) + ((x0 + xy[neigh, 0]) / 2, (y0 + xy[neigh, 1]) / 2) + ) # compute centroids of neighbors elements = self.mesh.triangulation.triangles[ np.where( np.any( - np.isin( - self.mesh.triangulation.triangles, - idx - ), - axis=1 - ) + np.isin(self.mesh.triangulation.triangles, idx), + axis=1, ) - ] + ) + ] cx = np.sum(self.mesh.x[elements], axis=1) / 3 cy = np.sum(self.mesh.y[elements], axis=1) / 3 for j in range(elements.shape[0]): @@ -200,12 +200,14 @@ def _resolve_aliasing(self, raster): np.logical_and( np.logical_and( xy_in[:, 0] >= np.min(path.vertices[:, 0]), - xy_in[:, 0] <= np.max(path.vertices[:, 0]) - ), + xy_in[:, 0] <= np.max(path.vertices[:, 0]), + ), np.logical_and( xy_in[:, 1] >= np.min(path.vertices[:, 1]), - xy_in[:, 1] <= np.max(path.vertices[:, 1])) - )) + xy_in[:, 1] <= np.max(path.vertices[:, 1]), + ), + ) + ) ridxs_mask = path.contains_points(xy_in[ridxs]) rvalues = raster.values.flatten() rvalues = rvalues[ridxs][np.where(ridxs_mask)] @@ -214,11 +216,11 @@ def _resolve_aliasing(self, raster): elif self._initial_values[idx] > 0: self.mesh._values[i] = np.max(rvalues) else: - raise Exception('unreachable') + raise Exception("unreachable") else: - msg = 'duck-type error for anti aliasing method ' - msg += f'{self._args.anti_aliasing_method}' + msg = "duck-type error for anti aliasing method " + msg += f"{self._args.anti_aliasing_method}" raise Exception(msg) def _expand_tile_index(self, path): @@ -226,47 +228,43 @@ def _expand_tile_index(self, path): raster_paths = [] with fiona.open(path) as src: for feature in src: - url = feature['properties']['URL'] + url = feature["properties"]["URL"] # Check if raster is in database res = self._session.query(db.TileIndexRasters).get(url) if res is None: tmpfile = request_raster_from_url(url, self._args.verbose) self._put_raster_in_cache(url, tmpfile) res = self._session.query(db.TileIndexRasters).get(url) - raster_paths.append(self._cache / 'data' / res.name) + raster_paths.append(self._cache / "data" / res.name) return raster_paths def _put_raster_in_cache(self, url, tmpfile): - print('debug:_put_raster_in_cache()') - datadir = self._cache / 'data' + print("debug:_put_raster_in_cache()") + datadir = self._cache / "data" datadir.mkdir(exist_ok=True) - target_path = datadir / url.split('/')[-1] + target_path = datadir / url.split("/")[-1] if not target_path.is_file(): shutil.copyfile(tmpfile.name, target_path) - # self._validate_raster_local( - # Raster(target_path), tmpraster.md5 - # os.copyfile() - # tgtraster.save(target_path) + # self._validate_raster_local( + # Raster(target_path), tmpraster.md5 + # os.copyfile() + # tgtraster.save(target_path) raster = Raster(target_path) bbox = raster.bbox # pylint: disable=no-member - geom = box( - bbox.xmin, - bbox.ymin, - bbox.xmax, - bbox.ymax - ) + geom = box(bbox.xmin, bbox.ymin, bbox.xmax, bbox.ymax) geom = transform_polygon(geom, raster.crs, 4326) - # md5 = raster.md5 - self._session.add(db.TileIndexRasters( - geom=from_shape( - geom, - srid=4326 - ), - url=url, - name=target_path.name, - md5=raster.md5)) + # md5 = raster.md5 + self._session.add( + db.TileIndexRasters( + geom=from_shape(geom, srid=4326), + url=url, + name=target_path.name, + md5=raster.md5, + ) + ) self._session.commit() + # return target_path, raster.m # breakpoint() @@ -288,19 +286,19 @@ def _rasters(self): @property @lru_cache(maxsize=None) def _pyenv(self): - return pathlib.Path("/".join(sys.executable.split('/')[:-2])) + return pathlib.Path("/".join(sys.executable.split("/")[:-2])) @property @lru_cache(maxsize=None) def _cache(self): - cache = self._pyenv / '.cache' + cache = self._pyenv / ".cache" cache.mkdir(exist_ok=True) return cache @property @lru_cache(maxsize=None) def _session(self): - return db.spatialite_session(self._cache / 'index.db', echo=False) + return db.spatialite_session(self._cache / "index.db", echo=False) def polygon_sort(corners): @@ -321,7 +319,7 @@ def polygon_sort(corners): def check_if_uri_is_tile_index(uri): try: - fiona.open(uri, 'r') + fiona.open(uri, "r") return True except fiona.errors.DriverError: return False @@ -330,20 +328,21 @@ def check_if_uri_is_tile_index(uri): def validate_tile_index(path): with fiona.open(path) as src: for feature in src: - url = feature['properties'].get("URL") + url = feature["properties"].get("URL") if url is None: raise AttributeError( f"No 'URL' entry for feature with id {feature['id']} " - f"on file {path}") + f"on file {path}" + ) def request_raster_from_url(url, verbose=False): - """ returns :class:`tempfile.NamedTemporaryFile` object """ + """returns :class:`tempfile.NamedTemporaryFile` object""" # pylint: disable=R1732 tmpfile = tempfile.NamedTemporaryFile() - with open(tmpfile.name, 'wb') as f: + with open(tmpfile.name, "wb") as f: response = requests.get(url, stream=True) - total = response.headers.get('content-length') + total = response.headers.get("content-length") if total is None: f.write(response.content) @@ -351,16 +350,16 @@ def request_raster_from_url(url, verbose=False): downloaded = 0 total = int(total) for data in response.iter_content( - chunk_size=max(int(total/1000), 1024*1024)): + chunk_size=max(int(total / 1000), 1024 * 1024) + ): downloaded += len(data) f.write(data) - done = int(50*downloaded/total) + done = int(50 * downloaded / total) if verbose: - sys.stdout.write( - '\r[{}{}]'.format('â–ˆ' * done, '.' * (50-done))) + sys.stdout.write("\r[{}{}]".format("â–ˆ" * done, "." * (50 - done))) sys.stdout.flush() if verbose: - sys.stdout.write('\n') + sys.stdout.write("\n") return tmpfile @@ -368,52 +367,58 @@ def transform_polygon(polygon, src_crs, dst_crs): src_crs = CRS.from_user_input(src_crs) dst_crs = CRS.from_user_input(dst_crs) if not dst_crs.equals(src_crs): - transformer = Transformer.from_crs( - src_crs, dst_crs, always_xy=True) + transformer = Transformer.from_crs(src_crs, dst_crs, always_xy=True) polygon = transform(transformer.transform, polygon) return polygon def validate_raster_local(raster, md5): if raster.md5 != md5: - raise Exception(f'Checksum mismatch for path {str(raster.path)}') + raise Exception(f"Checksum mismatch for path {str(raster.path)}") def parse_args(): parser = argparse.ArgumentParser() - parser.add_argument('input_mesh_path', help="Path to input mesh.") - parser.add_argument('output_mesh_path', help="Path to output mesh.") + parser.add_argument("input_mesh_path", help="Path to input mesh.") + parser.add_argument("output_mesh_path", help="Path to output mesh.") parser.add_argument( - 'DEM', nargs="+", + "DEM", + nargs="+", help="List of at least one input DEM(s). " "These will be interpolated in the same order as " "they are given. Recommended is to pass them in order of lowest " - "priority to highest priority." - ) + "priority to highest priority.", + ) parser.add_argument( - "--overwrite", action="store_true", + "--overwrite", + action="store_true", help="Used in case the output_mesh_path exists and the user wants to " - "allow overwrite.") + "allow overwrite.", + ) parser.add_argument( - "--nprocs", type=int, help="Total number of processors to use. This " + "--nprocs", + type=int, + help="Total number of processors to use. This " "algorithm can make use of virtual cores, so this value is not " "restricted to the number of physical cores or the amount of DEM's " - "to be interpolated.") + "to be interpolated.", + ) parser.add_argument( - "--chunk-size", type=int, help="Useful when passing large rasters that" + "--chunk-size", + type=int, + help="Useful when passing large rasters that" " do not fit in memory, it will subdivide the rasters into boxes " " of maximum pixel size chunk-size x chunk-size. If your job runs out " - "of memory, try using --chunk-size=3000") + "of memory, try using --chunk-size=3000", + ) parser.add_argument( - "--crs", - help="Input mesh CRS. Output will have the same CRS as the input.") + "--crs", help="Input mesh CRS. Output will have the same CRS as the input." + ) parser.add_argument("--verbose", action="store_true") parser.add_argument("--use-anti-aliasing", action="store_true") parser.add_argument( - "--anti-aliasing-method", - default="reuse", - choices=['reuse', 'fv'] - ) + "--anti-aliasing-method", default="reuse", choices=["reuse", "fv"] + ) return parser.parse_args() @@ -421,5 +426,5 @@ def main(): Interp(parse_args()).main() -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/ocsmesh/mesh/__init__.py b/ocsmesh/mesh/__init__.py index 342887d5..9cbcabf2 100644 --- a/ocsmesh/mesh/__init__.py +++ b/ocsmesh/mesh/__init__.py @@ -1,2 +1,3 @@ from ocsmesh.mesh.mesh import Mesh # type: ignore[attr-defined] + __all__ = ["Mesh"] diff --git a/ocsmesh/mesh/base.py b/ocsmesh/mesh/base.py index 23c1e1ce..74dc5768 100644 --- a/ocsmesh/mesh/base.py +++ b/ocsmesh/mesh/base.py @@ -1,14 +1,13 @@ class BaseMesh: - @property def msh_t(self): return self._msh_t @property def coord(self): - if self.msh_t.ndims == 2: # pylint: disable=R1705 - return self.msh_t.vert2['coord'] + if self.msh_t.ndims == 2: # pylint: disable=R1705 + return self.msh_t.vert2["coord"] elif self.msh_t.ndims == 3: - return self.msh_t.vert3['coord'] + return self.msh_t.vert3["coord"] - raise ValueError(f'Unhandled mesh dimensions {self.msh_t.ndims}.') + raise ValueError(f"Unhandled mesh dimensions {self.msh_t.ndims}.") diff --git a/ocsmesh/mesh/mesh.py b/ocsmesh/mesh/mesh.py index e477ba9d..4127b764 100644 --- a/ocsmesh/mesh/mesh.py +++ b/ocsmesh/mesh/mesh.py @@ -1,37 +1,33 @@ -from functools import lru_cache import logging -from multiprocessing import Pool, cpu_count import os import pathlib -from typing import Union, List -from collections import defaultdict import warnings +from collections import defaultdict +from functools import lru_cache +from multiprocessing import Pool, cpu_count +from typing import List, Union import geopandas as gpd -from jigsawpy import jigsaw_msh_t, savemsh, loadmsh, savevtk +import matplotlib.pyplot as plt +import numpy as np +from jigsawpy import jigsaw_msh_t, loadmsh, savemsh, savevtk from matplotlib.path import Path from matplotlib.transforms import Bbox from matplotlib.tri import Triangulation -import matplotlib.pyplot as plt -import numpy as np -from pyproj import CRS, Transformer -from scipy.interpolate import ( - RectBivariateSpline, RegularGridInterpolator) -from shapely.geometry import ( - LineString, box, Polygon, MultiPolygon) -from shapely.ops import polygonize, linemerge - - from ocsmesh import utils -from ocsmesh.raster import Raster from ocsmesh.mesh.base import BaseMesh from ocsmesh.mesh.parsers import grd, sms2dm +from ocsmesh.raster import Raster +from pyproj import CRS, Transformer +from scipy.interpolate import RectBivariateSpline, RegularGridInterpolator +from shapely.geometry import LineString, MultiPolygon, Polygon, box +from shapely.ops import linemerge, polygonize _logger = logging.getLogger(__name__) -class Rings: - def __init__(self, mesh: 'EuclideanMesh'): +class Rings: + def __init__(self, mesh: "EuclideanMesh"): self.mesh = mesh @lru_cache(maxsize=1) @@ -42,30 +38,25 @@ def __call__(self): data = [] bnd_id = 0 for poly in polys: - data.append({ - "geometry": poly.exterior, - "bnd_id": bnd_id, - "type": 'exterior' - }) + data.append( + {"geometry": poly.exterior, "bnd_id": bnd_id, "type": "exterior"} + ) for interior in poly.interiors: - data.append({ - "geometry": interior, - "bnd_id": bnd_id, - "type": 'interior' - }) + data.append( + {"geometry": interior, "bnd_id": bnd_id, "type": "interior"} + ) bnd_id = bnd_id + 1 return gpd.GeoDataFrame(data, crs=self.mesh.crs) def exterior(self): - return self().loc[self()['type'] == 'exterior'] + return self().loc[self()["type"] == "exterior"] def interior(self): - return self().loc[self()['type'] == 'interior'] + return self().loc[self()["type"] == "interior"] class Edges: - - def __init__(self, mesh: 'EuclideanMesh'): + def __init__(self, mesh: "EuclideanMesh"): self.mesh = mesh @lru_cache(maxsize=1) @@ -74,22 +65,24 @@ def __call__(self) -> gpd.GeoDataFrame: for ring in self.mesh.hull.rings().itertuples(): coords = ring.geometry.coords for i in range(1, len(coords)): - data.append({ - "geometry": LineString([coords[i-1], coords[i]]), - "bnd_id": ring.bnd_id, - "type": ring.type}) + data.append( + { + "geometry": LineString([coords[i - 1], coords[i]]), + "bnd_id": ring.bnd_id, + "type": ring.type, + } + ) return gpd.GeoDataFrame(data, crs=self.mesh.crs) def exterior(self): - return self().loc[self()['type'] == 'exterior'] + return self().loc[self()["type"] == "exterior"] def interior(self): - return self().loc[self()['type'] == 'interior'] + return self().loc[self()["type"] == "interior"] class Hull: - - def __init__(self, mesh: 'EuclideanMesh'): + def __init__(self, mesh: "EuclideanMesh"): self.mesh = mesh self.rings = Rings(mesh) self.edges = Edges(mesh) @@ -97,41 +90,51 @@ def __init__(self, mesh: 'EuclideanMesh'): @lru_cache(maxsize=1) def __call__(self): data = [] - for bnd_id in np.unique(self.rings()['bnd_id'].tolist()): + for bnd_id in np.unique(self.rings()["bnd_id"].tolist()): exterior = self.rings().loc[ - (self.rings()['bnd_id'] == bnd_id) & - (self.rings()['type'] == 'exterior')] + (self.rings()["bnd_id"] == bnd_id) + & (self.rings()["type"] == "exterior") + ] interiors = self.rings().loc[ - (self.rings()['bnd_id'] == bnd_id) & - (self.rings()['type'] == 'interior')] - data.append({ + (self.rings()["bnd_id"] == bnd_id) + & (self.rings()["type"] == "interior") + ] + data.append( + { "geometry": Polygon( exterior.iloc[0].geometry.coords, - [row.geometry.coords for _, row - in interiors.iterrows()]), - "bnd_id": bnd_id - }) + [row.geometry.coords for _, row in interiors.iterrows()], + ), + "bnd_id": bnd_id, + } + ) return gpd.GeoDataFrame(data, crs=self.mesh.crs) def exterior(self): data = [] - for exterior in self.rings().loc[ - self.rings()['type'] == 'exterior'].itertuples(): + for exterior in ( + self.rings().loc[self.rings()["type"] == "exterior"].itertuples() + ): data.append({"geometry": Polygon(exterior.geometry.coords)}) return gpd.GeoDataFrame(data, crs=self.mesh.crs) def interior(self): data = [] - for interior in self.rings().loc[ - self.rings()['type'] == 'interior'].itertuples(): + for interior in ( + self.rings().loc[self.rings()["type"] == "interior"].itertuples() + ): data.append({"geometry": Polygon(interior.geometry.coords)}) return gpd.GeoDataFrame(data, crs=self.mesh.crs) def implode(self) -> gpd.GeoDataFrame: return gpd.GeoDataFrame( - {"geometry": MultiPolygon([polygon.geometry for polygon - in self().itertuples()])}, - crs=self.mesh.crs) + { + "geometry": MultiPolygon( + [polygon.geometry for polygon in self().itertuples()] + ) + }, + crs=self.mesh.crs, + ) def multipolygon(self) -> MultiPolygon: mp = self.implode().iloc[0].geometry @@ -140,18 +143,13 @@ def multipolygon(self) -> MultiPolygon: return mp def triangulation(self): - triangles = self.mesh.msh_t.tria3['index'].tolist() - for quad in self.mesh.msh_t.quad4['index']: - triangles.extend([ - [quad[0], quad[1], quad[3]], - [quad[1], quad[2], quad[3]] - ]) + triangles = self.mesh.msh_t.tria3["index"].tolist() + for quad in self.mesh.msh_t.quad4["index"]: + triangles.extend([[quad[0], quad[1], quad[3]], [quad[1], quad[2], quad[3]]]) return Triangulation(self.mesh.coord[:, 0], self.mesh.coord[:, 1], triangles) - class Nodes: - def __init__(self, mesh: "EuclideanMesh"): self.mesh = mesh self._id_to_index = None @@ -159,7 +157,7 @@ def __init__(self, mesh: "EuclideanMesh"): @lru_cache(maxsize=1) def __call__(self): - return {i+1: coord for i, coord in enumerate(self.coords())} + return {i + 1: coord for i, coord in enumerate(self.coords())} def id(self): return list(self().keys()) @@ -182,8 +180,9 @@ def get_id_by_index(self, index: int): @property def id_to_index(self): if self._id_to_index is None: - self._id_to_index = {node_id: index for index, node_id - in enumerate(self().keys())} + self._id_to_index = { + node_id: index for index, node_id in enumerate(self().keys()) + } return self._id_to_index @property @@ -207,16 +206,20 @@ def index_to_id(self): class Elements: - def __init__(self, mesh: "EuclideanMesh"): self.mesh = mesh @lru_cache(maxsize=1) def __call__(self): - elements = {i+1: index+1 for i, index - in enumerate(self.mesh.msh_t.tria3['index'])} - elements.update({i+len(elements)+1: index+1 for i, index - in enumerate(self.mesh.msh_t.quad4['index'])}) + elements = { + i + 1: index + 1 for i, index in enumerate(self.mesh.msh_t.tria3["index"]) + } + elements.update( + { + i + len(elements) + 1: index + 1 + for i, index in enumerate(self.mesh.msh_t.quad4["index"]) + } + ) return elements @lru_cache(maxsize=1) @@ -232,22 +235,28 @@ def array(self): array = np.full((len(self()), rank), -1) for i, element in enumerate(self().values()): row = np.array(list(map(self.mesh.nodes.get_index_by_id, element))) - array[i, :len(row)] = row + array[i, : len(row)] = row return np.ma.masked_equal(array, -1) @lru_cache(maxsize=1) def triangles(self): return np.array( - [list(map(self.mesh.nodes.get_index_by_id, element)) - for element in self().values() - if len(element) == 3]) + [ + list(map(self.mesh.nodes.get_index_by_id, element)) + for element in self().values() + if len(element) == 3 + ] + ) @lru_cache(maxsize=1) def quads(self): return np.array( - [list(map(self.mesh.nodes.get_index_by_id, element)) - for element in self().values() - if len(element) == 4]) + [ + list(map(self.mesh.nodes.get_index_by_id, element)) + for element in self().values() + if len(element) == 4 + ] + ) def triangulation(self): triangles = self.triangles().tolist() @@ -255,24 +264,25 @@ def triangulation(self): # TODO: Not tested. triangles.append([quad[0], quad[1], quad[3]]) triangles.append([quad[1], quad[2], quad[3]]) - return Triangulation( - self.mesh.coord[:, 0], - self.mesh.coord[:, 1], - triangles) + return Triangulation(self.mesh.coord[:, 0], self.mesh.coord[:, 1], triangles) def geodataframe(self): data = [] for elem_id, element in self().items(): - data.append({ - 'geometry': Polygon( - self.mesh.coord[list( - map(self.mesh.nodes.get_index_by_id, element))]), - 'id': elem_id}) + data.append( + { + "geometry": Polygon( + self.mesh.coord[ + list(map(self.mesh.nodes.get_index_by_id, element)) + ] + ), + "id": elem_id, + } + ) return gpd.GeoDataFrame(data, crs=self.mesh.crs) class Boundaries: - def __init__(self, mesh: "Mesh"): # TODO: Add a way to manually initialize self.mesh = mesh @@ -291,29 +301,35 @@ def _init_dataframes(self): for ibtype, bnds in boundaries.items(): if ibtype is None: for bnd_id, data in bnds.items(): - indexes = list(map(self.mesh.nodes.get_index_by_id, - data['indexes'])) - ocean_boundaries.append({ - 'id': bnd_id, - "index_id": data['indexes'], - "indexes": indexes, - 'geometry': LineString(self.mesh.coord[indexes]) - }) - - elif str(ibtype).endswith('1'): + indexes = list( + map(self.mesh.nodes.get_index_by_id, data["indexes"]) + ) + ocean_boundaries.append( + { + "id": bnd_id, + "index_id": data["indexes"], + "indexes": indexes, + "geometry": LineString(self.mesh.coord[indexes]), + } + ) + + elif str(ibtype).endswith("1"): for bnd_id, data in bnds.items(): - indexes = list(map(self.mesh.nodes.get_index_by_id, - data['indexes'])) - interior_boundaries.append({ - 'id': bnd_id, - 'ibtype': ibtype, - "index_id": data['indexes'], - "indexes": indexes, - 'geometry': LineString(self.mesh.coord[indexes]) - }) + indexes = list( + map(self.mesh.nodes.get_index_by_id, data["indexes"]) + ) + interior_boundaries.append( + { + "id": bnd_id, + "ibtype": ibtype, + "index_id": data["indexes"], + "indexes": indexes, + "geometry": LineString(self.mesh.coord[indexes]), + } + ) else: for bnd_id, data in bnds.items(): - _indexes = np.array(data['indexes']) + _indexes = np.array(data["indexes"]) if _indexes.ndim > 1: # ndim > 1 implies we're dealing with an ADCIRC # mesh that includes boundary pairs, such as weir @@ -326,16 +342,17 @@ def _init_dataframes(self): _indexes = np.array(new_indexes).flatten() else: _indexes = _indexes.flatten() - indexes = list(map(self.mesh.nodes.get_index_by_id, - _indexes)) - - land_boundaries.append({ - 'id': bnd_id, - 'ibtype': ibtype, - "index_id": data['indexes'], - "indexes": indexes, - 'geometry': LineString(self.mesh.coord[indexes]) - }) + indexes = list(map(self.mesh.nodes.get_index_by_id, _indexes)) + + land_boundaries.append( + { + "id": bnd_id, + "ibtype": ibtype, + "index_id": data["indexes"], + "indexes": indexes, + "geometry": LineString(self.mesh.coord[indexes]), + } + ) self._ocean = gpd.GeoDataFrame(ocean_boundaries) self._land = gpd.GeoDataFrame(land_boundaries) @@ -362,28 +379,37 @@ def __call__(self): self._init_dataframes() data = [] for bnd in self.ocean().itertuples(): - data.append({ - 'id': bnd.id, - 'ibtype': None, - "index_id": bnd.index_id, - "indexes": bnd.indexes, - 'geometry': bnd.geometry}) + data.append( + { + "id": bnd.id, + "ibtype": None, + "index_id": bnd.index_id, + "indexes": bnd.indexes, + "geometry": bnd.geometry, + } + ) for bnd in self.land().itertuples(): - data.append({ - 'id': bnd.id, - 'ibtype': bnd.ibtype, - "index_id": bnd.index_id, - "indexes": bnd.indexes, - 'geometry': bnd.geometry}) + data.append( + { + "id": bnd.id, + "ibtype": bnd.ibtype, + "index_id": bnd.index_id, + "indexes": bnd.indexes, + "geometry": bnd.geometry, + } + ) for bnd in self.interior().itertuples(): - data.append({ - 'id': bnd.id, - 'ibtype': bnd.ibtype, - "index_id": bnd.index_id, - "indexes": bnd.indexes, - 'geometry': bnd.geometry}) + data.append( + { + "id": bnd.id, + "ibtype": bnd.ibtype, + "index_id": bnd.index_id, + "indexes": bnd.indexes, + "geometry": bnd.geometry, + } + ) return gpd.GeoDataFrame(data, crs=self.mesh.crs) @@ -391,24 +417,22 @@ def __len__(self): return len(self()) def auto_generate( - self, - threshold=0., - land_ibtype=0, - interior_ibtype=1, - ): + self, + threshold=0.0, + land_ibtype=0, + interior_ibtype=1, + ): values = self.mesh.value if np.any(np.isnan(values)): raise Exception( "Mesh contains invalid values. Raster values must" "be interpolated to the mesh before generating " - "boundaries.") - + "boundaries." + ) - coords = self.mesh.msh_t.vert2['coord'] - coo_to_idx = { - tuple(coo): idx - for idx, coo in enumerate(coords)} + coords = self.mesh.msh_t.vert2["coord"] + coo_to_idx = {tuple(coo): idx for idx, coo in enumerate(coords)} polys = utils.get_mesh_polygons(self.mesh.msh_t) @@ -421,21 +445,19 @@ def auto_generate( # generate exterior boundaries for poly in polys: ext_ring_coo = poly.exterior.coords - ext_ring = np.array([ - (coo_to_idx[ext_ring_coo[e]], - coo_to_idx[ext_ring_coo[e + 1]]) - for e, coo in enumerate(ext_ring_coo[:-1])]) + ext_ring = np.array( + [ + (coo_to_idx[ext_ring_coo[e]], coo_to_idx[ext_ring_coo[e + 1]]) + for e, coo in enumerate(ext_ring_coo[:-1]) + ] + ) # find boundary edges edge_tag = np.full(ext_ring.shape, 0) - edge_tag[ - np.where(values[ext_ring[:, 0]] < threshold)[0], 0] = -1 - edge_tag[ - np.where(values[ext_ring[:, 1]] < threshold)[0], 1] = -1 - edge_tag[ - np.where(values[ext_ring[:, 0]] >= threshold)[0], 0] = 1 - edge_tag[ - np.where(values[ext_ring[:, 1]] >= threshold)[0], 1] = 1 + edge_tag[np.where(values[ext_ring[:, 0]] < threshold)[0], 0] = -1 + edge_tag[np.where(values[ext_ring[:, 1]] < threshold)[0], 1] = -1 + edge_tag[np.where(values[ext_ring[:, 0]] >= threshold)[0], 0] = 1 + edge_tag[np.where(values[ext_ring[:, 1]] >= threshold)[0], 1] = 1 # sort boundary edges ocean_boundary = [] land_boundary = [] @@ -444,34 +466,43 @@ def auto_generate( land_boundary.append(tuple(ext_ring[i, :])) elif np.any(np.asarray((e0, e1)) == -1): ocean_boundary.append(tuple(ext_ring[i, :])) -# ocean_boundaries = utils.sort_edges(ocean_boundary) -# land_boundaries = utils.sort_edges(land_boundary) + # ocean_boundaries = utils.sort_edges(ocean_boundary) + # land_boundaries = utils.sort_edges(land_boundary) ocean_boundaries = [] if len(ocean_boundary) != 0: - #pylint: disable=not-an-iterable + # pylint: disable=not-an-iterable ocean_segs = linemerge(coords[np.array(ocean_boundary)]) - ocean_segs = [ocean_segs] if isinstance(ocean_segs, LineString) else ocean_segs + ocean_segs = ( + [ocean_segs] if isinstance(ocean_segs, LineString) else ocean_segs + ) ocean_boundaries = [ - [(coo_to_idx[seg.coords[e]], coo_to_idx[seg.coords[e + 1]]) - for e, coo in enumerate(seg.coords[:-1])] - for seg in ocean_segs] + [ + (coo_to_idx[seg.coords[e]], coo_to_idx[seg.coords[e + 1]]) + for e, coo in enumerate(seg.coords[:-1]) + ] + for seg in ocean_segs + ] land_boundaries = [] if len(land_boundary) != 0: - #pylint: disable=not-an-iterable + # pylint: disable=not-an-iterable land_segs = linemerge(coords[np.array(land_boundary)]) - land_segs = [land_segs] if isinstance(land_segs, LineString) else land_segs + land_segs = ( + [land_segs] if isinstance(land_segs, LineString) else land_segs + ) land_boundaries = [ - [(coo_to_idx[seg.coords[e]], coo_to_idx[seg.coords[e + 1]]) - for e, coo in enumerate(seg.coords[:-1])] - for seg in land_segs] + [ + (coo_to_idx[seg.coords[e]], coo_to_idx[seg.coords[e + 1]]) + for e, coo in enumerate(seg.coords[:-1]) + ] + for seg in land_segs + ] _bnd_id = len(boundaries[None]) for bnd in ocean_boundaries: e0, e1 = [list(t) for t in zip(*bnd)] e0 = [get_id(vert) for vert in e0] data = e0 + [get_id(e1[-1])] - boundaries[None][_bnd_id] = bdry_type( - indexes=data, properties={}) + boundaries[None][_bnd_id] = bdry_type(indexes=data, properties={}) _bnd_id += 1 # add land boundaries @@ -481,7 +512,8 @@ def auto_generate( e0 = [get_id(vert) for vert in e0] data = e0 + [get_id(e1[-1])] boundaries[land_ibtype][_bnd_id] = bdry_type( - indexes=data, properties={}) + indexes=data, properties={} + ) _bnd_id += 1 @@ -493,9 +525,9 @@ def auto_generate( for interior in interiors: int_ring_coo = interior.coords int_ring = [ - (coo_to_idx[int_ring_coo[e]], - coo_to_idx[int_ring_coo[e + 1]]) - for e, coo in enumerate(int_ring_coo[:-1])] + (coo_to_idx[int_ring_coo[e]], coo_to_idx[int_ring_coo[e + 1]]) + for e, coo in enumerate(int_ring_coo[:-1]) + ] # TODO: Do we still need these? e0, e1 = [list(t) for t in zip(*int_ring)] @@ -508,8 +540,7 @@ def auto_generate( _bnd_id += 1 for bnd_id, data in interior_boundaries.items(): - boundaries[interior_ibtype][bnd_id] = bdry_type( - indexes=data, properties={}) + boundaries[interior_ibtype][bnd_id] = bdry_type(indexes=data, properties={}) self._data = boundaries self._init_dataframes.cache_clear() @@ -518,53 +549,58 @@ def auto_generate( class EuclideanMesh(BaseMesh): - def __init__(self, mesh: jigsaw_msh_t): if not isinstance(mesh, jigsaw_msh_t): - raise TypeError(f'Argument mesh must be of type {jigsaw_msh_t}, ' - f'not type {type(mesh)}.') - if mesh.mshID != 'euclidean-mesh': - raise ValueError(f'Argument mesh has property mshID={mesh.mshID}, ' - "but expected 'euclidean-mesh'.") - if not hasattr(mesh, 'crs'): - warnings.warn('Input mesh has no CRS information.') + raise TypeError( + f"Argument mesh must be of type {jigsaw_msh_t}, " + f"not type {type(mesh)}." + ) + if mesh.mshID != "euclidean-mesh": + raise ValueError( + f"Argument mesh has property mshID={mesh.mshID}, " + "but expected 'euclidean-mesh'." + ) + if not hasattr(mesh, "crs"): + warnings.warn("Input mesh has no CRS information.") mesh.crs = None else: if not isinstance(mesh.crs, CRS): - raise ValueError(f'crs property must be of type {CRS}, not ' - f'type {type(mesh.crs)}.') + raise ValueError( + f"crs property must be of type {CRS}, not " + f"type {type(mesh.crs)}." + ) self._hull = None self._nodes = None self._elements = None self._msh_t = mesh - def write(self, - path: Union[str, os.PathLike], - overwrite: bool = False, - format='grd', # pylint: disable=W0622 - ): + def write( + self, + path: Union[str, os.PathLike], + overwrite: bool = False, + format="grd", # pylint: disable=W0622 + ): path = pathlib.Path(path) if path.exists() and overwrite is not True: - raise IOError( - f'File {str(path)} exists and overwrite is not True.') - if format == 'grd': + raise IOError(f"File {str(path)} exists and overwrite is not True.") + if format == "grd": grd_dict = utils.msh_t_to_grd(self.msh_t) if self._boundaries and self._boundaries.data: grd_dict.update(boundaries=self._boundaries.data) grd.write(grd_dict, path, overwrite) - elif format == '2dm': + elif format == "2dm": sms2dm.writer(utils.msh_t_to_2dm(self.msh_t), path, overwrite) - elif format == 'msh': + elif format == "msh": savemsh(str(path), self.msh_t) - elif format == 'vtk': + elif format == "vtk": savevtk(str(path), self.msh_t) else: - raise ValueError(f'Unhandled format {format}.') + raise ValueError(f"Unhandled format {format}.") @property def tria3(self): @@ -572,7 +608,7 @@ def tria3(self): @property def triangles(self): - return self.msh_t.tria3['index'] + return self.msh_t.tria3["index"] @property def quad4(self): @@ -580,7 +616,7 @@ def quad4(self): @property def quads(self): - return self.msh_t.quad4['index'] + return self.msh_t.quad4["index"] @property def crs(self): @@ -606,43 +642,43 @@ def elements(self): class EuclideanMesh2D(EuclideanMesh): - def __init__(self, mesh: jigsaw_msh_t): super().__init__(mesh) self._boundaries = None if mesh.ndims != +2: - raise ValueError(f'Argument mesh has property ndims={mesh.ndims}, ' - "but expected ndims=2.") + raise ValueError( + f"Argument mesh has property ndims={mesh.ndims}, " + "but expected ndims=2." + ) if len(self.msh_t.value) == 0: self.msh_t.value = np.array( - np.full((self.vert2['coord'].shape[0], 1), np.nan)) + np.full((self.vert2["coord"].shape[0], 1), np.nan) + ) def get_bbox( - self, - crs: Union[str, CRS] = None, - output_type: str = None + self, crs: Union[str, CRS] = None, output_type: str = None ) -> Union[Polygon, Bbox]: - output_type = 'polygon' if output_type is None else output_type + output_type = "polygon" if output_type is None else output_type xmin, xmax = np.min(self.coord[:, 0]), np.max(self.coord[:, 0]) ymin, ymax = np.min(self.coord[:, 1]), np.max(self.coord[:, 1]) crs = self.crs if crs is None else crs if crs is not None: if not self.crs.equals(crs): - transformer = Transformer.from_crs( - self.crs, crs, always_xy=True) + transformer = Transformer.from_crs(self.crs, crs, always_xy=True) # pylint: disable=E0633 (xmin, xmax), (ymin, ymax) = transformer.transform( - (xmin, xmax), (ymin, ymax)) - if output_type == 'polygon': # pylint: disable=R1705 + (xmin, xmax), (ymin, ymax) + ) + if output_type == "polygon": # pylint: disable=R1705 return box(xmin, ymin, xmax, ymax) - elif output_type == 'bbox': + elif output_type == "bbox": return Bbox([[xmin, ymin], [xmax, ymax]]) raise TypeError( - 'Argument output_type must a string literal \'polygon\' or ' - '\'bbox\'') + "Argument output_type must a string literal 'polygon' or " "'bbox'" + ) @property def boundaries(self): @@ -653,8 +689,9 @@ def boundaries(self): def tricontourf(self, **kwargs): return utils.tricontourf(self.msh_t, **kwargs) - def interpolate(self, raster: Union[Raster, List[Raster]], - method='spline', nprocs=None): + def interpolate( + self, raster: Union[Raster, List[Raster]], method="spline", nprocs=None + ): if isinstance(raster, Raster): raster = [raster] @@ -669,51 +706,63 @@ def interpolate(self, raster: Union[Raster, List[Raster]], with Pool(processes=nprocs) as pool: res = pool.starmap( _mesh_interpolate_worker, - [(self.vert2['coord'], self.crs, - _raster.tmpfile, _raster.chunk_size, method) - for _raster in raster] - ) + [ + ( + self.vert2["coord"], + self.crs, + _raster.tmpfile, + _raster.chunk_size, + method, + ) + for _raster in raster + ], + ) pool.join() else: - res = [_mesh_interpolate_worker( - self.vert2['coord'], self.crs, - _raster.tmpfile, _raster.chunk_size, method) - for _raster in raster] + res = [ + _mesh_interpolate_worker( + self.vert2["coord"], + self.crs, + _raster.tmpfile, + _raster.chunk_size, + method, + ) + for _raster in raster + ] values = self.msh_t.value.flatten() for idxs, _values in res: values[idxs] = _values - self.msh_t.value = np.array(values.reshape((values.shape[0], 1)), - dtype=jigsaw_msh_t.REALS_t) - + self.msh_t.value = np.array( + values.reshape((values.shape[0], 1)), dtype=jigsaw_msh_t.REALS_t + ) def get_contour(self, level: float): # ONLY SUPPORTS TRIANGLES - for attr in ['quad4', 'hexa8']: + for attr in ["quad4", "hexa8"]: if len(getattr(self.msh_t, attr)) > 0: - warnings.warn( - 'Mesh contour extraction only supports triangles') + warnings.warn("Mesh contour extraction only supports triangles") - coords = self.msh_t.vert2['coord'] + coords = self.msh_t.vert2["coord"] values = self.msh_t.value - trias = self.msh_t.tria3['index'] + trias = self.msh_t.tria3["index"] if np.any(np.isnan(values)): raise Exception( "Mesh contains invalid values. Raster values must" "be interpolated to the mesh before generating " - "boundaries.") + "boundaries." + ) x, y = coords[:, 0], coords[:, 1] features = [] with warnings.catch_warnings(): - warnings.simplefilter('ignore', UserWarning) - _logger.debug('Computing contours...') + warnings.simplefilter("ignore", UserWarning) + _logger.debug("Computing contours...") fig, ax = plt.subplots() - ax.tricontour( - x, y, trias, values.ravel(), levels=[level]) + ax.tricontour(x, y, trias, values.ravel(), levels=[level]) plt.close(fig) for path_collection in ax.collections: for path in path_collection.get_paths(): @@ -724,7 +773,6 @@ def get_contour(self, level: float): pass return linemerge(features) - def get_multipolygon(self, zmin=None, zmax=None): values = self.msh_t.value @@ -739,14 +787,12 @@ def get_multipolygon(self, zmin=None, zmax=None): verts_in = np.argwhere(mask).ravel() clipped_mesh = utils.clip_mesh_by_vertex( - self.msh_t, verts_in, - can_use_other_verts=True) + self.msh_t, verts_in, can_use_other_verts=True + ) boundary_edges = utils.get_boundary_edges(clipped_mesh) - coords = clipped_mesh.vert2['coord'] - coo_to_idx = { - tuple(coo): idx - for idx, coo in enumerate(coords)} + coords = clipped_mesh.vert2["coord"] + coo_to_idx = {tuple(coo): idx for idx, coo in enumerate(coords)} poly_gen = polygonize(coords[boundary_edges]) polys = list(poly_gen) polys = sorted(polys, key=lambda p: p.area, reverse=True) @@ -757,9 +803,11 @@ def get_multipolygon(self, zmin=None, zmax=None): for e, ring in enumerate(rings[:-1]): path = Path(ring, closed=True) n_parents = n_parents + np.pad( - np.array([ - path.contains_point(pt) for pt in represent[e+1:]]), - (e+1, 0), 'constant', constant_values=0) + np.array([path.contains_point(pt) for pt in represent[e + 1:]]), + (e + 1, 0), + "constant", + constant_values=0, + ) # Get actual polygons based on logic described above polys = [p for e, p in enumerate(polys) if not n_parents[e] % 2] @@ -785,18 +833,20 @@ class Mesh(BaseMesh): def __new__(cls, mesh: jigsaw_msh_t): if not isinstance(mesh, jigsaw_msh_t): - raise TypeError(f'Argument mesh must be of type {jigsaw_msh_t}, ' - f'not type {type(mesh)}.') + raise TypeError( + f"Argument mesh must be of type {jigsaw_msh_t}, " + f"not type {type(mesh)}." + ) - if mesh.mshID == 'euclidean-mesh': + if mesh.mshID == "euclidean-mesh": if mesh.ndims == 2: return EuclideanMesh2D(mesh) raise NotImplementedError( - f'mshID={mesh.mshID} + mesh.ndims={mesh.ndims} not ' - 'handled.') + f"mshID={mesh.mshID} + mesh.ndims={mesh.ndims} not " "handled." + ) - raise NotImplementedError(f'mshID={mesh.mshID} not handled.') + raise NotImplementedError(f"mshID={mesh.mshID} not handled.") @staticmethod def open(path, crs=None): @@ -805,7 +855,7 @@ def open(path, crs=None): msh_t.value = np.negative(msh_t.value) return Mesh(msh_t) except Exception as e: - if 'not a valid grd file' in str(e): + if "not a valid grd file" in str(e): pass else: raise e @@ -823,10 +873,7 @@ def open(path, crs=None): except Exception: pass - raise TypeError( - f'Unable to automatically determine file type for {str(path)}.') - - + raise TypeError(f"Unable to automatically determine file type for {str(path)}.") def sort_rings(index_rings, vertices): @@ -852,10 +899,7 @@ def sort_rings(index_rings, vertices): areas.pop(idx) _id = 0 _index_rings = {} - _index_rings[_id] = { - 'exterior': np.asarray(exterior), - 'interiors': [] - } + _index_rings[_id] = {"exterior": np.asarray(exterior), "interiors": []} e0, e1 = [list(t) for t in zip(*exterior)] path = Path(vertices[e0 + [e0[0]], :], closed=True) while len(index_rings) > 0: @@ -867,13 +911,13 @@ def sort_rings(index_rings, vertices): potential_interiors.append(i) # filter out nested rings real_interiors = [] - for i, p_interior in reversed( - list(enumerate(potential_interiors))): + for i, p_interior in reversed(list(enumerate(potential_interiors))): _p_interior = index_rings[p_interior] - check = [index_rings[k] - for j, k in - reversed(list(enumerate(potential_interiors))) - if i != j] + check = [ + index_rings[k] + for j, k in reversed(list(enumerate(potential_interiors))) + if i != j + ] has_parent = False for _path in check: e0, e1 = [list(t) for t in zip(*_path)] @@ -884,8 +928,7 @@ def sort_rings(index_rings, vertices): real_interiors.append(p_interior) # pop real rings from collection for i in reversed(sorted(real_interiors)): - _index_rings[_id]['interiors'].append( - np.asarray(index_rings.pop(i))) + _index_rings[_id]["interiors"].append(np.asarray(index_rings.pop(i))) areas.pop(i) # if no internal rings found, initialize next polygon if len(index_rings) > 0: @@ -893,22 +936,13 @@ def sort_rings(index_rings, vertices): exterior = index_rings.pop(idx) areas.pop(idx) _id += 1 - _index_rings[_id] = { - 'exterior': np.asarray(exterior), - 'interiors': [] - } + _index_rings[_id] = {"exterior": np.asarray(exterior), "interiors": []} e0, e1 = [list(t) for t in zip(*exterior)] path = Path(vertices[e0 + [e0[0]], :], closed=True) return _index_rings - -def _mesh_interpolate_worker( - coords, - coords_crs, - raster_path, - chunk_size, - method): +def _mesh_interpolate_worker(coords, coords_crs, raster_path, chunk_size, method): coords = np.array(coords) raster = Raster(raster_path) idxs = [] @@ -916,56 +950,50 @@ def _mesh_interpolate_worker( for window in raster.iter_windows(chunk_size=chunk_size, overlap=2): if not raster.crs.equals(coords_crs): - transformer = Transformer.from_crs( - coords_crs, raster.crs, always_xy=True) + transformer = Transformer.from_crs(coords_crs, raster.crs, always_xy=True) # pylint: disable=E0633 coords[:, 0], coords[:, 1] = transformer.transform( - coords[:, 0], coords[:, 1]) + coords[:, 0], coords[:, 1] + ) xi = raster.get_x(window) yi = raster.get_y(window) # Use masked array to ignore missing values from DEM zi = raster.get_values(window=window, masked=True) _idxs = np.logical_and( - np.logical_and( - np.min(xi) <= coords[:, 0], - np.max(xi) >= coords[:, 0]), - np.logical_and( - np.min(yi) <= coords[:, 1], - np.max(yi) >= coords[:, 1])) + np.logical_and(np.min(xi) <= coords[:, 0], np.max(xi) >= coords[:, 0]), + np.logical_and(np.min(yi) <= coords[:, 1], np.max(yi) >= coords[:, 1]), + ) # Inspired by StackOverflow 35807321 interp_mask = None if np.any(zi.mask): m_interp = RegularGridInterpolator( - (xi, np.flip(yi)), - np.flipud(zi.mask).T.astype(bool), - method=method + (xi, np.flip(yi)), np.flipud(zi.mask).T.astype(bool), method=method ) # Pick nodes NOT "contaminated" by masked values interp_mask = m_interp(coords[_idxs]) > 0 - if method == 'spline': + if method == "spline": f = RectBivariateSpline( xi, np.flip(yi), np.flipud(zi).T, - kx=3, ky=3, s=0, + kx=3, + ky=3, + s=0, # bbox=[min(x), max(x), min(y), max(y)] # ?? ) _values = f.ev(coords[_idxs, 0], coords[_idxs, 1]) - elif method in ['nearest', 'linear']: + elif method in ["nearest", "linear"]: f = RegularGridInterpolator( - (xi, np.flip(yi)), - np.flipud(zi).T, - method=method + (xi, np.flip(yi)), np.flipud(zi).T, method=method ) _values = f(coords[_idxs]) else: - raise ValueError( - f"Invalid value method specified <{method}>!") + raise ValueError(f"Invalid value method specified <{method}>!") if interp_mask is not None: # pylint: disable=invalid-unary-operand-type diff --git a/ocsmesh/mesh/parsers/grd.py b/ocsmesh/mesh/parsers/grd.py index 02c4d5fc..38061058 100644 --- a/ocsmesh/mesh/parsers/grd.py +++ b/ocsmesh/mesh/parsers/grd.py @@ -1,9 +1,9 @@ -from collections import defaultdict -import os import numbers +import os import pathlib -from typing import Union, Dict, TextIO import warnings +from collections import defaultdict +from typing import Dict, TextIO, Union import numpy as np # type: ignore[import] from pyproj import CRS # type: ignore[import] @@ -15,7 +15,7 @@ def buffer_to_dict(buf: TextIO): NE, NP = map(int, buf.readline().split()) nodes = {} for _ in range(NP): - line = buf.readline().strip('\n').split() + line = buf.readline().strip("\n").split() # Gr3/fort.14 format cannot distinguish between a 2D mesh with one # vector value (e.g. velocity, which uses 2 columns) or a 3D mesh with # one scalar value. This is a design problem of the mesh format, which @@ -24,12 +24,11 @@ def buffer_to_dict(buf: TextIO): # Here, we assume the input mesh is strictly a 2D mesh, and the data # that follows is an array of values. if len(line[3:]) == 1: - nodes[line[0]] = [ - (float(line[1]), float(line[2])), float(line[3])] + nodes[line[0]] = [(float(line[1]), float(line[2])), float(line[3])] else: nodes[line[0]] = [ (float(line[1]), float(line[2])), - [float(line[i]) for i in range(3, len(line[3:]))] + [float(line[i]) for i in range(3, len(line[3:]))], ] elements = {} for _ in range(NE): @@ -39,9 +38,7 @@ def buffer_to_dict(buf: TextIO): try: NOPE = int(buf.readline().split()[0]) except IndexError: - return {'description': description, - 'nodes': nodes, - 'elements': elements} + return {"description": description, "nodes": nodes, "elements": elements} # let NOPE=-1 mean an ellipsoidal-mesh # reassigning NOPE to 0 until further implementation is applied. boundaries: Dict = defaultdict(dict) @@ -51,10 +48,11 @@ def buffer_to_dict(buf: TextIO): NETA = int(buf.readline().split()[0]) _cnt = 0 boundaries[None][_bnd_id] = {} - boundaries[None][_bnd_id]['indexes'] = [] + boundaries[None][_bnd_id]["indexes"] = [] while _cnt < NETA: - boundaries[None][_bnd_id]['indexes'].append( - buf.readline().split()[0].strip()) + boundaries[None][_bnd_id]["indexes"].append( + buf.readline().split()[0].strip() + ) _cnt += 1 _bnd_id += 1 NBOU = int(buf.readline().split()[0]) @@ -68,24 +66,26 @@ def buffer_to_dict(buf: TextIO): else: _bnd_id = len(boundaries[ibtype]) boundaries[ibtype][_bnd_id] = {} - boundaries[ibtype][_bnd_id]['indexes'] = [] + boundaries[ibtype][_bnd_id]["indexes"] = [] while _pnt_cnt < npts: line = buf.readline().split() if len(line) == 1: - boundaries[ibtype][_bnd_id]['indexes'].append(line[0]) + boundaries[ibtype][_bnd_id]["indexes"].append(line[0]) else: index_construct = [] for val in line: - if '.' in val: + if "." in val: continue index_construct.append(val) - boundaries[ibtype][_bnd_id]['indexes'].append(index_construct) + boundaries[ibtype][_bnd_id]["indexes"].append(index_construct) _pnt_cnt += 1 _nbnd_cnt += 1 - return {'description': description, - 'nodes': nodes, - 'elements': elements, - 'boundaries': boundaries} + return { + "description": description, + "nodes": nodes, + "elements": elements, + "boundaries": boundaries, + } def to_string(description, nodes, elements, boundaries=None, crs=None): @@ -119,18 +119,19 @@ def to_string(description, nodes, elements, boundaries=None, crs=None): # ocean boundaries if boundaries is not None: - out.append(f"{len(boundaries[None]):d} " - "! total number of ocean boundaries") + out.append(f"{len(boundaries[None]):d} " "! total number of ocean boundaries") # count total number of ocean boundaries _sum = 0 for bnd in boundaries[None].values(): - _sum += len(bnd['indexes']) + _sum += len(bnd["indexes"]) out.append(f"{int(_sum):d} ! total number of ocean boundary nodes") # write ocean boundary indexes for i, boundary in boundaries[None].items(): - out.append(f"{len(boundary['indexes']):d}" - f" ! number of nodes for ocean_boundary_{i}") - for idx in boundary['indexes']: + out.append( + f"{len(boundary['indexes']):d}" + f" ! number of nodes for ocean_boundary_{i}" + ) + for idx in boundary["indexes"]: out.append(f"{idx}") else: out.append("0 ! total number of ocean boundaries") @@ -148,7 +149,7 @@ def to_string(description, nodes, elements, boundaries=None, crs=None): for ibtype in boundaries: if ibtype is not None: for bnd in boundaries[ibtype].values(): - _cnt += np.asarray(bnd['indexes']).size + _cnt += np.asarray(bnd["indexes"]).size out.append(f"{_cnt:d} ! Total number of non-ocean boundary nodes") # all additional boundaries for ibtype, bndrys in boundaries.items(): @@ -158,9 +159,10 @@ def to_string(description, nodes, elements, boundaries=None, crs=None): line = [ f"{len(boundary['indexes']):d}", f"{ibtype}", - f"! boundary {ibtype}:{bdry_id}"] - out.append(' '.join(line)) - for idx in boundary['indexes']: + f"! boundary {ibtype}:{bdry_id}", + ] + out.append(" ".join(line)) + for idx in boundary["indexes"]: out.append(f"{idx}") return "\n".join(out) @@ -174,35 +176,34 @@ def read(resource: Union[str, os.PathLike], boundaries: bool = True, crs=True): :class:`io.StringIO` """ resource = pathlib.Path(resource) - with open(resource, 'r') as stream: + with open(resource, "r") as stream: try: grd = buffer_to_dict(stream) except Exception as excepting: - err_msg = f'Resource {str(resource)} is not a valid grd file' + err_msg = f"Resource {str(resource)} is not a valid grd file" raise Exception(err_msg) from excepting if boundaries is False: - grd.pop('boundaries', None) + grd.pop("boundaries", None) if crs is True: crs = None if crs is None: - for try_crs in grd['description'].split(): + for try_crs in grd["description"].split(): try: crs = CRS.from_user_input(try_crs) break except CRSError: pass if crs is None: - warnings.warn(f'File {str(resource)} does not contain CRS ' - 'information.') + warnings.warn(f"File {str(resource)} does not contain CRS " "information.") if crs is not False: - grd.update({'crs': crs}) + grd.update({"crs": crs}) return grd def write(grd, path, overwrite=False): path = pathlib.Path(path) if path.is_file() and not overwrite: - raise Exception('File exists, pass overwrite=True to allow overwrite.') - with open(path, 'w') as f: + raise Exception("File exists, pass overwrite=True to allow overwrite.") + with open(path, "w") as f: f.write(to_string(**grd)) diff --git a/ocsmesh/mesh/parsers/sms2dm.py b/ocsmesh/mesh/parsers/sms2dm.py index c26fd7b0..aa014874 100644 --- a/ocsmesh/mesh/parsers/sms2dm.py +++ b/ocsmesh/mesh/parsers/sms2dm.py @@ -5,7 +5,7 @@ def read(path, crs=None): sms2dm = {} - with open(pathlib.Path(path), 'r') as f: + with open(pathlib.Path(path), "r") as f: lines = list(map(str.split, f.readlines())) ind = 1 while ind < len(lines): @@ -13,36 +13,32 @@ def read(path, crs=None): ind = ind + 1 if len(line) == 0: break - if line[0] in ['E3T', 'E4Q']: + if line[0] in ["E3T", "E4Q"]: if line[0] not in sms2dm: sms2dm[line[0]] = {} - sms2dm[line[0]].update({ - line[1]: line[2:] - }) - if line[0] == 'ND': + sms2dm[line[0]].update({line[1]: line[2:]}) + if line[0] == "ND": if line[0] not in sms2dm: sms2dm[line[0]] = {} - sms2dm[line[0]].update({ - line[1]: ( - list(map(float, line[2:-1])), float(line[-1]) - ) - }) + sms2dm[line[0]].update( + {line[1]: (list(map(float, line[2:-1])), float(line[-1]))} + ) if crs is not None: - sms2dm['crs'] = CRS.from_user_input(crs) + sms2dm["crs"] = CRS.from_user_input(crs) return sms2dm def writer(sms2dm, path, overwrite=False): path = pathlib.Path(path) if path.is_file() and not overwrite: - msg = 'File exists, pass overwrite=True to allow overwrite.' + msg = "File exists, pass overwrite=True to allow overwrite." raise Exception(msg) - with open(path, 'w') as f: + with open(path, "w") as f: f.write(to_string(sms2dm)) def to_string(sms2dm): - data = ['MESH2D'] + data = ["MESH2D"] E3T = E3T_string(sms2dm) if E3T is not None: data.append(E3T) @@ -50,44 +46,48 @@ def to_string(sms2dm): if E4Q is not None: data.append(E4Q) data.append(ND_string(sms2dm)) - return '\n'.join(data) + return "\n".join(data) def ND_string(sms2dm): - assert all(int(nd_id) > 0 for nd_id in sms2dm['ND']) + assert all(int(nd_id) > 0 for nd_id in sms2dm["ND"]) lines = [] - for nd_id, (coords, value) in sms2dm['ND'].items(): - lines.append(' '.join([ - 'ND', - f'{int(nd_id):d}', - f"{coords[0]:<.16E}", - f"{coords[1]:<.16E}", - f"{value:<.16E}" - ])) - return '\n'.join(lines) + for nd_id, (coords, value) in sms2dm["ND"].items(): + lines.append( + " ".join( + [ + "ND", + f"{int(nd_id):d}", + f"{coords[0]:<.16E}", + f"{coords[1]:<.16E}", + f"{value:<.16E}", + ] + ) + ) + return "\n".join(lines) def geom_string(geom_type, sms2dm): - assert geom_type in ['E3T', 'E4Q', 'E6T', 'E8Q', 'E9Q'] + assert geom_type in ["E3T", "E4Q", "E6T", "E8Q", "E9Q"] assert all(int(elm_id) > 0 for elm_id in sms2dm[geom_type]) f = [] for elm_id, geom in sms2dm[geom_type].items(): line = [ - f'{geom_type}', - f'{elm_id}', + f"{geom_type}", + f"{elm_id}", ] for j, _ in enumerate(geom): line.append(f"{geom[j]}") - f.append(' '.join(line)) + f.append(" ".join(line)) if len(f) > 0: - return '\n'.join(f) + return "\n".join(f) return None def E3T_string(sms2dm): - return geom_string('E3T', sms2dm) + return geom_string("E3T", sms2dm) def E4Q_string(sms2dm): - return geom_string('E4Q', sms2dm) + return geom_string("E4Q", sms2dm) diff --git a/ocsmesh/ops/__init__.py b/ocsmesh/ops/__init__.py index 0bad428e..803d161f 100644 --- a/ocsmesh/ops/__init__.py +++ b/ocsmesh/ops/__init__.py @@ -1,13 +1,8 @@ from ocsmesh.ops.combine_geom import GeomCombine from ocsmesh.ops.combine_hfun import HfunCombine -combine_geometry = lambda *args, **kwargs: GeomCombine( - *args, **kwargs).run() +combine_geometry = lambda *args, **kwargs: GeomCombine(*args, **kwargs).run() -combine_hfun = lambda *args, **kwargs: HfunCombine( - *args, **kwargs).run() +combine_hfun = lambda *args, **kwargs: HfunCombine(*args, **kwargs).run() -__all__ = [ - "combine_geometry", - "combine_hfun" -] +__all__ = ["combine_geometry", "combine_hfun"] diff --git a/ocsmesh/ops/combine_geom.py b/ocsmesh/ops/combine_geom.py index eca18c40..6f840665 100644 --- a/ocsmesh/ops/combine_geom.py +++ b/ocsmesh/ops/combine_geom.py @@ -1,45 +1,46 @@ import gc import logging -from multiprocessing import Pool, Lock, cpu_count import os import pathlib import tempfile import warnings -from typing import Union, Sequence, Tuple, List +from multiprocessing import Lock, Pool, cpu_count +from typing import List, Sequence, Tuple, Union import geopandas as gpd import numpy as np +from jigsawpy import jigsaw_msh_t, savemsh, savevtk from pyproj import CRS, Transformer from shapely import ops -from shapely.geometry import box, Polygon, MultiPolygon, LinearRing +from shapely.geometry import LinearRing, MultiPolygon, Polygon, box from shapely.validation import explain_validity -from jigsawpy import jigsaw_msh_t, savemsh, savevtk - -from ocsmesh.raster import Raster from ocsmesh.mesh.mesh import Mesh - +from ocsmesh.raster import Raster _logger = logging.getLogger(__name__) + class GeomCombine: _base_mesh_lock = Lock() + def __init__( - self, - dem_files: Union[None, Sequence[Union[str, os.PathLike]]], - out_file: Union[str, os.PathLike], - out_format: str = "shapefile", - mesh_file: Union[str, os.PathLike, None] = None, - mesh_multipolygon: Union[MultiPolygon, Polygon] = None, - ignore_mesh_final_boundary : bool = False, - zmin: Union[float, None] = None, - zmax: Union[float, None] = None, - chunk_size: Union[int, None] = None, - overlap: Union[int, None] = None, - nprocs: int = -1, - out_crs: Union[str, CRS] = "EPSG:4326", - base_crs: Union[str, CRS] = None): + self, + dem_files: Union[None, Sequence[Union[str, os.PathLike]]], + out_file: Union[str, os.PathLike], + out_format: str = "shapefile", + mesh_file: Union[str, os.PathLike, None] = None, + mesh_multipolygon: Union[MultiPolygon, Polygon] = None, + ignore_mesh_final_boundary: bool = False, + zmin: Union[float, None] = None, + zmax: Union[float, None] = None, + chunk_size: Union[int, None] = None, + overlap: Union[int, None] = None, + nprocs: int = -1, + out_crs: Union[str, CRS] = "EPSG:4326", + base_crs: Union[str, CRS] = None, + ): self._calc_crs = None self._base_exterior = None @@ -60,23 +61,24 @@ def __init__( overlap=overlap, nprocs=nprocs, out_crs=out_crs, - base_crs=base_crs) + base_crs=base_crs, + ) def run(self): - dem_files = self._operation_info['dem_files'] - out_file = self._operation_info['out_file'] - out_format = self._operation_info['out_format'] - mesh_file = self._operation_info['mesh_file'] - mesh_mp_in = self._operation_info['mesh_mp_in'] - ignore_mesh = self._operation_info['ignore_mesh'] - zmin = self._operation_info['zmin'] - zmax = self._operation_info['zmax'] - chunk_size = self._operation_info['chunk_size'] - overlap = self._operation_info['overlap'] - nprocs = self._operation_info['nprocs'] - out_crs = self._operation_info['out_crs'] - base_crs = self._operation_info['base_crs'] + dem_files = self._operation_info["dem_files"] + out_file = self._operation_info["out_file"] + out_format = self._operation_info["out_format"] + mesh_file = self._operation_info["mesh_file"] + mesh_mp_in = self._operation_info["mesh_mp_in"] + ignore_mesh = self._operation_info["ignore_mesh"] + zmin = self._operation_info["zmin"] + zmax = self._operation_info["zmax"] + chunk_size = self._operation_info["chunk_size"] + overlap = self._operation_info["overlap"] + nprocs = self._operation_info["nprocs"] + out_crs = self._operation_info["out_crs"] + base_crs = self._operation_info["base_crs"] out_dir = pathlib.Path(out_file).parent out_dir.mkdir(exist_ok=True, parents=True) @@ -93,8 +95,8 @@ def run(self): if len(all_crs) == 1: self._calc_crs = list(all_crs)[0] _logger.info( - f"All DEMs have the same CRS:" - f" {self._calc_crs.to_string()}") + f"All DEMs have the same CRS:" f" {self._calc_crs.to_string()}" + ) base_mult_poly = None if mesh_mp_in: @@ -106,9 +108,9 @@ def run(self): if not base_crs.equals(self._calc_crs): _logger.info("Reprojecting base polygon...") transformer = Transformer.from_crs( - base_crs, self._calc_crs, always_xy=True) - base_mult_poly = ops.transform( - transformer.transform, base_mult_poly) + base_crs, self._calc_crs, always_xy=True + ) + base_mult_poly = ops.transform(transformer.transform, base_mult_poly) elif mesh_file and pathlib.Path(mesh_file).is_file(): _logger.info("Creating mesh object from file...") @@ -124,10 +126,10 @@ def run(self): if not self._calc_crs.equals(base_crs): _logger.info("Reprojecting base mesh...") transformer = Transformer.from_crs( - base_crs, self._calc_crs, always_xy=True) + base_crs, self._calc_crs, always_xy=True + ) xy = base_mesh.coord - xy = np.vstack( - transformer.transform(xy[:, 0], xy[:, 1])).T + xy = np.vstack(transformer.transform(xy[:, 0], xy[:, 1])).T base_mesh.coord[:] = xy _logger.info("Done") @@ -138,7 +140,6 @@ def run(self): base_mult_poly = self._get_valid_multipolygon(base_mult_poly) - if base_mult_poly: # NOTE: This needs to happen once and before any # modification to basemesh happens (due to overlap @@ -149,44 +150,42 @@ def run(self): # was erosion and we want to make sure new DEMs futher # inland are considered (?) self._base_exterior = MultiPolygon( - list(ops.polygonize( - [poly.exterior for poly in base_mult_poly]))) - + list(ops.polygonize([poly.exterior for poly in base_mult_poly])) + ) z_info = {} if zmin is not None: - z_info['zmin'] = zmin + z_info["zmin"] = zmin if zmax is not None: - z_info['zmax'] = zmax + z_info["zmax"] = zmax poly_files_coll = [] _logger.info(f"Number of processes: {nprocs}") - with tempfile.TemporaryDirectory(dir=out_dir) as temp_dir, \ - tempfile.NamedTemporaryFile() as base_file: + with tempfile.TemporaryDirectory( + dir=out_dir + ) as temp_dir, tempfile.NamedTemporaryFile() as base_file: if base_mult_poly: base_mesh_path = base_file.name - self._multipolygon_to_disk( - base_mesh_path, base_mult_poly, fix=False) + self._multipolygon_to_disk(base_mesh_path, base_mult_poly, fix=False) else: base_mesh_path = None base_mult_poly = None - _logger.info("Processing DEM priorities ...") # Process priority: priority is based on the order, # the last input has the highest priority # (i.e. lowest priority number) priorities = list((range(len(dem_files))))[::-1] # TODO: Needs some code refinement for bbox issue -# priority_args = [] -# for priority, dem_file in zip(priorities, dem_files): -# priority_args.append( -# (priority, temp_dir, dem_file, chunk_size, overlap)) -# -# with Pool(processes=nprocs) as p: -# p.starmap(self._process_priority, priority_args) -# p.join() + # priority_args = [] + # for priority, dem_file in zip(priorities, dem_files): + # priority_args.append( + # (priority, temp_dir, dem_file, chunk_size, overlap)) + # + # with Pool(processes=nprocs) as p: + # p.starmap(self._process_priority, priority_args) + # p.join() _logger.info("Processing DEM contours ...") # Process contours @@ -194,22 +193,33 @@ def run(self): parallel_args = [] for priority, dem_file in zip(priorities, dem_files): parallel_args.append( - (base_mesh_path, temp_dir, - priority, dem_file, - z_info, chunk_size, overlap)) + ( + base_mesh_path, + temp_dir, + priority, + dem_file, + z_info, + chunk_size, + overlap, + ) + ) with Pool(processes=nprocs) as p: poly_files_coll.extend( - p.starmap( - self._parallel_get_polygon_worker, - parallel_args)) + p.starmap(self._parallel_get_polygon_worker, parallel_args) + ) p.join() else: poly_files_coll.extend( self._serial_get_polygon( - base_mesh_path, temp_dir, - priorities, dem_files, - z_info, chunk_size, overlap)) - + base_mesh_path, + temp_dir, + priorities, + dem_files, + z_info, + chunk_size, + overlap, + ) + ) _logger.info("Generating final boundary polygon...") # If a DEM doesn't intersect domain None will @@ -218,26 +228,20 @@ def run(self): if base_mesh_path is not None and not ignore_mesh: poly_files_coll.append(base_mesh_path) - rasters_gdf = gpd.GeoDataFrame( - columns=['geometry'], - crs=self._calc_crs - ) + rasters_gdf = gpd.GeoDataFrame(columns=["geometry"], crs=self._calc_crs) for feather_f in poly_files_coll: rasters_gdf = rasters_gdf.append( gpd.GeoDataFrame( - {'geometry': self._read_multipolygon( - feather_f) - }, - crs=self._calc_crs - ), - ignore_index=True) - + {"geometry": self._read_multipolygon(feather_f)}, + crs=self._calc_crs, + ), + ignore_index=True, + ) # The assumption is this returns polygon or multipolygon fin_mult_poly = rasters_gdf.unary_union _logger.info("Done") - # If DEM is not inside input base polygon, the end results # is None if fin_mult_poly: @@ -245,15 +249,13 @@ def run(self): # Is this necessary? It can be expensive if geom is not valid fin_mult_poly = self._get_valid_multipolygon(fin_mult_poly) - self._write_to_file( - out_format, out_file, fin_mult_poly, out_crs) + self._write_to_file(out_format, out_file, fin_mult_poly, out_crs) self._base_exterior = None def _get_valid_multipolygon( - self, - polygon: Union[Polygon, MultiPolygon] - ) -> MultiPolygon: + self, polygon: Union[Polygon, MultiPolygon] + ) -> MultiPolygon: if not polygon.is_valid: polygon = ops.unary_union(polygon) @@ -269,85 +271,73 @@ def _get_valid_multipolygon( return polygon - def _multipolygon_to_disk( - self, - path: Union[str, os.PathLike], - multipolygon: MultiPolygon, - fix: bool = True): + self, + path: Union[str, os.PathLike], + multipolygon: MultiPolygon, + fix: bool = True, + ): if fix: - multipolygon = self._get_valid_multipolygon( - multipolygon) + multipolygon = self._get_valid_multipolygon(multipolygon) if isinstance(multipolygon, Polygon): # In case fix is not True, we need to make sure it's # a multipolygon instead of polygon for dataframe creation multipolygon = MultiPolygon([multipolygon]) - gpd.GeoDataFrame({'geometry': multipolygon}).to_feather(path) - + gpd.GeoDataFrame({"geometry": multipolygon}).to_feather(path) def _read_multipolygon( - self, - path: Union[str, os.PathLike], - fix: bool = True - ) -> MultiPolygon: + self, path: Union[str, os.PathLike], fix: bool = True + ) -> MultiPolygon: - multipolygon = MultiPolygon( - list(gpd.read_feather(path).geometry)) + multipolygon = MultiPolygon(list(gpd.read_feather(path).geometry)) if fix: - multipolygon = self._get_valid_multipolygon( - multipolygon) + multipolygon = self._get_valid_multipolygon(multipolygon) return multipolygon def _read_to_geodf( - self, - path: Union[str, os.PathLike], - ) -> gpd.GeoDataFrame: + self, + path: Union[str, os.PathLike], + ) -> gpd.GeoDataFrame: gdf = gpd.read_feather(path) return gdf - def _process_priority( - self, - priority: int, - temp_dir: Union[str, os.PathLike], - dem_path: Union[str, os.PathLike], - chunk_size: Union[int, None] = None, - overlap: Union[int, None] = None): - - rast = Raster( - dem_path, - chunk_size=chunk_size, - overlap=overlap) + self, + priority: int, + temp_dir: Union[str, os.PathLike], + dem_path: Union[str, os.PathLike], + chunk_size: Union[int, None] = None, + overlap: Union[int, None] = None, + ): + + rast = Raster(dem_path, chunk_size=chunk_size, overlap=overlap) # Can cause issue with bbox(?) if not self._calc_crs.equals(rast.crs): rast.warp(dst_crs=self._calc_crs) - pri_dt_path = ( - pathlib.Path(temp_dir) / f'dem_priority_{priority}.feather') + pri_dt_path = pathlib.Path(temp_dir) / f"dem_priority_{priority}.feather" pri_mult_poly = MultiPolygon([box(*rast.src.bounds)]) - self._multipolygon_to_disk( - pri_dt_path, pri_mult_poly) - + self._multipolygon_to_disk(pri_dt_path, pri_mult_poly) def _serial_get_polygon( - self, - base_mesh_path: Union[str, os.PathLike, None], - temp_dir: Union[str, os.PathLike], - priorities: Sequence[int], - dem_files: Sequence[Union[str, os.PathLike]], - z_info: dict = {}, - chunk_size: Union[int, None] = None, - overlap: Union[int, None] = None): - + self, + base_mesh_path: Union[str, os.PathLike, None], + temp_dir: Union[str, os.PathLike], + priorities: Sequence[int], + dem_files: Sequence[Union[str, os.PathLike]], + z_info: dict = {}, + chunk_size: Union[int, None] = None, + overlap: Union[int, None] = None, + ): _logger.info("Getting DEM info") poly_coll = [] @@ -360,10 +350,7 @@ def _serial_get_polygon( # Calculate Polygon _logger.info("Loading raster from file...") - rast = Raster( - dem_path, - chunk_size=chunk_size, - overlap=overlap) + rast = Raster(dem_path, chunk_size=chunk_size, overlap=overlap) # Can cause issue with bbox(?) if not self._calc_crs.equals(rast.crs): rast.warp(dst_crs=self._calc_crs) @@ -374,12 +361,10 @@ def _serial_get_polygon( # NOTE: We use the exterior from the earlier calc if self._base_exterior and not rast_box.within(self._base_exterior): if not rast_box.intersects(self._base_exterior): - _logger.info( - f"{dem_path} is ignored due to base mesh...") + _logger.info(f"{dem_path} is ignored due to base mesh...") continue - _logger.info( - f"{dem_path} needs clipping by base mesh...") + _logger.info(f"{dem_path} needs clipping by base mesh...") rast.clip(self._base_exterior) rast_box = box(*rast.src.bounds) @@ -388,69 +373,64 @@ def _serial_get_polygon( _logger.info("Getting polygons from geom...") geom_mult_poly = rast.get_multipolygon(**z_info) - geom_mult_poly = self._get_valid_multipolygon( - geom_mult_poly) + geom_mult_poly = self._get_valid_multipolygon(geom_mult_poly) if base_mesh_path is not None: _logger.info("Subtract DEM bounds from base mesh polygons...") self._base_mesh_lock.acquire() try: # Get a valid multipolygon from disk - base_mult_poly = self._read_multipolygon( - base_mesh_path) + base_mult_poly = self._read_multipolygon(base_mesh_path) # Get valid multipolygon after operation and write - base_mult_poly = base_mult_poly.difference( - rast_box) - self._multipolygon_to_disk( - base_mesh_path, base_mult_poly) + base_mult_poly = base_mult_poly.difference(rast_box) + self._multipolygon_to_disk(base_mesh_path, base_mult_poly) finally: self._base_mesh_lock.release() # TODO: Needs some code refinement due to bbox # Processing DEM priority -# priority_geodf = gpd.GeoDataFrame( -# columns=['geometry'], -# crs=self._calc_crs) -# for p in range(priority): -# higher_pri_path = ( -# pathlib.Path(temp_dir) / f'dem_priority_{p}.feather') -# -# if higher_pri_path.is_file(): -# priority_geodf = priority_geodf.append( -# self._read_to_geodf(higher_pri_path)) -# -# if len(priority_geodf): -# op_res = priority_geodf.unary_union -# pri_mult_poly = MultiPolygon() -# if isinstance(op_res, MultiPolygon): -# pri_mult_poly = op_res -# else: -# pri_mult_poly = MultiPolygon([op_res]) -# -# -# if rast_box.within(pri_mult_poly): -# _logger.info( -# f"{dem_path} is ignored due to priority...") -# continue -# -# if rast_box.intersects(pri_mult_poly): -# _logger.info( -# f"{dem_path} needs clipping by priority...") -# -# # Clipping raster can cause problem at -# # boundaries due to difference in pixel size -# # between high and low resolution rasters -# # so instead we operate on extracted polygons -# geom_mult_poly = geom_mult_poly.difference( -# pri_mult_poly) - + # priority_geodf = gpd.GeoDataFrame( + # columns=['geometry'], + # crs=self._calc_crs) + # for p in range(priority): + # higher_pri_path = ( + # pathlib.Path(temp_dir) / f'dem_priority_{p}.feather') + # + # if higher_pri_path.is_file(): + # priority_geodf = priority_geodf.append( + # self._read_to_geodf(higher_pri_path)) + # + # if len(priority_geodf): + # op_res = priority_geodf.unary_union + # pri_mult_poly = MultiPolygon() + # if isinstance(op_res, MultiPolygon): + # pri_mult_poly = op_res + # else: + # pri_mult_poly = MultiPolygon([op_res]) + # + # + # if rast_box.within(pri_mult_poly): + # _logger.info( + # f"{dem_path} is ignored due to priority...") + # continue + # + # if rast_box.intersects(pri_mult_poly): + # _logger.info( + # f"{dem_path} needs clipping by priority...") + # + # # Clipping raster can cause problem at + # # boundaries due to difference in pixel size + # # between high and low resolution rasters + # # so instead we operate on extracted polygons + # geom_mult_poly = geom_mult_poly.difference( + # pri_mult_poly) # Write geometry multipolygon to disk temp_path = ( - pathlib.Path(temp_dir) - / f'{pathlib.Path(dem_path).name}.feather') + pathlib.Path(temp_dir) / f"{pathlib.Path(dem_path).name}.feather" + ) try: self._multipolygon_to_disk(temp_path, geom_mult_poly) @@ -464,32 +444,38 @@ def _serial_get_polygon( return poly_coll - def _parallel_get_polygon_worker( - self, - base_mesh_path: Union[str, os.PathLike, None], - temp_dir: Union[str, os.PathLike], - priority: int, - dem_file: Union[str, os.PathLike], - z_info: dict = {}, - chunk_size: Union[int, None] = None, - overlap: Union[int, None] = None): + self, + base_mesh_path: Union[str, os.PathLike, None], + temp_dir: Union[str, os.PathLike], + priority: int, + dem_file: Union[str, os.PathLike], + z_info: dict = {}, + chunk_size: Union[int, None] = None, + overlap: Union[int, None] = None, + ): poly_coll_files = self._serial_get_polygon( - base_mesh_path, temp_dir, [priority], [dem_file], - z_info, chunk_size, overlap) + base_mesh_path, + temp_dir, + [priority], + [dem_file], + z_info, + chunk_size, + overlap, + ) # Only one item passed to serial code at most return poly_coll_files[0] if poly_coll_files else None - def _linearring_to_vert_edge( - self, - coords: List[Tuple[float, float]], - edges: List[Tuple[int, int]], - lin_ring: LinearRing): + self, + coords: List[Tuple[float, float]], + edges: List[Tuple[int, int]], + lin_ring: LinearRing, + ): - '''From shapely LinearRing get coords and edges''' + """From shapely LinearRing get coords and edges""" # NOTE: This function mutates coords and edges @@ -501,38 +487,35 @@ def _linearring_to_vert_edge( idx_e = len(coords) - 1 n_idx = len(coords) - edges.extend([ - (i, (i + 1) % n_idx + idx_b * ((i + 1) // n_idx)) - for i in range(idx_b, idx_e + 1)]) + edges.extend( + [ + (i, (i + 1) % n_idx + idx_b * ((i + 1) // n_idx)) + for i in range(idx_b, idx_e + 1) + ] + ) - - def _write_to_file( - self, out_format, out_file, multi_polygon, crs): + def _write_to_file(self, out_format, out_file, multi_polygon, crs): _logger.info(f"Writing for file ({out_format}) ...") # TODO: Check for correct extension on out_file if out_format == "shapefile": - gdf = gpd.GeoDataFrame( - {'geometry': multi_polygon}, - crs=self._calc_crs - ) + gdf = gpd.GeoDataFrame({"geometry": multi_polygon}, crs=self._calc_crs) if not crs.equals(self._calc_crs): _logger.info( f"Project from {self._calc_crs.to_string()} to" - f" {crs.to_string()} ...") + f" {crs.to_string()} ..." + ) gdf = gdf.to_crs(crs) gdf.to_file(out_file) elif out_format == "feather": - gdf = gpd.GeoDataFrame( - {'geometry': multi_polygon}, - crs=self._calc_crs - ) + gdf = gpd.GeoDataFrame({"geometry": multi_polygon}, crs=self._calc_crs) if not crs.equals(self._calc_crs): _logger.info( f"Project from {self._calc_crs.to_string()} to" - f" {crs.to_string()} ...") + f" {crs.to_string()} ..." + ) gdf = gdf.to_crs(crs) gdf.to_feather(out_file) @@ -541,32 +524,24 @@ def _write_to_file( if not crs.equals(self._calc_crs): _logger.info( f"Project from {self._calc_crs.to_string()} to" - f" {crs.to_string()} ...") - transformer = Transformer.from_crs( - self._calc_crs, crs, always_xy=True) - multi_polygon = ops.transform( - transformer.transform, multi_polygon) + f" {crs.to_string()} ..." + ) + transformer = Transformer.from_crs(self._calc_crs, crs, always_xy=True) + multi_polygon = ops.transform(transformer.transform, multi_polygon) msh = jigsaw_msh_t() msh.ndims = +2 - msh.mshID = 'euclidean-mesh' + msh.mshID = "euclidean-mesh" coords = [] edges = [] for polygon in multi_polygon: - self._linearring_to_vert_edge( - coords, edges, polygon.exterior) + self._linearring_to_vert_edge(coords, edges, polygon.exterior) for interior in polygon.interiors: - self._linearring_to_vert_edge( - coords, edges, interior) - - msh.vert2 = np.array( - [(i, 0) for i in coords], - dtype=jigsaw_msh_t.VERT2_t) - msh.edge2 = np.array( - [(i, 0) for i in edges], - dtype=jigsaw_msh_t.EDGE2_t) + self._linearring_to_vert_edge(coords, edges, interior) + msh.vert2 = np.array([(i, 0) for i in coords], dtype=jigsaw_msh_t.VERT2_t) + msh.edge2 = np.array([(i, 0) for i in edges], dtype=jigsaw_msh_t.EDGE2_t) if out_format == "jigsaw": savemsh(out_file, msh) diff --git a/ocsmesh/ops/combine_hfun.py b/ocsmesh/ops/combine_hfun.py index 779af4ad..3c857209 100644 --- a/ocsmesh/ops/combine_hfun.py +++ b/ocsmesh/ops/combine_hfun.py @@ -2,36 +2,35 @@ import os import pathlib from multiprocessing import cpu_count -from typing import Union, Sequence, List +from typing import List, Sequence, Union -from pyproj import CRS from jigsawpy import savemsh, savevtk +from pyproj import CRS -from ocsmesh.raster import Raster +from ocsmesh import utils from ocsmesh.hfun.hfun import Hfun from ocsmesh.mesh.mesh import Mesh -from ocsmesh import utils +from ocsmesh.raster import Raster _logger = logging.getLogger(__name__) class HfunCombine: - def __init__( - self, - dem_files: Sequence[Union[str, os.PathLike]], - out_file: Union[str, os.PathLike], - out_format: str = "shapefile", - mesh_file: Union[str, os.PathLike, None] = None, - hmin: Union[float, None] = None, - hmax: Union[float, None] = None, - contours: List[List[float]] = None, - constants: List[List[float]] = None, - chunk_size: Union[int, None] = None, - overlap: Union[int, None] = None, - method: str = 'exact', - nprocs: int = -1): - + self, + dem_files: Sequence[Union[str, os.PathLike]], + out_file: Union[str, os.PathLike], + out_format: str = "shapefile", + mesh_file: Union[str, os.PathLike, None] = None, + hmin: Union[float, None] = None, + hmax: Union[float, None] = None, + contours: List[List[float]] = None, + constants: List[List[float]] = None, + chunk_size: Union[int, None] = None, + overlap: Union[int, None] = None, + method: str = "exact", + nprocs: int = -1, + ): self._base_exterior = None @@ -47,22 +46,23 @@ def __init__( chunk_size=chunk_size, overlap=overlap, method=method, - nprocs=nprocs) + nprocs=nprocs, + ) def run(self): - dem_files = self._operation_info['dem_files'] - out_file = self._operation_info['out_file'] - out_format = self._operation_info['out_format'] - mesh_file = self._operation_info['mesh_file'] - hmin = self._operation_info['hmin'] - hmax = self._operation_info['hmax'] - contours = self._operation_info['contours'] - constants = self._operation_info['constants'] - chunk_size = self._operation_info['chunk_size'] - overlap = self._operation_info['overlap'] - method = self._operation_info['method'] - nprocs = self._operation_info['nprocs'] + dem_files = self._operation_info["dem_files"] + out_file = self._operation_info["out_file"] + out_format = self._operation_info["out_format"] + mesh_file = self._operation_info["mesh_file"] + hmin = self._operation_info["hmin"] + hmax = self._operation_info["hmax"] + contours = self._operation_info["contours"] + constants = self._operation_info["constants"] + chunk_size = self._operation_info["chunk_size"] + overlap = self._operation_info["overlap"] + method = self._operation_info["method"] + nprocs = self._operation_info["nprocs"] nprocs = cpu_count() if nprocs == -1 else nprocs @@ -78,48 +78,53 @@ def run(self): for dem_path in dem_files: logging.info(f"Loading raster {dem_path}...") - rast = Raster( - dem_path, chunk_size=chunk_size, overlap=overlap) + rast = Raster(dem_path, chunk_size=chunk_size, overlap=overlap) rast_list.append(rast) # Create Hfun logging.info("Creating Hfun from rasters...") hfun_collector = Hfun( - rast_list, base_mesh=base_mesh, - hmin=hmin, hmax=hmax, nprocs=nprocs, method=method) + rast_list, + base_mesh=base_mesh, + hmin=hmin, + hmax=hmax, + nprocs=nprocs, + method=method, + ) for contour in contours: logging.info("Adding contour refinement...") if len(contour) > 3: raise ValueError( "Invalid format for contour specification." - " It should be level [expansion target-size].") + " It should be level [expansion target-size]." + ) level, expansion_rate, target_size = [ - *contour, *[None]*(3-len(contour))] + *contour, + *[None] * (3 - len(contour)), + ] if level is None: raise ValueError( "Invalid format for contour specification." - " It should be level [expansion target-size].") + " It should be level [expansion target-size]." + ) if expansion_rate is None: expansion_rate = 0.1 if target_size is None: target_size = hmin - hfun_collector.add_contour( - level, expansion_rate, target_size) + hfun_collector.add_contour(level, expansion_rate, target_size) for lower_bound, target_size in constants: hfun_collector.add_constant_value( - value=target_size, lower_bound=lower_bound) - - self._write_to_file( - out_format, out_file, hfun_collector, 'EPSG:4326') + value=target_size, lower_bound=lower_bound + ) + self._write_to_file(out_format, out_file, hfun_collector, "EPSG:4326") - def _write_to_file( - self, out_format, out_file, hfun_collector, crs): + def _write_to_file(self, out_format, out_file, hfun_collector, crs): _logger.info(f"Writing for file ({out_format}) ...") @@ -138,10 +143,10 @@ def _write_to_file( elif out_format == "vtk": savevtk(out_file, jig_hfun) - elif out_format in ['2dm', 'sms']: + elif out_format in ["2dm", "sms"]: # TODO: How to specify crs in 2dm file? mesh = Mesh(jig_hfun) - mesh.write(out_file, format='2dm') + mesh.write(out_file, format="2dm") else: raise NotImplementedError(f"Output type {out_format} is not supported") diff --git a/ocsmesh/raster.py b/ocsmesh/raster.py index 1ae5b0ac..30e1d7ac 100644 --- a/ocsmesh/raster.py +++ b/ocsmesh/raster.py @@ -1,58 +1,53 @@ -import math import hashlib import logging +import math import multiprocessing import os import pathlib import tempfile +import warnings +from contextlib import ExitStack, contextmanager from time import time from typing import Union -from contextlib import contextmanager, ExitStack -import warnings # from matplotlib.colors import LinearSegmentedColormap import geopandas as gpd -from matplotlib.cm import ScalarMappable import matplotlib.pyplot as plt -from matplotlib.transforms import Bbox -from mpl_toolkits.axes_grid1 import make_axes_locatable import numpy as np -from pyproj import CRS, Transformer import rasterio import rasterio.mask -from rasterio import warp +from matplotlib.cm import ScalarMappable +from matplotlib.transforms import Bbox +from mpl_toolkits.axes_grid1 import make_axes_locatable +from pyproj import CRS, Transformer +from rasterio import warp, windows from rasterio.enums import Resampling from rasterio.fill import fillnodata from rasterio.transform import array_bounds -from rasterio import windows from scipy.ndimage import gaussian_filter from shapely import ops -from shapely.geometry import ( - Polygon, MultiPolygon, LineString, MultiLineString, box) +from shapely.geometry import LineString, MultiLineString, MultiPolygon, Polygon, box # from ocsmesh.geom import Geom # from ocsmesh.hfun import Hfun -from ocsmesh import figures -from ocsmesh import utils +from ocsmesh import figures, utils _logger = logging.getLogger(__name__) -tmpdir = str(pathlib.Path(tempfile.gettempdir()+'/ocsmesh'))+'/' +tmpdir = str(pathlib.Path(tempfile.gettempdir() + "/ocsmesh")) + "/" os.makedirs(tmpdir, exist_ok=True) class RasterPath: - def __set__(self, obj, val: Union[str, os.PathLike]): - obj.__dict__['path'] = pathlib.Path(val) + obj.__dict__["path"] = pathlib.Path(val) def __get__(self, obj, val): - return obj.__dict__['path'] + return obj.__dict__["path"] class Crs: - def __set__(self, obj, val: Union[str, CRS, None]): # check if CRS is in file @@ -63,16 +58,17 @@ def __set__(self, obj, val: Union[str, CRS, None]): # Program cannot operate with an undefined CRS. val = src.crs if val is None: - raise IOError( - 'CRS not found in raster file. Must specify CRS.') + raise IOError("CRS not found in raster file. Must specify CRS.") # CRS is specified by user rewrite raster but add CRS to meta else: if isinstance(val, str): val = CRS.from_user_input(val) if not isinstance(val, CRS): - raise TypeError(f'Argument crs must be of type {str} or {CRS},' - f' not type {type(val)}.') + raise TypeError( + f"Argument crs must be of type {str} or {CRS}," + f" not type {type(val)}." + ) # create a temporary copy of the original file and update meta. with ExitStack() as stack: @@ -80,62 +76,57 @@ def __set__(self, obj, val: Union[str, CRS, None]): src = stack.enter_context(rasterio.open(obj.path)) if obj.chunk_size is not None: wins = get_iter_windows( - src.width, src.height, chunk_size=obj.chunk_size) + src.width, src.height, chunk_size=obj.chunk_size + ) else: - wins = [windows.Window( - 0, 0, src.width, src.height)] + wins = [windows.Window(0, 0, src.width, src.height)] - dst = stack.enter_context( - obj.modifying_raster(crs=val, driver='GTiff')) + dst = stack.enter_context(obj.modifying_raster(crs=val, driver="GTiff")) for window in wins: dst.write(src.read(window=window), window=window) class TemporaryFile: - def __set__(self, obj, val): - obj.__dict__['tmpfile'] = val + obj.__dict__["tmpfile"] = val obj._src = rasterio.open(val.name) def __get__(self, obj, val) -> pathlib.Path: - tmpfile = obj.__dict__.get('tmpfile') + tmpfile = obj.__dict__.get("tmpfile") if tmpfile is None: return obj.path return pathlib.Path(tmpfile.name) class SourceRaster: - def __get__(self, obj, val) -> rasterio.DatasetReader: - source = obj.__dict__.get('source') + source = obj.__dict__.get("source") if source is None: source = rasterio.open(obj.path) - obj.__dict__['source'] = source + obj.__dict__["source"] = source return source def __set__(self, obj, val: rasterio.DatasetReader): - obj.__dict__['source'] = val + obj.__dict__["source"] = val class ChunkSize: - def __set__(self, obj, val): chunk_size = 0 if val is None else int(val) if not chunk_size >= 0: raise ValueError("Argument chunk_size must be >= 0.") - obj.__dict__['chunk_size'] = val + obj.__dict__["chunk_size"] = val def __get__(self, obj, val): - return obj.__dict__['chunk_size'] + return obj.__dict__["chunk_size"] class Overlap: - def __set__(self, obj, val): - obj.__dict__['overlap'] = 0 if val is None else val + obj.__dict__["overlap"] = 0 if val is None else val def __get__(self, obj, val): - return obj.__dict__['overlap'] + return obj.__dict__["overlap"] class Raster: @@ -148,11 +139,11 @@ class Raster: _src = SourceRaster() def __init__( - self, - path: Union[str, os.PathLike], - crs: Union[str, CRS] = None, - chunk_size=None, - overlap=None + self, + path: Union[str, os.PathLike], + crs: Union[str, CRS] = None, + chunk_size=None, + overlap=None, ): self._chunk_size = chunk_size self._overlap = overlap @@ -175,7 +166,7 @@ def modifying_raster(self, use_src_meta=True, **kwargs): if use_src_meta: new_meta = self.src.meta.copy() new_meta.update(**kwargs) - with rasterio.open(tmpfile.name, 'w', **new_meta) as dst: + with rasterio.open(tmpfile.name, "w", **new_meta) as dst: yield dst no_except = True @@ -186,11 +177,12 @@ def modifying_raster(self, use_src_meta=True, **kwargs): # goes out of scope self._tmpfile = tmpfile - - def get_x(self, window=None): - window = windows.Window(0, 0, self.src.shape[1], self.src.shape[0]) \ - if window is None else window + window = ( + windows.Window(0, 0, self.src.shape[1], self.src.shape[0]) + if window is None + else window + ) if window is not None: assert isinstance(window, windows.Window) width = window.width @@ -200,8 +192,11 @@ def get_x(self, window=None): return np.linspace(x0, x1, width) def get_y(self, window=None): - window = windows.Window(0, 0, self.src.shape[1], self.src.shape[0]) \ - if window is None else window + window = ( + windows.Window(0, 0, self.src.shape[1], self.src.shape[0]) + if window is None + else window + ) if window is not None: assert isinstance(window, windows.Window) height = window.height @@ -216,26 +211,28 @@ def get_xy(self, window=None): def get_values(self, window=None, band=None, **kwargs): i = 1 if band is None else band - window = windows.Window(0, 0, self.src.shape[1], self.src.shape[0]) \ - if window is None else window + window = ( + windows.Window(0, 0, self.src.shape[1], self.src.shape[0]) + if window is None + else window + ) if window is not None: assert isinstance(window, windows.Window) return self.src.read(i, window=window, **kwargs) def get_xyz(self, window=None, band=None): xy = self.get_xy(window) - values = self.get_values(window=window, band=band).reshape( - (xy.shape[0], 1)) + values = self.get_values(window=window, band=band).reshape((xy.shape[0], 1)) return np.hstack([xy, values]) def get_multipolygon( - self, - hmin=None, - zmin=None, - zmax=None, - window=None, - overlap=None, - band=1, + self, + hmin=None, + zmin=None, + zmax=None, + window=None, + overlap=None, + band=1, ): polygon_collection = [] if window is None: @@ -261,8 +258,7 @@ def get_multipolygon( fig, ax = plt.subplots() ax.contourf(x, y, new_mask, levels=[0, 1]) plt.close(fig) - polygon_collection.extend( - list(utils.get_multipolygon_from_pathplot(ax))) + polygon_collection.extend(list(utils.get_multipolygon_from_pathplot(ax))) union_result = ops.unary_union(polygon_collection) if not isinstance(union_result, MultiPolygon): @@ -270,46 +266,44 @@ def get_multipolygon( return union_result def get_bbox( - self, - crs: Union[str, CRS] = None, - output_type: str = None + self, crs: Union[str, CRS] = None, output_type: str = None ) -> Union[Polygon, Bbox]: - output_type = 'polygon' if output_type is None else output_type + output_type = "polygon" if output_type is None else output_type xmin, xmax = np.min(self.x), np.max(self.x) ymin, ymax = np.min(self.y), np.max(self.y) crs = self.crs if crs is None else crs if crs is not None: if not self.crs.equals(crs): - transformer = Transformer.from_crs( - self.crs, crs, always_xy=True) + transformer = Transformer.from_crs(self.crs, crs, always_xy=True) # pylint: disable=E0633 (xmin, xmax), (ymin, ymax) = transformer.transform( - (xmin, xmax), (ymin, ymax)) - if output_type == 'polygon': # pylint: disable=R1705 + (xmin, xmax), (ymin, ymax) + ) + if output_type == "polygon": # pylint: disable=R1705 return box(xmin, ymin, xmax, ymax) - elif output_type == 'bbox': + elif output_type == "bbox": return Bbox([[xmin, ymin], [xmax, ymax]]) raise TypeError( - 'Argument output_type must a string literal \'polygon\' or ' - '\'bbox\'') + "Argument output_type must a string literal 'polygon' or " "'bbox'" + ) def contourf( - self, - band=1, - window=None, - axes=None, - vmin=None, - vmax=None, - cmap='topobathy', - levels=None, - show=False, - title=None, - figsize=None, - colors=256, - cbar_label=None, - norm=None, - **kwargs + self, + band=1, + window=None, + axes=None, + vmin=None, + vmax=None, + cmap="topobathy", + levels=None, + show=False, + title=None, + figsize=None, + colors=256, + cbar_label=None, + norm=None, + **kwargs, ): if axes is None: fig = plt.figure(figsize=figsize) @@ -317,8 +311,7 @@ def contourf( values = self.get_values(band=band, masked=True, window=window) vmin = np.min(values) if vmin is None else float(vmin) vmax = np.max(values) if vmax is None else float(vmax) - cmap, norm, levels, col_val = figures.get_topobathy_kwargs( - values, vmin, vmax) + cmap, norm, levels, col_val = figures.get_topobathy_kwargs(values, vmin, vmax) axes.contourf( self.get_x(window), self.get_y(window), @@ -328,9 +321,9 @@ def contourf( norm=norm, vmin=vmin, vmax=vmax, - **kwargs - ) - axes.axis('scaled') + **kwargs, + ) + axes.axis("scaled") if title is not None: axes.set_title(title) mappable = ScalarMappable(cmap=cmap) @@ -342,10 +335,10 @@ def contourf( mappable, cax=cax, # extend=cmap_extend, - orientation='horizontal' - ) + orientation="horizontal", + ) if col_val != 0: - cbar.set_ticks([vmin, vmin + col_val * (vmax-vmin), vmax]) + cbar.set_ticks([vmin, vmin + col_val * (vmax - vmin), vmax]) cbar.set_ticklabels([np.around(vmin, 2), 0.0, np.around(vmax, 2)]) else: cbar.set_ticks([vmin, vmax]) @@ -365,10 +358,10 @@ def read(self, i, masked=True, **kwargs): return self.src.read(i, masked=masked, **kwargs) def dtype(self, i): - return self.src.dtypes[i-1] + return self.src.dtypes[i - 1] def nodataval(self, i): - return self.src.nodatavals[i-1] + return self.src.nodatavals[i - 1] def sample(self, xy, i): return self.src.sample(xy, i) @@ -376,13 +369,13 @@ def sample(self, xy, i): def close(self): del self._src - def add_band(self, values, **tags): + def add_band(self, values, **tags): kwargs = self.src.meta.copy() band_id = kwargs["count"] + 1 with self.modifying_raster(count=band_id) as dst: for i in range(1, self.src.count + 1): dst.write_band(i, self.src.read(i)) - dst.write_band(band_id, values.astype(self.src.dtypes[i-1])) + dst.write_band(band_id, values.astype(self.src.dtypes[i - 1])) return band_id def fill_nodata(self): @@ -394,9 +387,8 @@ def fill_nodata(self): with self.modifying_raster() as dst: for window in self.iter_windows(): dst.write( - fillnodata(self.src.read(window=window, masked=True)), - window=window - ) + fillnodata(self.src.read(window=window, masked=True)), window=window + ) def gaussian_filter(self, **kwargs): @@ -408,8 +400,8 @@ def gaussian_filter(self, **kwargs): with self.modifying_raster() as dst: for i in range(1, self.src.count + 1): outband = self.src.read(i) -# # Write orignal band -# dst.write_band(i + n_bands_new // 2, outband) + # # Write orignal band + # dst.write_band(i + n_bands_new // 2, outband) # Write filtered band outband = gaussian_filter(outband, **kwargs) dst.write_band(i, outband) @@ -419,12 +411,12 @@ def mask(self, shapes, i=None, **kwargs): with self.modifying_raster(**kwargs) as dst: if i is None: for j in range(1, self.src.count + 1): - dst.write_band(j, out_images[j-1]) + dst.write_band(j, out_images[j - 1]) dst.update_tags(j, **self.src.tags(j)) else: for j in range(1, self.src.count + 1): if i == j: - dst.write_band(j, out_images[j-1]) + dst.write_band(j, out_images[j - 1]) dst.update_tags(j, **self.src.tags(j)) else: dst.write_band(j, self.src.read(j)) @@ -432,8 +424,7 @@ def mask(self, shapes, i=None, **kwargs): def read_masks(self, i=None): if i is None: - return np.dstack( - [self.src.read_masks(i) for i in range(1, self.count + 1)]) + return np.dstack([self.src.read_masks(i) for i in range(1, self.count + 1)]) return self.src.read_masks(i) @@ -448,14 +439,14 @@ def warp(self, dst_crs, nprocs=-1): self.src.height, *self.src.bounds, dst_width=self.src.width, - dst_height=self.src.height - ) + dst_height=self.src.height, + ) meta_update = { - 'crs': dst_crs.srs, - 'transform': transform, - 'width': width, - 'height': height + "crs": dst_crs.srs, + "transform": transform, + "width": width, + "height": height, } with self.modifying_raster(**meta_update) as dst: for i in range(1, self.src.count + 1): @@ -468,8 +459,7 @@ def warp(self, dst_crs, nprocs=-1): dst_crs=dst_crs.srs, resampling=self.resampling_method, num_threads=nprocs, - ) - + ) def resample(self, scaling_factor, resampling_method=None): if resampling_method is None: @@ -482,27 +472,17 @@ def resample(self, scaling_factor, resampling_method=None): width = int(self.src.width * scaling_factor) height = int(self.src.height * scaling_factor) data = self.src.read( - out_shape=( - self.src.count, - height, - width - ), - resampling=resampling_method + out_shape=(self.src.count, height, width), resampling=resampling_method ) transform = self.src.transform * self.src.transform.scale( - (self.src.width / data.shape[-1]), - (self.src.height / data.shape[-2]) + (self.src.width / data.shape[-1]), (self.src.height / data.shape[-2]) ) - meta_update = { - 'transform': transform, - 'width': width, - 'height': height - } + meta_update = {"transform": transform, "width": width, "height": height} with self.modifying_raster(**meta_update) as dst: dst.write(data) def save(self, path): - with rasterio.open(pathlib.Path(path), 'w', **self.src.meta) as dst: + with rasterio.open(pathlib.Path(path), "w", **self.src.meta) as dst: for i in range(1, self.src.count + 1): dst.write_band(i, self.src.read(i)) dst.update_tags(i, **self.src.tags(i)) @@ -510,47 +490,43 @@ def save(self, path): def clip(self, geom: Union[Polygon, MultiPolygon]): if isinstance(geom, Polygon): geom = MultiPolygon([geom]) - out_image, out_transform = rasterio.mask.mask( - self.src, geom, crop=True) + out_image, out_transform = rasterio.mask.mask(self.src, geom, crop=True) meta_update = { "driver": "GTiff", "height": out_image.shape[1], "width": out_image.shape[2], - "transform": out_transform - } + "transform": out_transform, + } with self.modifying_raster(**meta_update) as dest: dest.write(out_image) - def adjust( - self, - geom: Union[Polygon, MultiPolygon], - inside_min=0.5, - outside_max=-0.5): + self, geom: Union[Polygon, MultiPolygon], inside_min=0.5, outside_max=-0.5 + ): if isinstance(geom, Polygon): geom = MultiPolygon([geom]) - with self.modifying_raster(driver='GTiff') as dst: + with self.modifying_raster(driver="GTiff") as dst: iter_windows = list(self.iter_windows()) tot = len(iter_windows) for i, window in enumerate(iter_windows): - _logger.debug(f'Processing window {i+1}/{tot}.') + _logger.debug(f"Processing window {i+1}/{tot}.") # NOTE: We should NOT transform polygon, user just # needs to make sure input polygon has the same CRS # as the hfun (we don't calculate distances in this # method) - _logger.info('Creating mask from shape ...') + _logger.info("Creating mask from shape ...") start = time() values = self.get_values(window=window).copy() mask = np.zeros_like(values) try: mask, _, _ = rasterio.mask.raster_geometry_mask( - self.src, geom, - all_touched=True, invert=True) + self.src, geom, all_touched=True, invert=True + ) mask = mask[rasterio.windows.window_index(window)] except ValueError: @@ -558,36 +534,30 @@ def adjust( # shapes then it throws ValueError, instead of # checking for intersection, if there's a value # error we assume there's no overlap - _logger.debug( - 'Polygons don\'t intersect with the raster') + _logger.debug("Polygons don't intersect with the raster") - _logger.info( - f'Creating mask from shape took {time()-start}.') + _logger.info(f"Creating mask from shape took {time()-start}.") if mask.any(): - values[np.where(np.logical_and( - values < inside_min, mask) - )] = inside_min - - values[np.where(np.logical_and( - values > outside_max, np.logical_not(mask)) - )] = outside_max + values[ + np.where(np.logical_and(values < inside_min, mask)) + ] = inside_min + + values[ + np.where( + np.logical_and(values > outside_max, np.logical_not(mask)) + ) + ] = outside_max else: values[values > outside_max] = outside_max - _logger.info('Write array to file...') + _logger.info("Write array to file...") start = time() dst.write_band(1, values, window=window) - _logger.info(f'Write array to file took {time()-start}.') - + _logger.info(f"Write array to file took {time()-start}.") - def get_contour( - self, - level: float, - window: rasterio.windows.Window = None - ): - _logger.debug( - f'RasterHfun.get_raster_contours(level={level}, window={window})') + def get_contour(self, level: float, window: rasterio.windows.Window = None): + _logger.debug(f"RasterHfun.get_raster_contours(level={level}, window={window})") if window is None: iter_windows = list(self.iter_windows()) else: @@ -598,29 +568,25 @@ def get_contour( return self._get_raster_contour_windowed(level, window) def get_channels( - self, - level: float = 0, - width: float = 1000, # in meters - tolerance: Union[None, float] = None + self, + level: float = 0, + width: float = 1000, # in meters + tolerance: Union[None, float] = None, ): multipoly = self.get_multipolygon(zmax=level) - utm_crs = utils.estimate_bounds_utm( - self.get_bbox().bounds, self.crs) + utm_crs = utils.estimate_bounds_utm(self.get_bbox().bounds, self.crs) if utm_crs is not None: - transformer = Transformer.from_crs( - self.src.crs, utm_crs, always_xy=True) + transformer = Transformer.from_crs(self.src.crs, utm_crs, always_xy=True) multipoly = ops.transform(transformer.transform, multipoly) - channels = utils.get_polygon_channels( - multipoly, width, simplify=tolerance) + channels = utils.get_polygon_channels(multipoly, width, simplify=tolerance) if channels is None: return None if utm_crs is not None: - transformer = Transformer.from_crs( - utm_crs, self.src.crs, always_xy=True) + transformer = Transformer.from_crs(utm_crs, self.src.crs, always_xy=True) channels = ops.transform(transformer.transform, channels) return channels @@ -630,12 +596,12 @@ def _get_raster_contour_windowed(self, level, window): features = [] values = self.get_values(band=1, window=window) with warnings.catch_warnings(): - warnings.simplefilter('ignore', UserWarning) - _logger.debug('Computing contours...') + warnings.simplefilter("ignore", UserWarning) + _logger.debug("Computing contours...") start = time() fig, ax = plt.subplots() ax.contour(x, y, values, levels=[level]) - _logger.debug(f'Took {time()-start}...') + _logger.debug(f"Took {time()-start}...") plt.close(fig) for path_collection in ax.collections: for path in path_collection.get_paths(): @@ -650,25 +616,26 @@ def _get_raster_contour_feathered(self, level, iter_windows): with tempfile.TemporaryDirectory(dir=tmpdir) as feather_dir: results = self._get_raster_contour_feathered_internal( - level, iter_windows, feather_dir) + level, iter_windows, feather_dir + ) return results def _get_raster_contour_feathered_internal(self, level, iter_windows, temp_dir): feathers = [] total_windows = len(iter_windows) - _logger.debug(f'Total windows to process: {total_windows}.') + _logger.debug(f"Total windows to process: {total_windows}.") for i, window in enumerate(iter_windows): x, y = self.get_x(window), self.get_y(window) - _logger.debug(f'Processing window {i+1}/{total_windows}.') + _logger.debug(f"Processing window {i+1}/{total_windows}.") features = [] values = self.get_values(band=1, window=window) with warnings.catch_warnings(): - warnings.simplefilter('ignore', UserWarning) - _logger.debug('Computing contours...') + warnings.simplefilter("ignore", UserWarning) + _logger.debug("Computing contours...") start = time() fig, ax = plt.subplots() ax.contour(x, y, values, levels=[level]) - _logger.debug(f'Took {time()-start}...') + _logger.debug(f"Took {time()-start}...") plt.close(fig) for path_collection in ax.collections: for path in path_collection.get_paths(): @@ -678,14 +645,12 @@ def _get_raster_contour_feathered_internal(self, level, iter_windows, temp_dir): # LineStrings must have at least 2 coordinate tuples pass if len(features) > 0: - tmpfile = os.path.join(temp_dir, f'file_{i}.feather') - _logger.debug('Saving feather.') + tmpfile = os.path.join(temp_dir, f"file_{i}.feather") + _logger.debug("Saving feather.") features = ops.linemerge(features) - gpd.GeoDataFrame( - [{'geometry': features}] - ).to_feather(tmpfile) + gpd.GeoDataFrame([{"geometry": features}]).to_feather(tmpfile) feathers.append(tmpfile) - _logger.debug('Concatenating feathers.') + _logger.debug("Concatenating feathers.") features = [] out = gpd.GeoDataFrame() for feather in feathers: @@ -698,7 +663,7 @@ def _get_raster_contour_feathered_internal(self, level, iter_windows, temp_dir): break for linestring in geometry: features.append(linestring) - _logger.debug('Merging features.') + _logger.debug("Merging features.") return ops.linemerge(features) def iter_windows(self, chunk_size=None, overlap=None): @@ -708,8 +673,7 @@ def iter_windows(self, chunk_size=None, overlap=None): yield rasterio.windows.Window(0, 0, self.width, self.height) return - for window in get_iter_windows( - self.width, self.height, chunk_size, overlap): + for window in get_iter_windows(self.width, self.height, chunk_size, overlap): yield window def get_window_data(self, window, masked=True, band=None): @@ -724,9 +688,8 @@ def get_window_data(self, window, masked=True, band=None): def get_window_bounds(self, window): return array_bounds( - window.height, - window.width, - self.get_window_transform(window)) + window.height, window.width, self.get_window_transform(window) + ) def get_window_transform(self, window): if window is None: @@ -831,7 +794,7 @@ def yres(self): @property def resampling_method(self): - if not hasattr(self, '_resampling_method'): + if not hasattr(self, "_resampling_method"): self._resampling_method = Resampling.nearest return self._resampling_method @@ -839,8 +802,9 @@ def resampling_method(self): def resampling_method(self, resampling_method): if not isinstance(resampling_method, Resampling): TypeError( - f'Argument resampling_method must be of type {Resampling}, ' - f'not type {type(resampling_method)}.') + f"Argument resampling_method must be of type {Resampling}, " + f"not type {type(resampling_method)}." + ) self._resampling_method = resampling_method @property @@ -860,14 +824,7 @@ def overlap(self, overlap): self._overlap = overlap -def get_iter_windows( - width, - height, - chunk_size=0, - overlap=0, - row_off=0, - col_off=0 -): +def get_iter_windows(width, height, chunk_size=0, overlap=0, row_off=0, col_off=0): win_h = chunk_size + overlap win_w = chunk_size + overlap n_win_h = math.ceil(height / chunk_size) @@ -884,16 +841,18 @@ def get_iter_windows( def redistribute_vertices(geom, distance): - if geom.geom_type == 'LineString': # pylint: disable=R1705 + if geom.geom_type == "LineString": # pylint: disable=R1705 num_vert = int(round(geom.length / distance)) if num_vert == 0: num_vert = 1 return LineString( - [geom.interpolate(float(n) / num_vert, normalized=True) - for n in range(num_vert + 1)]) - elif geom.geom_type == 'MultiLineString': - parts = [redistribute_vertices(part, distance) - for part in geom] + [ + geom.interpolate(float(n) / num_vert, normalized=True) + for n in range(num_vert + 1) + ] + ) + elif geom.geom_type == "MultiLineString": + parts = [redistribute_vertices(part, distance) for part in geom] return type(geom)([p for p in parts if not p.is_empty]) - raise ValueError(f'unhandled geometry {geom.geom_type}') + raise ValueError(f"unhandled geometry {geom.geom_type}") diff --git a/ocsmesh/utils.py b/ocsmesh/utils.py index 74609ea1..30c6e546 100644 --- a/ocsmesh/utils.py +++ b/ocsmesh/utils.py @@ -1,37 +1,37 @@ from collections import defaultdict -from itertools import permutations -from typing import Union, Dict, Sequence, Tuple -from functools import reduce -from multiprocessing import cpu_count, Pool from copy import deepcopy +from functools import reduce +from itertools import permutations +from multiprocessing import Pool, cpu_count +from typing import Dict, Sequence, Tuple, Union +import geopandas as gpd import jigsawpy +import matplotlib.pyplot as plt # type: ignore[import] +import numpy as np # type: ignore[import] +import utm from jigsawpy import jigsaw_msh_t # type: ignore[import] from matplotlib.path import Path # type: ignore[import] -import matplotlib.pyplot as plt # type: ignore[import] from matplotlib.tri import Triangulation # type: ignore[import] -import numpy as np # type: ignore[import] from pyproj import CRS, Transformer # type: ignore[import] -from scipy.interpolate import ( # type: ignore[import] - RectBivariateSpline, griddata) from scipy import sparse -from shapely.geometry import ( # type: ignore[import] - Polygon, MultiPolygon, - box, GeometryCollection, Point, MultiPoint, - LineString, LinearRing) -from shapely.ops import polygonize, linemerge -import geopandas as gpd -import utm +from scipy.interpolate import RectBivariateSpline # type: ignore[import] +from scipy.interpolate import griddata +from shapely.geometry import GeometryCollection # type: ignore[import] +from shapely.geometry import (LinearRing, LineString, MultiPoint, MultiPolygon, + Point, Polygon, box) +from shapely.ops import linemerge, polygonize +ELEM_2D_TYPES = ["tria3", "quad4", "hexa8"] -ELEM_2D_TYPES = ['tria3', 'quad4', 'hexa8'] def must_be_euclidean_mesh(func): def decorator(mesh, *args, **kwargs): - if mesh.mshID.lower() != 'euclidean-mesh': + if mesh.mshID.lower() != "euclidean-mesh": msg = f"Not implemented for mshID={mesh.mshID}" raise NotImplementedError(msg) return func(mesh, *args, **kwargs) + return decorator @@ -40,52 +40,48 @@ def mesh_to_tri(mesh): mesh is a jigsawpy.jigsaw_msh_t() instance. """ return Triangulation( - mesh.vert2['coord'][:, 0], - mesh.vert2['coord'][:, 1], - mesh.tria3['index']) + mesh.vert2["coord"][:, 0], mesh.vert2["coord"][:, 1], mesh.tria3["index"] + ) def cleanup_isolates(mesh): # For triangle only (TODO: add support for other types) - node_indexes = np.arange(mesh.vert2['coord'].shape[0]) - used_indexes = np.unique(mesh.tria3['index']) - vert2_idxs = np.where( - np.isin(node_indexes, used_indexes, assume_unique=True))[0] + node_indexes = np.arange(mesh.vert2["coord"].shape[0]) + used_indexes = np.unique(mesh.tria3["index"]) + vert2_idxs = np.where(np.isin(node_indexes, used_indexes, assume_unique=True))[0] # Since tria simplex refers to node index which always starts from # 0 after removing isolate nodes we can use the map approach - tria3 = mesh.tria3['index'].flatten() + tria3 = mesh.tria3["index"].flatten() renum = {old: new for new, old in enumerate(np.unique(tria3))} tria3 = np.array([renum[i] for i in tria3]) - tria3 = tria3.reshape(mesh.tria3['index'].shape) + tria3 = tria3.reshape(mesh.tria3["index"].shape) mesh.vert2 = mesh.vert2.take(vert2_idxs, axis=0) if len(mesh.value) > 0: mesh.value = mesh.value.take(vert2_idxs, axis=0) mesh.tria3 = np.asarray( - [(tuple(indices), mesh.tria3['IDtag'][i]) - for i, indices in enumerate(tria3)], - dtype=jigsaw_msh_t.TRIA3_t) + [(tuple(indices), mesh.tria3["IDtag"][i]) for i, indices in enumerate(tria3)], + dtype=jigsaw_msh_t.TRIA3_t, + ) def put_edge2(mesh): tri = Triangulation( - mesh.vert2['coord'][:, 0], - mesh.vert2['coord'][:, 1], - mesh.tria3['index']) - mesh.edge2 = np.array( - [(edge, 0) for edge in tri.edges], dtype=jigsaw_msh_t.EDGE2_t) + mesh.vert2["coord"][:, 0], mesh.vert2["coord"][:, 1], mesh.tria3["index"] + ) + mesh.edge2 = np.array([(edge, 0) for edge in tri.edges], dtype=jigsaw_msh_t.EDGE2_t) def geom_to_multipolygon(mesh): - vertices = mesh.vert2['coord'] + vertices = mesh.vert2["coord"] idx_ring_coll = index_ring_collection(mesh) polygon_collection = [] for polygon in idx_ring_coll.values(): - exterior = vertices[polygon['exterior'][:, 0], :] + exterior = vertices[polygon["exterior"][:, 0], :] interiors = [] - for interior in polygon['interiors']: + for interior in polygon["interiors"]: interiors.append(vertices[interior[:, 0], :]) polygon_collection.append(Polygon(exterior, interiors)) return MultiPolygon(polygon_collection) @@ -93,30 +89,25 @@ def geom_to_multipolygon(mesh): def get_boundary_segments(mesh): - coords = mesh.vert2['coord'] + coords = mesh.vert2["coord"] boundary_edges = get_boundary_edges(mesh) boundary_verts = np.unique(boundary_edges) boundary_coords = coords[boundary_verts] - vert_map = { - orig: new for new, orig in enumerate(boundary_verts)} + vert_map = {orig: new for new, orig in enumerate(boundary_verts)} new_boundary_edges = np.array( - [vert_map[v] for v in boundary_edges.ravel()]).reshape( - boundary_edges.shape) + [vert_map[v] for v in boundary_edges.ravel()] + ).reshape(boundary_edges.shape) - graph = sparse.lil_matrix( - (len(boundary_verts), len(boundary_verts))) + graph = sparse.lil_matrix((len(boundary_verts), len(boundary_verts))) for vert1, vert2 in new_boundary_edges: graph[vert1, vert2] = 1 - n_components, labels = sparse.csgraph.connected_components( - graph, directed=False) + n_components, labels = sparse.csgraph.connected_components(graph, directed=False) segments = [] for i in range(n_components): - conn_mask = np.any(np.isin( - new_boundary_edges, np.nonzero(labels == i)), - axis=1) + conn_mask = np.any(np.isin(new_boundary_edges, np.nonzero(labels == i)), axis=1) conn_edges = new_boundary_edges[conn_mask] this_segment = linemerge(boundary_coords[conn_edges]) if not this_segment.is_simple: @@ -125,8 +116,7 @@ def get_boundary_segments(mesh): # for other issues like folded elements test_polys = list(polygonize(this_segment)) if not test_polys: - raise ValueError( - "Mesh boundary crosses itself! Folded element(s)!") + raise ValueError("Mesh boundary crosses itself! Folded element(s)!") segments.append(this_segment) return segments @@ -134,7 +124,6 @@ def get_boundary_segments(mesh): def get_mesh_polygons(mesh): - # TODO: Copy mesh? target_mesh = mesh result_polys = [] @@ -144,8 +133,7 @@ def get_mesh_polygons(mesh): # intersect any vertex for find_pass in range(2): - - coords = target_mesh.vert2['coord'] + coords = target_mesh.vert2["coord"] if len(coords) == 0: continue @@ -158,28 +146,24 @@ def get_mesh_polygons(mesh): polys = list(poly_gen) polys = sorted(polys, key=lambda p: p.area, reverse=True) - bndry_verts = np.unique(boundary_edges) if find_pass == 0: - non_bndry_verts = np.setdiff1d( - np.arange(len(coords)), bndry_verts) + non_bndry_verts = np.setdiff1d(np.arange(len(coords)), bndry_verts) pnts = MultiPoint(coords[non_bndry_verts]) else: pnts = MultiPoint(coords[bndry_verts]) - # NOTE: This logic requires polygons to be sorted by area pass_valid_polys = [] while len(pnts): - idx = np.random.randint(len(pnts)) pnt = pnts[idx] polys_gdf = gpd.GeoDataFrame( - {'geometry': polys, 'list_index': range(len(polys))}) - + {"geometry": polys, "list_index": range(len(polys))} + ) res_gdf = polys_gdf[polys_gdf.intersects(pnt)] if len(res_gdf) == 0: @@ -193,8 +177,6 @@ def get_mesh_polygons(mesh): poly = res_gdf.geometry.iloc[0] polys.pop(res_gdf.iloc[0].list_index) - - pass_valid_polys.append(poly) pnts = pnts.difference(poly) if pnts.is_empty: @@ -202,14 +184,13 @@ def get_mesh_polygons(mesh): if isinstance(pnts, Point): pnts = MultiPoint([pnts]) - result_polys.extend(pass_valid_polys) target_mesh = clip_mesh_by_shape( target_mesh, shape=MultiPolygon(pass_valid_polys), - inverse=True, fit_inside=True) - - + inverse=True, + fit_inside=True, + ) return MultiPolygon(result_polys) @@ -218,9 +199,9 @@ def repartition_features(linestring, max_verts): features = [] if len(linestring.coords) > max_verts: new_feat = [] - for segment in list(map(LineString, zip( - linestring.coords[:-1], - linestring.coords[1:]))): + for segment in list( + map(LineString, zip(linestring.coords[:-1], linestring.coords[1:])) + ): new_feat.append(segment) if len(new_feat) == max_verts - 1: features.append(linemerge(new_feat)) @@ -236,18 +217,16 @@ def transform_linestring( linestring: LineString, target_size: float, ): - distances = [0.] + distances = [0.0] while distances[-1] + target_size < linestring.length: distances.append(distances[-1] + target_size) distances.append(linestring.length) - linestring = LineString([ - linestring.interpolate(distance) - for distance in distances - ]) + linestring = LineString( + [linestring.interpolate(distance) for distance in distances] + ) return linestring - def needs_sieve(mesh, area=None): areas = [polygon.area for polygon in geom_to_multipolygon(mesh)] if area is None: @@ -266,21 +245,21 @@ def needs_sieve(mesh, area=None): def put_id_tags(mesh): # start enumerating on 1 to avoid issues with indexing on fortran models mesh.vert2 = np.array( - [(coord, id+1) for id, coord in enumerate(mesh.vert2['coord'])], - dtype=jigsaw_msh_t.VERT2_t - ) + [(coord, id + 1) for id, coord in enumerate(mesh.vert2["coord"])], + dtype=jigsaw_msh_t.VERT2_t, + ) mesh.tria3 = np.array( - [(index, id+1) for id, index in enumerate(mesh.tria3['index'])], - dtype=jigsaw_msh_t.TRIA3_t - ) + [(index, id + 1) for id, index in enumerate(mesh.tria3["index"])], + dtype=jigsaw_msh_t.TRIA3_t, + ) mesh.quad4 = np.array( - [(index, id+1) for id, index in enumerate(mesh.quad4['index'])], - dtype=jigsaw_msh_t.QUAD4_t - ) + [(index, id + 1) for id, index in enumerate(mesh.quad4["index"])], + dtype=jigsaw_msh_t.QUAD4_t, + ) mesh.hexa8 = np.array( - [(index, id+1) for id, index in enumerate(mesh.hexa8['index'])], - dtype=jigsaw_msh_t.HEXA8_t - ) + [(index, id + 1) for id, index in enumerate(mesh.hexa8["index"])], + dtype=jigsaw_msh_t.HEXA8_t, + ) def _get_sieve_mask(mesh, polygons, sieve_area): @@ -296,10 +275,10 @@ def _get_sieve_mask(mesh, polygons, sieve_area): remove.append(idx) # if the path surrounds the node, these need to be removed. - vert2_mask = np.full((mesh.vert2['coord'].shape[0],), False) + vert2_mask = np.full((mesh.vert2["coord"].shape[0],), False) for idx in remove: path = Path(polygons[idx].exterior.coords, closed=True) - vert2_mask = vert2_mask | path.contains_points(mesh.vert2['coord']) + vert2_mask = vert2_mask | path.contains_points(mesh.vert2["coord"]) return vert2_mask @@ -321,13 +300,13 @@ def _sieve_by_mask(mesh, sieve_mask): vert2_mask[lone_elem_verts] = True # Mask out elements containing the unwanted nodes. - tria3_mask = np.any(vert2_mask[mesh.tria3['index']], axis=1) + tria3_mask = np.any(vert2_mask[mesh.tria3["index"]], axis=1) # Tria and node removal and renumbering indexes ... - tria3_id_tag = mesh.tria3['IDtag'].take(np.where(~tria3_mask)[0]) - tria3_index = mesh.tria3['index'][~tria3_mask, :].flatten() + tria3_id_tag = mesh.tria3["IDtag"].take(np.where(~tria3_mask)[0]) + tria3_index = mesh.tria3["index"][~tria3_mask, :].flatten() used_indexes = np.unique(tria3_index) - node_indexes = np.arange(mesh.vert2['coord'].shape[0]) + node_indexes = np.arange(mesh.vert2["coord"].shape[0]) renum = {old: new for new, old in enumerate(np.unique(tria3_index))} tria3_index = np.array([renum[i] for i in tria3_index]) tria3_index = tria3_index.reshape((tria3_id_tag.shape[0], 3)) @@ -342,9 +321,9 @@ def _sieve_by_mask(mesh, sieve_mask): # update tria3 mesh.tria3 = np.array( - [(tuple(indices), tria3_id_tag[i]) - for i, indices in enumerate(tria3_index)], - dtype=jigsaw_msh_t.TRIA3_t) + [(tuple(indices), tria3_id_tag[i]) for i, indices in enumerate(tria3_index)], + dtype=jigsaw_msh_t.TRIA3_t, + ) def finalize_mesh(mesh, sieve_area=None): @@ -359,9 +338,12 @@ def finalize_mesh(mesh, sieve_area=None): no_op = False # TODO drop fewer elements for pinch clip_mesh_by_vertex( - mesh, pinched_nodes, + mesh, + pinched_nodes, can_use_other_verts=True, - inverse=True, in_place=True) + inverse=True, + in_place=True, + ) boundary_polys = get_mesh_polygons(mesh) sieve_mask = _get_sieve_mask(mesh, boundary_polys, sieve_area) @@ -386,9 +368,8 @@ def remesh_small_elements(opts, geom, mesh, hfun): # TODO: Implement for quad, etc. - hmin = np.min(hfun.value) - equilat_area = np.sqrt(3)/4 * hmin**2 + equilat_area = np.sqrt(3) / 4 * hmin ** 2 # List of arbitrary coef of equilateral triangle area for a givven # minimum mesh size to come up with a decent cut off. coeffs = [0.5, 0.2, 0.1, 0.05] @@ -397,20 +378,20 @@ def remesh_small_elements(opts, geom, mesh, hfun): for coef in coeffs: tria_areas = calculate_tria_areas(fixed_mesh) tiny_sz = coef * equilat_area - tiny_verts = np.unique(fixed_mesh.tria3['index'][tria_areas < tiny_sz, :].ravel()) + tiny_verts = np.unique( + fixed_mesh.tria3["index"][tria_areas < tiny_sz, :].ravel() + ) if len(tiny_verts) == 0: break mesh_clip = clip_mesh_by_vertex(fixed_mesh, tiny_verts, inverse=True) fixed_mesh = jigsawpy.jigsaw_msh_t() - fixed_mesh.mshID = 'euclidean-mesh' + fixed_mesh.mshID = "euclidean-mesh" fixed_mesh.ndims = +2 - if hasattr(mesh, 'crs'): + if hasattr(mesh, "crs"): fixed_mesh.crs = mesh.crs - jigsawpy.lib.jigsaw( - opts, geom, fixed_mesh, init=mesh_clip, hfun=hfun) - + jigsawpy.lib.jigsaw(opts, geom, fixed_mesh, init=mesh_clip, hfun=hfun) return fixed_mesh @@ -434,10 +415,10 @@ def sieve(mesh, area=None): remove.append(idx) # if the path surrounds the node, these need to be removed. - vert2_mask = np.full((mesh.vert2['coord'].shape[0],), False) + vert2_mask = np.full((mesh.vert2["coord"].shape[0],), False) for idx in remove: path = Path(multipolygon[idx].exterior.coords, closed=True) - vert2_mask = vert2_mask | path.contains_points(mesh.vert2['coord']) + vert2_mask = vert2_mask | path.contains_points(mesh.vert2["coord"]) # select any connected nodes; these ones are missed by # path.contains_point() because they are at the path edges. @@ -450,9 +431,8 @@ def sieve(mesh, area=None): lone_elem_verts = get_lone_element_verts(mesh) vert2_mask[lone_elem_verts] = True - # Mask out elements containing the unwanted nodes. - tria3_mask = np.any(vert2_mask[mesh.tria3['index']], axis=1) + tria3_mask = np.any(vert2_mask[mesh.tria3["index"]], axis=1) # Renumber indexes ... # isolated node removal does not require elimination of triangles from @@ -460,11 +440,11 @@ def sieve(mesh, area=None): # We must simply renumber the tria3 indexes to match the new node indexes. # Essentially subtract one, but going from the bottom of the index table # to the top. - used_indexes = np.unique(mesh.tria3['index']) - node_indexes = np.arange(mesh.vert2['coord'].shape[0]) + used_indexes = np.unique(mesh.tria3["index"]) + node_indexes = np.arange(mesh.vert2["coord"].shape[0]) tria3_idxs = np.where(~np.isin(node_indexes, used_indexes))[0] - tria3_id_tag = mesh.tria3['IDtag'].take(np.where(~tria3_mask)[0]) - tria3_index = mesh.tria3['index'][~tria3_mask, :].flatten() + tria3_id_tag = mesh.tria3["IDtag"].take(np.where(~tria3_mask)[0]) + tria3_index = mesh.tria3["index"][~tria3_mask, :].flatten() for idx in reversed(tria3_idxs): tria3_index[np.where(tria3_index >= idx)] -= 1 tria3_index = tria3_index.reshape((tria3_id_tag.shape[0], 3)) @@ -479,9 +459,9 @@ def sieve(mesh, area=None): # update tria3 mesh.tria3 = np.array( - [(tuple(indices), tria3_id_tag[i]) - for i, indices in enumerate(tria3_index)], - dtype=jigsaw_msh_t.TRIA3_t) + [(tuple(indices), tria3_id_tag[i]) for i, indices in enumerate(tria3_index)], + dtype=jigsaw_msh_t.TRIA3_t, + ) def sort_edges(edges): @@ -505,13 +485,11 @@ def sort_edges(edges): elif ordered_edges[-1][1] in e1: idx = e1.index(ordered_edges[-1][1]) - ordered_edges.append( - list(reversed(edges.pop(idx)))) + ordered_edges.append(list(reversed(edges.pop(idx)))) elif ordered_edges[0][0] in e0: idx = e0.index(ordered_edges[0][0]) - ordered_edges.insert( - 0, list(reversed(edges.pop(idx)))) + ordered_edges.insert(0, list(reversed(edges.pop(idx)))) else: edge_collection.append(tuple(ordered_edges)) @@ -536,16 +514,15 @@ def index_ring_collection(mesh): # see: https://stackoverflow.com/a/23073229/7432462 boundary_edges = [] tri = mesh_to_tri(mesh) - idxs = np.vstack( - list(np.where(tri.neighbors == -1))).T + idxs = np.vstack(list(np.where(tri.neighbors == -1))).T for i, j in idxs: boundary_edges.append( - (int(tri.triangles[i, j]), - int(tri.triangles[i, (j+1) % 3]))) + (int(tri.triangles[i, j]), int(tri.triangles[i, (j + 1) % 3])) + ) init_idx_ring_coll = sort_edges(boundary_edges) # sort index_rings into corresponding "polygons" areas = [] - vertices = mesh.vert2['coord'] + vertices = mesh.vert2["coord"] for index_ring in init_idx_ring_coll: e0, _ = [list(t) for t in zip(*index_ring)] areas.append(float(Polygon(vertices[e0, :]).area)) @@ -556,10 +533,7 @@ def index_ring_collection(mesh): areas.pop(idx) _id = 0 idx_ring_coll = {} - idx_ring_coll[_id] = { - 'exterior': np.asarray(exterior), - 'interiors': [] - } + idx_ring_coll[_id] = {"exterior": np.asarray(exterior), "interiors": []} e0, e1 = [list(t) for t in zip(*exterior)] path = Path(vertices[e0 + [e0[0]], :], closed=True) while len(init_idx_ring_coll) > 0: @@ -573,9 +547,11 @@ def index_ring_collection(mesh): real_interiors = [] for i, p_interior in reversed(list(enumerate(potential_interiors))): _p_interior = init_idx_ring_coll[p_interior] - check = [init_idx_ring_coll[_] - for j, _ in reversed(list(enumerate(potential_interiors))) - if i != j] + check = [ + init_idx_ring_coll[_] + for j, _ in reversed(list(enumerate(potential_interiors))) + if i != j + ] has_parent = False for _path in check: e0, e1 = [list(t) for t in zip(*_path)] @@ -587,8 +563,9 @@ def index_ring_collection(mesh): real_interiors.append(p_interior) # pop real rings from collection for i in reversed(sorted(real_interiors)): - idx_ring_coll[_id]['interiors'].append( - np.asarray(init_idx_ring_coll.pop(i))) + idx_ring_coll[_id]["interiors"].append( + np.asarray(init_idx_ring_coll.pop(i)) + ) areas.pop(i) # if no internal rings found, initialize next polygon if len(init_idx_ring_coll) > 0: @@ -596,10 +573,7 @@ def index_ring_collection(mesh): exterior = init_idx_ring_coll.pop(idx) areas.pop(idx) _id += 1 - idx_ring_coll[_id] = { - 'exterior': np.asarray(exterior), - 'interiors': [] - } + idx_ring_coll[_id] = {"exterior": np.asarray(exterior), "interiors": []} e0, e1 = [list(t) for t in zip(*exterior)] path = Path(vertices[e0 + [e0[0]], :], closed=True) return idx_ring_coll @@ -609,7 +583,7 @@ def outer_ring_collection(mesh): idx_ring_coll = index_ring_collection(mesh) exterior_ring_collection = defaultdict() for key, ring in idx_ring_coll.items(): - exterior_ring_collection[key] = ring['exterior'] + exterior_ring_collection[key] = ring["exterior"] return exterior_ring_collection @@ -617,7 +591,7 @@ def inner_ring_collection(mesh): idx_ring_coll = index_ring_collection(mesh) inner_ring_coll = defaultdict() for key, rings in idx_ring_coll.items(): - inner_ring_coll[key] = rings['interiors'] + inner_ring_coll[key] = rings["interiors"] return inner_ring_coll @@ -629,34 +603,31 @@ def get_multipolygon_from_pathplot(ax): polygons = path.to_polygons(closed_only=True) for linear_ring in polygons: if linear_ring.shape[0] > 3: - linear_ring_collection.append( - LinearRing(linear_ring)) + linear_ring_collection.append(LinearRing(linear_ring)) if len(linear_ring_collection) > 1: # reorder linear rings from above - areas = [Polygon(linear_ring).area - for linear_ring in linear_ring_collection] + areas = [Polygon(linear_ring).area for linear_ring in linear_ring_collection] idx = np.where(areas == np.max(areas))[0][0] polygon_collection = [] outer_ring = linear_ring_collection.pop(idx) path = Path(np.asarray(outer_ring.coords), closed=True) while len(linear_ring_collection) > 0: inner_rings = [] - for i, linear_ring in reversed( - list(enumerate(linear_ring_collection))): + for i, linear_ring in reversed(list(enumerate(linear_ring_collection))): xy = np.asarray(linear_ring.coords)[0, :] if path.contains_point(xy): inner_rings.append(linear_ring_collection.pop(i)) polygon_collection.append(Polygon(outer_ring, inner_rings)) if len(linear_ring_collection) > 0: - areas = [Polygon(linear_ring).area - for linear_ring in linear_ring_collection] + areas = [ + Polygon(linear_ring).area for linear_ring in linear_ring_collection + ] idx = np.where(areas == np.max(areas))[0][0] outer_ring = linear_ring_collection.pop(idx) path = Path(np.asarray(outer_ring.coords), closed=True) multipolygon = MultiPolygon(polygon_collection) else: - multipolygon = MultiPolygon( - [Polygon(linear_ring_collection.pop())]) + multipolygon = MultiPolygon([Polygon(linear_ring_collection.pop())]) return multipolygon @@ -672,11 +643,13 @@ def signed_polygon_area(vertices): def vertices_around_vertex(mesh): - if mesh.mshID == 'euclidean-mesh': + if mesh.mshID == "euclidean-mesh": + def append(geom): - for simplex in geom['index']: + for simplex in geom["index"]: for i, j in permutations(simplex, 2): vert_list[i].add(j) + vert_list = defaultdict(set) append(mesh.tria3) append(mesh.quad4) @@ -686,73 +659,72 @@ def append(geom): msg = f"Not implemented for mshID={mesh.mshID}" raise NotImplementedError(msg) + def get_surrounding_elem_verts(mesh, in_vert): - ''' + """ Find vertices of elements connected to input vertices - ''' + """ - tria = mesh.tria3['index'] - quad = mesh.quad4['index'] - hexa = mesh.hexa8['index'] + tria = mesh.tria3["index"] + quad = mesh.quad4["index"] + hexa = mesh.hexa8["index"] # NOTE: np.any is used so that vertices that are not in in_verts # triangles but are part of the triangles that include in_verts # are considered too - mark_tria = np.any( - (np.isin(tria.ravel(), in_vert).reshape( - tria.shape)), 1) - mark_quad = np.any( - (np.isin(quad.ravel(), in_vert).reshape( - quad.shape)), 1) - mark_hexa = np.any( - (np.isin(hexa.ravel(), in_vert).reshape( - hexa.shape)), 1) - - conn_verts = np.unique(np.concatenate( - (tria[mark_tria, :].ravel(), - quad[mark_quad, :].ravel(), - hexa[mark_hexa, :].ravel()))) + mark_tria = np.any((np.isin(tria.ravel(), in_vert).reshape(tria.shape)), 1) + mark_quad = np.any((np.isin(quad.ravel(), in_vert).reshape(quad.shape)), 1) + mark_hexa = np.any((np.isin(hexa.ravel(), in_vert).reshape(hexa.shape)), 1) + + conn_verts = np.unique( + np.concatenate( + ( + tria[mark_tria, :].ravel(), + quad[mark_quad, :].ravel(), + hexa[mark_hexa, :].ravel(), + ) + ) + ) return conn_verts + def get_lone_element_verts(mesh): - ''' + """ Also, there might be some dangling triangles without neighbors, which are also missed by path.contains_point() - ''' + """ - tria = mesh.tria3['index'] - quad = mesh.quad4['index'] - hexa = mesh.hexa8['index'] + tria = mesh.tria3["index"] + quad = mesh.quad4["index"] + hexa = mesh.hexa8["index"] unq_verts, counts = np.unique( - np.concatenate((tria.ravel(), quad.ravel(), hexa.ravel())), - return_counts=True) + np.concatenate((tria.ravel(), quad.ravel(), hexa.ravel())), return_counts=True + ) once_verts = unq_verts[counts == 1] # NOTE: np.all so that lone elements are found vs elements that # have nodes that are used only once - mark_tria = np.all( - (np.isin(tria.ravel(), once_verts).reshape( - tria.shape)), 1) - mark_quad = np.all( - (np.isin(quad.ravel(), once_verts).reshape( - quad.shape)), 1) - mark_hexa = np.all( - (np.isin(hexa.ravel(), once_verts).reshape( - hexa.shape)), 1) - - lone_elem_verts = np.unique(np.concatenate( - (tria[mark_tria, :].ravel(), - quad[mark_quad, :].ravel(), - hexa[mark_hexa, :].ravel()))) + mark_tria = np.all((np.isin(tria.ravel(), once_verts).reshape(tria.shape)), 1) + mark_quad = np.all((np.isin(quad.ravel(), once_verts).reshape(quad.shape)), 1) + mark_hexa = np.all((np.isin(hexa.ravel(), once_verts).reshape(hexa.shape)), 1) + + lone_elem_verts = np.unique( + np.concatenate( + ( + tria[mark_tria, :].ravel(), + quad[mark_quad, :].ravel(), + hexa[mark_hexa, :].ravel(), + ) + ) + ) return lone_elem_verts - # https://en.wikipedia.org/wiki/Polygon_mesh#Summary_of_mesh_representation # V-V All vertices around vertex # E-F All edges of a face @@ -763,13 +735,13 @@ def get_lone_element_verts(mesh): # V-E Both vertices of an edge # Flook Find face with given vertices def get_verts_in_shape( - mesh: jigsaw_msh_t, - shape: Union[box, Polygon, MultiPolygon], - from_box: bool = False, - ) -> Sequence[int]: + mesh: jigsaw_msh_t, + shape: Union[box, Polygon, MultiPolygon], + from_box: bool = False, +) -> Sequence[int]: if from_box: - crd = mesh.vert2['coord'] + crd = mesh.vert2["coord"] xmin, ymin, xmax, ymax = shape.bounds @@ -778,81 +750,79 @@ def get_verts_in_shape( in_box_idx_3 = np.arange(len(crd))[crd[:, 1] > ymin] in_box_idx_4 = np.arange(len(crd))[crd[:, 1] < ymax] in_box_idx = reduce( - np.intersect1d, (in_box_idx_1, in_box_idx_2, - in_box_idx_3, in_box_idx_4)) + np.intersect1d, (in_box_idx_1, in_box_idx_2, in_box_idx_3, in_box_idx_4) + ) return in_box_idx - - pt_series = gpd.GeoSeries(gpd.points_from_xy( - mesh.vert2['coord'][:,0], mesh.vert2['coord'][:,1])) + pt_series = gpd.GeoSeries( + gpd.points_from_xy(mesh.vert2["coord"][:, 0], mesh.vert2["coord"][:, 1]) + ) shp_series = gpd.GeoSeries(shape) - in_shp_idx = pt_series.sindex.query_bulk( - shp_series, predicate="intersects") + in_shp_idx = pt_series.sindex.query_bulk(shp_series, predicate="intersects") return in_shp_idx @must_be_euclidean_mesh def get_cross_edges( - mesh: jigsaw_msh_t, - shape: Union[box, Polygon, MultiPolygon], - ) -> Sequence[Tuple[int, int]]: + mesh: jigsaw_msh_t, + shape: Union[box, Polygon, MultiPolygon], +) -> Sequence[Tuple[int, int]]: - ''' + """ Return the list of edges crossing the input shape exterior - ''' + """ - coords = mesh.vert2['coord'] + coords = mesh.vert2["coord"] coord_dict = {} for i, coo in enumerate(coords): coord_dict[tuple(coo)] = i - gdf_shape = gpd.GeoDataFrame( - geometry=gpd.GeoSeries(shape)) + gdf_shape = gpd.GeoDataFrame(geometry=gpd.GeoSeries(shape)) exteriors = [pl.exterior for pl in gdf_shape.explode().geometry] # TODO: Reduce domain of search for faster results all_edges = get_mesh_edges(mesh, unique=True) edge_coords = coords[all_edges, :] - gdf_edg = gpd.GeoDataFrame( - geometry=gpd.GeoSeries(linemerge(edge_coords))) + gdf_edg = gpd.GeoDataFrame(geometry=gpd.GeoSeries(linemerge(edge_coords))) gdf_x = gpd.sjoin( - gdf_edg.explode(), - gpd.GeoDataFrame(geometry=gpd.GeoSeries(exteriors)), - how='inner', op='intersects') - + gdf_edg.explode(), + gpd.GeoDataFrame(geometry=gpd.GeoSeries(exteriors)), + how="inner", + op="intersects", + ) cut_coords = [ - list(cooseq) - for cooseq in gdf_x.geometry.apply(lambda i: i.coords).values] - - cut_edges = np.array([ - (coo_list[i], coo_list[i+1]) - for coo_list in cut_coords - for i in range(len(coo_list)-1) ]) + list(cooseq) for cooseq in gdf_x.geometry.apply(lambda i: i.coords).values + ] + + cut_edges = np.array( + [ + (coo_list[i], coo_list[i + 1]) + for coo_list in cut_coords + for i in range(len(coo_list) - 1) + ] + ) cut_edge_idx = np.array( - [coord_dict[tuple(coo)] - for coo in cut_edges.reshape(-1, 2)]).reshape( - cut_edges.shape[:2]) + [coord_dict[tuple(coo)] for coo in cut_edges.reshape(-1, 2)] + ).reshape(cut_edges.shape[:2]) return cut_edge_idx - def clip_mesh_by_shape( - mesh: jigsaw_msh_t, - shape: Union[box, Polygon, MultiPolygon], - use_box_only: bool = False, - fit_inside: bool = True, - inverse: bool = False, - in_place: bool = False, - check_cross_edges: bool = False - ) -> jigsaw_msh_t: - + mesh: jigsaw_msh_t, + shape: Union[box, Polygon, MultiPolygon], + use_box_only: bool = False, + fit_inside: bool = True, + inverse: bool = False, + in_place: bool = False, + check_cross_edges: bool = False, +) -> jigsaw_msh_t: # NOTE: Checking cross edge is only meaningful when # fit inside flag is NOT set @@ -872,14 +842,12 @@ def clip_mesh_by_shape( x_edge_idx = get_cross_edges(mesh, shape_box) in_box_idx = np.append(in_box_idx, np.unique(x_edge_idx)) - mesh = clip_mesh_by_vertex( - mesh, in_box_idx, not fit_inside, inverse, in_place) + mesh = clip_mesh_by_vertex(mesh, in_box_idx, not fit_inside, inverse, in_place) if use_box_only: if edge_flag and inverse: x_edge_idx = get_cross_edges(mesh, shape_box) - mesh = remove_mesh_by_edge( - mesh, x_edge_idx, in_place) + mesh = remove_mesh_by_edge(mesh, x_edge_idx, in_place) return mesh in_shp_idx = get_verts_in_shape(mesh, shape, False) @@ -888,8 +856,7 @@ def clip_mesh_by_shape( x_edge_idx = get_cross_edges(mesh, shape) in_shp_idx = np.append(in_shp_idx, np.unique(x_edge_idx)) - mesh = clip_mesh_by_vertex( - mesh, in_shp_idx, not fit_inside, inverse, in_place) + mesh = clip_mesh_by_vertex(mesh, in_shp_idx, not fit_inside, inverse, in_place) if edge_flag and inverse: x_edge_idx = get_cross_edges(mesh, shape) @@ -899,10 +866,8 @@ def clip_mesh_by_shape( def remove_mesh_by_edge( - mesh: jigsaw_msh_t, - edges: Sequence[Tuple[int, int]], - in_place: bool = False - ) -> jigsaw_msh_t: + mesh: jigsaw_msh_t, edges: Sequence[Tuple[int, int]], in_place: bool = False +) -> jigsaw_msh_t: mesh_out = mesh if not in_place: @@ -914,39 +879,37 @@ def remove_mesh_by_edge( edge_verts = np.unique(edges) for etype in ELEM_2D_TYPES: - elems = getattr(mesh, etype)['index'] + elems = getattr(mesh, etype)["index"] # If a given element contains to vertices from # a crossing edge, it is selected test = np.sum(np.isin(elems, edge_verts), axis=1) elems = elems[test < 2] - setattr(mesh_out, etype, np.array( + setattr( + mesh_out, + etype, + np.array( [(idx, 0) for idx in elems], - dtype=getattr( - jigsawpy.jigsaw_msh_t, f'{etype.upper()}_t'))) + dtype=getattr(jigsawpy.jigsaw_msh_t, f"{etype.upper()}_t"), + ), + ) return mesh_out def clip_mesh_by_vertex( - mesh: jigsaw_msh_t, - vert_in: Sequence[int], - can_use_other_verts: bool = False, - inverse: bool = False, - in_place: bool = False - ) -> jigsaw_msh_t: + mesh: jigsaw_msh_t, + vert_in: Sequence[int], + can_use_other_verts: bool = False, + inverse: bool = False, + in_place: bool = False, +) -> jigsaw_msh_t: - - if mesh.mshID == 'euclidean-mesh' and mesh.ndims == 2: - coord = mesh.vert2['coord'] + if mesh.mshID == "euclidean-mesh" and mesh.ndims == 2: + coord = mesh.vert2["coord"] # TODO: What about edge2 if in_place? - mesh_types = { - 'tria3': 'TRIA3_t', - 'quad4': 'QUAD4_t', - 'hexa8': 'HEXA8_t' - } - elm_dict = { - key: getattr(mesh, key)['index'] for key in mesh_types} + mesh_types = {"tria3": "TRIA3_t", "quad4": "QUAD4_t", "hexa8": "HEXA8_t"} + elm_dict = {key: getattr(mesh, key)["index"] for key in mesh_types} # Whether elements that include "in"-vertices can be created # using vertices other than "in"-vertices @@ -955,45 +918,40 @@ def clip_mesh_by_vertex( mark_func = np.any mark_dict = { - key: mark_func( - (np.isin(elems.ravel(), vert_in).reshape( - elems.shape)), 1) - for key, elems in elm_dict.items()} - + key: mark_func((np.isin(elems.ravel(), vert_in).reshape(elems.shape)), 1) + for key, elems in elm_dict.items() + } # Whether to return elements found by "in" vertices or return # all elements except them if inverse: - mark_dict = { - key: np.logical_not(mark) - for key, mark in mark_dict.items()} + mark_dict = {key: np.logical_not(mark) for key, mark in mark_dict.items()} # Find elements based on old vertex index - elem_draft_dict = { - key: elm_dict[key][mark_dict[key], :] - for key in elm_dict} + elem_draft_dict = {key: elm_dict[key][mark_dict[key], :] for key in elm_dict} crd_old_to_new = { - index: i for i, index - in enumerate(sorted(np.unique(np.concatenate( - [draft.ravel() - for draft in elem_draft_dict.values()] - )))) - } + index: i + for i, index in enumerate( + sorted( + np.unique( + np.concatenate( + [draft.ravel() for draft in elem_draft_dict.values()] + ) + ) + ) + ) + } elem_final_dict = { - key: np.array( - [[crd_old_to_new[x] for x in element] - for element in draft]) + key: np.array([[crd_old_to_new[x] for x in element] for element in draft]) for key, draft in elem_draft_dict.items() } new_coord = coord[list(crd_old_to_new.keys()), :] value = np.zeros(shape=(0, 0), dtype=jigsaw_msh_t.REALS_t) if len(mesh.value) == len(coord): - value = mesh.value.take( - list(crd_old_to_new.keys()), axis=0).copy() - + value = mesh.value.take(list(crd_old_to_new.keys()), axis=0).copy() mesh_out = mesh if not in_place: @@ -1006,8 +964,8 @@ def clip_mesh_by_vertex( mesh_out.value = value mesh_out.vert2 = np.array( - [(coo, 0) for coo in new_coord], - dtype=jigsaw_msh_t.VERT2_t) + [(coo, 0) for coo in new_coord], dtype=jigsaw_msh_t.VERT2_t + ) for key, elem_type in mesh_types.items(): setattr( @@ -1015,26 +973,23 @@ def clip_mesh_by_vertex( key, np.array( [(con, 0) for con in elem_final_dict[key]], - dtype=getattr(jigsaw_msh_t, elem_type))) + dtype=getattr(jigsaw_msh_t, elem_type), + ), + ) return mesh_out - msg = (f"Not implemented for" - f" mshID={mesh.mshID} and dim={mesh.ndims}") + msg = f"Not implemented for" f" mshID={mesh.mshID} and dim={mesh.ndims}" raise NotImplementedError(msg) - - - - @must_be_euclidean_mesh def get_mesh_edges(mesh: jigsaw_msh_t, unique=True): # NOTE: For msh_t type vertex id and index are the same - trias = mesh.tria3['index'] - quads = mesh.quad4['index'] - hexas = mesh.hexa8['index'] + trias = mesh.tria3["index"] + quads = mesh.quad4["index"] + hexas = mesh.hexa8["index"] # Get unique set of edges by rolling connectivity # and joining connectivities in 3rd dimension, then sorting @@ -1043,10 +998,8 @@ def get_mesh_edges(mesh: jigsaw_msh_t, unique=True): for elm_type in [trias, quads, hexas]: if elm_type.shape[0]: edges = np.sort( - np.stack( - (elm_type, np.roll(elm_type, shift=1, axis=1)), - axis=2), - axis=2) + np.stack((elm_type, np.roll(elm_type, shift=1, axis=1)), axis=2), axis=2 + ) edges = edges.reshape(np.product(edges.shape[0:2]), 2) all_edges = np.vstack((all_edges, edges)) @@ -1059,34 +1012,32 @@ def get_mesh_edges(mesh: jigsaw_msh_t, unique=True): @must_be_euclidean_mesh def calculate_tria_areas(mesh): - coord = mesh.vert2['coord'] - trias = mesh.tria3['index'] + coord = mesh.vert2["coord"] + trias = mesh.tria3["index"] tria_coo = coord[ - np.sort(np.stack((trias, np.roll(trias, shift=1, axis=1)), - axis=2), - axis=2)] + np.sort(np.stack((trias, np.roll(trias, shift=1, axis=1)), axis=2), axis=2) + ] tria_side_components = np.diff(tria_coo, axis=2).squeeze() tria_sides = np.sqrt( - np.sum(np.power(np.abs(tria_side_components), 2), - axis=2).squeeze()) + np.sum(np.power(np.abs(tria_side_components), 2), axis=2).squeeze() + ) perimeter = np.sum(tria_sides, axis=1) / 2 perimeter = perimeter.reshape(len(perimeter), 1) # pylint: disable=W0632 a_side, b_side, c_side = np.split(tria_sides, 3, axis=1) tria_areas = np.sqrt( - perimeter*(perimeter-a_side) - * (perimeter-b_side)*(perimeter-c_side) - ).squeeze() + perimeter * (perimeter - a_side) * (perimeter - b_side) * (perimeter - c_side) + ).squeeze() return tria_areas + @must_be_euclidean_mesh def calculate_edge_lengths(mesh, transformer=None): - coord = mesh.vert2['coord'] + coord = mesh.vert2["coord"] if transformer is not None: - coord = np.vstack( - transformer.transform(coord[:, 0], coord[:, 1])).T + coord = np.vstack(transformer.transform(coord[:, 0], coord[:, 1])).T # Get unique set of edges by rolling connectivity # and joining connectivities in 3rd dimension, then sorting @@ -1102,10 +1053,8 @@ def calculate_edge_lengths(mesh, transformer=None): # Calculate length of all edges based on acquired coords edge_lens = np.sqrt( - np.sum( - np.power( - np.abs(np.diff(edge_coords, axis=1)), 2) - ,axis=2)).squeeze() + np.sum(np.power(np.abs(np.diff(edge_coords, axis=1)), 2), axis=2) + ).squeeze() edge_dict = defaultdict(float) for i, edge in enumerate(all_edges): @@ -1117,17 +1066,19 @@ def calculate_edge_lengths(mesh, transformer=None): @must_be_euclidean_mesh def elements(mesh): elements_id = [] - elements_id.extend(list(mesh.tria3['IDtag'])) - elements_id.extend(list(mesh.quad4['IDtag'])) - elements_id.extend(list(mesh.hexa8['IDtag'])) - elements_id = range(1, len(elements_id)+1) \ - if len(set(elements_id)) != len(elements_id) else elements_id + elements_id.extend(list(mesh.tria3["IDtag"])) + elements_id.extend(list(mesh.quad4["IDtag"])) + elements_id.extend(list(mesh.hexa8["IDtag"])) + elements_id = ( + range(1, len(elements_id) + 1) + if len(set(elements_id)) != len(elements_id) + else elements_id + ) elems = [] - elems.extend(list(mesh.tria3['index'])) - elems.extend(list(mesh.quad4['index'])) - elems.extend(list(mesh.hexa8['index'])) - elems = { - elements_id[i]: indexes for i, indexes in enumerate(elems)} + elems.extend(list(mesh.tria3["index"])) + elems.extend(list(mesh.quad4["index"])) + elems.extend(list(mesh.hexa8["index"])) + elems = {elements_id[i]: indexes for i, indexes in enumerate(elems)} return elems @@ -1135,24 +1086,24 @@ def elements(mesh): def faces_around_vertex(mesh): _elements = elements(mesh) length = max(map(len, _elements.values())) - y = np.array([xi+[-99999]*(length-len(xi)) for xi in _elements.values()]) + y = np.array([xi + [-99999] * (length - len(xi)) for xi in _elements.values()]) faces_around_vert = defaultdict(set) - for i, coord in enumerate(mesh.vert2['index']): + for i, coord in enumerate(mesh.vert2["index"]): # TODO: pass -# np.isin(i, axis=0) -# faces_around_vert[i].add() + # np.isin(i, axis=0) + # faces_around_vert[i].add() faces_around_vert = defaultdict(set) def get_boundary_edges(mesh): - ''' + """ Find internal and external boundaries of mesh - ''' + """ - coord = mesh.vert2['coord'] + coord = mesh.vert2["coord"] all_edges = get_mesh_edges(mesh, unique=False) @@ -1166,9 +1117,9 @@ def get_boundary_edges(mesh): def get_pinched_nodes(mesh): - ''' + """ Find nodes through which fluid cannot flow - ''' + """ boundary_edges = get_boundary_edges(mesh) @@ -1210,46 +1161,37 @@ def cleanup_pinched_nodes(mesh): all_nodes.extend(np.asarray(ring)[:, 0].tolist()) u, c = np.unique(all_nodes, return_counts=True) mesh.tria3 = mesh.tria3.take( - np.where( - ~np.any(np.isin(mesh.tria3['index'], u[c > 1]), axis=1))[0], - axis=0) + np.where(~np.any(np.isin(mesh.tria3["index"], u[c > 1]), axis=1))[0], axis=0 + ) def interpolate(src: jigsaw_msh_t, dst: jigsaw_msh_t, **kwargs): - if src.mshID == 'euclidean-grid' and dst.mshID == 'euclidean-mesh': + if src.mshID == "euclidean-grid" and dst.mshID == "euclidean-mesh": interpolate_euclidean_grid_to_euclidean_mesh(src, dst, **kwargs) - elif src.mshID == 'euclidean-mesh' and dst.mshID == 'euclidean-mesh': + elif src.mshID == "euclidean-mesh" and dst.mshID == "euclidean-mesh": interpolate_euclidean_mesh_to_euclidean_mesh(src, dst, **kwargs) else: raise NotImplementedError( - f'Not implemented type combination: source={src.mshID}, ' - f'dest={dst.mshID}') + f"Not implemented type combination: source={src.mshID}, " + f"dest={dst.mshID}" + ) def interpolate_euclidean_mesh_to_euclidean_mesh( - src: jigsaw_msh_t, - dst: jigsaw_msh_t, - method='linear', - fill_value=np.nan + src: jigsaw_msh_t, dst: jigsaw_msh_t, method="linear", fill_value=np.nan ): values = griddata( - src.vert2['coord'], + src.vert2["coord"], src.value.flatten(), - dst.vert2['coord'], + dst.vert2["coord"], method=method, - fill_value=fill_value + fill_value=fill_value, ) - dst.value = np.array( - values.reshape(len(values), 1), dtype=jigsaw_msh_t.REALS_t) + dst.value = np.array(values.reshape(len(values), 1), dtype=jigsaw_msh_t.REALS_t) def interpolate_euclidean_grid_to_euclidean_mesh( - src: jigsaw_msh_t, - dst: jigsaw_msh_t, - bbox=None, - kx=3, - ky=3, - s=0 + src: jigsaw_msh_t, dst: jigsaw_msh_t, bbox=None, kx=3, ky=3, s=0 ): values = RectBivariateSpline( src.xgrid, @@ -1258,80 +1200,65 @@ def interpolate_euclidean_grid_to_euclidean_mesh( bbox=bbox or [None, None, None, None], kx=kx, ky=ky, - s=s - ).ev( - dst.vert2['coord'][:, 0], - dst.vert2['coord'][:, 1]) - dst.value = np.array( - values.reshape((values.size, 1)), - dtype=jigsaw_msh_t.REALS_t) + s=s, + ).ev(dst.vert2["coord"][:, 0], dst.vert2["coord"][:, 1]) + dst.value = np.array(values.reshape((values.size, 1)), dtype=jigsaw_msh_t.REALS_t) def tricontourf( - mesh, - ax=None, - show=False, - figsize=None, - extend='both', - colorbar=False, - **kwargs + mesh, ax=None, show=False, figsize=None, extend="both", colorbar=False, **kwargs ): if ax is None: fig = plt.figure(figsize=figsize) ax = fig.add_subplot(111) tcf = ax.tricontourf( - mesh.vert2['coord'][:, 0], - mesh.vert2['coord'][:, 1], - mesh.tria3['index'], + mesh.vert2["coord"][:, 0], + mesh.vert2["coord"][:, 1], + mesh.tria3["index"], mesh.value.flatten(), - **kwargs) + **kwargs, + ) if colorbar: plt.colorbar(tcf) if show: - plt.gca().axis('scaled') + plt.gca().axis("scaled") plt.show() return ax def triplot( - mesh, - axes=None, - show=False, - figsize=None, - color='k', - linewidth=0.07, - **kwargs + mesh, axes=None, show=False, figsize=None, color="k", linewidth=0.07, **kwargs ): if axes is None: fig = plt.figure(figsize=figsize) axes = fig.add_subplot(111) axes.triplot( - mesh.vert2['coord'][:, 0], - mesh.vert2['coord'][:, 1], - mesh.tria3['index'], + mesh.vert2["coord"][:, 0], + mesh.vert2["coord"][:, 1], + mesh.tria3["index"], color=color, linewidth=linewidth, - **kwargs) + **kwargs, + ) if show: - axes.axis('scaled') + axes.axis("scaled") plt.show() return axes -def reproject( - mesh: jigsaw_msh_t, - dst_crs: Union[str, CRS] -): +def reproject(mesh: jigsaw_msh_t, dst_crs: Union[str, CRS]): src_crs = mesh.crs dst_crs = CRS.from_user_input(dst_crs) transformer = Transformer.from_crs(src_crs, dst_crs, always_xy=True) # pylint: disable=E0633 - x, y = transformer.transform( - mesh.vert2['coord'][:, 0], mesh.vert2['coord'][:, 1]) + x, y = transformer.transform(mesh.vert2["coord"][:, 0], mesh.vert2["coord"][:, 1]) mesh.vert2 = np.array( - [([x[i], y[i]], mesh.vert2['IDtag'][i]) for i - in range(len(mesh.vert2['IDtag']))], - dtype=jigsaw_msh_t.VERT2_t) + [ + ([x[i], y[i]], mesh.vert2["IDtag"][i]) + for i in range(len(mesh.vert2["IDtag"])) + ], + dtype=jigsaw_msh_t.VERT2_t, + ) mesh.crs = dst_crs @@ -1345,7 +1272,7 @@ def limgrad(mesh, dfdx, imax=100): edge = tri.edges dx = np.subtract(xy[edge[:, 0], 0], xy[edge[:, 1], 0]) dy = np.subtract(xy[edge[:, 0], 1], xy[edge[:, 1], 1]) - elen = np.sqrt(dx**2+dy**2) + elen = np.sqrt(dx ** 2 + dy ** 2) ffun = mesh.value.flatten() aset = np.zeros(ffun.shape) ftol = np.min(ffun) * np.sqrt(np.finfo(float).eps) @@ -1355,9 +1282,9 @@ def limgrad(mesh, dfdx, imax=100): for i, j in permutations(simplex, 2): point_neighbors[i].add(j) # iterative smoothing - for _iter in range(1, imax+1): - aidx = np.where(aset == _iter-1)[0] - if len(aidx) == 0.: + for _iter in range(1, imax + 1): + aidx = np.where(aset == _iter - 1)[0] + if len(aidx) == 0.0: break active_idxs = np.argsort(ffun[aidx]) for active_idx in active_idxs: @@ -1365,138 +1292,143 @@ def limgrad(mesh, dfdx, imax=100): for adj_edge in adjacent_edges: if ffun[adj_edge] > ffun[active_idx]: fun1 = ffun[active_idx] + elen[active_idx] * dfdx - if ffun[adj_edge] > fun1+ftol: + if ffun[adj_edge] > fun1 + ftol: ffun[adj_edge] = fun1 aset[adj_edge] = _iter else: fun2 = ffun[adj_edge] + elen[active_idx] * dfdx - if ffun[active_idx] > fun2+ftol: + if ffun[active_idx] > fun2 + ftol: ffun[active_idx] = fun2 aset[active_idx] = _iter if not _iter < imax: - msg = f'limgrad() did not converge within {imax} iterations.' + msg = f"limgrad() did not converge within {imax} iterations." raise Exception(msg) return ffun def msh_t_to_grd(msh: jigsaw_msh_t) -> Dict: - src_crs = msh.crs if hasattr(msh, 'crs') else None - coords = msh.vert2['coord'] + src_crs = msh.crs if hasattr(msh, "crs") else None + coords = msh.vert2["coord"] desc = "EPSG:4326" if src_crs is not None: # TODO: Support non EPSG:4326 CRS -# desc = src_crs.to_string() + # desc = src_crs.to_string() epsg_4326 = CRS.from_epsg(4326) if not src_crs.equals(epsg_4326): - transformer = Transformer.from_crs( - src_crs, epsg_4326, always_xy=True) - coords = np.vstack( - transformer.transform(coords[:, 0], coords[:, 1])).T + transformer = Transformer.from_crs(src_crs, epsg_4326, always_xy=True) + coords = np.vstack(transformer.transform(coords[:, 0], coords[:, 1])).T nodes = { - i + 1: [tuple(p.tolist()), v] for i, (p, v) in - enumerate(zip(coords, -msh.value))} + i + 1: [tuple(p.tolist()), v] + for i, (p, v) in enumerate(zip(coords, -msh.value)) + } # NOTE: Node IDs are node index + 1 - elems = { - i + 1: v + 1 for i, v in enumerate(msh.tria3['index'])} + elems = {i + 1: v + 1 for i, v in enumerate(msh.tria3["index"])} offset = len(elems) - elems.update({ - offset + i + 1: v + 1 for i, v in enumerate(msh.quad4['index'])}) + elems.update({offset + i + 1: v + 1 for i, v in enumerate(msh.quad4["index"])}) - return {'description': desc, - 'nodes': nodes, - 'elements': elems} + return {"description": desc, "nodes": nodes, "elements": elems} def grd_to_msh_t(_grd: Dict) -> jigsaw_msh_t: msh = jigsaw_msh_t() msh.ndims = +2 - msh.mshID = 'euclidean-mesh' - id_to_index = {node_id: index for index, node_id - in enumerate(_grd['nodes'].keys())} - triangles = [list(map(lambda x: id_to_index[x], element)) for element - in _grd['elements'].values() if len(element) == 3] - quads = [list(map(lambda x: id_to_index[x], element)) for element - in _grd['elements'].values() if len(element) == 4] - msh.vert2 = np.array([(coord, 0) for coord, _ in _grd['nodes'].values()], - dtype=jigsaw_msh_t.VERT2_t) - msh.tria3 = np.array([(index, 0) for index in triangles], - dtype=jigsaw_msh_t.TRIA3_t) - msh.quad4 = np.array([(index, 0) for index in quads], - dtype=jigsaw_msh_t.QUAD4_t) - value = [value for _, value in _grd['nodes'].values()] - msh.value = np.array(np.array(value).reshape((len(value), 1)), - dtype=jigsaw_msh_t.REALS_t) - crs = _grd.get('crs') + msh.mshID = "euclidean-mesh" + id_to_index = {node_id: index for index, node_id in enumerate(_grd["nodes"].keys())} + triangles = [ + list(map(lambda x: id_to_index[x], element)) + for element in _grd["elements"].values() + if len(element) == 3 + ] + quads = [ + list(map(lambda x: id_to_index[x], element)) + for element in _grd["elements"].values() + if len(element) == 4 + ] + msh.vert2 = np.array( + [(coord, 0) for coord, _ in _grd["nodes"].values()], dtype=jigsaw_msh_t.VERT2_t + ) + msh.tria3 = np.array( + [(index, 0) for index in triangles], dtype=jigsaw_msh_t.TRIA3_t + ) + msh.quad4 = np.array([(index, 0) for index in quads], dtype=jigsaw_msh_t.QUAD4_t) + value = [value for _, value in _grd["nodes"].values()] + msh.value = np.array( + np.array(value).reshape((len(value), 1)), dtype=jigsaw_msh_t.REALS_t + ) + crs = _grd.get("crs") if crs is not None: msh.crs = CRS.from_user_input(crs) return msh def msh_t_to_2dm(msh: jigsaw_msh_t): - coords = msh.vert2['coord'] - src_crs = msh.crs if hasattr(msh, 'crs') else None + coords = msh.vert2["coord"] + src_crs = msh.crs if hasattr(msh, "crs") else None if src_crs is not None: epsg_4326 = CRS.from_epsg(4326) if not src_crs.equals(epsg_4326): - transformer = Transformer.from_crs( - src_crs, epsg_4326, always_xy=True) - coords = np.vstack( - transformer.transform(coords[:, 0], coords[:, 1])).T + transformer = Transformer.from_crs(src_crs, epsg_4326, always_xy=True) + coords = np.vstack(transformer.transform(coords[:, 0], coords[:, 1])).T return { - 'ND': {i+1: (coord, msh.value[i, 0] if not - np.isnan(msh.value[i, 0]) else -99999) - for i, coord in enumerate(coords)}, - 'E3T': {i+1: index+1 for i, index - in enumerate(msh.tria3['index'])}, - 'E4Q': {i+1: index+1 for i, index - in enumerate(msh.quad4['index'])} - } + "ND": { + i + 1: (coord, msh.value[i, 0] if not np.isnan(msh.value[i, 0]) else -99999) + for i, coord in enumerate(coords) + }, + "E3T": {i + 1: index + 1 for i, index in enumerate(msh.tria3["index"])}, + "E4Q": {i + 1: index + 1 for i, index in enumerate(msh.quad4["index"])}, + } def sms2dm_to_msh_t(_sms2dm: Dict) -> jigsaw_msh_t: msh = jigsaw_msh_t() msh.ndims = +2 - msh.mshID = 'euclidean-mesh' - id_to_index = {node_id: index for index, node_id - in enumerate(_sms2dm['ND'].keys())} - if 'E3T' in _sms2dm: - triangles = [list(map(lambda x: id_to_index[x], element)) for element - in _sms2dm['E3T'].values()] - msh.tria3 = np.array([(index, 0) for index in triangles], - dtype=jigsaw_msh_t.TRIA3_t) - if 'E4Q' in _sms2dm: - quads = [list(map(lambda x: id_to_index[x], element)) for element - in _sms2dm['E4Q'].values()] - msh.quad4 = np.array([(index, 0) for index in quads], - dtype=jigsaw_msh_t.QUAD4_t) - msh.vert2 = np.array([(coord, 0) for coord, _ in _sms2dm['ND'].values()], - dtype=jigsaw_msh_t.VERT2_t) - value = [value for _, value in _sms2dm['ND'].values()] - msh.value = np.array(np.array(value).reshape((len(value), 1)), - dtype=jigsaw_msh_t.REALS_t) - crs = _sms2dm.get('crs') + msh.mshID = "euclidean-mesh" + id_to_index = {node_id: index for index, node_id in enumerate(_sms2dm["ND"].keys())} + if "E3T" in _sms2dm: + triangles = [ + list(map(lambda x: id_to_index[x], element)) + for element in _sms2dm["E3T"].values() + ] + msh.tria3 = np.array( + [(index, 0) for index in triangles], dtype=jigsaw_msh_t.TRIA3_t + ) + if "E4Q" in _sms2dm: + quads = [ + list(map(lambda x: id_to_index[x], element)) + for element in _sms2dm["E4Q"].values() + ] + msh.quad4 = np.array( + [(index, 0) for index in quads], dtype=jigsaw_msh_t.QUAD4_t + ) + msh.vert2 = np.array( + [(coord, 0) for coord, _ in _sms2dm["ND"].values()], dtype=jigsaw_msh_t.VERT2_t + ) + value = [value for _, value in _sms2dm["ND"].values()] + msh.value = np.array( + np.array(value).reshape((len(value), 1)), dtype=jigsaw_msh_t.REALS_t + ) + crs = _sms2dm.get("crs") if crs is not None: msh.crs = CRS.from_user_input(crs) return msh + @must_be_euclidean_mesh def msh_t_to_utm(msh): utm_crs = estimate_mesh_utm(msh) if utm_crs is None: return - transformer = Transformer.from_crs( - msh.crs, utm_crs, always_xy=True) + transformer = Transformer.from_crs(msh.crs, utm_crs, always_xy=True) - coords = msh.vert2['coord'] + coords = msh.vert2["coord"] # pylint: disable=E0633 - coords[:, 0], coords[:, 1] = transformer.transform( - coords[:, 0], coords[:, 1]) - msh.vert2['coord'][:] = coords + coords[:, 0], coords[:, 1] = transformer.transform(coords[:, 0], coords[:, 1]) + msh.vert2["coord"][:] = coords msh.crs = utm_crs @@ -1504,67 +1436,70 @@ def estimate_bounds_utm(bounds, crs="EPSG:4326"): in_crs = CRS.from_user_input(crs) if in_crs.is_geographic: x0, y0, x1, y1 = bounds - _, _, number, letter = utm.from_latlon( - (y0 + y1)/2, (x0 + x1)/2) + _, _, number, letter = utm.from_latlon((y0 + y1) / 2, (x0 + x1) / 2) # PyProj 3.2.1 throws error if letter is provided utm_crs = CRS( - proj='utm', - zone=f'{number}', - south=(y0 + y1)/2 < 0, - ellps={ - 'GRS 1980': 'GRS80', - 'WGS 84': 'WGS84' - }[in_crs.ellipsoid.name] - ) + proj="utm", + zone=f"{number}", + south=(y0 + y1) / 2 < 0, + ellps={"GRS 1980": "GRS80", "WGS 84": "WGS84"}[in_crs.ellipsoid.name], + ) return utm_crs return None + @must_be_euclidean_mesh def estimate_mesh_utm(msh): - if hasattr(msh, 'crs'): - coords = msh.vert2['coord'] + if hasattr(msh, "crs"): + coords = msh.vert2["coord"] x0, y0, x1, y1 = ( - np.min(coords[:, 0]), np.min(coords[:, 1]), - np.max(coords[:, 0]), np.max(coords[:, 1])) + np.min(coords[:, 0]), + np.min(coords[:, 1]), + np.max(coords[:, 0]), + np.max(coords[:, 1]), + ) utm_crs = estimate_bounds_utm((x0, y0, x1, y1), msh.crs) return utm_crs return None + def get_polygon_channels(polygon, width, simplify=None, join_style=3): # Operations are done without any CRS info consideration - polys_gdf = gpd.GeoDataFrame( - geometry=gpd.GeoSeries(polygon)) + polys_gdf = gpd.GeoDataFrame(geometry=gpd.GeoSeries(polygon)) if isinstance(simplify, (int, float)): polys_gdf = gpd.GeoDataFrame( - geometry=polys_gdf.simplify( - tolerance=simplify, - preserve_topology=False)) + geometry=polys_gdf.simplify(tolerance=simplify, preserve_topology=False) + ) - buffer_size = width/2 + buffer_size = width / 2 buffered_gdf = gpd.GeoDataFrame( - geometry=polys_gdf.buffer(-buffer_size).buffer( - buffer_size, - join_style=join_style)) + geometry=polys_gdf.buffer(-buffer_size).buffer( + buffer_size, join_style=join_style + ) + ) buffered_gdf = buffered_gdf[~buffered_gdf.is_empty] if len(buffered_gdf) == 0: # All is channel! return polygon - channels_gdf = gpd.overlay( - polys_gdf, buffered_gdf, how='difference') + channels_gdf = gpd.overlay(polys_gdf, buffered_gdf, how="difference") # Use square - 1/4 circle as cleanup criteria channels_gdf = gpd.GeoDataFrame( - geometry=gpd.GeoSeries( - [p for i in channels_gdf.geometry - for p in i.geoms - if p.area > width**2 * (1-np.pi/4)])) - + geometry=gpd.GeoSeries( + [ + p + for i in channels_gdf.geometry + for p in i.geoms + if p.area > width ** 2 * (1 - np.pi / 4) + ] + ) + ) ret_val = channels_gdf.unary_union if isinstance(ret_val, GeometryCollection): @@ -1577,11 +1512,12 @@ def get_polygon_channels(polygon, width, simplify=None, join_style=3): def merge_msh_t( - *mesh_list, - out_crs="EPSG:4326", - drop_by_bbox=True, - can_overlap=True, - check_cross_edges=False): + *mesh_list, + out_crs="EPSG:4326", + drop_by_bbox=True, + can_overlap=True, + check_cross_edges=False, +): # TODO: Add support for quad4 and hexa8 @@ -1601,8 +1537,8 @@ def merge_msh_t( reproject(mesh, dst_crs) if drop_by_bbox: - x = mesh.vert2['coord'][:, 0] - y = mesh.vert2['coord'][:, 1] + x = mesh.vert2["coord"][:, 0] + y = mesh.vert2["coord"][:, 1] mesh_shape = box(np.min(x), np.min(y), np.max(x), np.max(y)) else: mesh_shape = get_mesh_polygons(mesh) @@ -1611,33 +1547,32 @@ def merge_msh_t( # NOTE: fit_inside = True w/ inverse = True results # in overlap when clipping low-priority mesh mesh = clip_mesh_by_shape( - mesh, ishp, + mesh, + ishp, use_box_only=drop_by_bbox, fit_inside=can_overlap, inverse=True, - check_cross_edges=check_cross_edges) + check_cross_edges=check_cross_edges, + ) mesh_shape_list.append(mesh_shape) - - index.append(mesh.tria3['index'] + offset) - coord.append(mesh.vert2['coord']) + index.append(mesh.tria3["index"] + offset) + coord.append(mesh.vert2["coord"]) value.append(mesh.value) offset += coord[-1].shape[0] composite_mesh = jigsaw_msh_t() - composite_mesh.mshID = 'euclidean-mesh' + composite_mesh.mshID = "euclidean-mesh" composite_mesh.ndims = 2 composite_mesh.vert2 = np.array( - [(coord, 0) for coord in np.vstack(coord)], - dtype=jigsaw_msh_t.VERT2_t) + [(coord, 0) for coord in np.vstack(coord)], dtype=jigsaw_msh_t.VERT2_t + ) composite_mesh.tria3 = np.array( - [(index, 0) for index in np.vstack(index)], - dtype=jigsaw_msh_t.TRIA3_t) - composite_mesh.value = np.array( - np.vstack(value), - dtype=jigsaw_msh_t.REALS_t) + [(index, 0) for index in np.vstack(index)], dtype=jigsaw_msh_t.TRIA3_t + ) + composite_mesh.value = np.array(np.vstack(value), dtype=jigsaw_msh_t.REALS_t) composite_mesh.crs = dst_crs diff --git a/setup.cfg b/setup.cfg index b2c12d45..19f391a3 100644 --- a/setup.cfg +++ b/setup.cfg @@ -13,3 +13,6 @@ url = [nosetests] exe = True tests = tests/ + +[flake8] +ignore = E501,E226,W503 \ No newline at end of file diff --git a/setup.py b/setup.py index a893e020..351523a3 100755 --- a/setup.py +++ b/setup.py @@ -1,30 +1,27 @@ #! /usr/bin/env python -import setuptools -import subprocess -import setuptools.command.build_py import distutils.cmd import distutils.util -import shutil +import os import platform +import shutil +import subprocess +import sys from multiprocessing import cpu_count from pathlib import Path -import sys -import os + +import setuptools +import setuptools.command.build_py PARENT = Path(__file__).parent.absolute() -PYENV_PREFIX = Path("/".join(sys.executable.split('/')[:-2])) -SYSLIB = { - "Windows": "jigsaw.dll", - "Linux": "libjigsaw.so", - "Darwin": "libjigsaw.dylib"} +PYENV_PREFIX = Path("/".join(sys.executable.split("/")[:-2])) +SYSLIB = {"Windows": "jigsaw.dll", "Linux": "libjigsaw.so", "Darwin": "libjigsaw.dylib"} if "install_jigsaw" not in sys.argv: if "develop" not in sys.argv: if "install" in sys.argv: - libsaw = PYENV_PREFIX / 'lib' / SYSLIB[platform.system()] + libsaw = PYENV_PREFIX / "lib" / SYSLIB[platform.system()] if not libsaw.is_file(): - subprocess.check_call( - [sys.executable, "setup.py", "install_jigsaw"]) + subprocess.check_call([sys.executable, "setup.py", "install_jigsaw"]) class InstallJigsawCommand(distutils.cmd.Command): @@ -32,98 +29,111 @@ class InstallJigsawCommand(distutils.cmd.Command): user_options = [] - def initialize_options(self): pass + def initialize_options(self): + pass - def finalize_options(self): pass + def finalize_options(self): + pass def run(self): - self.announce('Loading JIGSAWPY from GitHub', level=3) + self.announce("Loading JIGSAWPY from GitHub", level=3) # init jigsaw-python submodule subprocess.check_call( - ["git", "submodule", "update", - "--init", "submodules/jigsaw-python"]) + ["git", "submodule", "update", "--init", "submodules/jigsaw-python"] + ) # install jigsawpy - os.chdir(PARENT / 'submodules/jigsaw-python') + os.chdir(PARENT / "submodules/jigsaw-python") subprocess.check_call(["git", "checkout", "master"]) - self.announce('INSTALLING JIGSAWPY', level=3) + self.announce("INSTALLING JIGSAWPY", level=3) subprocess.check_call(["python", "setup.py", "install"]) # install jigsaw self.announce( - 'INSTALLING JIGSAW LIBRARY AND BINARIES FROM ' - 'https://github.com/dengwirda/jigsaw-python', level=3) + "INSTALLING JIGSAW LIBRARY AND BINARIES FROM " + "https://github.com/dengwirda/jigsaw-python", + level=3, + ) os.chdir("external/jigsaw") os.makedirs("build", exist_ok=True) os.chdir("build") gcc, cpp = self._check_gcc_version() subprocess.check_call( - ["cmake", "..", - "-DCMAKE_BUILD_TYPE=Release", - f"-DCMAKE_INSTALL_PREFIX={PYENV_PREFIX}", - f"-DCMAKE_C_COMPILER={gcc}", - f"-DCMAKE_CXX_COMPILER={cpp}", - ]) + [ + "cmake", + "..", + "-DCMAKE_BUILD_TYPE=Release", + f"-DCMAKE_INSTALL_PREFIX={PYENV_PREFIX}", + f"-DCMAKE_C_COMPILER={gcc}", + f"-DCMAKE_CXX_COMPILER={cpp}", + ] + ) subprocess.check_call(["make", f"-j{cpu_count()}", "install"]) - libsaw_prefix = list(PYENV_PREFIX.glob("**/*jigsawpy*")).pop() / '_lib' + libsaw_prefix = list(PYENV_PREFIX.glob("**/*jigsawpy*")).pop() / "_lib" os.makedirs(libsaw_prefix, exist_ok=True) - envlib = PYENV_PREFIX / 'lib' / SYSLIB[platform.system()] + envlib = PYENV_PREFIX / "lib" / SYSLIB[platform.system()] os.symlink(envlib, libsaw_prefix / envlib.name) os.chdir(PARENT) subprocess.check_call( - ["git", "submodule", "deinit", "-f", "submodules/jigsaw-python"]) + ["git", "submodule", "deinit", "-f", "submodules/jigsaw-python"] + ) def _check_gcc_version(self): cpp = shutil.which("c++") - major, minor, patch = subprocess.check_output( - [cpp, "--version"] - ).decode('utf-8').split('\n')[0].split()[-1].split('.') + major, minor, patch = ( + subprocess.check_output([cpp, "-dumpversion"]) + .decode("utf-8") + .split("\n")[0] + .split()[-1] + .split(".") + ) current_version = float(f"{major}.{minor}") - if current_version < 7.: + if current_version < 7.0: raise Exception( - 'JIGSAW requires GCC version 7 or later, got ' - f'{major}.{minor}.{patch} from {cpp}') + "JIGSAW requires GCC version 7 or later, got " + f"{major}.{minor}.{patch} from {cpp}" + ) return shutil.which("gcc"), cpp -conf = setuptools.config.read_configuration(PARENT / 'setup.cfg') -meta = conf['metadata'] +conf = setuptools.config.read_configuration(PARENT / "setup.cfg") +meta = conf["metadata"] setuptools.setup( - name=meta['name'], - version=meta['version'], - author=meta['author'], - author_email=meta['author_email'], - description=meta['description'], - long_description=meta['long_description'], + name=meta["name"], + version=meta["version"], + author=meta["author"], + author_email=meta["author_email"], + description=meta["description"], + long_description=meta["long_description"], long_description_content_type="text/markdown", - url=meta['url'], + url=meta["url"], packages=setuptools.find_packages(), cmdclass={ - 'install_jigsaw': InstallJigsawCommand, - }, - python_requires='>=3.7, <3.10', - setup_requires=['wheel', 'numpy'], + "install_jigsaw": InstallJigsawCommand, + }, + python_requires=">=3.7, <3.10", + setup_requires=["wheel", "numpy"], install_requires=[ - "jigsawpy", - "matplotlib", - "netCDF4", - "scipy", - "pyproj>=3.0", - "fiona", - "rasterio", - 'tqdm', - # "pysheds", - "colored_traceback", - "requests", - "shapely", - "geoalchemy2", - "utm", - "geopandas", - ], + "jigsawpy", + "matplotlib", + "netCDF4", + "scipy", + "pyproj>=3.0", + "fiona", + "rasterio", + "tqdm", + # "pysheds", + "colored_traceback", + "requests", + "shapely", + "geoalchemy2", + "utm", + "geopandas", + ], entry_points={ - 'console_scripts': [ + "console_scripts": [ "ocsmesh=ocsmesh.__main__:main", - "interp=ocsmesh.interp:main" + "interp=ocsmesh.interp:main", ] }, - tests_require=['nose'], - test_suite='nose.collector', + tests_require=["nose"], + test_suite="nose.collector", ) diff --git a/tests/api/hfun.py b/tests/api/hfun.py index 30248560..24565dc9 100755 --- a/tests/api/hfun.py +++ b/tests/api/hfun.py @@ -1,14 +1,15 @@ #! python import unittest from copy import deepcopy + import numpy as np + import ocsmesh class SizeFromMesh(unittest.TestCase): - def setUp(self): - rast = ocsmesh.raster.Raster('test_dem.tif') + rast = ocsmesh.raster.Raster("test_dem.tif") hfun_orig = ocsmesh.hfun.hfun.Hfun(rast, hmin=100, hmax=1500) hfun_orig.add_contour(level=0, expansion_rate=0.001, target_size=100) @@ -29,8 +30,9 @@ def test_calculated_size(self): # TODO: Come up with a more robust criteria threshold = 0.2 - err_value = np.max(np.abs(hfun_val_diff))/np.max(self.hfun_orig_val) + err_value = np.max(np.abs(hfun_val_diff)) / np.max(self.hfun_orig_val) self.assertTrue(err_value < threshold) -if __name__ == '__main__': + +if __name__ == "__main__": unittest.main()