From 9e1e2496f48be5bfb348f88832db4a5ddc6a54e5 Mon Sep 17 00:00:00 2001 From: Shuo Zhong Date: Wed, 2 Apr 2025 10:27:33 -0400 Subject: [PATCH 1/4] WIP:fixing geo coord exporting accuracy --- configure.sh | 4 +- opendm/georef.py | 49 ++++++++++++++ opendm/osfm.py | 5 +- opendm/types.py | 4 ++ stages/mvstex.py | 8 +-- stages/odm_filterpoints.py | 8 +-- stages/odm_georeferencing.py | 123 +++++++++++++++++++++++++++++------ stages/odm_meshing.py | 18 ++--- stages/run_opensfm.py | 14 ++-- 9 files changed, 187 insertions(+), 46 deletions(-) create mode 100644 opendm/georef.py diff --git a/configure.sh b/configure.sh index 93fa32999..775c2770c 100755 --- a/configure.sh +++ b/configure.sh @@ -25,9 +25,9 @@ check_version(){ } if [[ $2 =~ ^[0-9]+$ ]] ; then - processes=$2 + processes=2 else - processes=$(nproc) + processes=2 fi ensure_prereqs() { diff --git a/opendm/georef.py b/opendm/georef.py new file mode 100644 index 000000000..217217acb --- /dev/null +++ b/opendm/georef.py @@ -0,0 +1,49 @@ +import numpy as np +from numpy import ndarray +from typing import Tuple +from pyproj import Proj +import argparse +from opensfm.geo import TopocentricConverter + +def parse_pdal_args(pdal_args: dict) -> argparse.Namespace: + def validate_arg(name: str, data_type: type): + if name not in pdal_args: + raise ValueError(f"PDAL arguments should contain {name}.") + try: + data_type(pdal_args[name]) + except ValueError: + raise ValueError(f"PDAL argument {name} should be of type {data_type}.") + return data_type(pdal_args[name]) + return argparse.Namespace( + reflat=validate_arg('reflat', float), + reflon=validate_arg('reflon', float), + refalt=validate_arg('refalt', float), + x_offset=validate_arg('x_offset', float), + y_offset=validate_arg('y_offset', float), + a_srs=validate_arg('a_srs', str) + ) + +def topocentric_to_georef_pdal(ins, outs): + args = parse_pdal_args(ins) + outs['X'], outs['Y'], outs['Z'] = topocentric_to_georef(args.reflat, args.reflon, args.refalt, args.a_srs, ins['X'], ins['Y'], ins['Z'], args.x_offset, args.y_offset) + return True + +def topocentric_to_georef( + reflat: float, + reflon: float, + refalt: float, + a_srs: str, + x: ndarray, + y: ndarray, + z: ndarray, + x_offset: float = 0, + y_offset: float = 0, + ) -> Tuple[ndarray, ndarray, ndarray]: + reference = TopocentricConverter(reflat, reflon, refalt) + projection = Proj(a_srs) + lat, lon, alt = reference.to_lla(x, y, z) + easting, northing = projection(lon, lat) + return easting - x_offset, northing - y_offset, alt + + + diff --git a/opendm/osfm.py b/opendm/osfm.py index 0d49e1e40..d39c88da4 100644 --- a/opendm/osfm.py +++ b/opendm/osfm.py @@ -629,9 +629,12 @@ def ground_control_points(self, proj4): return result + def reference(self): + ds = DataSet(self.opensfm_project_path) + return ds.load_reference_lla() def name(self): - return os.path.basename(os.path.abspath(self.path(".."))) + return os.path.basename(os.path.abspath(self.path(".."))) def get_submodel_argv(args, submodels_path = None, submodel_name = None): """ diff --git a/opendm/types.py b/opendm/types.py index cc5889125..28f5e5bcd 100644 --- a/opendm/types.py +++ b/opendm/types.py @@ -361,16 +361,20 @@ def __init__(self, root_path, gcp_file = None, geo_file = None, align_file = Non self.openmvs_model = os.path.join(self.openmvs, 'scene_dense_dense_filtered.ply') # filter points + self.filtered_point_cloud_topo = os.path.join(self.odm_filterpoints, "point_cloud_topocentric.ply") self.filtered_point_cloud = os.path.join(self.odm_filterpoints, "point_cloud.ply") self.filtered_point_cloud_stats = os.path.join(self.odm_filterpoints, "point_cloud_stats.json") # odm_meshing + self.odm_mesh_topo = os.path.join(self.odm_meshing, 'odm_mesh_topocentric.ply') self.odm_mesh = os.path.join(self.odm_meshing, 'odm_mesh.ply') self.odm_meshing_log = os.path.join(self.odm_meshing, 'odm_meshing_log.txt') + self.odm_25dmesh_topo = os.path.join(self.odm_meshing, 'odm_25dmesh_topocentric.ply') self.odm_25dmesh = os.path.join(self.odm_meshing, 'odm_25dmesh.ply') self.odm_25dmeshing_log = os.path.join(self.odm_meshing, 'odm_25dmeshing_log.txt') # texturing + self.odm_textured_model_obj_topo = os.path.join(self.odm_texturing, 'odm_textured_model_topocentric.obj') self.odm_textured_model_obj = 'odm_textured_model_geo.obj' self.odm_textured_model_glb = 'odm_textured_model_geo.glb' diff --git a/stages/mvstex.py b/stages/mvstex.py index 6b4b64b12..6f8274da4 100644 --- a/stages/mvstex.py +++ b/stages/mvstex.py @@ -33,7 +33,7 @@ def add_run(nvm_file, primary=True, band=None): if not args.skip_3dmodel and (primary or args.use_3dmesh): nonloc.runs += [{ 'out_dir': os.path.join(tree.odm_texturing, subdir), - 'model': tree.odm_mesh, + 'model': tree.odm_mesh_topo, 'nadir': False, 'primary': primary, 'nvm_file': nvm_file, @@ -43,7 +43,7 @@ def add_run(nvm_file, primary=True, band=None): if not args.use_3dmesh: nonloc.runs += [{ 'out_dir': os.path.join(tree.odm_25dtexturing, subdir), - 'model': tree.odm_25dmesh, + 'model': tree.odm_25dmesh_topo, 'nadir': True, 'primary': primary, 'nvm_file': nvm_file, @@ -69,7 +69,7 @@ def add_run(nvm_file, primary=True, band=None): if not io.dir_exists(r['out_dir']): system.mkdir_p(r['out_dir']) - odm_textured_model_obj = os.path.join(r['out_dir'], tree.odm_textured_model_obj) + odm_textured_model_obj = os.path.join(r['out_dir'], tree.odm_textured_model_obj_topo) unaligned_obj = io.related_file_path(odm_textured_model_obj, postfix="_unaligned") if not io.file_exists(odm_textured_model_obj) or self.rerun(): @@ -147,7 +147,7 @@ def add_run(nvm_file, primary=True, band=None): shutil.rmtree(packed_dir) try: - obj_pack(os.path.join(r['out_dir'], tree.odm_textured_model_obj), packed_dir, _info=log.ODM_INFO) + obj_pack(os.path.join(r['out_dir'], tree.odm_textured_model_obj_topo), packed_dir, _info=log.ODM_INFO) # Move packed/* into texturing folder system.delete_files(r['out_dir'], (".vec", )) diff --git a/stages/odm_filterpoints.py b/stages/odm_filterpoints.py index dac7c3f13..56ed48522 100644 --- a/stages/odm_filterpoints.py +++ b/stages/odm_filterpoints.py @@ -19,7 +19,7 @@ def process(self, args, outputs): inputPointCloud = "" # check if reconstruction was done before - if not io.file_exists(tree.filtered_point_cloud) or self.rerun(): + if not io.file_exists(tree.filtered_point_cloud_topo) or self.rerun(): if args.fast_orthophoto: inputPointCloud = os.path.join(tree.opensfm, 'reconstruction.ply') else: @@ -49,14 +49,14 @@ def process(self, args, outputs): else: log.ODM_WARNING("Not a georeferenced reconstruction, will ignore --auto-boundary") - point_cloud.filter(inputPointCloud, tree.filtered_point_cloud, tree.filtered_point_cloud_stats, + point_cloud.filter(inputPointCloud, tree.filtered_point_cloud_topo, tree.filtered_point_cloud_stats, standard_deviation=args.pc_filter, sample_radius=args.pc_sample, boundary=boundary_offset(outputs.get('boundary'), reconstruction.get_proj_offset()), max_concurrency=args.max_concurrency) # Quick check - info = point_cloud.ply_info(tree.filtered_point_cloud) + info = point_cloud.ply_info(tree.filtered_point_cloud_topo) if info["vertex_count"] == 0: extra_msg = '' if 'boundary' in outputs: @@ -64,7 +64,7 @@ def process(self, args, outputs): raise system.ExitException("Uh oh! We ended up with an empty point cloud. This means that the reconstruction did not succeed. Have you followed best practices for data acquisition? See https://docs.opendronemap.org/flying/%s" % extra_msg) else: log.ODM_WARNING('Found a valid point cloud file in: %s' % - tree.filtered_point_cloud) + tree.filtered_point_cloud_topo) if args.optimize_disk_space and inputPointCloud: if os.path.isfile(inputPointCloud): diff --git a/stages/odm_georeferencing.py b/stages/odm_georeferencing.py index de16c374b..cd732dc07 100644 --- a/stages/odm_georeferencing.py +++ b/stages/odm_georeferencing.py @@ -8,7 +8,9 @@ import zipfile import math from collections import OrderedDict +from numpy import rec from pyproj import CRS +import pdal from opendm import io from opendm import log @@ -114,18 +116,83 @@ def process(self, args, outputs): else: log.ODM_WARNING("GCPs could not be loaded for writing to %s" % gcp_export_file) + if reconstruction.is_georeferenced(): + # prepare pipeline stage for topocentric to georeferenced conversion + octx = OSFMContext(tree.opensfm) + reference = octx.reference() + pdalargs = { + 'reflat': reference.lat, + 'reflon': reference.lon, + 'refalt': reference.alt, + 'a_srs': reconstruction.georef.proj4(), + 'x_offset': reconstruction.georef.utm_east_offset, + 'y_offset': reconstruction.georef.utm_north_offset + } + topo_to_georef_def = { + "type": "filters.python", + "module": "opendm.georef", + "function": "topocentric_to_georef_pdal", + "pdalargs": json.dumps(pdalargs) + } + + if not io.file_exists(tree.filtered_point_cloud) or self.rerun(): + log.ODM_INFO("Georeferecing filtered point cloud") + if reconstruction.is_georeferenced(): + ply_georeferencing_pipeline = [ + { + "type": "readers.ply", + "filename": tree.filtered_point_cloud_topo + }, + topo_to_georef_def, + { + "type": "writers.ply", + "filename": tree.filtered_point_cloud + } + ] + pipeline = pdal.Pipeline(json.dumps(ply_georeferencing_pipeline)) + pipeline.execute() + else: + shutil.copy(tree.filtered_point_cloud_topo, tree.filtered_point_cloud) + + if not io.file_exists(tree.odm_textured_model_geo_obj) or self.rerun(): + log.ODM_INFO("Georeferecing textured model") + if reconstruction.is_georeferenced(): + obj_georeferencing_pipeline = [ + { + "type": "readers.obj", + "filename": tree.odm_textured_model_obj + }, + topo_to_georef_def, + { + "type": "writers.obj", + "filename": tree.odm_textured_model_geo_obj + } + ] + pipeline = pdal.Pipeline(json.dumps(obj_georeferencing_pipeline)) + pipeline.execute() + else: + shutil.copy(tree.odm_textured_model_obj, tree.odm_textured_model_geo_obj) + if not io.file_exists(tree.odm_georeferencing_model_laz) or self.rerun(): - cmd = f'pdal translate -i "{tree.filtered_point_cloud}" -o \"{tree.odm_georeferencing_model_laz}\"' - stages = ["ferry"] - params = [ - '--filters.ferry.dimensions="views => UserData"' + las_georeferencing_pipeline = [ + { + "type": "readers.las", + "filename": tree.filtered_point_cloud + }, + { + "type": "filters.ferry", + "dimensions": "views => UserData" + } ] - + if reconstruction.is_georeferenced(): log.ODM_INFO("Georeferencing point cloud") - stages.append("transformation") utmoffset = reconstruction.georef.utm_offset() + las_georeferencing_pipeline.append({ + "type": "filters.transformation", + "matrix": f"1 0 0 {utmoffset[0]} 0 1 0 {utmoffset[1]} 0 0 1 0 0 0 0 1" + }) # Establish appropriate las scale for export las_scale = 0.001 @@ -148,27 +215,40 @@ def powerr(r): else: log.ODM_INFO("No point_cloud_stats.json found. Using default las scale: %s" % las_scale) - params += [ - f'--filters.transformation.matrix="1 0 0 {utmoffset[0]} 0 1 0 {utmoffset[1]} 0 0 1 0 0 0 0 1"', - f'--writers.las.offset_x={reconstruction.georef.utm_east_offset}' , - f'--writers.las.offset_y={reconstruction.georef.utm_north_offset}', - f'--writers.las.scale_x={las_scale}', - f'--writers.las.scale_y={las_scale}', - f'--writers.las.scale_z={las_scale}', - '--writers.las.offset_z=0', - f'--writers.las.a_srs="{reconstruction.georef.proj4()}"' # HOBU this should maybe be WKT - ] - + las_writer_def = { + "type": "writers.las", + "filename": tree.odm_georeferencing_model_laz, + "a_srs": reconstruction.georef.proj4(), + "offset_x": utmoffset[0], + "offset_y": utmoffset[1], + "offset_z": 0, + "scale_x": las_scale, + "scale_y": las_scale, + "scale_z": las_scale, + } + if reconstruction.has_gcp() and io.file_exists(gcp_geojson_zip_export_file): if os.path.getsize(gcp_geojson_zip_export_file) <= 65535: log.ODM_INFO("Embedding GCP info in point cloud") params += [ '--writers.las.vlrs="{\\\"filename\\\": \\\"%s\\\", \\\"user_id\\\": \\\"ODM\\\", \\\"record_id\\\": 2, \\\"description\\\": \\\"Ground Control Points (zip)\\\"}"' % gcp_geojson_zip_export_file.replace(os.sep, "/") ] + las_writer_def["vlrs"] = json.dumps( + { + "filename": gcp_geojson_zip_export_file.replace(os.sep, "/"), + "user_id": "ODM", + "record_id": 2, + "description": "Ground Control Points (zip)" + } + ) + else: log.ODM_WARNING("Cannot embed GCP info in point cloud, %s is too large" % gcp_geojson_zip_export_file) - system.run(cmd + ' ' + ' '.join(stages) + ' ' + ' '.join(params)) + las_georeferencing_pipeline.append(las_writer_def) + + pipeline = pdal.Pipeline(json.dumps(las_georeferencing_pipeline)) + pipeline.execute() self.update_progress(50) @@ -202,7 +282,12 @@ def powerr(r): export_to_bounds_files(outputs['boundary'], reconstruction.get_proj_srs(), bounds_json, bounds_gpkg) else: log.ODM_INFO("Converting point cloud (non-georeferenced)") - system.run(cmd + ' ' + ' '.join(stages) + ' ' + ' '.join(params)) + las_georeferencing_pipeline.append({ + "type": "writers.las", + "filename": tree.odm_georeferencing_model_laz + }) + pipeline = pdal.Pipeline(json.dumps(las_georeferencing_pipeline)) + pipeline.execute() stats_dir = tree.path("opensfm", "stats", "codem") diff --git a/stages/odm_meshing.py b/stages/odm_meshing.py index 6f5f603e6..da0055781 100644 --- a/stages/odm_meshing.py +++ b/stages/odm_meshing.py @@ -19,11 +19,11 @@ def process(self, args, outputs): # Create full 3D model unless --skip-3dmodel is set if not args.skip_3dmodel: - if not io.file_exists(tree.odm_mesh) or self.rerun(): - log.ODM_INFO('Writing ODM Mesh file in: %s' % tree.odm_mesh) + if not io.file_exists(tree.odm_mesh_topo) or self.rerun(): + log.ODM_INFO('Writing ODM Mesh file in: %s' % tree.odm_mesh_topo) - mesh.screened_poisson_reconstruction(tree.filtered_point_cloud, - tree.odm_mesh, + mesh.screened_poisson_reconstruction(tree.filtered_point_cloud_topo, + tree.odm_mesh_topo, depth=self.params.get('oct_tree'), samples=self.params.get('samples'), maxVertexCount=self.params.get('max_vertex'), @@ -31,16 +31,16 @@ def process(self, args, outputs): threads=max(1, self.params.get('max_concurrency') - 1)), # poissonrecon can get stuck on some machines if --threads == all cores else: log.ODM_WARNING('Found a valid ODM Mesh file in: %s' % - tree.odm_mesh) + tree.odm_mesh_topo) self.update_progress(50) # Always generate a 2.5D mesh # unless --use-3dmesh is set. if not args.use_3dmesh: - if not io.file_exists(tree.odm_25dmesh) or self.rerun(): + if not io.file_exists(tree.odm_25dmesh_topo) or self.rerun(): - log.ODM_INFO('Writing ODM 2.5D Mesh file in: %s' % tree.odm_25dmesh) + log.ODM_INFO('Writing ODM 2.5D Mesh file in: %s' % tree.odm_25dmesh_topo) multiplier = math.pi / 2.0 radius_steps = commands.get_dem_radius_steps(tree.filtered_point_cloud_stats, 3, args.orthophoto_resolution, multiplier=multiplier) @@ -51,7 +51,7 @@ def process(self, args, outputs): if args.fast_orthophoto: dsm_resolution *= 8.0 - mesh.create_25dmesh(tree.filtered_point_cloud, tree.odm_25dmesh, + mesh.create_25dmesh(tree.filtered_point_cloud_topo, tree.odm_25dmesh_topo, radius_steps, dsm_resolution=dsm_resolution, depth=self.params.get('oct_tree'), @@ -63,5 +63,5 @@ def process(self, args, outputs): max_tiles=None if reconstruction.has_geotagged_photos() else math.ceil(len(reconstruction.photos) / 2)) else: log.ODM_WARNING('Found a valid ODM 2.5D Mesh file in: %s' % - tree.odm_25dmesh) + tree.odm_25dmesh_topo) diff --git a/stages/run_opensfm.py b/stages/run_opensfm.py index 850c6ce83..e3bab12da 100644 --- a/stages/run_opensfm.py +++ b/stages/run_opensfm.py @@ -69,13 +69,13 @@ def cleanup_disk_space(): self.update_progress(75) # We now switch to a geographic CRS - if reconstruction.is_georeferenced() and (not io.file_exists(tree.opensfm_topocentric_reconstruction) or self.rerun()): - octx.run('export_geocoords --reconstruction --proj "%s" --offset-x %s --offset-y %s' % - (reconstruction.georef.proj4(), reconstruction.georef.utm_east_offset, reconstruction.georef.utm_north_offset)) - shutil.move(tree.opensfm_reconstruction, tree.opensfm_topocentric_reconstruction) - shutil.move(tree.opensfm_geocoords_reconstruction, tree.opensfm_reconstruction) - else: - log.ODM_WARNING("Will skip exporting %s" % tree.opensfm_geocoords_reconstruction) + # if reconstruction.is_georeferenced() and (not io.file_exists(tree.opensfm_topocentric_reconstruction) or self.rerun()): + # octx.run('export_geocoords --reconstruction --proj "%s" --offset-x %s --offset-y %s' % + # (reconstruction.georef.proj4(), reconstruction.georef.utm_east_offset, reconstruction.georef.utm_north_offset)) + # shutil.move(tree.opensfm_reconstruction, tree.opensfm_topocentric_reconstruction) + # shutil.move(tree.opensfm_geocoords_reconstruction, tree.opensfm_reconstruction) + # else: + # log.ODM_WARNING("Will skip exporting %s" % tree.opensfm_geocoords_reconstruction) self.update_progress(80) From e17c57dba5541983686be85eaad1a0d34f33a319 Mon Sep 17 00:00:00 2001 From: Shuo Zhong Date: Wed, 2 Apr 2025 17:47:59 -0400 Subject: [PATCH 2/4] 1. Add a topocentric to geocoords converter, allowing to convert every points directly. 2. update odm_georeferencing stage to convert and generate ply, laz, and texture model in UTM coordinate system. Some minor change to use pdal python api instead of command line tool --- opendm/georef.py | 55 ++++++++++-------- opendm/osfm.py | 2 +- opendm/types.py | 2 +- stages/mvstex.py | 3 +- stages/odm_georeferencing.py | 106 ++++++++++++----------------------- 5 files changed, 70 insertions(+), 98 deletions(-) diff --git a/opendm/georef.py b/opendm/georef.py index 217217acb..0581e7ae5 100644 --- a/opendm/georef.py +++ b/opendm/georef.py @@ -2,32 +2,8 @@ from numpy import ndarray from typing import Tuple from pyproj import Proj -import argparse from opensfm.geo import TopocentricConverter -def parse_pdal_args(pdal_args: dict) -> argparse.Namespace: - def validate_arg(name: str, data_type: type): - if name not in pdal_args: - raise ValueError(f"PDAL arguments should contain {name}.") - try: - data_type(pdal_args[name]) - except ValueError: - raise ValueError(f"PDAL argument {name} should be of type {data_type}.") - return data_type(pdal_args[name]) - return argparse.Namespace( - reflat=validate_arg('reflat', float), - reflon=validate_arg('reflon', float), - refalt=validate_arg('refalt', float), - x_offset=validate_arg('x_offset', float), - y_offset=validate_arg('y_offset', float), - a_srs=validate_arg('a_srs', str) - ) - -def topocentric_to_georef_pdal(ins, outs): - args = parse_pdal_args(ins) - outs['X'], outs['Y'], outs['Z'] = topocentric_to_georef(args.reflat, args.reflon, args.refalt, args.a_srs, ins['X'], ins['Y'], ins['Z'], args.x_offset, args.y_offset) - return True - def topocentric_to_georef( reflat: float, reflon: float, @@ -46,4 +22,35 @@ def topocentric_to_georef( return easting - x_offset, northing - y_offset, alt +class TopocentricToProj: + def __init__(self, reflat:float, reflon:float, refalt:float, a_srs:str): + self.reference = TopocentricConverter(reflat, reflon, refalt) + self.projection = Proj(a_srs) + + def convert_array(self, arr:ndarray, offset_x:float=0, offset_y:float=0): + x, y, z = arr['X'], arr['Y'], arr['Z'] + easting, northing, alt = self.convert_points(x, y, z, offset_x, offset_y) + arr['X'] = easting + arr['Y'] = northing + arr['Z'] = alt + return arr + + def convert_points(self, x:ndarray, y:ndarray, z:ndarray, offset_x:float=0, offset_y:float=0): + lat, lon, alt = self.reference.to_lla(x, y, z) + easting, northing = self.projection(lon, lat) + return easting - offset_x, northing - offset_y, alt + + def convert_obj(self, input_obj:str, output_obj:str, offset_x:float=0, offset_y:float=0): + with open(input_obj, 'r') as fin: + with open(output_obj, 'w') as fout: + lines = fin.readlines() + for line in lines: + if line.startswith("v "): + v = np.fromstring(line.strip()[2:] + " 1", sep=' ', dtype=float) + vt = self.convert_points(v[0], v[1], v[2], offset_x, offset_y) + fout.write("v " + " ".join(map(str, list(vt))) + '\n') + else: + fout.write(line) + + diff --git a/opendm/osfm.py b/opendm/osfm.py index d39c88da4..db0bb43e1 100644 --- a/opendm/osfm.py +++ b/opendm/osfm.py @@ -631,7 +631,7 @@ def ground_control_points(self, proj4): def reference(self): ds = DataSet(self.opensfm_project_path) - return ds.load_reference_lla() + return ds.load_reference() def name(self): return os.path.basename(os.path.abspath(self.path(".."))) diff --git a/opendm/types.py b/opendm/types.py index 28f5e5bcd..257857704 100644 --- a/opendm/types.py +++ b/opendm/types.py @@ -374,7 +374,7 @@ def __init__(self, root_path, gcp_file = None, geo_file = None, align_file = Non self.odm_25dmeshing_log = os.path.join(self.odm_meshing, 'odm_25dmeshing_log.txt') # texturing - self.odm_textured_model_obj_topo = os.path.join(self.odm_texturing, 'odm_textured_model_topocentric.obj') + self.odm_textured_model_obj_topo = 'odm_textured_model_topocentric.obj' self.odm_textured_model_obj = 'odm_textured_model_geo.obj' self.odm_textured_model_glb = 'odm_textured_model_geo.glb' diff --git a/stages/mvstex.py b/stages/mvstex.py index 6f8274da4..6afcb7f9e 100644 --- a/stages/mvstex.py +++ b/stages/mvstex.py @@ -71,7 +71,6 @@ def add_run(nvm_file, primary=True, band=None): odm_textured_model_obj = os.path.join(r['out_dir'], tree.odm_textured_model_obj_topo) unaligned_obj = io.related_file_path(odm_textured_model_obj, postfix="_unaligned") - if not io.file_exists(odm_textured_model_obj) or self.rerun(): log.ODM_INFO('Writing MVS Textured file in: %s' % odm_textured_model_obj) @@ -94,7 +93,7 @@ def add_run(nvm_file, primary=True, band=None): # mvstex definitions kwargs = { 'bin': context.mvstex_path, - 'out_dir': os.path.join(r['out_dir'], "odm_textured_model_geo"), + 'out_dir': os.path.join(r['out_dir'], tree.odm_textured_model_obj_topo.split('.obj')[0]), 'model': r['model'], 'dataTerm': 'gmi', 'outlierRemovalType': 'gauss_clamping', diff --git a/stages/odm_georeferencing.py b/stages/odm_georeferencing.py index cd732dc07..6ecf57db9 100644 --- a/stages/odm_georeferencing.py +++ b/stages/odm_georeferencing.py @@ -8,7 +8,6 @@ import zipfile import math from collections import OrderedDict -from numpy import rec from pyproj import CRS import pdal @@ -25,6 +24,7 @@ from opendm.boundary import as_polygon, export_to_bounds_files from opendm.align import compute_alignment_matrix, transform_point_cloud, transform_obj from opendm.utils import np_to_json +from opendm.georef import TopocentricToProj class ODMGeoreferencingStage(types.ODM_Stage): def process(self, args, outputs): @@ -115,84 +115,55 @@ def process(self, args, outputs): else: log.ODM_WARNING("GCPs could not be loaded for writing to %s" % gcp_export_file) - + if reconstruction.is_georeferenced(): # prepare pipeline stage for topocentric to georeferenced conversion octx = OSFMContext(tree.opensfm) reference = octx.reference() - pdalargs = { - 'reflat': reference.lat, - 'reflon': reference.lon, - 'refalt': reference.alt, - 'a_srs': reconstruction.georef.proj4(), - 'x_offset': reconstruction.georef.utm_east_offset, - 'y_offset': reconstruction.georef.utm_north_offset - } - topo_to_georef_def = { - "type": "filters.python", - "module": "opendm.georef", - "function": "topocentric_to_georef_pdal", - "pdalargs": json.dumps(pdalargs) - } + converter = TopocentricToProj(reference.lat, reference.lon, reference.alt, reconstruction.georef.proj4()) if not io.file_exists(tree.filtered_point_cloud) or self.rerun(): log.ODM_INFO("Georeferecing filtered point cloud") if reconstruction.is_georeferenced(): - ply_georeferencing_pipeline = [ - { - "type": "readers.ply", - "filename": tree.filtered_point_cloud_topo - }, - topo_to_georef_def, - { - "type": "writers.ply", - "filename": tree.filtered_point_cloud - } - ] - pipeline = pdal.Pipeline(json.dumps(ply_georeferencing_pipeline)) + pipeline = pdal.Reader.ply(tree.filtered_point_cloud_topo).pipeline() + pipeline.execute() + arr = pipeline.arrays[0] + arr = converter.convert_array( + arr, + reconstruction.georef.utm_east_offset, + reconstruction.georef.utm_north_offset + ) + pipeline = pdal.Writer.ply(tree.filtered_point_cloud).pipeline(arr) pipeline.execute() else: shutil.copy(tree.filtered_point_cloud_topo, tree.filtered_point_cloud) - if not io.file_exists(tree.odm_textured_model_geo_obj) or self.rerun(): + if not io.file_exists(os.path.join(tree.odm_texturing, tree.odm_textured_model_obj)) or self.rerun(): log.ODM_INFO("Georeferecing textured model") + obj_in = os.path.join(tree.odm_texturing, tree.odm_textured_model_obj_topo) + obj_out = os.path.join(tree.odm_texturing, tree.odm_textured_model_obj) if reconstruction.is_georeferenced(): - obj_georeferencing_pipeline = [ - { - "type": "readers.obj", - "filename": tree.odm_textured_model_obj - }, - topo_to_georef_def, - { - "type": "writers.obj", - "filename": tree.odm_textured_model_geo_obj - } - ] - pipeline = pdal.Pipeline(json.dumps(obj_georeferencing_pipeline)) - pipeline.execute() + converter.convert_obj( + obj_in, + obj_out, + reconstruction.georef.utm_east_offset, + reconstruction.georef.utm_north_offset + ) else: - shutil.copy(tree.odm_textured_model_obj, tree.odm_textured_model_geo_obj) - + shutil.copy(obj_in, obj_out) + if not io.file_exists(tree.odm_georeferencing_model_laz) or self.rerun(): - las_georeferencing_pipeline = [ - { - "type": "readers.las", - "filename": tree.filtered_point_cloud - }, - { - "type": "filters.ferry", - "dimensions": "views => UserData" - } - ] + pipeline = pdal.Pipeline() + pipeline |= pdal.Reader.ply(tree.filtered_point_cloud) + pipeline |= pdal.Filter.ferry(dimensions="views => UserData") if reconstruction.is_georeferenced(): log.ODM_INFO("Georeferencing point cloud") utmoffset = reconstruction.georef.utm_offset() - las_georeferencing_pipeline.append({ - "type": "filters.transformation", - "matrix": f"1 0 0 {utmoffset[0]} 0 1 0 {utmoffset[1]} 0 0 1 0 0 0 0 1" - }) + pipeline |= pdal.Filter.transformation( + matrix=f"1 0 0 {utmoffset[0]} 0 1 0 {utmoffset[1]} 0 0 1 0 0 0 0 1" + ) # Establish appropriate las scale for export las_scale = 0.001 @@ -216,7 +187,6 @@ def powerr(r): log.ODM_INFO("No point_cloud_stats.json found. Using default las scale: %s" % las_scale) las_writer_def = { - "type": "writers.las", "filename": tree.odm_georeferencing_model_laz, "a_srs": reconstruction.georef.proj4(), "offset_x": utmoffset[0], @@ -230,9 +200,6 @@ def powerr(r): if reconstruction.has_gcp() and io.file_exists(gcp_geojson_zip_export_file): if os.path.getsize(gcp_geojson_zip_export_file) <= 65535: log.ODM_INFO("Embedding GCP info in point cloud") - params += [ - '--writers.las.vlrs="{\\\"filename\\\": \\\"%s\\\", \\\"user_id\\\": \\\"ODM\\\", \\\"record_id\\\": 2, \\\"description\\\": \\\"Ground Control Points (zip)\\\"}"' % gcp_geojson_zip_export_file.replace(os.sep, "/") - ] las_writer_def["vlrs"] = json.dumps( { "filename": gcp_geojson_zip_export_file.replace(os.sep, "/"), @@ -245,9 +212,10 @@ def powerr(r): else: log.ODM_WARNING("Cannot embed GCP info in point cloud, %s is too large" % gcp_geojson_zip_export_file) - las_georeferencing_pipeline.append(las_writer_def) - - pipeline = pdal.Pipeline(json.dumps(las_georeferencing_pipeline)) + pipeline |= pdal.Writer.las( + **las_writer_def + ) + pipeline.execute() self.update_progress(50) @@ -282,11 +250,9 @@ def powerr(r): export_to_bounds_files(outputs['boundary'], reconstruction.get_proj_srs(), bounds_json, bounds_gpkg) else: log.ODM_INFO("Converting point cloud (non-georeferenced)") - las_georeferencing_pipeline.append({ - "type": "writers.las", - "filename": tree.odm_georeferencing_model_laz - }) - pipeline = pdal.Pipeline(json.dumps(las_georeferencing_pipeline)) + pipeline |= pdal.Writer.las( + tree.odm_georeferencing_model_laz + ) pipeline.execute() From 39f6c347bed72d9134550043705be05b9624a3ce Mon Sep 17 00:00:00 2001 From: Thor Date: Fri, 4 Apr 2025 17:49:53 -0400 Subject: [PATCH 3/4] save georeferenced point cloud in little endian mode; fix geomodel texture files name; remove tmp change --- configure.sh | 4 ++-- stages/mvstex.py | 7 ++++++- stages/odm_georeferencing.py | 5 ++++- 3 files changed, 12 insertions(+), 4 deletions(-) diff --git a/configure.sh b/configure.sh index 775c2770c..93fa32999 100755 --- a/configure.sh +++ b/configure.sh @@ -25,9 +25,9 @@ check_version(){ } if [[ $2 =~ ^[0-9]+$ ]] ; then - processes=2 + processes=$2 else - processes=2 + processes=$(nproc) fi ensure_prereqs() { diff --git a/stages/mvstex.py b/stages/mvstex.py index 6afcb7f9e..12de126ee 100644 --- a/stages/mvstex.py +++ b/stages/mvstex.py @@ -90,10 +90,12 @@ def add_run(nvm_file, primary=True, band=None): if (r['nadir']): nadir = '--nadir_mode' + # mvstex definitions + # mtl and texture files would be the same between topo and proj geomodel, so create with the final name kwargs = { 'bin': context.mvstex_path, - 'out_dir': os.path.join(r['out_dir'], tree.odm_textured_model_obj_topo.split('.obj')[0]), + 'out_dir': os.path.join(r['out_dir'], 'odm_textured_model_geo'), 'model': r['model'], 'dataTerm': 'gmi', 'outlierRemovalType': 'gauss_clamping', @@ -124,6 +126,9 @@ def add_run(nvm_file, primary=True, band=None): '{nadirMode} ' '{labelingFile} ' '{maxTextureSize} '.format(**kwargs)) + + # update the obj file name to topo for further conversion + shutil.move(os.path.join(r['out_dir'], tree.odm_textured_model_obj), odm_textured_model_obj) if r['primary'] and (not r['nadir'] or args.skip_3dmodel): # GlTF? diff --git a/stages/odm_georeferencing.py b/stages/odm_georeferencing.py index 6ecf57db9..813f5a65f 100644 --- a/stages/odm_georeferencing.py +++ b/stages/odm_georeferencing.py @@ -133,7 +133,10 @@ def process(self, args, outputs): reconstruction.georef.utm_east_offset, reconstruction.georef.utm_north_offset ) - pipeline = pdal.Writer.ply(tree.filtered_point_cloud).pipeline(arr) + pipeline = pdal.Writer.ply( + filename = tree.filtered_point_cloud, + storage_mode = "little endian", + ).pipeline(arr) pipeline.execute() else: shutil.copy(tree.filtered_point_cloud_topo, tree.filtered_point_cloud) From c14c683f86cb73ea02af697f176c7bf5f59fc1d2 Mon Sep 17 00:00:00 2001 From: Thor Date: Fri, 4 Apr 2025 18:44:05 -0400 Subject: [PATCH 4/4] convert all the geomodels --- stages/odm_georeferencing.py | 40 ++++++++++++++++++++++++------------ 1 file changed, 27 insertions(+), 13 deletions(-) diff --git a/stages/odm_georeferencing.py b/stages/odm_georeferencing.py index 813f5a65f..615b3749b 100644 --- a/stages/odm_georeferencing.py +++ b/stages/odm_georeferencing.py @@ -141,19 +141,33 @@ def process(self, args, outputs): else: shutil.copy(tree.filtered_point_cloud_topo, tree.filtered_point_cloud) - if not io.file_exists(os.path.join(tree.odm_texturing, tree.odm_textured_model_obj)) or self.rerun(): - log.ODM_INFO("Georeferecing textured model") - obj_in = os.path.join(tree.odm_texturing, tree.odm_textured_model_obj_topo) - obj_out = os.path.join(tree.odm_texturing, tree.odm_textured_model_obj) - if reconstruction.is_georeferenced(): - converter.convert_obj( - obj_in, - obj_out, - reconstruction.georef.utm_east_offset, - reconstruction.georef.utm_north_offset - ) + def georefernce_textured_model(obj_in, obj_out): + log.ODM_INFO("Georeferecing textured model %s" % obj_in) + if not io.file_exists(obj_out) or self.rerun(): + if reconstruction.is_georeferenced(): + converter.convert_obj( + obj_in, + obj_out, + reconstruction.georef.utm_east_offset, + reconstruction.georef.utm_north_offset + ) + else: + shutil.copy(obj_in, obj_out) + + #TODO: maybe parallelize this + #TODO: gltf export? Should problably move the exporting process after this + for texturing in [tree.odm_texturing, tree.odm_25dtexturing]: + if reconstruction.multi_camera: + primary = get_primary_band_name(reconstruction.multi_camera, args.primary_band) + for band in reconstruction.multi_camera: + subdir = "" if band['name'] == primary else band['name'].lower() + obj_in = os.path.join(texturing, subdir, tree.odm_textured_model_obj_topo) + obj_out = os.path.join(texturing, subdir, tree.odm_textured_model_obj) + georefernce_textured_model(obj_in, obj_out) else: - shutil.copy(obj_in, obj_out) + obj_in = os.path.join(texturing, tree.odm_textured_model_obj_topo) + obj_out = os.path.join(texturing, tree.odm_textured_model_obj) + transform_textured_model(obj_in, obj_out) if not io.file_exists(tree.odm_georeferencing_model_laz) or self.rerun(): pipeline = pdal.Pipeline() @@ -304,7 +318,7 @@ def transform_textured_model(obj): except Exception as e: log.ODM_WARNING("Cannot transform textured model: %s" % str(e)) os.rename(unaligned_obj, obj) - + #TODO: seems gltf file is not converted in alignment? for texturing in [tree.odm_texturing, tree.odm_25dtexturing]: if reconstruction.multi_camera: primary = get_primary_band_name(reconstruction.multi_camera, args.primary_band)