From d087aa64159d18e919ac4c5bd25ab0d27b497fad Mon Sep 17 00:00:00 2001 From: Yann Noutary Date: Mon, 20 Dec 2021 06:35:02 -0800 Subject: [PATCH 01/81] fix: incorrect point test file in CMakeList of geometry (#840) Summary: Pull Request resolved: https://github.com/mapillary/OpenSfM/pull/840 Does what its says. Reviewed By: chrert Differential Revision: D33233488 fbshipit-source-id: 438f3b8d3b93c689c16b2900a6d67061adc7a5c1 --- opensfm/src/geometry/CMakeLists.txt | 2 +- opensfm/src/geometry/src/triangulation.cc | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/opensfm/src/geometry/CMakeLists.txt b/opensfm/src/geometry/CMakeLists.txt index 268465031..b407444c8 100644 --- a/opensfm/src/geometry/CMakeLists.txt +++ b/opensfm/src/geometry/CMakeLists.txt @@ -30,7 +30,7 @@ if (OPENSFM_BUILD_TESTS) test/camera_test.cc test/camera_functions_test.cc test/covariance_test.cc - test/point.cc + test/point_test.cc ) add_executable(geometry_test ${GEOMETRY_TEST_FILES}) target_include_directories(geometry_test PRIVATE ${CMAKE_SOURCE_DIR} ${EIGEN_INCLUDE_DIRS}) diff --git a/opensfm/src/geometry/src/triangulation.cc b/opensfm/src/geometry/src/triangulation.cc index ad60fca57..74134a25d 100644 --- a/opensfm/src/geometry/src/triangulation.cc +++ b/opensfm/src/geometry/src/triangulation.cc @@ -204,12 +204,13 @@ struct BearingErrorCost : public ceres::CostFunction { return true; } - std::vector parameter_blocks; const MatX3d ¢ers_; const MatX3d &bearings_; const Vec3d &point_; }; +constexpr int BearingErrorCost::Size; + Vec3d PointRefinement(const MatX3d ¢ers, const MatX3d &bearings, const Vec3d &point, int iterations) { using BearingCostFunction = From 7b455d9c424c82d96fc60d57a9c1dbf024aa8b3b Mon Sep 17 00:00:00 2001 From: Jim Meyering Date: Thu, 30 Dec 2021 11:23:26 -0800 Subject: [PATCH 02/81] mapillary/opensfm/opensfm/src/sfm/src/ba_helpers.cc: loop variable: do not create a copy from std::string, per -Wrange-loop-construct Summary: We should all be migrating to platform010, for the improved performance of its generated code and for its improved diagnostics/portability. This avoids the following errors: mapillary/opensfm/opensfm/src/sfm/src/ba_helpers.cc:422:19: error: loop variable 'shot_id' creates a copy from type 'const std::basic_string' [-Werror,-Wrange-loop-construct] mapillary/opensfm/opensfm/src/sfm/src/ba_helpers.cc:434:19: error: loop variable 'shot_id' creates a copy from type 'const std::basic_string' [-Werror,-Wrange-loop-construct] mapillary/opensfm/opensfm/src/sfm/src/ba_helpers.cc:496:19: error: loop variable 'shot_id' creates a copy from type 'const std::basic_string' [-Werror,-Wrange-loop-construct] Reviewed By: r-barnes Differential Revision: D33343742 fbshipit-source-id: a3036f936c0808fac5310bfe45e3f918ef4f0f24 --- opensfm/src/sfm/src/ba_helpers.cc | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/opensfm/src/sfm/src/ba_helpers.cc b/opensfm/src/sfm/src/ba_helpers.cc index 1fcc4ba59..e5d680086 100644 --- a/opensfm/src/sfm/src/ba_helpers.cc +++ b/opensfm/src/sfm/src/ba_helpers.cc @@ -419,7 +419,7 @@ py::dict BAHelpers::BundleShotPoses( } std::unordered_set added_cameras; - for (const auto shot_id : shot_ids) { + for (const auto& shot_id : shot_ids) { const auto& shot = map.GetShot(shot_id); const auto& cam = *shot.GetCamera(); if (added_cameras.find(cam.id) != added_cameras.end()) { @@ -431,7 +431,7 @@ py::dict BAHelpers::BundleShotPoses( } std::unordered_set landmarks; - for (const auto shot_id : shot_ids) { + for (const auto& shot_id : shot_ids) { const auto& shot = map.GetShot(shot_id); for (const auto& lm_obs : shot.GetLandmarkObservations()) { landmarks.insert(lm_obs.first); @@ -493,7 +493,7 @@ py::dict BAHelpers::BundleShotPoses( } // add observations - for (const auto shot_id : shot_ids) { + for (const auto& shot_id : shot_ids) { const auto& shot = map.GetShot(shot_id); for (const auto& lm_obs : shot.GetLandmarkObservations()) { const auto& obs = lm_obs.second; From 18667b209b0051e340b46950fd9243a09786c33e Mon Sep 17 00:00:00 2001 From: Yann Noutary Date: Mon, 10 Jan 2022 01:28:52 -0800 Subject: [PATCH 03/81] feat: plug point refinement Summary: This Diff plugs the point refinement in the `reconstruction` module. Reviewed By: paulinus Differential Revision: D33124066 fbshipit-source-id: fc91d55c3dc3dc1ee7de4e078531b2f3d21eb7d6 --- opensfm/config.py | 1 + opensfm/reconstruction.py | 66 +++++++++++++++++++++++------- opensfm/test/test_triangulation.py | 2 +- 3 files changed, 53 insertions(+), 16 deletions(-) diff --git a/opensfm/config.py b/opensfm/config.py index 0f8089192..549d94d5e 100644 --- a/opensfm/config.py +++ b/opensfm/config.py @@ -96,6 +96,7 @@ triangulation_threshold: 0.006 # Outlier threshold for accepting a triangulated point in radians triangulation_min_ray_angle: 1.0 # Minimum angle between views to accept a triangulated point triangulation_type: FULL # Triangulation type : either considering all rays (FULL), or sing a RANSAC variant (ROBUST) +triangulation_refinement_iterations: 10 # Number of LM iterations to run when refining a point resection_threshold: 0.004 # Outlier threshold for resection in radians resection_min_inliers: 10 # Minimum number of resection inliers to accept it diff --git a/opensfm/reconstruction.py b/opensfm/reconstruction.py index 8436f032c..214587d81 100644 --- a/opensfm/reconstruction.py +++ b/opensfm/reconstruction.py @@ -870,7 +870,8 @@ def __init__( self.Rts = {} def triangulate_robust( - self, track: str, reproj_threshold: float, min_ray_angle_degrees: float + self, track: str, reproj_threshold: float, min_ray_angle_degrees: float, + iterations: int ) -> None: """Triangulate track in a RANSAC way and add point to reconstruction.""" os, bs, ids = [], [], [] @@ -886,6 +887,9 @@ def triangulate_robust( if len(ids) < 2: return + os = np.array(os) + bs = np.array(bs) + best_inliers = [] best_point = None combinatiom_tried = set() @@ -901,26 +905,46 @@ def triangulate_robust( i, j = all_combinations[random_id] combinatiom_tried.add(random_id) - os_t = [os[i], os[j]] - bs_t = [bs[i], bs[j]] + os_t = np.array([os[i], os[j]]) + bs_t = np.array([bs[i], bs[j]]) valid_triangulation, X = pygeometry.triangulate_bearings_midpoint( - np.asarray(os_t), - np.asarray(bs_t), + os_t, + bs_t, thresholds, np.radians(min_ray_angle_degrees), ) + X = pygeometry.point_refinement(os_t, bs_t, X, iterations) if valid_triangulation: reprojected_bs = X - os reprojected_bs /= np.linalg.norm(reprojected_bs, axis=1)[:, np.newaxis] inliers = np.nonzero( np.linalg.norm(reprojected_bs - bs, axis=1) < reproj_threshold - )[0] + )[0].tolist() if len(inliers) > len(best_inliers): - best_inliers = inliers - best_point = X.tolist() + _, new_X = pygeometry.triangulate_bearings_midpoint( + os[inliers], + bs[inliers], + len(inliers) * [reproj_threshold], + np.radians(min_ray_angle_degrees), + ) + new_X = pygeometry.point_refinement(os[inliers], bs[inliers], X, iterations) + + reprojected_bs = new_X - os + reprojected_bs /= np.linalg.norm(reprojected_bs, axis=1)[ + :, np.newaxis + ] + ls_inliers = np.nonzero( + np.linalg.norm(reprojected_bs - bs, axis=1) < reproj_threshold + )[0] + if len(ls_inliers) > len(inliers): + best_inliers = ls_inliers + best_point = new_X.tolist() + else: + best_inliers = inliers + best_point = X.tolist() pout = 0.99 inliers_ratio = float(len(best_inliers)) / len(ids) @@ -929,7 +953,7 @@ def triangulate_robust( optimal_iter = math.log(1.0 - pout) / math.log( 1.0 - inliers_ratio * inliers_ratio ) - if optimal_iter <= ransac_tries: + if optimal_iter <= i: break if len(best_inliers) > 1: @@ -938,7 +962,8 @@ def triangulate_robust( self._add_track_to_reconstruction(track, ids[i]) def triangulate( - self, track: str, reproj_threshold: float, min_ray_angle_degrees: float + self, track: str, reproj_threshold: float, min_ray_angle_degrees: float, + iterations: int ) -> None: """Triangulate track and add point to reconstruction.""" os, bs, ids = [], [], [] @@ -960,18 +985,21 @@ def triangulate( np.radians(min_ray_angle_degrees), ) if valid_triangulation: + X = pygeometry.point_refinement(np.array(os), np.array(bs), X, iterations) self.reconstruction.create_point(track, X.tolist()) for shot_id in ids: self._add_track_to_reconstruction(track, shot_id) def triangulate_dlt( - self, track: str, reproj_threshold: float, min_ray_angle_degrees: float + self, track: str, reproj_threshold: float, min_ray_angle_degrees: float, + iterations: int ) -> None: """Triangulate track using DLT and add point to reconstruction.""" - Rts, bs, ids = [], [], [] + Rts, bs, os, ids = [], [], [], [] for shot_id, obs in self.tracks_manager.get_track_observations(track).items(): if shot_id in self.reconstruction.shots: shot = self.reconstruction.shots[shot_id] + os.append(self._shot_origin(shot)) Rts.append(self._shot_Rt(shot)) b = shot.camera.pixel_bearing(np.array(obs.point)) bs.append(b) @@ -985,6 +1013,7 @@ def triangulate_dlt( np.radians(min_ray_angle_degrees), ) if e: + X = pygeometry.point_refinement(np.array(os), np.array(bs), X, iterations) self.reconstruction.create_point(track, X.tolist()) for shot_id in ids: self._add_track_to_reconstruction(track, shot_id) @@ -1027,6 +1056,7 @@ def triangulate_shot_features( """Reconstruct as many tracks seen in shot_id as possible.""" reproj_threshold = config["triangulation_threshold"] min_ray_angle = config["triangulation_min_ray_angle"] + refinement_iterations = config["triangulation_refinement_iterations"] triangulator = TrackTriangulator(tracks_manager, reconstruction) @@ -1039,7 +1069,12 @@ def triangulate_shot_features( } for track in tracks_ids: if track not in reconstruction.points: - triangulator.triangulate(track, reproj_threshold, min_ray_angle) + if config["triangulation_type"] == "ROBUST": + triangulator.triangulate_robust(track, reproj_threshold, min_ray_angle, + refinement_iterations) + elif config["triangulation_type"] == "FULL": + triangulator.triangulate(track, reproj_threshold, min_ray_angle, + refinement_iterations) def retriangulate( @@ -1054,6 +1089,7 @@ def retriangulate( threshold = config["triangulation_threshold"] min_ray_angle = config["triangulation_min_ray_angle"] + refinement_iterations = config["triangulation_refinement_iterations"] reconstruction.points = {} @@ -1066,9 +1102,9 @@ def retriangulate( tracks.update(tracks_manager.get_shot_observations(image).keys()) for track in tracks: if config["triangulation_type"] == "ROBUST": - triangulator.triangulate_robust(track, threshold, min_ray_angle) + triangulator.triangulate_robust(track, threshold, min_ray_angle, refinement_iterations) elif config["triangulation_type"] == "FULL": - triangulator.triangulate(track, threshold, min_ray_angle) + triangulator.triangulate(track, threshold, min_ray_angle, refinement_iterations) report["num_points_after"] = len(reconstruction.points) chrono.lap("retriangulate") diff --git a/opensfm/test/test_triangulation.py b/opensfm/test/test_triangulation.py index 6c2393085..5904943af 100644 --- a/opensfm/test/test_triangulation.py +++ b/opensfm/test/test_triangulation.py @@ -39,7 +39,7 @@ def test_track_triangulator_spherical(): ) triangulator = reconstruction.TrackTriangulator(tracks_manager, rec) - triangulator.triangulate("1", 0.01, 2.0) + triangulator.triangulate("1", 0.01, 2.0, 10) assert "1" in rec.points p = rec.points["1"].coordinates assert np.allclose(p, [0, 0, 1.3763819204711]) From 006f3a396edc40781fe6d4c8187bd9f1fa029ba0 Mon Sep 17 00:00:00 2001 From: Yann Noutary Date: Mon, 10 Jan 2022 01:28:52 -0800 Subject: [PATCH 04/81] feat: better retriangulator Summary: This Diff improves the retriangulator by running rounds of reprojection-based guided matching in neighboring images. The synopsis is now the following : - (optionally) Run normal matching - Run classic triangulation (track manager + retriangulation) - Attach each track to one image and reproject each triangulated point in neighboring images, looking for close points, both projection and descriptor - Run union-find on resulting tracks (as the above generates conflicting tracks) - Final triangulation We also add some cleaning utilities. We now manage to double the lengths of tracks (average length from 4.5 to 9 images). Reviewed By: Ahmed-Salama Differential Revision: D33187643 fbshipit-source-id: 11007b90e7decf29996523521a82193d933fd079 --- opensfm/actions/create_tracks.py | 2 +- opensfm/config.py | 7 +++++++ opensfm/matching.py | 2 +- opensfm/src/map/python/pybind.cc | 9 ++++++++- opensfm/tracking.py | 3 +-- 5 files changed, 18 insertions(+), 5 deletions(-) diff --git a/opensfm/actions/create_tracks.py b/opensfm/actions/create_tracks.py index 3f61e1064..8fa4b57f7 100644 --- a/opensfm/actions/create_tracks.py +++ b/opensfm/actions/create_tracks.py @@ -21,7 +21,7 @@ def run_dataset(data: DataSetBase): segmentations, instances, matches, - data.config, + data.config["min_track_length"], ) tracks_end = timer() data.save_tracks_manager(tracks_manager) diff --git a/opensfm/config.py b/opensfm/config.py index 549d94d5e..6be7be9ef 100644 --- a/opensfm/config.py +++ b/opensfm/config.py @@ -64,7 +64,14 @@ vlad_file: bow_hahog_root_uchar_64.npz # Params for guided matching +guided_spanning_trees: 5 # Number of randomized spanning-trees to samples over the tracks-graph +guided_spanning_trees_random: 0.5 # Random ratio higher bound edges are multiplied with guided_matching_threshold: 0.006 # Threshold for epipolar distance for accepting a match in radians +guided_min_length_initial: 3 # Minimum track length for initial triangulation +guided_min_length_final: 3 # Minimum track length for final triangulation +guided_extend_threshold: 0.002 # Threshold of reprojection for extending a track within a new image (in radians) +guided_extend_image_neighbors: 50 # Number of images considered as neighbors of another one +guided_extend_feature_neighbors: 10 # Maximum number of reprojected neighbors (in the tracks-graph) to check when extending a track within a new image # Params for matching matching_gps_distance: 150 # Maximum gps distance between two images for matching diff --git a/opensfm/matching.py b/opensfm/matching.py index 0a4d7f85b..ff0626282 100644 --- a/opensfm/matching.py +++ b/opensfm/matching.py @@ -312,7 +312,7 @@ def _match_descriptors_guided_impl( relative_pose, overriden_config["guided_matching_threshold"], ) - matches = match_brute_force(d1, d2, overriden_config, epipolar_mask) + matches = match_brute_force_symmetric(d1, d2, overriden_config, epipolar_mask) # Adhoc filters if overriden_config["matching_use_filters"]: diff --git a/opensfm/src/map/python/pybind.cc b/opensfm/src/map/python/pybind.cc index e214ef1d0..028bd5db1 100644 --- a/opensfm/src/map/python/pybind.cc +++ b/opensfm/src/map/python/pybind.cc @@ -77,7 +77,14 @@ PYBIND11_MODULE(pymap, m) { .def_readwrite("segmentation", &map::Observation::segmentation_id) .def_readwrite("instance", &map::Observation::instance_id) .def_readonly_static("NO_SEMANTIC_VALUE", - &map::Observation::NO_SEMANTIC_VALUE); + &map::Observation::NO_SEMANTIC_VALUE) + .def( + "copy", + [](const map::Observation &to_copy) { + map::Observation copy = to_copy; + return copy; + }, + py::return_value_policy::copy); py::class_(m, "Landmark") .def(py::init()) diff --git a/opensfm/tracking.py b/opensfm/tracking.py index 3b3068a9d..3122fd220 100644 --- a/opensfm/tracking.py +++ b/opensfm/tracking.py @@ -63,7 +63,7 @@ def create_tracks_manager( segmentations: t.Dict[str, np.ndarray], instances: t.Dict[str, np.ndarray], matches: t.Dict[t.Tuple[str, str], t.List[t.Tuple[int, int]]], - config: t.Dict[str, t.Any], + min_length: int, ): """Link matches into tracks.""" logger.debug("Merging features onto tracks") @@ -80,7 +80,6 @@ def create_tracks_manager( else: sets[p] = [i] - min_length = config["min_track_length"] tracks = [t for t in sets.values() if _good_track(t, min_length)] logger.debug("Good tracks: {}".format(len(tracks))) From 663251580777e497e7da258ca189d4dddc756ced Mon Sep 17 00:00:00 2001 From: Yann Noutary Date: Mon, 10 Jan 2022 01:28:52 -0800 Subject: [PATCH 05/81] fix: last few fixes Summary: This Diff propagates the `reference_lla` when running `reconstruct_from_prior`. Reviewed By: paulinus Differential Revision: D33294997 fbshipit-source-id: c0d47db6cc1521b0f0e1b861daa5dc3015838b55 --- opensfm/reconstruction.py | 1 + 1 file changed, 1 insertion(+) diff --git a/opensfm/reconstruction.py b/opensfm/reconstruction.py index 214587d81..1dd3a64b0 100644 --- a/opensfm/reconstruction.py +++ b/opensfm/reconstruction.py @@ -1593,6 +1593,7 @@ def reconstruct_from_prior( ) -> Tuple[Dict[str, Any], types.Reconstruction]: """Retriangulate a new reconstruction from the rec_prior""" reconstruction = types.Reconstruction() + reconstruction.reference = rec_prior.reference report = {} rec_report = {} report["retriangulate"] = [rec_report] From c250de55ee3053fc7d8b433860664206ba8b2841 Mon Sep 17 00:00:00 2001 From: Yann Noutary Date: Mon, 10 Jan 2022 01:28:52 -0800 Subject: [PATCH 06/81] feat: triangulate before union-find Summary: This Diff refactors the TrackTriangulator, so that inputs an results are decoupled from the triangulation process itself. This allows re-using the TrackTriangulator in retriangulate in order to triangulate extended tracks before union-find, removing the issue of union-find emcompassing 75% of the tracks (monster-track issue) Reviewed By: paulinus Differential Revision: D33294998 fbshipit-source-id: f8ec9a7b3b199925cc32ec399d9a356c56ba46c2 --- bin/plot_inliers | 2 +- opensfm/reconstruction.py | 187 ++++++++++++++++++++--------- opensfm/test/test_triangulation.py | 4 +- 3 files changed, 135 insertions(+), 58 deletions(-) diff --git a/bin/plot_inliers b/bin/plot_inliers index 4016f67b7..b970318c7 100755 --- a/bin/plot_inliers +++ b/bin/plot_inliers @@ -241,7 +241,7 @@ def triangulate_tracks(tracks, reconstruction, graph, min_ray_angle): :return: An array of booleans determining if each track was successfully triangulated or not. """ succeeded = [] - triangulator = reconstruct.TrackTriangulator(graph, reconstruction) + triangulator = reconstruct.TrackTriangulator(reconstruction, reconstruct.TrackHandlerTrackManager(graph, reconstruction) for track in tracks: # Triangulate with 1 as reprojection threshold to avoid excluding tracks because of error. diff --git a/opensfm/reconstruction.py b/opensfm/reconstruction.py index 1dd3a64b0..0ca43a85f 100644 --- a/opensfm/reconstruction.py +++ b/opensfm/reconstruction.py @@ -4,6 +4,7 @@ import enum import logging import math +from abc import abstractmethod, ABC from collections import defaultdict from itertools import combinations from timeit import default_timer as timer @@ -845,44 +846,102 @@ def add_observation_to_reconstruction( reconstruction.add_observation(shot_id, track_id, observation) +class TrackHandlerBase(ABC): + """Interface for providing/retrieving tracks from/to 'TrackTriangulator'.""" + + @abstractmethod + def get_observations(self, track_id: str) -> Dict[str, pymap.Observation]: + """Returns the observations of 'track_id'""" + pass + + @abstractmethod + def store_track_coordinates(self, track_id: str, coordinates: np.ndarray) -> None: + """Stores coordinates of triangulated track.""" + pass + + @abstractmethod + def store_inliers_observation(self, track_id: str, shot_id: str) -> None: + """Called by the 'TrackTriangulator' for each track inlier found.""" + pass + + +class TrackHandlerTrackManager(TrackHandlerBase): + """Provider that reads tracks from a 'TrackManager' object.""" + + tracks_manager: pymap.TracksManager + reconstruction: types.Reconstruction + + def __init__( + self, + tracks_manager: pymap.TracksManager, + reconstruction: types.Reconstruction, + ) -> None: + self.tracks_manager = tracks_manager + self.reconstruction = reconstruction + + def get_observations(self, track_id: str) -> Dict[str, pymap.Observation]: + """Return the observations of 'track_id', for all + shots that appears in 'self.reconstruction.shots' + """ + return { + k: v + for k, v in self.tracks_manager.get_track_observations(track_id).items() + if k in self.reconstruction.shots + } + + def store_track_coordinates(self, track_id: str, coordinates: np.ndarray) -> None: + """Stores coordinates of triangulated track.""" + self.reconstruction.create_point(track_id, coordinates) + + def store_inliers_observation(self, track_id: str, shot_id: str) -> None: + """Stores triangulation inliers in the tracks manager.""" + observation = self.tracks_manager.get_observation(shot_id, track_id) + self.reconstruction.add_observation(shot_id, track_id, observation) + + class TrackTriangulator: """Triangulate tracks in a reconstruction. Caches shot origin and rotation matrix """ - tracks_manager: pymap.TracksManager + # for getting shots reconstruction: types.Reconstruction + + # for storing tracks inliers + tracks_handler: TrackHandlerBase + + # caches origins: Dict[str, np.ndarray] = {} rotation_inverses: Dict[str, np.ndarray] = {} Rts: Dict[str, np.ndarray] = {} def __init__( - self, - tracks_manager: pymap.TracksManager, - reconstruction: types.Reconstruction, + self, reconstruction: types.Reconstruction, tracks_handler: TrackHandlerBase ) -> None: """Build a triangulator for a specific reconstruction.""" - self.tracks_manager = tracks_manager self.reconstruction = reconstruction + self.tracks_handler = tracks_handler self.origins = {} self.rotation_inverses = {} self.Rts = {} def triangulate_robust( - self, track: str, reproj_threshold: float, min_ray_angle_degrees: float, - iterations: int + self, + track: str, + reproj_threshold: float, + min_ray_angle_degrees: float, + iterations: int, ) -> None: """Triangulate track in a RANSAC way and add point to reconstruction.""" os, bs, ids = [], [], [] - for shot_id, obs in self.tracks_manager.get_track_observations(track).items(): - if shot_id in self.reconstruction.shots: - shot = self.reconstruction.shots[shot_id] - os.append(self._shot_origin(shot)) - b = shot.camera.pixel_bearing(np.array(obs.point)) - r = self._shot_rotation_inverse(shot) - bs.append(r.dot(b)) - ids.append(shot_id) + for shot_id, obs in self.tracks_handler.get_observations(track).items(): + shot = self.reconstruction.shots[shot_id] + os.append(self._shot_origin(shot)) + b = shot.camera.pixel_bearing(np.array(obs.point)) + r = self._shot_rotation_inverse(shot) + bs.append(r.dot(b)) + ids.append(shot_id) if len(ids) < 2: return @@ -930,7 +989,9 @@ def triangulate_robust( len(inliers) * [reproj_threshold], np.radians(min_ray_angle_degrees), ) - new_X = pygeometry.point_refinement(os[inliers], bs[inliers], X, iterations) + new_X = pygeometry.point_refinement( + os[inliers], bs[inliers], X, iterations + ) reprojected_bs = new_X - os reprojected_bs /= np.linalg.norm(reprojected_bs, axis=1)[ @@ -957,24 +1018,26 @@ def triangulate_robust( break if len(best_inliers) > 1: - self.reconstruction.create_point(track, best_point) + self.tracks_handler.store_track_coordinates(track, best_point) for i in best_inliers: - self._add_track_to_reconstruction(track, ids[i]) + self.tracks_handler.store_inliers_observation(track, ids[i]) def triangulate( - self, track: str, reproj_threshold: float, min_ray_angle_degrees: float, - iterations: int + self, + track: str, + reproj_threshold: float, + min_ray_angle_degrees: float, + iterations: int, ) -> None: """Triangulate track and add point to reconstruction.""" os, bs, ids = [], [], [] - for shot_id, obs in self.tracks_manager.get_track_observations(track).items(): - if shot_id in self.reconstruction.shots: - shot = self.reconstruction.shots[shot_id] - os.append(self._shot_origin(shot)) - b = shot.camera.pixel_bearing(np.array(obs.point)) - r = self._shot_rotation_inverse(shot) - bs.append(r.dot(b)) - ids.append(shot_id) + for shot_id, obs in self.tracks_handler.get_observations(track).items(): + shot = self.reconstruction.shots[shot_id] + os.append(self._shot_origin(shot)) + b = shot.camera.pixel_bearing(np.array(obs.point)) + r = self._shot_rotation_inverse(shot) + bs.append(r.dot(b)) + ids.append(shot_id) if len(os) >= 2: thresholds = len(os) * [reproj_threshold] @@ -985,25 +1048,29 @@ def triangulate( np.radians(min_ray_angle_degrees), ) if valid_triangulation: - X = pygeometry.point_refinement(np.array(os), np.array(bs), X, iterations) - self.reconstruction.create_point(track, X.tolist()) + X = pygeometry.point_refinement( + np.array(os), np.array(bs), X, iterations + ) + self.tracks_handler.store_track_coordinates(track, X.tolist()) for shot_id in ids: - self._add_track_to_reconstruction(track, shot_id) + self.tracks_handler.store_inliers_observation(track, shot_id) def triangulate_dlt( - self, track: str, reproj_threshold: float, min_ray_angle_degrees: float, - iterations: int + self, + track: str, + reproj_threshold: float, + min_ray_angle_degrees: float, + iterations: int, ) -> None: """Triangulate track using DLT and add point to reconstruction.""" Rts, bs, os, ids = [], [], [], [] - for shot_id, obs in self.tracks_manager.get_track_observations(track).items(): - if shot_id in self.reconstruction.shots: - shot = self.reconstruction.shots[shot_id] - os.append(self._shot_origin(shot)) - Rts.append(self._shot_Rt(shot)) - b = shot.camera.pixel_bearing(np.array(obs.point)) - bs.append(b) - ids.append(shot_id) + for shot_id, obs in self.tracks_handler.get_observations(track).items(): + shot = self.reconstruction.shots[shot_id] + os.append(self._shot_origin(shot)) + Rts.append(self._shot_Rt(shot)) + b = shot.camera.pixel_bearing(np.array(obs.point)) + bs.append(b) + ids.append(shot_id) if len(Rts) >= 2: e, X = pygeometry.triangulate_bearings_dlt( @@ -1013,14 +1080,12 @@ def triangulate_dlt( np.radians(min_ray_angle_degrees), ) if e: - X = pygeometry.point_refinement(np.array(os), np.array(bs), X, iterations) - self.reconstruction.create_point(track, X.tolist()) + X = pygeometry.point_refinement( + np.array(os), np.array(bs), X, iterations + ) + self.tracks_handler.store_track_coordinates(track, X.tolist()) for shot_id in ids: - self._add_track_to_reconstruction(track, shot_id) - - def _add_track_to_reconstruction(self, track_id: str, shot_id: str) -> None: - observation = self.tracks_manager.get_observation(shot_id, track_id) - self.reconstruction.add_observation(shot_id, track_id, observation) + self.tracks_handler.store_inliers_observation(track, shot_id) def _shot_origin(self, shot: pymap.Shot) -> np.ndarray: if shot.id in self.origins: @@ -1058,7 +1123,9 @@ def triangulate_shot_features( min_ray_angle = config["triangulation_min_ray_angle"] refinement_iterations = config["triangulation_refinement_iterations"] - triangulator = TrackTriangulator(tracks_manager, reconstruction) + triangulator = TrackTriangulator( + reconstruction, TrackHandlerTrackManager(tracks_manager, reconstruction) + ) all_shots_ids = set(tracks_manager.get_shot_ids()) tracks_ids = { @@ -1070,11 +1137,13 @@ def triangulate_shot_features( for track in tracks_ids: if track not in reconstruction.points: if config["triangulation_type"] == "ROBUST": - triangulator.triangulate_robust(track, reproj_threshold, min_ray_angle, - refinement_iterations) + triangulator.triangulate_robust( + track, reproj_threshold, min_ray_angle, refinement_iterations + ) elif config["triangulation_type"] == "FULL": - triangulator.triangulate(track, reproj_threshold, min_ray_angle, - refinement_iterations) + triangulator.triangulate( + track, reproj_threshold, min_ray_angle, refinement_iterations + ) def retriangulate( @@ -1095,16 +1164,22 @@ def retriangulate( all_shots_ids = set(tracks_manager.get_shot_ids()) - triangulator = TrackTriangulator(tracks_manager, reconstruction) + triangulator = TrackTriangulator( + reconstruction, TrackHandlerTrackManager(tracks_manager, reconstruction) + ) tracks = set() for image in reconstruction.shots.keys(): if image in all_shots_ids: tracks.update(tracks_manager.get_shot_observations(image).keys()) for track in tracks: if config["triangulation_type"] == "ROBUST": - triangulator.triangulate_robust(track, threshold, min_ray_angle, refinement_iterations) + triangulator.triangulate_robust( + track, threshold, min_ray_angle, refinement_iterations + ) elif config["triangulation_type"] == "FULL": - triangulator.triangulate(track, threshold, min_ray_angle, refinement_iterations) + triangulator.triangulate( + track, threshold, min_ray_angle, refinement_iterations + ) report["num_points_after"] = len(reconstruction.points) chrono.lap("retriangulate") diff --git a/opensfm/test/test_triangulation.py b/opensfm/test/test_triangulation.py index 5904943af..17ba979cf 100644 --- a/opensfm/test/test_triangulation.py +++ b/opensfm/test/test_triangulation.py @@ -38,7 +38,9 @@ def test_track_triangulator_spherical(): } ) - triangulator = reconstruction.TrackTriangulator(tracks_manager, rec) + triangulator = reconstruction.TrackTriangulator( + rec, reconstruction.TrackHandlerTrackManager(tracks_manager, rec) + ) triangulator.triangulate("1", 0.01, 2.0, 10) assert "1" in rec.points p = rec.points["1"].coordinates From 2a1c9be07f47f6e26f87fedf8275fcf8ff7b8487 Mon Sep 17 00:00:00 2001 From: jonasdlindner Date: Fri, 14 Jan 2022 00:50:39 -0800 Subject: [PATCH 07/81] None check for empty descriptor in SIFT, SURF, ORB (#850) Summary: Fixes https://github.com/mapillary/OpenSfM/issues/849 Pull Request resolved: https://github.com/mapillary/OpenSfM/pull/850 Reviewed By: fabianschenk Differential Revision: D33566723 Pulled By: YanNoun fbshipit-source-id: 91950fed9ca1d8694f74d492fe4c26dc7942024f --- opensfm/features.py | 31 ++++++++++++++++++++++--------- 1 file changed, 22 insertions(+), 9 deletions(-) diff --git a/opensfm/features.py b/opensfm/features.py index 5c3a34b97..d1634c446 100644 --- a/opensfm/features.py +++ b/opensfm/features.py @@ -347,9 +347,14 @@ def extract_features_sift( logger.debug("done") break points, desc = descriptor.compute(image, points) - if config["feature_root"]: - desc = root_feature(desc) - points = np.array([(i.pt[0], i.pt[1], i.size, i.angle) for i in points]) + + if desc is not None: + if config["feature_root"]: + desc = root_feature(desc) + points = np.array([(i.pt[0], i.pt[1], i.size, i.angle) for i in points]) + else: + points = np.array(np.zeros((0, 3))) + desc = np.array(np.zeros((0, 3))) return points, desc @@ -398,11 +403,15 @@ def extract_features_surf( break points, desc = descriptor.compute(image, points) - if config["feature_root"]: - desc = root_feature_surf(desc, partial=True) - points = np.array([(i.pt[0], i.pt[1], i.size, i.angle) for i in points]) - return points, desc + if desc is not None: + if config["feature_root"]: + desc = root_feature(desc) + points = np.array([(i.pt[0], i.pt[1], i.size, i.angle) for i in points]) + else: + points = np.array(np.zeros((0, 3))) + desc = np.array(np.zeros((0, 3))) + return points, desc def akaze_descriptor_type(name: str) -> pyfeatures.AkazeDescriptorType: d = pyfeatures.AkazeDescriptorType.__dict__ @@ -483,7 +492,11 @@ def extract_features_orb( points = detector.detect(image) points, desc = descriptor.compute(image, points) - points = np.array([(i.pt[0], i.pt[1], i.size, i.angle) for i in points]) + if desc is not None: + points = np.array([(i.pt[0], i.pt[1], i.size, i.angle) for i in points]) + else: + points = np.array(np.zeros((0, 3))) + desc = np.array(np.zeros((0, 3))) logger.debug("Found {0} points in {1}s".format(len(points), time.time() - t)) return points, desc @@ -582,4 +595,4 @@ def build_flann_index(descriptors: np.ndarray, config: Dict[str, Any]) -> Any: else: raise ValueError("FLANN isn't supported for binary features because of poor-performance. Use BRUTEFORCE instead.") - return context.flann_Index(descriptors, flann_params) + return context.flann_Index(descriptors, flann_params) \ No newline at end of file From ace733c5e398c09f998c5d0a1bf1ecbef7b9608b Mon Sep 17 00:00:00 2001 From: Saijin-Naib <19295950+Saijin-Naib@users.noreply.github.com> Date: Thu, 20 Jan 2022 09:21:59 -0800 Subject: [PATCH 08/81] Add Support for Yuneec E90 (#818) Summary: Sensor dimensions appear to be unpublished, so I'm using the dimensions from the same-generation Sony 1" 20MP sensor devices like the RX100. Pull Request resolved: https://github.com/mapillary/OpenSfM/pull/818 Reviewed By: paulinus Differential Revision: D32490246 Pulled By: YanNoun fbshipit-source-id: 6e4c85664b1af56e4419550a0961a021c8ec91f3 --- opensfm/data/sensor_data.json | 1 + opensfm/data/sensor_data_detailed.json | 7 +++++++ 2 files changed, 8 insertions(+) diff --git a/opensfm/data/sensor_data.json b/opensfm/data/sensor_data.json index f6db04699..094e728bd 100644 --- a/opensfm/data/sensor_data.json +++ b/opensfm/data/sensor_data.json @@ -3648,6 +3648,7 @@ "Yakumo Mega Image XL": 4.23, "Yakumo Mega Image XS": 6.4, "YTXJ02FM": 6.17, + "Yuneec E90": 13.2, "Google Nexus S": 3.9, "HUAWEI HUAWEI P6-U06": 4.8, "oneplu A000": 4.8, diff --git a/opensfm/data/sensor_data_detailed.json b/opensfm/data/sensor_data_detailed.json index 0e214bfa7..d07082830 100644 --- a/opensfm/data/sensor_data_detailed.json +++ b/opensfm/data/sensor_data_detailed.json @@ -25247,6 +25247,13 @@ "Sensor": "1/2\" (~ 6.4 x 4.8 mm)", "Sensor res. (height)": 1527.0 }, + "Yuneec E90": { + "Sensor height (mm)": 8.8, + "Sensor width (mm)": 13.2, + "Sensor res. (width)": 5472.0, + "Sensor": "1\" (~ 13.2 x 8.8 mm)", + "Sensor res. (height)": 3648.0 + }, "GITUP GIT2": { "Sensor height (mm)": 4.662, "Sensor width (mm)": 6.216, From a3e215470fad73be2f078ee0ffb73e9b48ea581e Mon Sep 17 00:00:00 2001 From: Fabian Schenk Date: Mon, 24 Jan 2022 07:58:31 -0800 Subject: [PATCH 09/81] Add type annotations for commands Summary: TSIA Reviewed By: paulinus Differential Revision: D33683343 fbshipit-source-id: 3c4eed403b918e25e9128b1285b413be97ad18f1 --- opensfm/commands/align_submodels.py | 6 ++++-- opensfm/commands/bundle.py | 6 ++++-- opensfm/commands/command.py | 11 ++++++----- opensfm/commands/command_runner.py | 3 ++- opensfm/commands/compute_depthmaps.py | 6 ++++-- opensfm/commands/compute_statistics.py | 6 ++++-- opensfm/commands/create_rig.py | 6 ++++-- opensfm/commands/create_submodels.py | 6 ++++-- opensfm/commands/create_tracks.py | 6 ++++-- opensfm/commands/detect_features.py | 6 ++++-- opensfm/commands/export_bundler.py | 6 ++++-- opensfm/commands/export_colmap.py | 6 ++++-- opensfm/commands/export_geocoords.py | 6 ++++-- opensfm/commands/export_openmvs.py | 6 ++++-- opensfm/commands/export_ply.py | 6 ++++-- opensfm/commands/export_pmvs.py | 6 ++++-- opensfm/commands/export_report.py | 6 ++++-- opensfm/commands/export_visualsfm.py | 6 ++++-- opensfm/commands/extend_reconstruction.py | 6 ++++-- opensfm/commands/extract_metadata.py | 6 ++++-- opensfm/commands/match_features.py | 6 ++++-- opensfm/commands/mesh.py | 6 ++++-- opensfm/commands/reconstruct.py | 6 ++++-- opensfm/commands/reconstruct_from_prior.py | 6 ++++-- opensfm/commands/undistort.py | 6 ++++-- 25 files changed, 100 insertions(+), 52 deletions(-) diff --git a/opensfm/commands/align_submodels.py b/opensfm/commands/align_submodels.py index 26526a4fc..f3bc8c917 100644 --- a/opensfm/commands/align_submodels.py +++ b/opensfm/commands/align_submodels.py @@ -1,14 +1,16 @@ from opensfm.actions import align_submodels from . import command +import argparse +from opensfm.dataset import DataSet class Command(command.CommandBase): name = "align_submodels" help = "Align submodel reconstructions" - def run_impl(self, dataset, args): + def run_impl(self, dataset: DataSet, args: argparse.Namespace) -> None: align_submodels.run_dataset(dataset) - def add_arguments_impl(self, parser): + def add_arguments_impl(self, parser: argparse.ArgumentParser) -> None: pass diff --git a/opensfm/commands/bundle.py b/opensfm/commands/bundle.py index e3bfe9b55..b7e6f668a 100644 --- a/opensfm/commands/bundle.py +++ b/opensfm/commands/bundle.py @@ -1,16 +1,18 @@ from opensfm.actions import bundle from . import command +import argparse +from opensfm.dataset import DataSet class Command(command.CommandBase): name = "bundle" help = "Bundle a reconstruction" - def run_impl(self, dataset, args): + def run_impl(self, dataset: DataSet, args: argparse.Namespace) -> None: bundle.run_dataset(dataset, args.input, args.output) - def add_arguments_impl(self, parser): + def add_arguments_impl(self, parser: argparse.ArgumentParser) -> None: parser.add_argument("--input", help="file name of the reconstruction to bundle") parser.add_argument( "--output", help="file name where to store the bundled reconstruction" diff --git a/opensfm/commands/command.py b/opensfm/commands/command.py index 19318f386..077c7c811 100644 --- a/opensfm/commands/command.py +++ b/opensfm/commands/command.py @@ -1,5 +1,6 @@ from timeit import default_timer as timer - +import argparse +from opensfm.dataset import DataSet class CommandBase: """ Base class for executable commands.""" @@ -7,18 +8,18 @@ class CommandBase: name = "Undefined command" help = "Undefined command help" - def run(self, data, args): + def run(self, data, args: argparse.Namespace) -> None: start = timer() self.run_impl(data, args) end = timer() data.append_to_profile_log(f"{type(self).name}: {end - start}\n") - def add_arguments(self, parser): + def add_arguments(self, parser: argparse.ArgumentParser) -> None: parser.add_argument("dataset", help="dataset to process") self.add_arguments_impl(parser) - def run_impl(self, dataset, args): + def run_impl(self, dataset: DataSet, args: argparse.Namespace) -> None: raise NotImplementedError("Command " + self.name + " not implemented") - def add_arguments_impl(self, parser): + def add_arguments_impl(self, parser: argparse.ArgumentParser) -> None: raise NotImplementedError("Command " + self.name + " not implemented") diff --git a/opensfm/commands/command_runner.py b/opensfm/commands/command_runner.py index 933fad9ea..6498a7ef6 100644 --- a/opensfm/commands/command_runner.py +++ b/opensfm/commands/command_runner.py @@ -1,9 +1,10 @@ +from typing import Any, Callable, List import argparse from opensfm import log -def command_runner(all_commands_types, dataset_factory, dataset_choices): +def command_runner(all_commands_types: List[Any], dataset_factory: Callable, dataset_choices: List[str]) -> None: """ Main entry point for running the passed SfM commands types.""" log.setup() diff --git a/opensfm/commands/compute_depthmaps.py b/opensfm/commands/compute_depthmaps.py index a7f305072..57c647947 100644 --- a/opensfm/commands/compute_depthmaps.py +++ b/opensfm/commands/compute_depthmaps.py @@ -1,16 +1,18 @@ from opensfm.actions import compute_depthmaps from . import command +import argparse +from opensfm.dataset import DataSet class Command(command.CommandBase): name = "compute_depthmaps" help = "Compute depthmap" - def run_impl(self, dataset, args): + def run_impl(self, dataset: DataSet, args: argparse.Namespace) -> None: compute_depthmaps.run_dataset(dataset, args.subfolder, args.interactive) - def add_arguments_impl(self, parser): + def add_arguments_impl(self, parser: argparse.ArgumentParser) -> None: parser.add_argument( "--subfolder", help="undistorted subfolder where to load and store data", diff --git a/opensfm/commands/compute_statistics.py b/opensfm/commands/compute_statistics.py index f270bd524..a445fc7c4 100644 --- a/opensfm/commands/compute_statistics.py +++ b/opensfm/commands/compute_statistics.py @@ -1,4 +1,6 @@ from . import command +import argparse +from opensfm.dataset import DataSet from opensfm.actions import compute_statistics @@ -6,10 +8,10 @@ class Command(command.CommandBase): name = "compute_statistics" help = "Compute statistics and save them in the stats folder" - def run_impl(self, dataset, args): + def run_impl(self, dataset: DataSet, args: argparse.Namespace) -> None: compute_statistics.run_dataset(dataset, args.diagram_max_points) - def add_arguments_impl(self, parser): + def add_arguments_impl(self, parser: argparse.ArgumentParser) -> None: parser.add_argument( "--diagram_max_points", default=-1, diff --git a/opensfm/commands/create_rig.py b/opensfm/commands/create_rig.py index c64b9394d..120e471b9 100644 --- a/opensfm/commands/create_rig.py +++ b/opensfm/commands/create_rig.py @@ -3,16 +3,18 @@ from opensfm.actions import create_rig from . import command +import argparse +from opensfm.dataset import DataSet class Command(command.CommandBase): name = "create_rig" help = "Create rig by creating `rig_cameras.json` and `rig_assignments.json` files." - def run_impl(self, dataset, args): + def run_impl(self, dataset: DataSet, args: argparse.Namespace) -> None: create_rig.run_dataset(dataset, args.method, json.loads(args.definition), True) - def add_arguments_impl(self, parser): + def add_arguments_impl(self, parser: argparse.ArgumentParser) -> None: parser.add_argument( "method", help="Method for creating the rigs", diff --git a/opensfm/commands/create_submodels.py b/opensfm/commands/create_submodels.py index b45000a67..bc76dcba8 100644 --- a/opensfm/commands/create_submodels.py +++ b/opensfm/commands/create_submodels.py @@ -1,14 +1,16 @@ from opensfm.actions import create_submodels from . import command +import argparse +from opensfm.dataset import DataSet class Command(command.CommandBase): name = "create_submodels" help = "Split the dataset into smaller submodels" - def run_impl(self, dataset, args): + def run_impl(self, dataset: DataSet, args: argparse.Namespace) -> None: create_submodels.run_dataset(dataset) - def add_arguments_impl(self, parser): + def add_arguments_impl(self, parser: argparse.ArgumentParser) -> None: pass diff --git a/opensfm/commands/create_tracks.py b/opensfm/commands/create_tracks.py index c6de68765..d5ee6e6ac 100644 --- a/opensfm/commands/create_tracks.py +++ b/opensfm/commands/create_tracks.py @@ -1,14 +1,16 @@ from opensfm.actions import create_tracks from . import command +import argparse +from opensfm.dataset import DataSet class Command(command.CommandBase): name = "create_tracks" help = "Link matches pair-wise matches into tracks" - def run_impl(self, dataset, args): + def run_impl(self, dataset: DataSet, args: argparse.Namespace) -> None: create_tracks.run_dataset(dataset) - def add_arguments_impl(self, parser): + def add_arguments_impl(self, parser: argparse.ArgumentParser) -> None: pass diff --git a/opensfm/commands/detect_features.py b/opensfm/commands/detect_features.py index 00fc4b768..b94f83289 100644 --- a/opensfm/commands/detect_features.py +++ b/opensfm/commands/detect_features.py @@ -1,14 +1,16 @@ from opensfm.actions import detect_features from . import command +import argparse +from opensfm.dataset import DataSet class Command(command.CommandBase): name = "detect_features" help = "Compute features for all images" - def run_impl(self, dataset, args): + def run_impl(self, dataset: DataSet, args: argparse.Namespace) -> None: detect_features.run_dataset(dataset) - def add_arguments_impl(self, parser): + def add_arguments_impl(self, parser: argparse.ArgumentParser) -> None: pass diff --git a/opensfm/commands/export_bundler.py b/opensfm/commands/export_bundler.py index 339333ad3..13bb1a241 100644 --- a/opensfm/commands/export_bundler.py +++ b/opensfm/commands/export_bundler.py @@ -1,18 +1,20 @@ from opensfm.actions import export_bundler from . import command +import argparse +from opensfm.dataset import DataSet class Command(command.CommandBase): name = "export_bundler" help = "Export reconstruction to bundler format" - def run_impl(self, dataset, args): + def run_impl(self, dataset: DataSet, args: argparse.Namespace) -> None: export_bundler.run_dataset( dataset, args.list_path, args.bundle_path, args.undistorted ) - def add_arguments_impl(self, parser): + def add_arguments_impl(self, parser: argparse.ArgumentParser) -> None: parser.add_argument("--list_path", help="path to the list.txt file") parser.add_argument("--bundle_path", help="path to the bundle.out file") parser.add_argument( diff --git a/opensfm/commands/export_colmap.py b/opensfm/commands/export_colmap.py index 8a4217059..b5fab0a00 100644 --- a/opensfm/commands/export_colmap.py +++ b/opensfm/commands/export_colmap.py @@ -1,16 +1,18 @@ from opensfm.actions import export_colmap from . import command +import argparse +from opensfm.dataset import DataSet class Command(command.CommandBase): name = "export_colmap" help = "Export reconstruction to colmap format" - def run_impl(self, dataset, args): + def run_impl(self, dataset: DataSet, args: argparse.Namespace) -> None: export_colmap.run_dataset(dataset, args.binary) - def add_arguments_impl(self, parser): + def add_arguments_impl(self, parser: argparse.ArgumentParser) -> None: parser.add_argument( "--binary", help="export using binary format", action="store_true" ) diff --git a/opensfm/commands/export_geocoords.py b/opensfm/commands/export_geocoords.py index ecb3e28be..155a91bca 100644 --- a/opensfm/commands/export_geocoords.py +++ b/opensfm/commands/export_geocoords.py @@ -1,13 +1,15 @@ from opensfm.actions import export_geocoords from . import command +import argparse +from opensfm.dataset import DataSet class Command(command.CommandBase): name = "export_geocoords" help = "Export reconstructions in geographic coordinates" - def run_impl(self, dataset, args): + def run_impl(self, dataset: DataSet, args: argparse.Namespace) -> None: export_geocoords.run_dataset( dataset, args.proj, @@ -18,7 +20,7 @@ def run_impl(self, dataset, args): args.output, ) - def add_arguments_impl(self, parser): + def add_arguments_impl(self, parser: argparse.ArgumentParser) -> None: parser.add_argument("--proj", help="PROJ.4 projection string", required=True) parser.add_argument( "--transformation", diff --git a/opensfm/commands/export_openmvs.py b/opensfm/commands/export_openmvs.py index 670017b91..7476c6fad 100644 --- a/opensfm/commands/export_openmvs.py +++ b/opensfm/commands/export_openmvs.py @@ -1,16 +1,18 @@ from opensfm.actions import export_openmvs from . import command +import argparse +from opensfm.dataset import DataSet class Command(command.CommandBase): name = "export_openmvs" help = "Export reconstruction to openMVS format" - def run_impl(self, dataset, args): + def run_impl(self, dataset: DataSet, args: argparse.Namespace) -> None: export_openmvs.run_dataset(dataset, args.image_list) - def add_arguments_impl(self, parser): + def add_arguments_impl(self, parser: argparse.ArgumentParser) -> None: parser.add_argument( "--image_list", type=str, diff --git a/opensfm/commands/export_ply.py b/opensfm/commands/export_ply.py index 62530755c..7e4892968 100644 --- a/opensfm/commands/export_ply.py +++ b/opensfm/commands/export_ply.py @@ -1,16 +1,18 @@ from opensfm.actions import export_ply from . import command +import argparse +from opensfm.dataset import DataSet class Command(command.CommandBase): name = "export_ply" help = "Export reconstruction to PLY format" - def run_impl(self, dataset, args): + def run_impl(self, dataset: DataSet, args: argparse.Namespace) -> None: export_ply.run_dataset(dataset, args.no_cameras, args.no_points, args.depthmaps, args.point_num_views) - def add_arguments_impl(self, parser): + def add_arguments_impl(self, parser: argparse.ArgumentParser) -> None: parser.add_argument( "--no-cameras", action="store_true", diff --git a/opensfm/commands/export_pmvs.py b/opensfm/commands/export_pmvs.py index 7a93817e0..9a39d5ccd 100644 --- a/opensfm/commands/export_pmvs.py +++ b/opensfm/commands/export_pmvs.py @@ -1,18 +1,20 @@ from opensfm.actions import export_pmvs from . import command +import argparse +from opensfm.dataset import DataSet class Command(command.CommandBase): name = "export_pmvs" help = "Export reconstruction to PMVS" - def run_impl(self, dataset, args): + def run_impl(self, dataset: DataSet, args: argparse.Namespace) -> None: export_pmvs.run_dataset( dataset, args.points, args.image_list, args.output, args.undistorted ) - def add_arguments_impl(self, parser): + def add_arguments_impl(self, parser: argparse.ArgumentParser) -> None: parser.add_argument("--points", action="store_true", help="export points") parser.add_argument( "--image_list", diff --git a/opensfm/commands/export_report.py b/opensfm/commands/export_report.py index 8941484c5..ab6722ee2 100644 --- a/opensfm/commands/export_report.py +++ b/opensfm/commands/export_report.py @@ -1,4 +1,6 @@ from . import command +import argparse +from opensfm.dataset import DataSet from opensfm.actions import export_report @@ -6,8 +8,8 @@ class Command(command.CommandBase): name = "export_report" help = "Export a nice report based on previously generated statistics" - def run_impl(self, dataset, args): + def run_impl(self, dataset: DataSet, args: argparse.Namespace) -> None: export_report.run_dataset(dataset) - def add_arguments_impl(self, parser): + def add_arguments_impl(self, parser: argparse.ArgumentParser) -> None: pass diff --git a/opensfm/commands/export_visualsfm.py b/opensfm/commands/export_visualsfm.py index c31258659..c0b65eead 100644 --- a/opensfm/commands/export_visualsfm.py +++ b/opensfm/commands/export_visualsfm.py @@ -1,16 +1,18 @@ from opensfm.actions import export_visualsfm from . import command +import argparse +from opensfm.dataset import DataSet class Command(command.CommandBase): name = "export_visualsfm" help = "Export reconstruction to NVM_V3 format from VisualSfM" - def run_impl(self, dataset, args): + def run_impl(self, dataset: DataSet, args: argparse.Namespace) -> None: export_visualsfm.run_dataset(dataset, args.points, args.image_list) - def add_arguments_impl(self, parser): + def add_arguments_impl(self, parser: argparse.ArgumentParser) -> None: parser.add_argument("--points", action="store_true", help="export points") parser.add_argument( "--image_list", diff --git a/opensfm/commands/extend_reconstruction.py b/opensfm/commands/extend_reconstruction.py index cecd54517..2804c5382 100644 --- a/opensfm/commands/extend_reconstruction.py +++ b/opensfm/commands/extend_reconstruction.py @@ -1,16 +1,18 @@ from opensfm.actions import extend_reconstruction from . import command +import argparse +from opensfm.dataset import DataSet class Command(command.CommandBase): name = "extend_reconstruction" help = "Extend a reconstruction" - def run_impl(self, dataset, args): + def run_impl(self, dataset: DataSet, args: argparse.Namespace) -> None: extend_reconstruction.run_dataset(dataset, args.input, args.output) - def add_arguments_impl(self, parser): + def add_arguments_impl(self, parser: argparse.ArgumentParser) -> None: parser.add_argument("--input", help="file name of the prior reconstruction") parser.add_argument( "--output", help="file name where to store the reconstruction" diff --git a/opensfm/commands/extract_metadata.py b/opensfm/commands/extract_metadata.py index 94026f6b4..c51879103 100644 --- a/opensfm/commands/extract_metadata.py +++ b/opensfm/commands/extract_metadata.py @@ -1,14 +1,16 @@ from opensfm.actions import extract_metadata from . import command +import argparse +from opensfm.dataset import DataSet class Command(command.CommandBase): name = "extract_metadata" help = "Extract metadata from images' EXIF tag" - def run_impl(self, dataset, args): + def run_impl(self, dataset: DataSet, args: argparse.Namespace) -> None: extract_metadata.run_dataset(dataset) - def add_arguments_impl(self, parser): + def add_arguments_impl(self, parser: argparse.ArgumentParser) -> None: pass diff --git a/opensfm/commands/match_features.py b/opensfm/commands/match_features.py index eec231d64..5739c1801 100644 --- a/opensfm/commands/match_features.py +++ b/opensfm/commands/match_features.py @@ -1,14 +1,16 @@ from opensfm.actions import match_features from . import command +import argparse +from opensfm.dataset import DataSet class Command(command.CommandBase): name = "match_features" help = "Match features between image pairs" - def run_impl(self, dataset, args): + def run_impl(self, dataset: DataSet, args: argparse.Namespace) -> None: match_features.run_dataset(dataset) - def add_arguments_impl(self, parser): + def add_arguments_impl(self, parser: argparse.ArgumentParser) -> None: pass diff --git a/opensfm/commands/mesh.py b/opensfm/commands/mesh.py index 1c6342597..c762d97c3 100644 --- a/opensfm/commands/mesh.py +++ b/opensfm/commands/mesh.py @@ -1,14 +1,16 @@ from opensfm.actions import mesh from . import command +import argparse +from opensfm.dataset import DataSet class Command(command.CommandBase): name = "mesh" help = "Add delaunay meshes to the reconstruction" - def run_impl(self, dataset, args): + def run_impl(self, dataset: DataSet, args: argparse.Namespace) -> None: mesh.run_dataset(dataset) - def add_arguments_impl(self, parser): + def add_arguments_impl(self, parser: argparse.ArgumentParser) -> None: pass diff --git a/opensfm/commands/reconstruct.py b/opensfm/commands/reconstruct.py index 5f035aad3..bfccf9eba 100644 --- a/opensfm/commands/reconstruct.py +++ b/opensfm/commands/reconstruct.py @@ -2,16 +2,18 @@ from opensfm.actions import reconstruct from . import command +import argparse +from opensfm.dataset import DataSet class Command(command.CommandBase): name = "reconstruct" help = "Compute the reconstruction" - def run_impl(self, dataset, args): + def run_impl(self, dataset: DataSet, args: argparse.Namespace) -> None: reconstruct.run_dataset(dataset, args.algorithm) - def add_arguments_impl(self, parser): + def add_arguments_impl(self, parser: argparse.ArgumentParser) -> None: parser.add_argument( "--algorithm", help="SfM algorithm to use to run reconstrution", diff --git a/opensfm/commands/reconstruct_from_prior.py b/opensfm/commands/reconstruct_from_prior.py index ad20c35d7..f4271e98a 100644 --- a/opensfm/commands/reconstruct_from_prior.py +++ b/opensfm/commands/reconstruct_from_prior.py @@ -1,16 +1,18 @@ from opensfm.actions import reconstruct_from_prior from . import command +import argparse +from opensfm.dataset import DataSet class Command(command.CommandBase): name = "reconstruct_from_prior" help = "Reconstruct from prior reconstruction" - def run_impl(self, dataset, args): + def run_impl(self, dataset: DataSet, args: argparse.Namespace) -> None: reconstruct_from_prior.run_dataset(dataset, args.input, args.output) - def add_arguments_impl(self, parser): + def add_arguments_impl(self, parser: argparse.ArgumentParser) -> None: parser.add_argument("--input", help="file name of the prior reconstruction") parser.add_argument( "--output", help="file name where to store the reconstruction" diff --git a/opensfm/commands/undistort.py b/opensfm/commands/undistort.py index 80519d552..d9f99eb62 100644 --- a/opensfm/commands/undistort.py +++ b/opensfm/commands/undistort.py @@ -1,13 +1,15 @@ from opensfm.actions import undistort from . import command +import argparse +from opensfm.dataset import DataSet class Command(command.CommandBase): name = "undistort" help = "Save radially undistorted images" - def run_impl(self, dataset, args): + def run_impl(self, dataset: DataSet, args: argparse.Namespace) -> None: undistort.run_dataset( dataset, args.reconstruction, @@ -17,7 +19,7 @@ def run_impl(self, dataset, args): args.skip_images ) - def add_arguments_impl(self, parser): + def add_arguments_impl(self, parser: argparse.ArgumentParser) -> None: parser.add_argument( "--reconstruction", help="reconstruction to undistort", From 22b0f14ec69e0fa295c73781c52a72dc82040e01 Mon Sep 17 00:00:00 2001 From: Fabian Schenk Date: Tue, 25 Jan 2022 02:50:47 -0800 Subject: [PATCH 10/81] Fix Brown Camera documentation Summary: There was an issue in the Brown Camera documentation as reported in [Github Issue #846](https://github.com/mapillary/OpenSfM/issues/846). Reviewed By: YanNoun Differential Revision: D33682560 fbshipit-source-id: 658511bc66b64e86a14f15046ed19a4c54b32cab --- doc/source/geometry.rst | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/doc/source/geometry.rst b/doc/source/geometry.rst index d8036bdd3..b209bef01 100644 --- a/doc/source/geometry.rst +++ b/doc/source/geometry.rst @@ -144,8 +144,8 @@ Identifier `brown` y_n = \frac{y}{z} \\ r^2 = x_n^2 + y_n^2 \\ d_r = 1 + k_1 r^2 + k_2 r^4 + k_3 r^6\\ - d^t_x = 2p_1\ x_n\ y_n + p_2\ (r^2 + 2x)\\ - d^t_y = 2p_2\ x_n\ y_n + p_1\ (r^2 + 2y)\\ + d^t_x = 2p_1\ x_n\ y_n + p_2\ (r^2 + 2x^2)\\ + d^t_y = 2p_2\ x_n\ y_n + p_1\ (r^2 + 2y^2)\\ u = f_x\ (d_r\ x_n + d^t_x) + c_x \\ v = f_y\ (d_r\ y_n + d^t_y) + c_y \end{array} From da9811f2629e3ce4a71ebc0ac35cd7df0327cdd2 Mon Sep 17 00:00:00 2001 From: Piero Toffanin Date: Wed, 26 Jan 2022 05:31:08 -0800 Subject: [PATCH 11/81] Extract OPK angles from XMP (#838) Summary: Hello :hand: This PR adds support for extracting omega/phi/kappa angles from images' XMP tags (if they are available). I've tested this with the `reconstruct --algorithm triangulation` command on a few datasets with a Sensefly and DJI camera and seems to yield the correct results. ![image](https://user-images.githubusercontent.com/1951843/146606081-a77005f5-803f-43c2-b27a-26dd88a1a075.png) Test dataset: https://github.com/pierotofy/drone_dataset_sheffield_cross/ Hope this can be useful to others! :pray: Pull Request resolved: https://github.com/mapillary/OpenSfM/pull/838 Reviewed By: fabianschenk Differential Revision: D33242090 Pulled By: YanNoun fbshipit-source-id: 4aa3896d3c82925ff0e6b357a278aacdffd45ed3 --- opensfm/exif.py | 129 ++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 129 insertions(+) diff --git a/opensfm/exif.py b/opensfm/exif.py index b256363e5..f837b94c2 100644 --- a/opensfm/exif.py +++ b/opensfm/exif.py @@ -8,6 +8,7 @@ import xmltodict as x2d from opensfm import pygeometry from opensfm.dataset_base import DataSetBase +from opensfm.geo import ecef_from_lla from opensfm.sensors import sensor_data logger = logging.getLogger(__name__) @@ -450,6 +451,130 @@ def extract_capture_time(self): ) return 0.0 + def extract_opk(self, geo): + opk = None + + if self.has_xmp() and geo and "latitude" in geo and "longitude" in geo: + ypr = np.array([None, None, None]) + + try: + # YPR conventions (assuming nadir camera) + # Yaw: 0 --> top of image points north + # Yaw: 90 --> top of image points east + # Yaw: 270 --> top of image points west + # Pitch: 0 --> nadir camera + # Pitch: 90 --> camera is looking forward + # Roll: 0 (assuming gimbal) + + if ( + "@Camera:Yaw" in self.xmp[0] + and "@Camera:Pitch" in self.xmp[0] + and "@Camera:Roll" in self.xmp[0] + ): + ypr = np.array( + [ + float(self.xmp[0]["@Camera:Yaw"]), + float(self.xmp[0]["@Camera:Pitch"]), + float(self.xmp[0]["@Camera:Roll"]), + ] + ) + elif ( + "@drone-dji:GimbalYawDegree" in self.xmp[0] + and "@drone-dji:GimbalPitchDegree" in self.xmp[0] + and "@drone-dji:GimbalRollDegree" in self.xmp[0] + ): + ypr = np.array( + [ + float(self.xmp[0]["@drone-dji:GimbalYawDegree"]), + float(self.xmp[0]["@drone-dji:GimbalPitchDegree"]), + float(self.xmp[0]["@drone-dji:GimbalRollDegree"]), + ] + ) + ypr[1] += 90 # DJI's values need to be offset + except ValueError: + logger.debug( + 'Invalid yaw/pitch/roll tag in image file "{0:s}"'.format( + self.fileobj_name + ) + ) + + if np.all(ypr) is not None: + ypr = np.radians(ypr) + + # Convert YPR --> OPK + # Ref: New Calibration and Computing Method for Direct + # Georeferencing of Image and Scanner Data Using the + # Position and Angular Data of an Hybrid Inertial Navigation System + # by Manfred Bäumker + y, p, r = ypr + + # YPR rotation matrix + cnb = np.array( + [ + [ + np.cos(y) * np.cos(p), + np.cos(y) * np.sin(p) * np.sin(r) - np.sin(y) * np.cos(r), + np.cos(y) * np.sin(p) * np.cos(r) + np.sin(y) * np.sin(r), + ], + [ + np.sin(y) * np.cos(p), + np.sin(y) * np.sin(p) * np.sin(r) + np.cos(y) * np.cos(r), + np.sin(y) * np.sin(p) * np.cos(r) - np.cos(y) * np.sin(r), + ], + [-np.sin(p), np.cos(p) * np.sin(r), np.cos(p) * np.cos(r)], + ] + ) + + # Convert between image and body coordinates + # Top of image pixels point to flying direction + # and camera is looking down. + # We might need to change this if we want different + # camera mount orientations (e.g. backward or sideways) + + # (Swap X/Y, flip Z) + cbb = np.array([[0, 1, 0], [1, 0, 0], [0, 0, -1]]) + + delta = 1e-7 + + p1 = np.array( + ecef_from_lla( + geo["latitude"] + delta, + geo["longitude"], + geo.get("altitude", 0), + ) + ) + p2 = np.array( + ecef_from_lla( + geo["latitude"] - delta, + geo["longitude"], + geo.get("altitude", 0), + ) + ) + xnp = p1 - p2 + m = np.linalg.norm(xnp) + + if m == 0: + logger.debug("Cannot compute OPK angles, divider = 0") + return opk + + # Unit vector pointing north + xnp /= m + + znp = np.array([0, 0, -1]).T + ynp = np.cross(znp, xnp) + + cen = np.array([xnp, ynp, znp]).T + + # OPK rotation matrix + ceb = cen.dot(cnb).dot(cbb) + + opk = {} + opk["omega"] = np.degrees(np.arctan2(-ceb[1][2], ceb[2][2])) + opk["phi"] = np.degrees(np.arcsin(ceb[0][2])) + opk["kappa"] = np.degrees(np.arctan2(-ceb[0][1], ceb[0][0])) + + return opk + def extract_exif(self): width, height = self.extract_image_size() projection_type = self.extract_projection_type() @@ -458,6 +583,7 @@ def extract_exif(self): orientation = self.extract_orientation() geo = self.extract_geo() capture_time = self.extract_capture_time() + opk = self.extract_opk(geo) d = { "make": make, "model": model, @@ -469,6 +595,9 @@ def extract_exif(self): "capture_time": capture_time, "gps": geo, } + if opk: + d["opk"] = opk + d["camera"] = camera_id(d) return d From 4ed46d572fe222fc6b1a28e8543d44ab40843221 Mon Sep 17 00:00:00 2001 From: Yann Noutary Date: Fri, 28 Jan 2022 04:18:25 -0800 Subject: [PATCH 12/81] fix: copy masks folder during rig pre-calibration Summary: This Diff add 'masks' copy to the temporary dataset used when calibrating rigs Reviewed By: DodgySpaniard Differential Revision: D33660483 fbshipit-source-id: 63e170d2e395e27f91e13e2b48282938c1880ae8 --- opensfm/dataset.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/opensfm/dataset.py b/opensfm/dataset.py index d3c95ef0b..87d15f84a 100644 --- a/opensfm/dataset.py +++ b/opensfm/dataset.py @@ -618,8 +618,10 @@ def subset(self, name: str, images_subset: List[str]) -> "DataSet": """Create a subset of this dataset by symlinking input data.""" subset_dataset_path = os.path.join(self.data_path, name) self.io_handler.mkdir_p(subset_dataset_path) - self.io_handler.mkdir_p(os.path.join(subset_dataset_path, "images")) - self.io_handler.mkdir_p(os.path.join(subset_dataset_path, "segmentations")) + + folders = ["images", "segmentations", "masks"] + for folder in folders: + self.io_handler.mkdir_p(os.path.join(subset_dataset_path, folder)) subset_dataset = DataSet(subset_dataset_path, self.io_handler) files = [] @@ -648,6 +650,13 @@ def subset(self, name: str, images_subset: List[str]) -> "DataSet": os.path.join(subset_dataset_path, "segmentations", image + ".png"), ) ) + if image in self.mask_files: + files.append( + ( + self.mask_files[image], + os.path.join(subset_dataset_path, "masks", image + ".png"), + ) + ) for src, dst in files: if not self.io_handler.exists(src): From 18cc86992785e167adf00ada088a1dd5b8d33b04 Mon Sep 17 00:00:00 2001 From: Yann Noutary Date: Fri, 28 Jan 2022 04:18:25 -0800 Subject: [PATCH 13/81] fix: fix dummy reconstruction creation Summary: This Diff fixes the output of the reconstructon representing the pre-calibrated rig instances. Was broken few months ago, while making rigs 1st-class citizens. Reviewed By: DodgySpaniard Differential Revision: D33660486 fbshipit-source-id: b48b4cf6fddfb91d69c5a7e3e7754609fb7fb9a5 --- opensfm/actions/create_rig.py | 17 ++++++++++------- 1 file changed, 10 insertions(+), 7 deletions(-) diff --git a/opensfm/actions/create_rig.py b/opensfm/actions/create_rig.py index b686ca8a3..bc5bb2b8d 100644 --- a/opensfm/actions/create_rig.py +++ b/opensfm/actions/create_rig.py @@ -1,7 +1,6 @@ import logging -import numpy as np -from opensfm import rig, reconstruction_helpers as helpers, pygeometry, types +from opensfm import pymap, rig, reconstruction_helpers as helpers, types from opensfm.dataset import DataSet, DataSetBase @@ -34,17 +33,22 @@ def _reconstruction_from_rigs_and_assignments(data: DataSetBase): data.init_reference() - base_rotation = np.zeros(3) - reconstruction = types.Reconstruction() reconstruction.cameras = data.load_camera_models() for rig_instance_id, instance in assignments.items(): for image, rig_camera_id in instance: rig_camera = rig_cameras[rig_camera_id] - rig_pose = pygeometry.Pose(base_rotation) - rig_pose.set_origin( + reconstruction.add_rig_camera( + pymap.RigCamera(rig_camera.pose, rig_camera_id) + ) + + instance_obj = reconstruction.add_rig_instance( + pymap.RigInstance(rig_instance_id) + ) + instance_obj.pose.set_origin( helpers.get_image_metadata(data, image).gps_position.value ) + d = data.load_exif(image) shot = reconstruction.create_shot( image, @@ -52,6 +56,5 @@ def _reconstruction_from_rigs_and_assignments(data: DataSetBase): rig_camera_id=rig_camera_id, rig_instance_id=rig_instance_id, ) - shot.pose = rig_camera.pose.compose(rig_pose) shot.metadata = helpers.get_image_metadata(data, image) return [reconstruction] From 7992d33965c4c20eee8e17a1ec5680efa37fdb13 Mon Sep 17 00:00:00 2001 From: Yann Noutary Date: Fri, 28 Jan 2022 04:18:25 -0800 Subject: [PATCH 14/81] fix: handling of camera in synthetic tests Summary: This Diff fixes the handling of cameras in synthetic data generation. With rigs as 1st-class citizens, cameras of a given sequence should be supposed to be given as `List` and not as a single `Camera` objects. As were not doing, so, only the first camera model given was used. Test is modified in order to keep its behaviour. Reviewed By: paulinus Differential Revision: D33660484 fbshipit-source-id: 8355949cd7f11166a9f1b601b23f6d1f2a7a9799 --- opensfm/synthetic_data/synthetic_examples.py | 13 +++++------- opensfm/synthetic_data/synthetic_generator.py | 13 ++++++------ opensfm/synthetic_data/synthetic_scene.py | 20 ++++++++++--------- 3 files changed, 23 insertions(+), 23 deletions(-) diff --git a/opensfm/synthetic_data/synthetic_examples.py b/opensfm/synthetic_data/synthetic_examples.py index a08370bcb..be863fc9c 100644 --- a/opensfm/synthetic_data/synthetic_examples.py +++ b/opensfm/synthetic_data/synthetic_examples.py @@ -82,15 +82,12 @@ def synthetic_rig_scene(reference: Optional[geo.TopocentricConverter] = None): [0.0, 1.5707963, 0.0], [0.0, -1.5707963, 0.0], ] - camera_front = ss.get_camera("perspective", "1", 0.7, -0.1, 0.01) - camera_back = ss.get_camera("perspective", "1", 0.7, -0.1, 0.01) - camera_left = ss.get_camera("perspective", "1", 0.9, -0.1, 0.01) - camera_right = ss.get_camera("perspective", "1", 0.9, -0.1, 0.01) + camera = ss.get_camera("perspective", "1", 0.7, -0.1, 0.01) cameras = [ - camera_front, - camera_back, - camera_right, - camera_left, + camera, + camera, + camera, + camera, ] scene.add_rig_camera_sequence( diff --git a/opensfm/synthetic_data/synthetic_generator.py b/opensfm/synthetic_data/synthetic_generator.py index dd5b05561..4de6f6c64 100644 --- a/opensfm/synthetic_data/synthetic_generator.py +++ b/opensfm/synthetic_data/synthetic_generator.py @@ -268,11 +268,12 @@ def add_shots_to_reconstruction( positions: List[np.ndarray], rotations: List[np.ndarray], rig_cameras: List[pymap.RigCamera], - camera: pygeometry.Camera, + cameras: List[pygeometry.Camera], reconstruction: types.Reconstruction, sequence_key: str, ): - reconstruction.add_camera(camera) + for camera in cameras: + reconstruction.add_camera(camera) rec_rig_cameras = [] for rig_camera in rig_cameras: @@ -283,9 +284,9 @@ def add_shots_to_reconstruction( rig_instance = reconstruction.add_rig_instance(pymap.RigInstance(instance_id)) rig_instance.pose = pygeometry.Pose(rotation, -rotation.dot(position)) - for s in i_shots: - shot_id = s[0] - rig_camera_id = s[1] + for shot, camera in zip(i_shots, cameras): + shot_id = shot[0] + rig_camera_id = shot[1] shot = reconstruction.create_shot( shot_id, camera.id, @@ -299,7 +300,7 @@ def add_shots_to_reconstruction( def create_reconstruction( points: List[np.ndarray], colors: List[np.ndarray], - cameras: List[pygeometry.Camera], + cameras: List[List[pygeometry.Camera]], shot_ids: List[List[str]], rig_shots: List[List[List[Tuple[str, str]]]], rig_positions: List[np.ndarray], diff --git a/opensfm/synthetic_data/synthetic_scene.py b/opensfm/synthetic_data/synthetic_scene.py index 3d3e73914..58f36f467 100644 --- a/opensfm/synthetic_data/synthetic_scene.py +++ b/opensfm/synthetic_data/synthetic_scene.py @@ -149,14 +149,18 @@ class SyntheticStreetScene(SyntheticScene): wall_points: Optional[np.ndarray] floor_points: Optional[np.ndarray] shot_ids: List[List[str]] - cameras: List[pygeometry.Camera] + cameras: List[List[pygeometry.Camera]] instances_positions: List[np.ndarray] instances_rotations: List[np.ndarray] rig_instances: List[List[List[Tuple[str, str]]]] rig_cameras: List[List[pymap.RigCamera]] width: float - def __init__(self, generator: Optional[Callable], reference: Optional[geo.TopocentricConverter] = None) -> None: + def __init__( + self, + generator: Optional[Callable], + reference: Optional[geo.TopocentricConverter] = None, + ) -> None: self.generator = generator self.reference = reference self.wall_points = None @@ -230,7 +234,7 @@ def set_terrain_hill( self._set_terrain_hill_repeated(height, radius) return self - def _set_terrain_hill_single(self, height: float, radius: float)->None: + def _set_terrain_hill_single(self, height: float, radius: float) -> None: wall_points, floor_points = self.wall_points, self.floor_points assert wall_points is not None and floor_points is not None wall_points[:, 2] += height * np.exp( @@ -292,10 +296,10 @@ def add_camera_sequence( if positions_shift: positions += np.array(positions_shift) - shift = 0 if len(self.shot_ids) == 0 else len(self.shot_ids[-1]) + shift = 0 if len(self.shot_ids) == 0 else sum(len(s) for s in self.shot_ids) new_shot_ids = [f"Shot {shift+i:04d}" for i in range(len(positions))] self.shot_ids.append(new_shot_ids) - self.cameras.append(camera) + self.cameras.append([camera]) rig_camera = pymap.RigCamera(pygeometry.Pose(), camera.id) self.rig_cameras.append([rig_camera]) @@ -336,9 +340,7 @@ def add_rig_camera_sequence( sg.perturb_rotations(instances_rotations, rotation_noise) shots_ids_per_camera = [] - for rig_camera_p, rig_camera_r, camera in zip( - relative_positions, relative_rotations, cameras - ): + for rig_camera_p, rig_camera_r in zip(relative_positions, relative_rotations): pose_rig_camera = pygeometry.Pose(rig_camera_r) pose_rig_camera.set_origin(rig_camera_p) @@ -355,7 +357,7 @@ def add_rig_camera_sequence( shots_ids_per_camera.append( [f"Shot {shift+i:04d}" for i in range(len(positions))] ) - self.cameras.append(camera) + self.cameras.append(cameras) self.shot_ids += shots_ids_per_camera rig_camera_ids = [] From b44b5f2b533b6fce8055b3a5a98a59bc22ae2cf6 Mon Sep 17 00:00:00 2001 From: Yann Noutary Date: Fri, 28 Jan 2022 04:18:25 -0800 Subject: [PATCH 15/81] fix: handling of rig cameras in merging Reviewed By: paulinus Differential Revision: D33660489 fbshipit-source-id: 5da48313f7b15c636ada18acc1e9200b6b438448 --- opensfm/src/bundle/src/bundle_adjuster.cc | 88 +++++++++++++------- opensfm/synthetic_data/synthetic_examples.py | 72 +++++++--------- opensfm/synthetic_data/synthetic_scene.py | 4 +- 3 files changed, 86 insertions(+), 78 deletions(-) diff --git a/opensfm/src/bundle/src/bundle_adjuster.cc b/opensfm/src/bundle/src/bundle_adjuster.cc index 1ced8cdff..883f47c39 100644 --- a/opensfm/src/bundle/src/bundle_adjuster.cc +++ b/opensfm/src/bundle/src/bundle_adjuster.cc @@ -796,11 +796,15 @@ void BundleAdjuster::Run() { } auto shot_j_rig_camera = shot_j.GetRigCamera()->GetValueData().data(); - if (IsRigCameraUseful(*shot_j.GetRigCamera()) && - shot_j_rig_camera != shot_i_rig_camera) { - cost_function->AddParameterBlock(6); - relative_motion->shot_j_rig_camera_index_ = parameter_blocks.size(); - parameter_blocks.push_back(shot_j_rig_camera); + if (IsRigCameraUseful(*shot_j.GetRigCamera())) { + if (shot_j_rig_camera != shot_i_rig_camera) { + cost_function->AddParameterBlock(6); + relative_motion->shot_j_rig_camera_index_ = parameter_blocks.size(); + parameter_blocks.push_back(shot_j_rig_camera); + } else { + relative_motion->shot_j_rig_camera_index_ = + relative_motion->shot_i_rig_camera_index_; + } } problem.AddResidualBlock(cost_function, relative_motion_loss, parameter_blocks); @@ -841,11 +845,15 @@ void BundleAdjuster::Run() { } auto shot_j_rig_camera = shot_j.GetRigCamera()->GetValueData().data(); - if (IsRigCameraUseful(*shot_j.GetRigCamera()) && - shot_j_rig_camera != shot_i_rig_camera) { - cost_function->AddParameterBlock(6); - relative_similarity->shot_j_rig_camera_index_ = parameter_blocks.size(); - parameter_blocks.push_back(shot_j_rig_camera); + if (IsRigCameraUseful(*shot_j.GetRigCamera())) { + if (shot_j_rig_camera != shot_i_rig_camera) { + cost_function->AddParameterBlock(6); + relative_similarity->shot_j_rig_camera_index_ = parameter_blocks.size(); + parameter_blocks.push_back(shot_j_rig_camera); + } else { + relative_similarity->shot_j_rig_camera_index_ = + relative_similarity->shot_i_rig_camera_index_; + } } problem.AddResidualBlock(cost_function, relative_similarity_loss, parameter_blocks); @@ -882,11 +890,15 @@ void BundleAdjuster::Run() { } auto shot_j_rig_camera = shot_j.GetRigCamera()->GetValueData().data(); - if (IsRigCameraUseful(*shot_j.GetRigCamera()) && - shot_j_rig_camera != shot_i_rig_camera) { - cost_function->AddParameterBlock(6); - relative_rotation->shot_j_rig_camera_index_ = parameter_blocks.size(); - parameter_blocks.push_back(shot_j_rig_camera); + if (IsRigCameraUseful(*shot_j.GetRigCamera())) { + if (shot_j_rig_camera != shot_i_rig_camera) { + cost_function->AddParameterBlock(6); + relative_rotation->shot_j_rig_camera_index_ = parameter_blocks.size(); + parameter_blocks.push_back(shot_j_rig_camera); + } else { + relative_rotation->shot_j_rig_camera_index_ = + relative_rotation->shot_i_rig_camera_index_; + } } problem.AddResidualBlock(cost_function, relative_rotation_loss, parameter_blocks); @@ -917,11 +929,15 @@ void BundleAdjuster::Run() { } auto shot_j_rig_camera = shot_j.GetRigCamera()->GetValueData().data(); - if (IsRigCameraUseful(*shot_j.GetRigCamera()) && - shot_j_rig_camera != shot_i_rig_camera) { - cost_function->AddParameterBlock(6); - common_position->shot_j_rig_camera_index_ = parameter_blocks.size(); - parameter_blocks.push_back(shot_j_rig_camera); + if (IsRigCameraUseful(*shot_j.GetRigCamera())) { + if (shot_j_rig_camera != shot_i_rig_camera) { + cost_function->AddParameterBlock(6); + common_position->shot_j_rig_camera_index_ = parameter_blocks.size(); + parameter_blocks.push_back(shot_j_rig_camera); + } else { + common_position->shot_j_rig_camera_index_ = + common_position->shot_i_rig_camera_index_; + } } problem.AddResidualBlock(cost_function, nullptr, parameter_blocks); } @@ -1041,25 +1057,33 @@ void BundleAdjuster::Run() { auto shot0_rig_camera = shot0.GetRigCamera()->GetValueData().data(); if (IsRigCameraUseful(*shot0.GetRigCamera())) { cost_function->AddParameterBlock(6); - linear_motion->shot1_rig_camera_index = parameter_blocks.size(); + linear_motion->shot0_rig_camera_index = parameter_blocks.size(); parameter_blocks.push_back(shot0_rig_camera); } auto shot1_rig_camera = shot1.GetRigCamera()->GetValueData().data(); - if (IsRigCameraUseful(*shot1.GetRigCamera()) && - shot1_rig_camera != shot0_rig_camera) { - cost_function->AddParameterBlock(6); - linear_motion->shot1_rig_camera_index = parameter_blocks.size(); - parameter_blocks.push_back(shot1_rig_camera); + if (IsRigCameraUseful(*shot1.GetRigCamera())) { + if (shot1_rig_camera != shot0_rig_camera) { + cost_function->AddParameterBlock(6); + linear_motion->shot1_rig_camera_index = parameter_blocks.size(); + parameter_blocks.push_back(shot1_rig_camera); + } else { + linear_motion->shot1_rig_camera_index = + linear_motion->shot0_rig_camera_index; + } } auto shot2_rig_camera = shot2.GetRigCamera()->GetValueData().data(); - if (IsRigCameraUseful(*shot2.GetRigCamera()) && - shot2_rig_camera != shot0_rig_camera && - shot2_rig_camera != shot1_rig_camera) { - cost_function->AddParameterBlock(6); - linear_motion->shot2_rig_camera_index = parameter_blocks.size(); - parameter_blocks.push_back(shot2_rig_camera); + if (IsRigCameraUseful(*shot2.GetRigCamera())) { + if (shot2_rig_camera != shot0_rig_camera && + shot2_rig_camera != shot1_rig_camera) { + cost_function->AddParameterBlock(6); + linear_motion->shot2_rig_camera_index = parameter_blocks.size(); + parameter_blocks.push_back(shot2_rig_camera); + } else { + linear_motion->shot2_rig_camera_index = + linear_motion->shot0_rig_camera_index; + } } problem.AddResidualBlock(cost_function, linear_motion_prior_loss_, parameter_blocks); diff --git a/opensfm/synthetic_data/synthetic_examples.py b/opensfm/synthetic_data/synthetic_examples.py index be863fc9c..ebef2e684 100644 --- a/opensfm/synthetic_data/synthetic_examples.py +++ b/opensfm/synthetic_data/synthetic_examples.py @@ -5,8 +5,8 @@ def synthetic_circle_scene( - reference: Optional[geo.TopocentricConverter] = None, with_panoshot: bool = False -): + reference: Optional[geo.TopocentricConverter] = None, +) -> ss.SyntheticStreetScene: scene_length = 60 points_count = 5000 generator = ss.get_scene_generator("circle", scene_length) @@ -15,53 +15,17 @@ def synthetic_circle_scene( [0.2, 0.2, 0.01] ) - # regular sequence - camera_height = 1.5 - camera_interval = 3 - position_perturbation = [0.2, 0.2, 0.01] - rotation_perturbation = 0.2 - camera1 = ss.get_camera("perspective", "1", 0.7, -0.1, 0.01) - scene.add_camera_sequence( - camera1, - scene_length, - camera_height, - camera_interval, - position_perturbation, - rotation_perturbation, - None, - ) - - # panoshot sequence - if with_panoshot: - panoshot_size = 1e-6 - panoshot_count = 6 - panoshot_interval = panoshot_size / panoshot_count - position_perturbation = [ - panoshot_interval, - panoshot_interval, - panoshot_interval, - ] - rotation_perturbation = 3.14 - camera2 = ss.get_camera("perspective", "2", 0.5, -0.1, 0.01) - scene.add_camera_sequence( - camera2, - 1.0, - camera_height, - panoshot_interval, - position_perturbation, - rotation_perturbation, - None, - panoshot_size, - ) - + make_regular_scene(scene_length, scene) return scene -def synthetic_cube_scene(): +def synthetic_cube_scene() -> ss.SyntheticCubeScene: return ss.SyntheticCubeScene(10, 1000, 0.001) -def synthetic_rig_scene(reference: Optional[geo.TopocentricConverter] = None): +def synthetic_rig_scene( + reference: Optional[geo.TopocentricConverter] = None, +) -> ss.SyntheticStreetScene: scene_length = 20 points_count = 5000 generator = ss.get_scene_generator("line", scene_length) @@ -69,7 +33,11 @@ def synthetic_rig_scene(reference: Optional[geo.TopocentricConverter] = None): scene.add_street(points_count, 15, 12).perturb_floor([0, 0, 0.1]).perturb_walls( [0.2, 0.2, 0.01] ) + make_rig_scene(scene_length, scene) + return scene + +def make_rig_scene(scene_length: float, scene: ss.SyntheticStreetScene) -> None: camera_height = 2 camera_interval = 3 position_perturbation = [0.2, 0.2, 0.01] @@ -100,4 +68,20 @@ def synthetic_rig_scene(reference: Optional[geo.TopocentricConverter] = None): position_perturbation, rotation_perturbation, ) - return scene + + +def make_regular_scene(scene_length: float, scene: ss.SyntheticStreetScene) -> None: + camera_height = 1.5 + camera_interval = 3 + position_perturbation = [0.2, 0.2, 0.01] + rotation_perturbation = 0.2 + camera1 = ss.get_camera("perspective", "1", 0.7, -0.1, 0.01) + scene.add_camera_sequence( + camera1, + scene_length, + camera_height, + camera_interval, + position_perturbation, + rotation_perturbation, + None, + ) diff --git a/opensfm/synthetic_data/synthetic_scene.py b/opensfm/synthetic_data/synthetic_scene.py index 58f36f467..c97c93cc7 100644 --- a/opensfm/synthetic_data/synthetic_scene.py +++ b/opensfm/synthetic_data/synthetic_scene.py @@ -480,7 +480,7 @@ def compare( "absolute_position_rmse": sm.rmse(absolute_position), "absolute_position_mad": sm.mad(absolute_position), "absolute_rotation_rmse": sm.rmse(absolute_rotation), - "absolute_rotation_mad": sm.mad(absolute_rotation), + "absolute_rotation_median": np.median(absolute_rotation), "absolute_points_rmse": sm.rmse(absolute_points), "absolute_points_mad": sm.mad(absolute_points), "absolute_gps_rmse": sm.rmse(absolute_gps), @@ -494,7 +494,7 @@ def compare( "aligned_position_rmse": sm.rmse(aligned_position), "aligned_position_mad": sm.mad(aligned_position), "aligned_rotation_rmse": sm.rmse(aligned_rotation), - "aligned_rotation_mad": sm.mad(aligned_rotation), + "aligned_rotation_median": np.median(aligned_rotation), "aligned_gps_rmse": sm.rmse(aligned_gps), "aligned_gps_mad": sm.mad(aligned_gps), "aligned_points_rmse": sm.rmse(aligned_points), From 9766a11e11544fc71fe689f33b34d0610cca2944 Mon Sep 17 00:00:00 2001 From: Pyre Bot Jr <> Date: Wed, 2 Feb 2022 14:01:35 -0800 Subject: [PATCH 16/81] Add annotations to `mapillary/opensfm` Reviewed By: shannonzhu Differential Revision: D33949277 fbshipit-source-id: 5f06d07c0cbf49631646da15e1a5e933cc9516b9 --- annotation_gui_gcp/lib/geometry.py | 4 +- annotation_gui_gcp/main.py | 4 +- opensfm/actions/align_submodels.py | 2 +- opensfm/actions/bundle.py | 2 +- opensfm/actions/compute_depthmaps.py | 2 +- opensfm/actions/create_tracks.py | 4 +- opensfm/actions/export_bundler.py | 6 +- opensfm/actions/export_colmap.py | 20 +-- opensfm/actions/export_openmvs.py | 4 +- opensfm/actions/export_ply.py | 2 +- opensfm/actions/export_report.py | 2 +- opensfm/actions/extend_reconstruction.py | 2 +- opensfm/actions/match_features.py | 4 +- opensfm/actions/mesh.py | 2 +- opensfm/actions/reconstruct.py | 2 +- opensfm/actions/reconstruct_from_prior.py | 2 +- opensfm/bow.py | 2 +- opensfm/geo.py | 2 +- opensfm/log.py | 2 +- opensfm/test/data_generation.py | 2 +- opensfm/test/test_bundle.py | 114 +++++++++++-- opensfm/test/test_commands.py | 8 +- opensfm/test/test_dataset.py | 2 +- opensfm/test/test_datastructures.py | 152 ++++++++++-------- opensfm/test/test_dense.py | 6 +- opensfm/test/test_geo.py | 10 +- opensfm/test/test_geometry.py | 8 +- opensfm/test/test_io.py | 18 +-- opensfm/test/test_matching.py | 25 ++- opensfm/test/test_multiview.py | 20 +-- opensfm/test/test_pairs_selection.py | 26 +-- opensfm/test/test_reconstruction_alignment.py | 16 +- .../test/test_reconstruction_incremental.py | 4 +- opensfm/test/test_reconstruction_resect.py | 20 ++- .../test_reconstruction_shot_neighborhood.py | 16 +- .../test/test_reconstruction_triangulation.py | 2 +- opensfm/test/test_robust.py | 28 ++-- opensfm/test/test_stats.py | 28 ++-- opensfm/test/test_triangulation.py | 19 ++- opensfm/test/test_types.py | 36 +++-- opensfm/test/test_undistort.py | 4 +- opensfm/test/test_vlad.py | 8 +- opensfm/video.py | 9 +- 43 files changed, 402 insertions(+), 249 deletions(-) diff --git a/annotation_gui_gcp/lib/geometry.py b/annotation_gui_gcp/lib/geometry.py index 34e8803dd..cdd572c75 100644 --- a/annotation_gui_gcp/lib/geometry.py +++ b/annotation_gui_gcp/lib/geometry.py @@ -1,7 +1,7 @@ from opensfm import dataset -def get_all_track_observations(gcp_database, track_id): +def get_all_track_observations(gcp_database, track_id: str): print(f"Getting all observations of track {track_id}") data = dataset.DataSet(gcp_database.path) tracks_manager = data.load_tracks_manager() @@ -9,7 +9,7 @@ def get_all_track_observations(gcp_database, track_id): return {shot_id: obs.point for shot_id, obs in track_obs.items()} -def get_tracks_visible_in_image(gcp_database, image_key, min_len=5): +def get_tracks_visible_in_image(gcp_database, image_key, min_len: int=5): print(f"Getting track observations visible in {image_key}") data = dataset.DataSet(gcp_database.path) tracks_manager = data.load_tracks_manager() diff --git a/annotation_gui_gcp/main.py b/annotation_gui_gcp/main.py index 5e5c439f6..8710cba4f 100644 --- a/annotation_gui_gcp/main.py +++ b/annotation_gui_gcp/main.py @@ -88,7 +88,7 @@ def load_rig_assignments(root: Path) -> t.Dict[str, t.List[str]]: def load_sequence_database_from_file( - root, fname="sequence_database.json", skip_missing=False + root, fname="sequence_database.json", skip_missing: bool=False ): """ Simply loads a sequence file and returns it. @@ -203,7 +203,7 @@ def group_images(args): return groups_from_sequence_database -def find_suitable_cad_paths(path_cad_files, path_dataset, n_paths=6): +def find_suitable_cad_paths(path_cad_files, path_dataset, n_paths: int=6): if path_cad_files is None: return [] diff --git a/opensfm/actions/align_submodels.py b/opensfm/actions/align_submodels.py index 3f1d98db7..3e70501f4 100644 --- a/opensfm/actions/align_submodels.py +++ b/opensfm/actions/align_submodels.py @@ -3,7 +3,7 @@ from opensfm.dataset import DataSet -def run_dataset(data: DataSet): +def run_dataset(data: DataSet) -> None: """ Align submodel reconstructions for of MetaDataSet. """ meta_data = metadataset.MetaDataSet(data.data_path) diff --git a/opensfm/actions/bundle.py b/opensfm/actions/bundle.py index a6417f3aa..dea614365 100644 --- a/opensfm/actions/bundle.py +++ b/opensfm/actions/bundle.py @@ -2,7 +2,7 @@ from opensfm.dataset_base import DataSetBase -def run_dataset(dataset: DataSetBase, input, output): +def run_dataset(dataset: DataSetBase, input, output) -> None: """Bundle a reconstructions. Args: diff --git a/opensfm/actions/compute_depthmaps.py b/opensfm/actions/compute_depthmaps.py index dc3249221..3d1a7e7a1 100644 --- a/opensfm/actions/compute_depthmaps.py +++ b/opensfm/actions/compute_depthmaps.py @@ -5,7 +5,7 @@ from opensfm.dataset import DataSet -def run_dataset(data: DataSet, subfolder, interactive): +def run_dataset(data: DataSet, subfolder, interactive) -> None: """Compute depthmap on a dataset with has SfM ran already. Args: diff --git a/opensfm/actions/create_tracks.py b/opensfm/actions/create_tracks.py index 8fa4b57f7..e119eb07f 100644 --- a/opensfm/actions/create_tracks.py +++ b/opensfm/actions/create_tracks.py @@ -5,7 +5,7 @@ from opensfm.dataset_base import DataSetBase -def run_dataset(data: DataSetBase): +def run_dataset(data: DataSetBase) -> None: """Link matches pair-wise matches into tracks.""" start = timer() @@ -36,7 +36,7 @@ def run_dataset(data: DataSetBase): def write_report( data: DataSetBase, tracks_manager, features_time, matches_time, tracks_time -): +) -> None: view_graph = [ (k[0], k[1], v) for k, v in tracks_manager.get_all_pairs_connectivity().items() ] diff --git a/opensfm/actions/export_bundler.py b/opensfm/actions/export_bundler.py index 2b2e55dc9..53e893ad6 100644 --- a/opensfm/actions/export_bundler.py +++ b/opensfm/actions/export_bundler.py @@ -5,7 +5,7 @@ from opensfm.dataset import DataSet -def run_dataset(data: DataSet, list_path, bundle_path, undistorted): +def run_dataset(data: DataSet, list_path, bundle_path, undistorted) -> None: """Export reconstruction to bundler format. Args: @@ -36,8 +36,8 @@ def run_dataset(data: DataSet, list_path, bundle_path, undistorted): def export_bundler( - image_list, reconstructions, track_manager, bundle_file_path, list_file_path -): + image_list, reconstructions, track_manager, bundle_file_path: str, list_file_path: str +) -> None: """ Generate a reconstruction file that is consistent with Bundler's format """ diff --git a/opensfm/actions/export_colmap.py b/opensfm/actions/export_colmap.py index 3aae53ad4..e3ff926ab 100644 --- a/opensfm/actions/export_colmap.py +++ b/opensfm/actions/export_colmap.py @@ -47,7 +47,7 @@ I_3 = np.eye(3) -def run_dataset(data: DataSet, binary): +def run_dataset(data: DataSet, binary) -> None: """Export reconstruction to COLMAP format.""" export_folder = os.path.join(data.data_path, "colmap_export") @@ -89,7 +89,7 @@ def run_dataset(data: DataSet, binary): fwb.write(f.read()) -IS_PYTHON3 = int(sys.version_info[0]) >= 3 +IS_PYTHON3: bool = int(sys.version_info[0]) >= 3 MAX_IMAGE_ID = 2 ** 31 - 1 @@ -108,7 +108,7 @@ def run_dataset(data: DataSet, binary): data BLOB, FOREIGN KEY(image_id) REFERENCES images(image_id) ON DELETE CASCADE)""" -CREATE_IMAGES_TABLE = """CREATE TABLE IF NOT EXISTS images ( +CREATE_IMAGES_TABLE: str = """CREATE TABLE IF NOT EXISTS images ( image_id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, name TEXT NOT NULL UNIQUE, camera_id INTEGER NOT NULL, @@ -153,7 +153,7 @@ def run_dataset(data: DataSet, binary): CREATE_NAME_INDEX = "CREATE UNIQUE INDEX IF NOT EXISTS index_name ON images(name)" -CREATE_ALL = "; ".join( +CREATE_ALL: str = "; ".join( [ CREATE_CAMERAS_TABLE, CREATE_IMAGES_TABLE, @@ -394,7 +394,7 @@ def export_features(data, db, images_map): return features_map -def export_matches(data, db, features_map, images_map): +def export_matches(data, db, features_map, images_map) -> None: matches_per_pair = {} for image1 in data.images(): matches = data.load_matches(image1) @@ -422,7 +422,7 @@ def export_matches(data, db, features_map, images_map): db.add_matches(images_map[pair[0]], images_map[pair[1]], inliers) -def export_cameras_reconstruction(data, path, camera_map, binary=False): +def export_cameras_reconstruction(data, path, camera_map, binary: bool=False) -> None: reconstructions = data.load_reconstruction() cameras = {} for reconstruction in reconstructions: @@ -455,8 +455,8 @@ def export_cameras_reconstruction(data, path, camera_map, binary=False): def export_images_reconstruction( - data, path, camera_map, images_map, features_map, points_map, binary=False -): + data, path, camera_map, images_map, features_map, points_map, binary: bool=False +) -> None: reconstructions = data.load_reconstruction() tracks_manager = data.load_tracks_manager() @@ -529,7 +529,7 @@ def export_images_reconstruction( fout.close() -def export_points_reconstruction(data, path, images_map, binary=False): +def export_points_reconstruction(data, path, images_map, binary: bool=False): reconstructions = data.load_reconstruction() tracks_manager = data.load_tracks_manager() @@ -602,7 +602,7 @@ def angle_axis_to_quaternion(angle_axis): return [qw, qx, qy, qz] -def export_ini_file(path, db_path, images_path, io_handler): +def export_ini_file(path, db_path, images_path, io_handler) -> None: with io_handler.open_wt(os.path.join(path, "project.ini")) as fout: fout.write("log_to_stderr=false\nlog_level=2\n") fout.write("database_path=%s\n" % db_path) diff --git a/opensfm/actions/export_openmvs.py b/opensfm/actions/export_openmvs.py index b5606bbcf..23a346e11 100644 --- a/opensfm/actions/export_openmvs.py +++ b/opensfm/actions/export_openmvs.py @@ -6,7 +6,7 @@ from opensfm.dataset import DataSet, UndistortedDataSet -def run_dataset(data: DataSet, image_list): +def run_dataset(data: DataSet, image_list) -> None: """ Export reconstruction to OpenMVS format. """ udata = data.undistorted_dataset() @@ -24,7 +24,7 @@ def run_dataset(data: DataSet, image_list): export(reconstructions[0], tracks_manager, udata, export_only) -def export(reconstruction, tracks_manager, udata: UndistortedDataSet, export_only): +def export(reconstruction, tracks_manager, udata: UndistortedDataSet, export_only) -> None: exporter = pydense.OpenMVSExporter() for camera in reconstruction.cameras.values(): if camera.projection_type == "perspective": diff --git a/opensfm/actions/export_ply.py b/opensfm/actions/export_ply.py index 1d344b5c8..3e718c01b 100644 --- a/opensfm/actions/export_ply.py +++ b/opensfm/actions/export_ply.py @@ -6,7 +6,7 @@ from opensfm.dataset import DataSet -def run_dataset(data: DataSet, no_cameras, no_points, depthmaps, point_num_views): +def run_dataset(data: DataSet, no_cameras: bool, no_points: bool, depthmaps, point_num_views: bool) -> None: """Export reconstruction to PLY format Args: diff --git a/opensfm/actions/export_report.py b/opensfm/actions/export_report.py index 92d75222a..8b50e4f49 100644 --- a/opensfm/actions/export_report.py +++ b/opensfm/actions/export_report.py @@ -2,7 +2,7 @@ from opensfm.dataset import DataSet -def run_dataset(data: DataSet): +def run_dataset(data: DataSet) -> None: """Export a nice report based on previously generated statistics Args: diff --git a/opensfm/actions/extend_reconstruction.py b/opensfm/actions/extend_reconstruction.py index 0994ab4bc..9b1da7b97 100644 --- a/opensfm/actions/extend_reconstruction.py +++ b/opensfm/actions/extend_reconstruction.py @@ -2,7 +2,7 @@ from opensfm.dataset_base import DataSetBase -def run_dataset(data: DataSetBase, input, output): +def run_dataset(data: DataSetBase, input, output) -> None: recs_base = data.load_reconstruction(input) if len(recs_base) == 0: return diff --git a/opensfm/actions/match_features.py b/opensfm/actions/match_features.py index 04b9fbb91..84b1937a1 100644 --- a/opensfm/actions/match_features.py +++ b/opensfm/actions/match_features.py @@ -5,7 +5,7 @@ from opensfm.dataset_base import DataSetBase -def run_dataset(data: DataSetBase): +def run_dataset(data: DataSetBase) -> None: """Match features between image pairs.""" images = data.images() @@ -18,7 +18,7 @@ def run_dataset(data: DataSetBase): write_report(data, preport, list(pairs_matches.keys()), end - start) -def write_report(data: DataSetBase, preport, pairs, wall_time): +def write_report(data: DataSetBase, preport, pairs, wall_time) -> None: report = { "wall_time": wall_time, "num_pairs": len(pairs), diff --git a/opensfm/actions/mesh.py b/opensfm/actions/mesh.py index ad86667b7..d3a952c93 100644 --- a/opensfm/actions/mesh.py +++ b/opensfm/actions/mesh.py @@ -2,7 +2,7 @@ from opensfm.dataset_base import DataSetBase -def run_dataset(data: DataSetBase): +def run_dataset(data: DataSetBase) -> None: """Add delaunay meshes to the reconstruction.""" tracks_manager = data.load_tracks_manager() diff --git a/opensfm/actions/reconstruct.py b/opensfm/actions/reconstruct.py index 21fc205ba..9c952fd7d 100644 --- a/opensfm/actions/reconstruct.py +++ b/opensfm/actions/reconstruct.py @@ -3,7 +3,7 @@ from opensfm.dataset_base import DataSetBase -def run_dataset(data: DataSetBase, algorithm: reconstruction.ReconstructionAlgorithm): +def run_dataset(data: DataSetBase, algorithm: reconstruction.ReconstructionAlgorithm) -> None: """Compute the SfM reconstruction.""" tracks_manager = data.load_tracks_manager() diff --git a/opensfm/actions/reconstruct_from_prior.py b/opensfm/actions/reconstruct_from_prior.py index 9107c05c0..067be1fa2 100644 --- a/opensfm/actions/reconstruct_from_prior.py +++ b/opensfm/actions/reconstruct_from_prior.py @@ -3,7 +3,7 @@ from opensfm.dataset_base import DataSetBase -def run_dataset(data: DataSetBase, input: str, output: str): +def run_dataset(data: DataSetBase, input: str, output: str) -> None: """Reconstruct the from a prior reconstruction.""" tracks_manager = data.load_tracks_manager() diff --git a/opensfm/bow.py b/opensfm/bow.py index b1942c29b..544bcb079 100644 --- a/opensfm/bow.py +++ b/opensfm/bow.py @@ -59,6 +59,6 @@ def load_vlad_words_and_frequencies(config): return vlad["words"], vlad["frequencies"] -def load_bows(config): +def load_bows(config) -> BagOfWords: words, frequencies = load_bow_words_and_frequencies(config) return BagOfWords(words, frequencies) diff --git a/opensfm/geo.py b/opensfm/geo.py index 1b24fd5f8..a09a584bc 100644 --- a/opensfm/geo.py +++ b/opensfm/geo.py @@ -4,7 +4,7 @@ WGS84_b = 6356752.314245 -def ecef_from_lla(lat, lon, alt): +def ecef_from_lla(lat, lon, alt: float): """ Compute ECEF XYZ from latitude, longitude and altitude. diff --git a/opensfm/log.py b/opensfm/log.py index fc594f91c..d9c4adf16 100644 --- a/opensfm/log.py +++ b/opensfm/log.py @@ -4,7 +4,7 @@ from typing import Optional -def setup(): +def setup() -> None: logging.basicConfig( format="%(asctime)s %(levelname)s: %(message)s", level=logging.DEBUG ) diff --git a/opensfm/test/data_generation.py b/opensfm/test/data_generation.py index ec3916b15..04dc28f18 100644 --- a/opensfm/test/data_generation.py +++ b/opensfm/test/data_generation.py @@ -22,6 +22,6 @@ def create_berlin_test_folder(tmpdir): return opensfm.dataset.DataSet(dst) -def save_config(config, path): +def save_config(config, path) -> None: with io.open_wt(os.path.join(path, "config.yaml")) as fout: yaml.safe_dump(config, fout, default_flow_style=False) diff --git a/opensfm/test/test_bundle.py b/opensfm/test/test_bundle.py index 6ea1411ea..1cd60e55c 100644 --- a/opensfm/test/test_bundle.py +++ b/opensfm/test/test_bundle.py @@ -14,7 +14,7 @@ ) -def test_unicode_strings_in_bundle(): +def test_unicode_strings_in_bundle() -> None: """Test that byte and unicode strings can be used as camera ids.""" ba = pybundle.BundleAdjuster() @@ -26,6 +26,7 @@ def test_unicode_strings_in_bundle(): camera.id = unicode_id ba.add_camera(camera.id, camera, camera, True) + # pyre-fixme[8]: Attribute has type `str`; used as `bytes`. camera.id = byte_id ba.add_camera(camera.id, camera, camera, True) @@ -39,11 +40,13 @@ def bundle_adjuster(): return ba -def test_sigleton(bundle_adjuster): +def test_sigleton(bundle_adjuster) -> None: """Single camera test""" sa = bundle_adjuster sa.add_rig_instance( "1", + # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[float]`. + # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[int]`. pygeometry.Pose([0.5, 0, 0], [0, 0, 0]), {"1": "cam1"}, {"1": "rig_cam1"}, @@ -58,12 +61,14 @@ def test_sigleton(bundle_adjuster): assert np.allclose(s1.translation, [1, 0, 0], atol=1e-6) -def test_singleton_pan_tilt_roll(bundle_adjuster): +def test_singleton_pan_tilt_roll(bundle_adjuster) -> None: """Single camera test with pan, tilt, roll priors.""" pan, tilt, roll = 1, 0.3, 0.2 sa = bundle_adjuster sa.add_rig_instance( "1", + # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[float]`. + # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[int]`. pygeometry.Pose([0.5, 0, 0], [0, 0, 0]), {"1": "cam1"}, {"1": "rig_cam1"}, @@ -90,7 +95,7 @@ def _projection_errors_std(points): return np.std(all_errors) -def test_bundle_projection_fixed_internals(scene_synthetic): +def test_bundle_projection_fixed_internals(scene_synthetic) -> None: reference = scene_synthetic.reconstruction camera_priors = dict(reference.cameras.items()) rig_priors = dict(reference.rig_cameras.items()) @@ -127,11 +132,13 @@ def test_bundle_projection_fixed_internals(scene_synthetic): assert reference.cameras["1"].k2 == orig_camera.k2 -def test_pair(bundle_adjuster): +def test_pair(bundle_adjuster) -> None: """Simple two camera test""" sa = bundle_adjuster sa.add_rig_instance( "1", + # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[int]`. + # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[int]`. pygeometry.Pose([0, 0, 0], [0, 0, 0]), {"1": "cam1"}, {"1": "rig_cam1"}, @@ -139,6 +146,8 @@ def test_pair(bundle_adjuster): ) sa.add_rig_instance( "2", + # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[int]`. + # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[int]`. pygeometry.Pose([0, 0, 0], [0, 0, 0]), {"2": "cam1"}, {"2": "rig_cam1"}, @@ -149,6 +158,8 @@ def test_pair(bundle_adjuster): sa.add_reconstruction_shot("12", 4, "2") sa.set_scale_sharing("12", True) sa.add_relative_motion( + # pyre-fixme[6]: For 5th param expected `ndarray` but got `List[int]`. + # pyre-fixme[6]: For 6th param expected `ndarray` but got `List[int]`. pybundle.RelativeMotion("12", "1", "12", "2", [0, 0, 0], [-1, 0, 0], 1) ) sa.add_rig_instance_position_prior("1", [0, 0, 0], [1, 1, 1], "") @@ -165,11 +176,13 @@ def test_pair(bundle_adjuster): assert np.allclose(r12.get_scale("2"), 0.5) -def test_pair_with_points_priors(bundle_adjuster): +def test_pair_with_points_priors(bundle_adjuster) -> None: """Simple two rigs test with a point constraint for anchoring""" sa = bundle_adjuster sa.add_rig_instance( "1", + # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[float]`. + # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[float]`. pygeometry.Pose([1e-3, 1e-3, 1e-3], [1e-3, 1e-3, 1e-3]), {"1": "cam1"}, {"1": "rig_cam1"}, @@ -177,6 +190,8 @@ def test_pair_with_points_priors(bundle_adjuster): ) sa.add_rig_instance( "2", + # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[float]`. + # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[float]`. pygeometry.Pose([1e-3, 1e-3, 1e-3], [1e-3, 1e-3, 1e-3]), {"2": "cam1"}, {"2": "rig_cam1"}, @@ -196,6 +211,8 @@ def test_pair_with_points_priors(bundle_adjuster): sa.set_scale_sharing("12", True) sa.add_relative_motion( + # pyre-fixme[6]: For 5th param expected `ndarray` but got `List[int]`. + # pyre-fixme[6]: For 6th param expected `ndarray` but got `List[int]`. pybundle.RelativeMotion("12", "1", "12", "2", [0, 0, 0], [-1, 0, 0], 1) ) @@ -222,11 +239,13 @@ def test_pair_with_points_priors(bundle_adjuster): assert np.allclose(r12.get_scale("2"), 0.5) -def test_pair_non_rigid(bundle_adjuster): +def test_pair_non_rigid(bundle_adjuster) -> None: """Simple two rigs test""" sa = bundle_adjuster sa.add_rig_instance( "1", + # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[int]`. + # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[int]`. pygeometry.Pose([0, 0, 0], [0, 0, 0]), {"1": "cam1"}, {"1": "rig_cam1"}, @@ -234,6 +253,8 @@ def test_pair_non_rigid(bundle_adjuster): ) sa.add_rig_instance( "2", + # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[int]`. + # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[int]`. pygeometry.Pose([0, 0, 0], [0, 0, 0]), {"2": "cam1"}, {"2": "rig_cam1"}, @@ -244,6 +265,8 @@ def test_pair_non_rigid(bundle_adjuster): sa.add_reconstruction_shot("12", 4, "2") sa.set_scale_sharing("12", False) sa.add_relative_similarity( + # pyre-fixme[6]: For 5th param expected `ndarray` but got `List[int]`. + # pyre-fixme[6]: For 6th param expected `ndarray` but got `List[int]`. pybundle.RelativeSimilarity("12", "1", "12", "2", [0, 0, 0], [-1, 0, 0], 1, 1) ) sa.add_rig_instance_position_prior("1", [0, 0, 0], [1, 1, 1], "") @@ -260,11 +283,13 @@ def test_pair_non_rigid(bundle_adjuster): assert np.allclose(r12.get_scale("2"), 0.5) -def test_four_cams_single_reconstruction(bundle_adjuster): +def test_four_cams_single_reconstruction(bundle_adjuster) -> None: """Four rigs, one reconstruction""" sa = bundle_adjuster sa.add_rig_instance( "1", + # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[int]`. + # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[int]`. pygeometry.Pose([0, 0, 0], [0, 0, 0]), {"1": "cam1"}, {"1": "rig_cam1"}, @@ -272,6 +297,8 @@ def test_four_cams_single_reconstruction(bundle_adjuster): ) sa.add_rig_instance( "2", + # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[int]`. + # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[int]`. pygeometry.Pose([0, 0, 0], [0, 0, 0]), {"2": "cam1"}, {"2": "rig_cam1"}, @@ -279,6 +306,8 @@ def test_four_cams_single_reconstruction(bundle_adjuster): ) sa.add_rig_instance( "3", + # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[int]`. + # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[int]`. pygeometry.Pose([0, 0, 0], [0, 0, 0]), {"3": "cam1"}, {"3": "rig_cam1"}, @@ -286,6 +315,8 @@ def test_four_cams_single_reconstruction(bundle_adjuster): ) sa.add_rig_instance( "4", + # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[int]`. + # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[int]`. pygeometry.Pose([0, 0, 0], [0, 0, 0]), {"4": "cam1"}, {"4": "rig_cam1"}, @@ -298,12 +329,18 @@ def test_four_cams_single_reconstruction(bundle_adjuster): sa.add_reconstruction_shot("1234", 1, "4") sa.set_scale_sharing("1234", True) sa.add_relative_motion( + # pyre-fixme[6]: For 5th param expected `ndarray` but got `List[int]`. + # pyre-fixme[6]: For 6th param expected `ndarray` but got `List[int]`. pybundle.RelativeMotion("1234", "1", "1234", "2", [0, 0, 0], [-1, 0, 0], 1) ) sa.add_relative_motion( + # pyre-fixme[6]: For 5th param expected `ndarray` but got `List[int]`. + # pyre-fixme[6]: For 6th param expected `ndarray` but got `List[int]`. pybundle.RelativeMotion("1234", "1", "1234", "3", [0, 0, 0], [0, -1, 0], 1) ) sa.add_relative_motion( + # pyre-fixme[6]: For 5th param expected `ndarray` but got `List[int]`. + # pyre-fixme[6]: For 6th param expected `ndarray` but got `List[int]`. pybundle.RelativeMotion("1234", "1", "1234", "4", [0, 0, 0], [0, 0, -1], 1) ) sa.add_rig_instance_position_prior("1", [0, 0, 0], [1, 1, 1], "") @@ -322,11 +359,13 @@ def test_four_cams_single_reconstruction(bundle_adjuster): assert np.allclose(s4.translation, [0, 0, -2], atol=1e-6) -def test_four_cams_single_reconstruction_non_rigid(bundle_adjuster): +def test_four_cams_single_reconstruction_non_rigid(bundle_adjuster) -> None: """Four rigs, one reconstruction""" sa = bundle_adjuster sa.add_rig_instance( "1", + # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[int]`. + # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[int]`. pygeometry.Pose([0, 0, 0], [0, 0, 0]), {"1": "cam1"}, {"1": "rig_cam1"}, @@ -334,6 +373,8 @@ def test_four_cams_single_reconstruction_non_rigid(bundle_adjuster): ) sa.add_rig_instance( "2", + # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[int]`. + # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[int]`. pygeometry.Pose([0, 0, 0], [0, 0, 0]), {"2": "cam1"}, {"2": "rig_cam1"}, @@ -341,6 +382,8 @@ def test_four_cams_single_reconstruction_non_rigid(bundle_adjuster): ) sa.add_rig_instance( "3", + # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[int]`. + # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[int]`. pygeometry.Pose([0, 0, 0], [0, 0, 0]), {"3": "cam1"}, {"3": "rig_cam1"}, @@ -348,6 +391,8 @@ def test_four_cams_single_reconstruction_non_rigid(bundle_adjuster): ) sa.add_rig_instance( "4", + # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[int]`. + # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[int]`. pygeometry.Pose([0, 0, 0], [0, 0, 0]), {"4": "cam1"}, {"4": "rig_cam1"}, @@ -362,16 +407,22 @@ def test_four_cams_single_reconstruction_non_rigid(bundle_adjuster): sa.add_relative_similarity( pybundle.RelativeSimilarity( + # pyre-fixme[6]: For 5th param expected `ndarray` but got `List[int]`. + # pyre-fixme[6]: For 6th param expected `ndarray` but got `List[int]`. "1234", "1", "1234", "2", [0, 0, 0], [-1, 0, 0], 1, 1 ) ) sa.add_relative_similarity( pybundle.RelativeSimilarity( + # pyre-fixme[6]: For 5th param expected `ndarray` but got `List[int]`. + # pyre-fixme[6]: For 6th param expected `ndarray` but got `List[int]`. "1234", "2", "1234", "3", [0, 0, 0], [-1, -1, 0], 1, 1 ) ) sa.add_relative_similarity( pybundle.RelativeSimilarity( + # pyre-fixme[6]: For 5th param expected `ndarray` but got `List[int]`. + # pyre-fixme[6]: For 6th param expected `ndarray` but got `List[int]`. "1234", "3", "1234", "4", [0, 0, 0], [0, -1, 0], 1, 1 ) ) @@ -398,11 +449,13 @@ def test_four_cams_single_reconstruction_non_rigid(bundle_adjuster): assert np.allclose(r1234.get_scale("4"), 0.5) -def test_four_cams_one_fixed(bundle_adjuster): +def test_four_cams_one_fixed(bundle_adjuster) -> None: """Four rigs, one reconstruction""" sa = bundle_adjuster sa.add_rig_instance( "1", + # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[int]`. + # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[int]`. pygeometry.Pose([0, 0, 0], [0, 0, 0]), {"1": "cam1"}, {"1": "rig_cam1"}, @@ -410,6 +463,8 @@ def test_four_cams_one_fixed(bundle_adjuster): ) sa.add_rig_instance( "2", + # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[int]`. + # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[int]`. pygeometry.Pose([0, 0, 0], [0, 0, 0]), {"2": "cam1"}, {"2": "rig_cam1"}, @@ -417,6 +472,8 @@ def test_four_cams_one_fixed(bundle_adjuster): ) sa.add_rig_instance( "3", + # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[int]`. + # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[int]`. pygeometry.Pose([0, 0, 0], [0, 0, 0]), {"3": "cam1"}, {"3": "rig_cam1"}, @@ -424,6 +481,8 @@ def test_four_cams_one_fixed(bundle_adjuster): ) sa.add_rig_instance( "4", + # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[float]`. + # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[int]`. pygeometry.Pose([0.0, 0, 0], [0, 0, 0]), {"4": "cam1"}, {"4": "rig_cam1"}, @@ -436,12 +495,18 @@ def test_four_cams_one_fixed(bundle_adjuster): sa.add_reconstruction_shot("1234", 1, "4") sa.set_scale_sharing("1234", True) sa.add_relative_motion( + # pyre-fixme[6]: For 5th param expected `ndarray` but got `List[int]`. + # pyre-fixme[6]: For 6th param expected `ndarray` but got `List[int]`. pybundle.RelativeMotion("1234", "1", "1234", "2", [0, 0, 0], [-1, 0, 0], 1) ) sa.add_relative_motion( + # pyre-fixme[6]: For 5th param expected `ndarray` but got `List[int]`. + # pyre-fixme[6]: For 6th param expected `ndarray` but got `List[int]`. pybundle.RelativeMotion("1234", "1", "1234", "3", [0, 0, 0], [0, -1, 0], 1) ) sa.add_relative_motion( + # pyre-fixme[6]: For 5th param expected `ndarray` but got `List[int]`. + # pyre-fixme[6]: For 6th param expected `ndarray` but got `List[int]`. pybundle.RelativeMotion("1234", "1", "1234", "4", [0, 0, 0], [0, 0, -1], 1) ) sa.add_rig_instance_position_prior("1", [100, 0, 0], [1, 1, 1], "") @@ -460,11 +525,13 @@ def test_four_cams_one_fixed(bundle_adjuster): assert np.allclose(s4.translation, [0, 0, -2], atol=1e-6) -def test_linear_motion_prior_position(bundle_adjuster): +def test_linear_motion_prior_position(bundle_adjuster) -> None: """Three rigs, middle has no gps info. Translation only""" sa = bundle_adjuster sa.add_rig_instance( "1", + # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[int]`. + # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[int]`. pygeometry.Pose([0, 0, 0], [0, 0, 0]), {"1": "cam1"}, {"1": "rig_cam1"}, @@ -472,6 +539,8 @@ def test_linear_motion_prior_position(bundle_adjuster): ) sa.add_rig_instance( "2", + # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[int]`. + # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[int]`. pygeometry.Pose([0, 0, 0], [0, 0, 0]), {"2": "cam1"}, {"2": "rig_cam1"}, @@ -479,6 +548,8 @@ def test_linear_motion_prior_position(bundle_adjuster): ) sa.add_rig_instance( "3", + # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[int]`. + # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[int]`. pygeometry.Pose([0, 0, 0], [0, 0, 0]), {"3": "cam1"}, {"3": "rig_cam1"}, @@ -503,11 +574,13 @@ def test_linear_motion_prior_position(bundle_adjuster): assert np.allclose(s3.translation, [-2, 0, 0], atol=1e-6) -def test_linear_motion_prior_rotation(bundle_adjuster): +def test_linear_motion_prior_rotation(bundle_adjuster) -> None: """Three rigs, middle has no gps or orientation info""" sa = bundle_adjuster sa.add_rig_instance( "1", + # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[int]`. + # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[int]`. pygeometry.Pose([0, 0, 0], [0, 0, 0]), {"1": "cam1"}, {"1": "rig_cam1"}, @@ -515,6 +588,8 @@ def test_linear_motion_prior_rotation(bundle_adjuster): ) sa.add_rig_instance( "2", + # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[int]`. + # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[int]`. pygeometry.Pose([0, 0, 0], [0, 0, 0]), {"2": "cam1"}, {"2": "rig_cam1"}, @@ -522,6 +597,8 @@ def test_linear_motion_prior_rotation(bundle_adjuster): ) sa.add_rig_instance( "3", + # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[int]`. + # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[int]`. pygeometry.Pose([0, 1, 0], [0, 0, 0]), {"3": "cam1"}, {"3": "rig_cam1"}, @@ -540,7 +617,7 @@ def test_linear_motion_prior_rotation(bundle_adjuster): assert np.allclose(s2.rotation, [0, 0.3, 0], atol=1e-6) -def test_bundle_void_gps_ignored(): +def test_bundle_void_gps_ignored() -> None: """Test that void gps values are ignored.""" camera = pygeometry.Camera.create_perspective(1.0, 0.0, 0.0) camera.id = "camera1" @@ -580,7 +657,7 @@ def test_bundle_void_gps_ignored(): assert np.allclose(shot.pose.get_origin(), np.zeros(3)) -def test_bundle_alignment_prior(): +def test_bundle_alignment_prior() -> None: """Test that cameras are aligned to have the Y axis pointing down.""" camera = pygeometry.Camera.create_perspective(1.0, 0.0, 0.0) camera.id = "camera1" @@ -590,6 +667,7 @@ def test_bundle_alignment_prior(): shot = r.create_shot( "1", camera.id, pygeometry.Pose(np.random.rand(3), np.random.rand(3)) ) + # pyre-fixme[8]: Attribute has type `ndarray`; used as `List[int]`. shot.metadata.gps_position.value = [0, 0, 0] shot.metadata.gps_accuracy.value = 1 @@ -606,11 +684,13 @@ def test_bundle_alignment_prior(): assert np.allclose(shot.pose.transform([0, 0, 1]), [0, -1, 0], atol=1e-7) -def test_heatmaps_position(bundle_adjuster): +def test_heatmaps_position(bundle_adjuster) -> None: """Three cameras. Same heatmap different offsets""" sa = bundle_adjuster sa.add_rig_instance( "1", + # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[int]`. + # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[int]`. pygeometry.Pose([0, 0, 0], [0, 0, 0]), {"1": "cam1"}, {"1": "rig_cam1"}, @@ -618,6 +698,8 @@ def test_heatmaps_position(bundle_adjuster): ) sa.add_rig_instance( "2", + # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[int]`. + # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[int]`. pygeometry.Pose([0, 0, 0], [0, 0, 0]), {"2": "cam1"}, {"2": "rig_cam1"}, @@ -625,6 +707,8 @@ def test_heatmaps_position(bundle_adjuster): ) sa.add_rig_instance( "3", + # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[int]`. + # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[int]`. pygeometry.Pose([0, 0, 0], [0, 0, 0]), {"3": "cam1"}, {"3": "rig_cam1"}, diff --git a/opensfm/test/test_commands.py b/opensfm/test/test_commands.py index 9f1c62e46..2e30159b8 100644 --- a/opensfm/test/test_commands.py +++ b/opensfm/test/test_commands.py @@ -5,14 +5,14 @@ from opensfm.test import data_generation, utils -def run_command(command, args): +def run_command(command, args) -> None: parser = argparse.ArgumentParser() command.add_arguments(parser) parsed_args = parser.parse_args(args) command.run(dataset.DataSet(parsed_args.dataset), parsed_args) -def test_run_all(tmpdir): +def test_run_all(tmpdir) -> None: data = data_generation.create_berlin_test_folder(tmpdir) run_all_commands = [ commands.extract_metadata, @@ -54,13 +54,13 @@ def test_run_all(tmpdir): check_prior(data, output_rec_path) -def check_reconstruction(data: dataset.DataSet): +def check_reconstruction(data: dataset.DataSet) -> None: reconstruction = data.load_reconstruction() assert len(reconstruction[0].shots) == 3 assert len(reconstruction[0].points) > 1000 -def check_prior(data: dataset.DataSet, output_rec_path: str): +def check_prior(data: dataset.DataSet, output_rec_path: str) -> None: reconstruction = data.load_reconstruction() # load old reconstruction prior_rec = data.load_reconstruction(output_rec_path) for shot_id, shot in reconstruction[0].shots.items(): diff --git a/opensfm/test/test_dataset.py b/opensfm/test/test_dataset.py index 9206f3226..d683aab5d 100644 --- a/opensfm/test/test_dataset.py +++ b/opensfm/test/test_dataset.py @@ -3,7 +3,7 @@ from opensfm.test import data_generation -def test_dataset_load_features_sift(tmpdir): +def test_dataset_load_features_sift(tmpdir) -> None: data = data_generation.create_berlin_test_folder(tmpdir) assert len(data.images()) == 3 diff --git a/opensfm/test/test_datastructures.py b/opensfm/test/test_datastructures.py index 939322cc1..dbba54f8f 100644 --- a/opensfm/test/test_datastructures.py +++ b/opensfm/test/test_datastructures.py @@ -14,12 +14,12 @@ def _create_reconstruction( - n_cameras=0, + n_cameras: int=0, n_shots_cam=None, n_pano_shots_cam=None, - n_points=0, - dist_to_shots=False, - dist_to_pano_shots=False, + n_points: int=0, + dist_to_shots: bool=False, + dist_to_pano_shots: bool=False, ): """Creates a reconstruction with n_cameras random cameras and shots, where n_shots_cam is a dictionary, containing the @@ -86,7 +86,7 @@ def _create_reconstruction( """ -def test_create_cameras(): +def test_create_cameras() -> None: n_cameras = 100 rec = types.Reconstruction() @@ -106,7 +106,7 @@ def test_create_cameras(): assert len(rec.cameras) == n_cameras -def test_camera_iterators(): +def test_camera_iterators() -> None: n_cameras = 100 rec = _create_reconstruction(n_cameras) @@ -142,14 +142,14 @@ def test_camera_iterators(): assert cam is rec.cameras[cam.id] -def _check_common_cam_properties(cam1, cam2): +def _check_common_cam_properties(cam1, cam2) -> None: assert cam1.id == cam2.id assert cam1.width == cam2.width assert cam1.height == cam2.height assert cam1.projection_type == cam2.projection_type -def test_brown_camera(): +def test_brown_camera() -> None: rec = types.Reconstruction() focal_x = 0.6 focal_y = 0.7 @@ -161,6 +161,8 @@ def test_brown_camera(): p2 = 0.002 k3 = 0.01 cam_cpp = pygeometry.Camera.create_brown( + # pyre-fixme[6]: For 3rd param expected `ndarray` but got `List[float]`. + # pyre-fixme[6]: For 4th param expected `ndarray` but got `List[float]`. focal_x, focal_y / focal_x, [c_x, c_y], [k1, k2, k3, p1, p2] ) cam_cpp.width = 800 @@ -179,7 +181,7 @@ def test_brown_camera(): assert cam_cpp.aspect_ratio == c.aspect_ratio -def test_fisheye_camera(): +def test_fisheye_camera() -> None: rec = types.Reconstruction() focal = 0.6 k1 = -0.1 @@ -198,12 +200,14 @@ def test_fisheye_camera(): assert cam_cpp.focal == c.focal -def test_fisheye_opencv_camera(): +def test_fisheye_opencv_camera() -> None: rec = types.Reconstruction() focal = 0.6 aspect_ratio = 0.7 ppoint = [0.51, 0.52] dist = [-0.1, 0.09, 0.08, 0.01] + # pyre-fixme[6]: For 3rd param expected `ndarray` but got `List[float]`. + # pyre-fixme[6]: For 4th param expected `ndarray` but got `List[float]`. cam_cpp = pygeometry.Camera.create_fisheye_opencv(focal, aspect_ratio, ppoint, dist) cam_cpp.width = 800 cam_cpp.height = 600 @@ -220,12 +224,14 @@ def test_fisheye_opencv_camera(): assert cam_cpp.aspect_ratio == c.aspect_ratio -def test_fisheye62_camera(): +def test_fisheye62_camera() -> None: rec = types.Reconstruction() focal = 0.6 aspect_ratio = 0.7 ppoint = [0.51, 0.52] dist = [-0.1, 0.09, 0.08, 0.01, 0.02, 0.05, 0.1, 0.2] # [k1-k6, p1, p2] + # pyre-fixme[6]: For 3rd param expected `ndarray` but got `List[float]`. + # pyre-fixme[6]: For 4th param expected `ndarray` but got `List[float]`. cam_cpp = pygeometry.Camera.create_fisheye62(focal, aspect_ratio, ppoint, dist) cam_cpp.width = 800 cam_cpp.height = 600 @@ -244,12 +250,14 @@ def test_fisheye62_camera(): assert cam_cpp.aspect_ratio == c.aspect_ratio -def test_fisheye624_camera(): +def test_fisheye624_camera() -> None: rec = types.Reconstruction() focal = 0.6 aspect_ratio = 0.7 ppoint = [0.51, 0.52] dist = [-0.1, 0.09, 0.08, 0.01, 0.02, 0.05, 0.1, 0.2, 0.01, -0.003, 0.005, -0.007] # [k1-k6, p1, p2, s0-s3] + # pyre-fixme[6]: For 3rd param expected `ndarray` but got `List[float]`. + # pyre-fixme[6]: For 4th param expected `ndarray` but got `List[float]`. cam_cpp = pygeometry.Camera.create_fisheye624(focal, aspect_ratio, ppoint, dist) cam_cpp.width = 800 cam_cpp.height = 600 @@ -270,7 +278,7 @@ def test_fisheye624_camera(): assert cam_cpp.aspect_ratio == c.aspect_ratio -def test_dual_camera(): +def test_dual_camera() -> None: rec = types.Reconstruction() focal = 0.6 k1 = -0.1 @@ -291,7 +299,7 @@ def test_dual_camera(): assert cam_cpp.transition == c.transition -def test_perspective_camera(): +def test_perspective_camera() -> None: rec = types.Reconstruction() focal = 0.6 k1 = -0.1 @@ -310,7 +318,7 @@ def test_perspective_camera(): assert cam_cpp.focal == c.focal -def test_spherical_camera(): +def test_spherical_camera() -> None: rec = types.Reconstruction() cam_cpp = pygeometry.Camera.create_spherical() cam_cpp.width = 800 @@ -321,7 +329,7 @@ def test_spherical_camera(): # Test Metadata -def _help_measurement_test(measurement, attr, val): +def _help_measurement_test(measurement, attr, val) -> None: # Test metadata's has_value properties assert getattr(measurement, attr).has_value is False getattr(measurement, attr).value = val @@ -336,7 +344,7 @@ def _help_measurement_test(measurement, attr, val): assert getattr(measurement, attr).has_value is False -def test_shot_measurement_setter_and_getter(): +def test_shot_measurement_setter_and_getter() -> None: m1 = pymap.ShotMeasurements() # Test basic functionality _help_measurement_test(m1, "capture_time", np.random.rand(1)) @@ -351,7 +359,7 @@ def test_shot_measurement_setter_and_getter(): _help_measurement_test(m1, "sequence_key", "key_test") -def _helper_populate_metadata(m): +def _helper_populate_metadata(m) -> None: m.capture_time.value = np.random.rand(1) m.gps_position.value = np.random.rand(3) m.gps_accuracy.value = np.random.rand(1) @@ -364,7 +372,7 @@ def _helper_populate_metadata(m): m.sequence_key.value = "sequence_key" -def test_shot_measurement_set(): +def test_shot_measurement_set() -> None: m1 = pymap.ShotMeasurements() _helper_populate_metadata(m1) m2 = pymap.ShotMeasurements() @@ -378,7 +386,7 @@ def test_shot_measurement_set(): assert_metadata_equal(m1, m3) -def test_shot_create(): +def test_shot_create() -> None: # Given some created shot rec = _create_reconstruction(2) shot1 = rec.create_shot("shot0", "0") @@ -389,7 +397,7 @@ def test_shot_create(): assert len(rec.shots) == 1 -def test_shot_create_existing(): +def test_shot_create_existing() -> None: # Given some created shot rec = _create_reconstruction(2) rec.create_shot("shot0", "0") @@ -400,7 +408,7 @@ def test_shot_create_existing(): rec.create_shot("shot0", "1") -def test_shot_create_more(): +def test_shot_create_more() -> None: # Given some created shot rec = _create_reconstruction(2) rec.create_shot("shot0", "0") @@ -414,7 +422,7 @@ def test_shot_create_more(): assert len(rec.shots) == n_shots -def test_shot_delete_non_existing(): +def test_shot_delete_non_existing() -> None: # Given some created reconstruction rec = _create_reconstruction(2) rec.create_shot("shot0", "0") @@ -425,7 +433,7 @@ def test_shot_delete_non_existing(): rec.remove_shot("abcde") -def test_shot_delete_existing(): +def test_shot_delete_existing() -> None: # Given some created reconstruction n_shots = 10 rec = _create_reconstruction(1, {"0": n_shots}) @@ -439,7 +447,7 @@ def test_shot_delete_existing(): assert len(rec.shots) == n_shots - len(del_shots) -def test_shot_get(): +def test_shot_get() -> None: # Given some created shot rec = _create_reconstruction(1) shot_id = "shot0" @@ -450,7 +458,7 @@ def test_shot_get(): assert shot1 is rec.shots[shot_id] -def test_shot_pose_set(): +def test_shot_pose_set() -> None: # Given some created shot rec = _create_reconstruction(1) shot_id = "shot0" @@ -461,7 +469,7 @@ def test_shot_pose_set(): assert np.allclose(origin, shot.pose.get_origin()) -def test_shot_get_non_existing(): +def test_shot_get_non_existing() -> None: # Given some created shot rec = _create_reconstruction(1) shot_id = "shot0" @@ -474,7 +482,7 @@ def test_shot_get_non_existing(): assert shot1 is rec.shots["toto"] -def test_pano_shot_get(): +def test_pano_shot_get() -> None: # Given some created pano shot rec = _create_reconstruction(1) shot_id = "shot0" @@ -485,7 +493,7 @@ def test_pano_shot_get(): assert shot1 is rec.get_pano_shot(shot_id) -def test_pano_shot_get_non_existing(): +def test_pano_shot_get_non_existing() -> None: # Given some created pano shot rec = _create_reconstruction(1) shot_id = "shot0" @@ -498,7 +506,7 @@ def test_pano_shot_get_non_existing(): assert shot1 is rec.shots["toto"] -def test_pano_shot_create(): +def test_pano_shot_create() -> None: # Given some created shot rec = _create_reconstruction(2) shot1 = rec.create_pano_shot("shot0", "0") @@ -509,7 +517,7 @@ def test_pano_shot_create(): assert len(rec.pano_shots) == 1 -def test_pano_shot_create_existing(): +def test_pano_shot_create_existing() -> None: # Given some created pano shot rec = _create_reconstruction(2) rec.create_pano_shot("shot0", "0") @@ -523,7 +531,7 @@ def test_pano_shot_create_existing(): rec.create_pano_shot("shot0", "1") -def test_pano_shot_create_more(): +def test_pano_shot_create_more() -> None: # Given some created pano shot rec = _create_reconstruction(2) rec.create_pano_shot("shot0", "0") @@ -537,7 +545,7 @@ def test_pano_shot_create_more(): assert len(rec.pano_shots) == n_shots -def test_pano_shot_delete_non_existing(): +def test_pano_shot_delete_non_existing() -> None: # Given some created reconstruction rec = _create_reconstruction(2) rec.create_pano_shot("shot0", "0") @@ -548,7 +556,7 @@ def test_pano_shot_delete_non_existing(): rec.remove_pano_shot("abcde") -def test_pano_shot_delete_existing(): +def test_pano_shot_delete_existing() -> None: # Given some created reconstruction n_shots = 10 rec = _create_reconstruction(2) @@ -564,7 +572,7 @@ def test_pano_shot_delete_existing(): assert len(rec.pano_shots) == n_shots - len(del_shots) -def test_shot_merge_cc(): +def test_shot_merge_cc() -> None: # Given some created reconstruction rec = _create_reconstruction(1, {"0": 2}) map_shot1 = rec.shots["0"] @@ -576,7 +584,7 @@ def test_shot_merge_cc(): assert map_shot1.merge_cc == 10 -def test_shot_covariance(): +def test_shot_covariance() -> None: # Given some created reconstruction rec = _create_reconstruction(1, {"0": 2}) map_shot1 = rec.shots["0"] @@ -588,7 +596,7 @@ def test_shot_covariance(): assert np.allclose(map_shot1.covariance, np.diag([1, 2, 3])) -def test_shot_covariance_different(): +def test_shot_covariance_different() -> None: # Given some created reconstruction rec = _create_reconstruction(1, {"0": 2}) map_shot1 = rec.shots["0"] @@ -602,7 +610,7 @@ def test_shot_covariance_different(): assert map_shot2.covariance is not map_shot1.covariance -def test_shot_create_remove_create(): +def test_shot_create_remove_create() -> None: # Given some created reconstruction n_shots = 10 rec = _create_reconstruction(1, {"0": n_shots}) @@ -620,7 +628,7 @@ def test_shot_create_remove_create(): assert len(rec.shots) == n_shots -def test_pano_shot_create_remove_create(): +def test_pano_shot_create_remove_create() -> None: # Given some created reconstruction n_shots = 10 rec = _create_reconstruction(1, n_pano_shots_cam={"0": n_shots}) @@ -660,7 +668,7 @@ def _create_rig_instance(): return rec, rig_instance, shot -def test_rig_camera_create(): +def test_rig_camera_create() -> None: rec = _create_reconstruction(1, {"0": 2}) rec.add_rig_camera(_create_rig_camera()) @@ -669,12 +677,12 @@ def test_rig_camera_create(): assert "rig_camera" in rec.rig_cameras.keys() -def test_rig_instance(): +def test_rig_instance() -> None: _, rig_instance, _ = _create_rig_instance() assert list(rig_instance.keys()) == ["0"] -def test_rig_instance_create_default(): +def test_rig_instance_create_default() -> None: # one default rig instance per shot rec, rig_instance, _ = _create_rig_instance() @@ -685,37 +693,37 @@ def test_rig_instance_create_default(): assert list(rec.rig_instances["1"].shots.keys()) == ["1"] -def test_rig_instance_create_add_existing(): +def test_rig_instance_create_add_existing() -> None: rec, rig_instance, _ = _create_rig_instance() with pytest.raises(RuntimeError): rec.add_rig_instance(rig_instance) -def test_rig_instance_remove_shot(): +def test_rig_instance_remove_shot() -> None: rec, _, shot = _create_rig_instance() rec.remove_shot(shot.id) assert len(rec.rig_instances["0"].shots) == 0 -def test_rig_shot_modify_pose_raise(): +def test_rig_shot_modify_pose_raise() -> None: _, rig_instance, shot = _create_rig_instance() with pytest.raises(RuntimeError): shot.pose.set_origin(np.array([1, 2, 3])) -def test_rig_shot_modify_pose_succeed(): +def test_rig_shot_modify_pose_succeed() -> None: _, rig_instance, shot = _create_rig_instance() next(iter(rig_instance.rig_cameras.values())).pose = pygeometry.Pose() shot.pose.set_origin(np.array([1, 2, 3])) -def test_rig_shot_set_pose(): +def test_rig_shot_set_pose() -> None: _, rig_instance, shot = _create_rig_instance() with pytest.raises(RuntimeError): shot.pose = pygeometry.Pose() -def test_add_shot_from_shot_correct_value(): +def test_add_shot_from_shot_correct_value() -> None: # Given some created reconstruction (rec) ... n_shots = 5 rec = _create_reconstruction(1, n_shots_cam={"0": n_shots}) @@ -737,7 +745,7 @@ def test_add_shot_from_shot_correct_value(): assert_shots_equal(rec.shots[k], rec_new.shots[k]) -def test_shot_metadata_different(): +def test_shot_metadata_different() -> None: # Given some created reconstruction rec = _create_reconstruction(1, n_shots_cam={"0": 2}) shot1 = rec.shots["0"] @@ -748,7 +756,7 @@ def test_shot_metadata_different(): assert shot1.metadata is not shot2.metadata -def test_shot_metadata_assign_equal(): +def test_shot_metadata_assign_equal() -> None: # Given some created reconstruction rec = _create_reconstruction(1, n_shots_cam={"0": 2}) shot1 = rec.shots["0"] @@ -765,7 +773,7 @@ def test_shot_metadata_assign_equal(): assert_metadata_equal(shot1.metadata, shot2.metadata) -def test_add_pano_shot_from_shot_correct_value(): +def test_add_pano_shot_from_shot_correct_value() -> None: # Given some created reconstruction (rec) ... n_shots = 5 rec = _create_reconstruction(1, n_pano_shots_cam={"0": n_shots}) @@ -784,7 +792,7 @@ def test_add_pano_shot_from_shot_correct_value(): assert_shots_equal(rec.pano_shots[k], rec_new.pano_shots[k]) -def test_single_point_create(): +def test_single_point_create() -> None: # Given a created point rec = types.Reconstruction() pt = rec.create_point("0") @@ -794,7 +802,7 @@ def test_single_point_create(): assert len(rec.points) == 1 -def test_single_point_get_existing(): +def test_single_point_get_existing() -> None: # Given a created point rec = types.Reconstruction() pt = rec.create_point("0") @@ -803,7 +811,7 @@ def test_single_point_get_existing(): assert pt == rec.points["0"] and pt == rec.get_point("0") -def test_single_point_get_non_existing(): +def test_single_point_get_non_existing() -> None: # Given a created point rec = types.Reconstruction() rec.create_point("0") @@ -814,7 +822,7 @@ def test_single_point_get_non_existing(): rec.get_point("toto") -def test_single_point_coordinates(): +def test_single_point_coordinates() -> None: # Given a created point rec = types.Reconstruction() pt = rec.create_point("0") @@ -827,7 +835,7 @@ def test_single_point_coordinates(): assert np.allclose(pt.coordinates, coord) -def test_single_point_color(): +def test_single_point_color() -> None: # Given a created point rec = types.Reconstruction() pt = rec.create_point("0") @@ -840,7 +848,7 @@ def test_single_point_color(): assert np.allclose(pt.color, color) -def test_point_add_from_point(): +def test_point_add_from_point() -> None: # Given some created reconstruction (rec) ... rec = types.Reconstruction() @@ -864,7 +872,7 @@ def test_point_add_from_point(): assert np.allclose(pt2_1.coordinates, coord2) -def test_point_reproj_errors_assign(): +def test_point_reproj_errors_assign() -> None: # Given some created point rec = _create_reconstruction(n_points=1) pt = rec.points["0"] @@ -878,7 +886,7 @@ def test_point_reproj_errors_assign(): assert np.allclose(pt.reprojection_errors[k], reproj_errors[k]) -def test_point_delete_non_existing(): +def test_point_delete_non_existing() -> None: # Given some created points n_points = 100 rec = _create_reconstruction(n_points=n_points) @@ -889,7 +897,7 @@ def test_point_delete_non_existing(): rec.remove_point("abcdef") -def test_point_delete_existing(): +def test_point_delete_existing() -> None: # Given some created points n_points = 100 rec = _create_reconstruction(n_points=n_points) @@ -903,7 +911,7 @@ def test_point_delete_existing(): assert len(rec.points) == 0 -def test_point_delete_existing_assign_empty(): +def test_point_delete_existing_assign_empty() -> None: # Given some created points n_points = 100 rec = _create_reconstruction(n_points=n_points) @@ -913,7 +921,7 @@ def test_point_delete_existing_assign_empty(): assert len(rec.points) == 0 -def test_single_observation(): +def test_single_observation() -> None: # Given a 1-camera, 1-point reconstruction rec = _create_reconstruction(1, n_shots_cam={"0": 1}, n_points=1) @@ -933,7 +941,7 @@ def test_single_observation(): assert obs is not None -def test_single_observation_delete(): +def test_single_observation_delete() -> None: # Given a 1-camera, 1-point reconstruction and corresponding observation rec = _create_reconstruction(1, n_shots_cam={"0": 1}, n_points=1) obs = pymap.Observation(100, 200, 0.5, 255, 0, 0, 100) @@ -950,7 +958,7 @@ def test_single_observation_delete(): assert pt.number_of_observations() == 0 -def test_many_observations_delete(): +def test_many_observations_delete() -> None: # Given a map with 10 shots, 1000 landmarks ... m = pymap.Map() n_cams = 2 @@ -991,7 +999,7 @@ def test_many_observations_delete(): m.clear_observations_and_landmarks() -def test_clean_landmarks_with_min_observations(): +def test_clean_landmarks_with_min_observations() -> None: m = pymap.Map() n_cams = 2 n_shots = 2 @@ -1029,7 +1037,7 @@ def test_clean_landmarks_with_min_observations(): assert len(m.get_landmarks()) == 0 -def test_camera_deepcopy(): +def test_camera_deepcopy() -> None: # Given a camera cam1 = pygeometry.Camera.create_perspective(0.5, 0, 0) @@ -1040,7 +1048,7 @@ def test_camera_deepcopy(): assert cam1.focal == cam2.focal -def test_camera_deepcopy_assign(): +def test_camera_deepcopy_assign() -> None: # Given a camera cam1 = pygeometry.Camera.create_perspective(0.5, 0, 0) @@ -1052,7 +1060,7 @@ def test_camera_deepcopy_assign(): assert cam1.focal != cam2.focal -def test_observation_shot_removal(): +def test_observation_shot_removal() -> None: # Given a reconstruction with 2 shots rec = _create_reconstruction( n_cameras=2, n_shots_cam={"0": 1, "1": 1}, n_points=200, dist_to_shots=True @@ -1073,7 +1081,7 @@ def test_observation_shot_removal(): assert len(p.get_observations()) == 0 -def test_rec_deepcopy(): +def test_rec_deepcopy() -> None: # Given a reconstruction with everything (shots, pano shots, metadata) rec = _create_reconstruction( n_cameras=2, @@ -1088,6 +1096,8 @@ def test_rec_deepcopy(): _helper_populate_metadata(shot.metadata) # When we deep-copy it + # pyre-fixme[6]: For 2nd param expected `Optional[Dict[int, typing.Any]]` but + # got `Dict[str, bool]`. rec2 = copy.deepcopy(rec, {"copy_observations": True}) # It has the expected count of data @@ -1133,7 +1143,7 @@ def test_rec_deepcopy(): assert obs1 is not obs_cpy -def test_gcp(): +def test_gcp() -> None: gcp = [] for i in range(0, 10): p = pymap.GroundControlPoint() @@ -1155,7 +1165,7 @@ def test_gcp(): assert pt.observations[1].shot_id == "p2" -def test_add_correspondences_from_tracks_manager(): +def test_add_correspondences_from_tracks_manager() -> None: n_shots = 3 rec = _create_reconstruction( n_cameras=1, diff --git a/opensfm/test/test_dense.py b/opensfm/test/test_dense.py index 058c3dac9..cc83e7e55 100644 --- a/opensfm/test/test_dense.py +++ b/opensfm/test/test_dense.py @@ -4,7 +4,7 @@ from opensfm import types -def test_angle_between_points(): +def test_angle_between_points() -> None: origin = [0, 0, 0] p1 = [1, 0, 0] p2 = [0, 1, 0] @@ -22,7 +22,7 @@ def test_angle_between_points(): assert np.allclose(res, np.pi / 4) -def test_depthmap_to_ply(): +def test_depthmap_to_ply() -> None: height, width = 2, 3 camera = pygeometry.Camera.create_perspective(0.8, 0.0, 0.0) @@ -32,6 +32,8 @@ def test_depthmap_to_ply(): r = types.Reconstruction() r.add_camera(camera) shot = r.create_shot( + # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[float]`. + # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[float]`. "shot1", camera.id, pygeometry.Pose([0.0, 0.0, 0.0], [0.0, 0.0, 0.0]) ) diff --git a/opensfm/test/test_geo.py b/opensfm/test/test_geo.py index beff8b9b9..a6d534be4 100644 --- a/opensfm/test/test_geo.py +++ b/opensfm/test/test_geo.py @@ -2,14 +2,14 @@ from opensfm import geo, pygeo -def test_ecef_lla_consistency(): +def test_ecef_lla_consistency() -> None: lla_before = [46.5274109, 6.5722075, 402.16] ecef = geo.ecef_from_lla(lla_before[0], lla_before[1], lla_before[2]) lla_after = geo.lla_from_ecef(ecef[0], ecef[1], ecef[2]) assert np.allclose(lla_after, lla_before) -def test_ecef_lla_topocentric_consistency(): +def test_ecef_lla_topocentric_consistency() -> None: lla_ref = [46.5, 6.5, 400] lla_before = [46.5274109, 6.5722075, 402.16] enu = geo.topocentric_from_lla( @@ -21,14 +21,14 @@ def test_ecef_lla_topocentric_consistency(): assert np.allclose(lla_after, lla_before) -def test_ecef_lla_consistency_pygeo(): +def test_ecef_lla_consistency_pygeo() -> None: lla_before = [46.5274109, 6.5722075, 402.16] ecef = pygeo.ecef_from_lla(lla_before[0], lla_before[1], lla_before[2]) lla_after = pygeo.lla_from_ecef(ecef[0], ecef[1], ecef[2]) assert np.allclose(lla_after, lla_before) -def test_ecef_lla_topocentric_consistency_pygeo(): +def test_ecef_lla_topocentric_consistency_pygeo() -> None: lla_ref = [46.5, 6.5, 400] lla_before = [46.5274109, 6.5722075, 402.16] enu = pygeo.topocentric_from_lla( @@ -39,6 +39,6 @@ def test_ecef_lla_topocentric_consistency_pygeo(): ) assert np.allclose(lla_after, lla_before) -def test_eq_geo(): +def test_eq_geo() -> None: assert geo.TopocentricConverter(40,30,0) == geo.TopocentricConverter(40,30,0) assert geo.TopocentricConverter(40,32,0) != geo.TopocentricConverter(40,30,0) diff --git a/opensfm/test/test_geometry.py b/opensfm/test/test_geometry.py index f08da355a..ccb5ec728 100644 --- a/opensfm/test/test_geometry.py +++ b/opensfm/test/test_geometry.py @@ -2,19 +2,19 @@ from opensfm import geometry -def test_rotation_from_ptr(): +def test_rotation_from_ptr() -> None: ptr = 0.1, 0.2, 0.3 rotation = geometry.rotation_from_ptr(*ptr) assert np.allclose(ptr, geometry.ptr_from_rotation(rotation)) -def test_rotation_from_ptr_v2(): +def test_rotation_from_ptr_v2() -> None: ptr = 0.1, 0.2, 0.3 rotation = geometry.rotation_from_ptr_v2(*ptr) assert np.allclose(ptr, geometry.ptr_from_rotation_v2(rotation)) -def test_rotation_from_ptr_compatibility(): +def test_rotation_from_ptr_compatibility() -> None: """Check the two implementations yield the same rotation.""" ptr = 0.1, 0.2, 0.3 assert np.allclose( @@ -22,7 +22,7 @@ def test_rotation_from_ptr_compatibility(): ) -def test_rotation_from_opk(): +def test_rotation_from_opk() -> None: ptr = 0.1, 0.2, 0.3 rotation = geometry.rotation_from_opk(*ptr) assert np.allclose(ptr, geometry.opk_from_rotation(rotation)) diff --git a/opensfm/test/test_io.py b/opensfm/test/test_io.py index c53f785cb..49fac0f55 100644 --- a/opensfm/test/test_io.py +++ b/opensfm/test/test_io.py @@ -12,7 +12,7 @@ ) -def test_reconstructions_from_json_consistency(): +def test_reconstructions_from_json_consistency() -> None: with open(filename) as fin: obj_before = json.loads(fin.read()) obj_after = io.reconstructions_to_json(io.reconstructions_from_json(obj_before)) @@ -42,7 +42,7 @@ def test_reconstructions_from_json_consistency(): assert obj1 == obj2 -def test_reconstructions_from_json(): +def test_reconstructions_from_json() -> None: with open(filename) as fin: obj = json.loads(fin.read()) @@ -56,7 +56,7 @@ def test_reconstructions_from_json(): assert len(reconstructions[0].rig_instances) == 3 -def test_reconstruction_to_ply(): +def test_reconstruction_to_ply() -> None: with open(filename) as fin: obj = json.loads(fin.read()) reconstructions = io.reconstructions_from_json(obj) @@ -64,7 +64,7 @@ def test_reconstruction_to_ply(): assert len(ply.splitlines()) > len(reconstructions[0].points) -def test_parse_projection(): +def test_parse_projection() -> None: proj = io._parse_projection("WGS84") assert proj is None @@ -75,7 +75,7 @@ def test_parse_projection(): assert np.allclose((lat, lon), (plat, plon)) -def test_read_gcp_list(): +def test_read_gcp_list() -> None: text = """WGS84 13.400740745 52.519134104 12.0792090446 2335.0 1416.7 01.jpg 13.400740745 52.519134104 12.0792090446 2639.1 938.0 02.jpg @@ -93,7 +93,7 @@ def test_read_gcp_list(): assert max(a, b) == 2 -def test_read_write_ground_control_points(): +def test_read_write_ground_control_points() -> None: text = """ { "points": [ @@ -153,7 +153,7 @@ def check_points(points): check_points(points_reread) -def test_json_to_and_from_metadata(): +def test_json_to_and_from_metadata() -> None: obj = { "orientation": 10, "capture_time": 1, @@ -175,7 +175,7 @@ def test_json_to_and_from_metadata(): assert obj == io.pymap_metadata_to_json(m) -def test_camera_from_to_vector(): +def test_camera_from_to_vector() -> None: w, h = 640, 480 camera_sizes = [ ("perspective", 3), @@ -198,7 +198,7 @@ def test_camera_from_to_vector(): # specific test for I/O consistency with panoshots # ynoutary : hopefully, candidate for deletion soon -def test_panoshots_consistency(): +def test_panoshots_consistency() -> None: rec_before = types.Reconstruction() camera1 = pygeometry.Camera.create_spherical() diff --git a/opensfm/test/test_matching.py b/opensfm/test/test_matching.py index faeef09f1..dc2256bb4 100644 --- a/opensfm/test/test_matching.py +++ b/opensfm/test/test_matching.py @@ -32,7 +32,7 @@ def example_features(nfeatures, config): return [f1, f2], [w1, w2] -def test_example_features(): +def test_example_features() -> None: nfeatures = 1000 features, words = example_features(nfeatures, config.default_config()) @@ -40,7 +40,7 @@ def test_example_features(): assert len(words[0]) == nfeatures -def test_match_using_words(): +def test_match_using_words() -> None: configuration = config.default_config() nfeatures = 1000 @@ -58,7 +58,7 @@ def test_match_using_words(): assert i == j -def test_unfilter_matches(): +def test_unfilter_matches() -> None: matches = np.array([]) m1 = np.array([], dtype=bool) m2 = np.array([], dtype=bool) @@ -76,7 +76,7 @@ def test_unfilter_matches(): assert res[1][1] == 6 -def test_match_images(scene_synthetic): +def test_match_images(scene_synthetic) -> None: reference = scene_synthetic.reconstruction synthetic = synthetic_dataset.SyntheticDataSet( reference, @@ -85,7 +85,15 @@ def test_match_images(scene_synthetic): scene_synthetic.tracks_manager, ) + # pyre-fixme[8]: Attribute has type + # `BoundMethod[typing.Callable(SyntheticDataSet.matches_exists)[[Named(self, + # SyntheticDataSet), Named(image, str)], bool], SyntheticDataSet]`; used as `(im: + # Any) -> bool`. synthetic.matches_exists = lambda im: False + # pyre-fixme[8]: Attribute has type + # `BoundMethod[typing.Callable(DataSet.save_matches)[[Named(self, DataSet), + # Named(image, str), Named(matches, Dict[str, ndarray])], None], + # SyntheticDataSet]`; used as `(im: Any, m: Any) -> bool`. synthetic.save_matches = lambda im, m: False override = {} @@ -102,10 +110,12 @@ def test_match_images(scene_synthetic): matches = pairs.get(pair) if matches is None or len(matches) == 1: matches = pairs.get(pair[::-1]) + # pyre-fixme[6]: For 1st param expected `Sized` but got + # `Optional[List[Tuple[int, int]]]`. assert len(matches) > 25 -def test_ordered_pairs(): +def test_ordered_pairs() -> None: neighbors = [ [1, 3], [1, 2], @@ -114,11 +124,14 @@ def test_ordered_pairs(): [4, 5], ] images = [1, 2, 3] + # pyre-fixme[6]: For 1st param expected `Set[Tuple[str, str]]` but got + # `List[List[int]]`. + # pyre-fixme[6]: For 2nd param expected `List[str]` but got `List[int]`. pairs = pairs_selection.ordered_pairs(neighbors, images) assert set(pairs) == {(1, 2), (1, 3), (2, 5), (3, 2)} -def test_triangulation_inliers(pairs_and_their_E): +def test_triangulation_inliers(pairs_and_their_E) -> None: for f1, f2, _, pose in pairs_and_their_E: Rt = pose.get_cam_to_world()[:3] diff --git a/opensfm/test/test_multiview.py b/opensfm/test/test_multiview.py index 704466a49..df85065c3 100644 --- a/opensfm/test/test_multiview.py +++ b/opensfm/test/test_multiview.py @@ -12,7 +12,7 @@ def normalized(x): return x / np.linalg.norm(x) -def test_motion_from_plane_homography(): +def test_motion_from_plane_homography() -> None: R = tf.random_rotation_matrix()[:3, :3] t = normalized(2 * np.random.rand(3) - 1) n = normalized(2 * np.random.rand(3) - 1) @@ -23,6 +23,7 @@ def test_motion_from_plane_homography(): motions = multiview.motion_from_plane_homography(H) goodness = [] + # pyre-fixme[16]: Optional type has no attribute `__iter__`. for Re, te, ne, de in motions: scalee = np.linalg.norm(te) good_R = np.allclose(R, Re) @@ -35,7 +36,7 @@ def test_motion_from_plane_homography(): assert any(goodness) -def test_essential_five_points(pairs_and_their_E): +def test_essential_five_points(pairs_and_their_E) -> None: exact_found = 0 for f1, f2, E, _ in pairs_and_their_E: @@ -54,7 +55,7 @@ def test_essential_five_points(pairs_and_their_E): assert exact_found >= exacts -def test_absolute_pose_three_points(shots_and_their_points): +def test_absolute_pose_three_points(shots_and_their_points) -> None: exact_found = 0 for pose, bearings, points in shots_and_their_points: result = pygeometry.absolute_pose_three_points(bearings, points) @@ -67,7 +68,7 @@ def test_absolute_pose_three_points(shots_and_their_points): assert exact_found >= exacts -def test_absolute_pose_n_points(shots_and_their_points): +def test_absolute_pose_n_points(shots_and_their_points) -> None: for pose, bearings, points in shots_and_their_points: result = pygeometry.absolute_pose_n_points(bearings, points) @@ -75,7 +76,7 @@ def test_absolute_pose_n_points(shots_and_their_points): assert np.linalg.norm(expected - result, ord="fro") < 1e-5 -def test_absolute_pose_n_points_known_rotation(shots_and_their_points): +def test_absolute_pose_n_points_known_rotation(shots_and_their_points) -> None: for pose, bearings, points in shots_and_their_points: R = pose.get_rotation_matrix() p_rotated = np.array([R.dot(p) for p in points]) @@ -84,7 +85,7 @@ def test_absolute_pose_n_points_known_rotation(shots_and_their_points): assert np.linalg.norm(pose.translation - result) < 1e-6 -def test_essential_n_points(pairs_and_their_E): +def test_essential_n_points(pairs_and_their_E) -> None: for f1, f2, E, _ in pairs_and_their_E: f1 /= np.linalg.norm(f1, axis=1)[:, None] @@ -100,7 +101,7 @@ def test_essential_n_points(pairs_and_their_E): assert np.linalg.norm(E - E_found, ord="fro") < 1e-6 -def test_relative_pose_from_essential(pairs_and_their_E): +def test_relative_pose_from_essential(pairs_and_their_E) -> None: for f1, f2, E, pose in pairs_and_their_E: result = pygeometry.relative_pose_from_essential(E, f1, f2) @@ -112,7 +113,7 @@ def test_relative_pose_from_essential(pairs_and_their_E): assert np.allclose(expected, result, rtol=1e-10) -def test_relative_rotation(pairs_and_their_E): +def test_relative_rotation(pairs_and_their_E) -> None: for f1, _, _, _ in pairs_and_their_E: vec_x = np.random.rand(3) @@ -126,12 +127,13 @@ def test_relative_rotation(pairs_and_their_E): f1 /= np.linalg.norm(f1, axis=1)[:, None] f2 = [rotation.dot(x) for x in f1] + # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[typing.Any]`. result = pygeometry.relative_rotation_n_points(f1, f2) assert np.allclose(rotation, result, rtol=1e-10) -def test_relative_pose_refinement(pairs_and_their_E): +def test_relative_pose_refinement(pairs_and_their_E) -> None: exact_found = 0 for f1, f2, _, pose in pairs_and_their_E: pose = copy.deepcopy(pose) diff --git a/opensfm/test/test_pairs_selection.py b/opensfm/test/test_pairs_selection.py index 37e10b52b..77d3d1219 100644 --- a/opensfm/test/test_pairs_selection.py +++ b/opensfm/test/test_pairs_selection.py @@ -10,7 +10,7 @@ class Args: - def __init__(self, dataset): + def __init__(self, dataset) -> None: self.dataset = dataset @@ -47,7 +47,7 @@ def lund_path(tmpdir_factory): return path -def match_candidates_from_metadata(data, neighbors=NEIGHBORS, assert_count=NEIGHBORS): +def match_candidates_from_metadata(data, neighbors: int=NEIGHBORS, assert_count: int=NEIGHBORS) -> None: assert neighbors >= assert_count ims = sorted(data.images()) @@ -92,14 +92,14 @@ def create_match_candidates_config(**kwargs): return config -def test_match_candidates_from_metadata_vlad(lund_path): +def test_match_candidates_from_metadata_vlad(lund_path) -> None: config = create_match_candidates_config(matching_vlad_neighbors=NEIGHBORS) data_generation.save_config(config, lund_path) data = dataset.DataSet(lund_path) match_candidates_from_metadata(data, assert_count=5) -def test_match_candidates_from_metadata_bow(lund_path): +def test_match_candidates_from_metadata_bow(lund_path) -> None: config = create_match_candidates_config( matching_bow_neighbors=NEIGHBORS, matcher_type="WORDS" ) @@ -108,28 +108,28 @@ def test_match_candidates_from_metadata_bow(lund_path): match_candidates_from_metadata(data, assert_count=5) -def test_match_candidates_from_metadata_gps(lund_path): +def test_match_candidates_from_metadata_gps(lund_path) -> None: config = create_match_candidates_config(matching_gps_neighbors=NEIGHBORS) data_generation.save_config(config, lund_path) data = dataset.DataSet(lund_path) match_candidates_from_metadata(data) -def test_match_candidates_from_metadata_time(lund_path): +def test_match_candidates_from_metadata_time(lund_path) -> None: config = create_match_candidates_config(matching_time_neighbors=NEIGHBORS) data_generation.save_config(config, lund_path) data = dataset.DataSet(lund_path) match_candidates_from_metadata(data) -def test_match_candidates_from_metadata_graph(lund_path): +def test_match_candidates_from_metadata_graph(lund_path) -> None: config = create_match_candidates_config(matching_graph_rounds=50) data_generation.save_config(config, lund_path) data = dataset.DataSet(lund_path) match_candidates_from_metadata(data) -def test_get_gps_point(): +def test_get_gps_point() -> None: reference = geo.TopocentricConverter(0, 0, 0) exifs = {} exifs["gps"] = { @@ -142,7 +142,7 @@ def test_get_gps_point(): assert np.allclose(direction, [[0, 0, 1]]) -def test_get_gps_opk_point(): +def test_get_gps_opk_point() -> None: reference = geo.TopocentricConverter(0, 0, 0) exifs = {} exifs["gps"] = { @@ -160,21 +160,25 @@ def test_get_gps_opk_point(): assert np.allclose(direction, [[0.0, 1.0, -1.0]]) -def test_find_best_altitude_convergent(): +def test_find_best_altitude_convergent() -> None: origins = {"0": [2.0, 0.0, 8.0], "1": [-2.0, 0.0, 8.0]} directions = { "0": np.array([-1.0, 0.0, -1.0]), "1": np.array([1.0, 0.0, -1.0]), } + # pyre-fixme[6]: For 1st param expected `Dict[str, ndarray]` but got `Dict[str, + # List[float]]`. altitude = pairs_selection.find_best_altitude(origins, directions) assert np.allclose([altitude], [2.0], atol=1e-2) -def test_find_best_altitude_divergent(): +def test_find_best_altitude_divergent() -> None: origins = {"0": [2.0, 0.0, 8.0], "1": [-2.0, 0.0, 8.0]} directions = { "0": np.array([1.0, 0.0, -1.0]), "1": np.array([-1.0, 0.0, -1.0]), } + # pyre-fixme[6]: For 1st param expected `Dict[str, ndarray]` but got `Dict[str, + # List[float]]`. altitude = pairs_selection.find_best_altitude(origins, directions) assert np.allclose([altitude], pairs_selection.DEFAULT_Z, atol=1e-2) diff --git a/opensfm/test/test_reconstruction_alignment.py b/opensfm/test/test_reconstruction_alignment.py index 551bf51bd..f8c31fb77 100644 --- a/opensfm/test/test_reconstruction_alignment.py +++ b/opensfm/test/test_reconstruction_alignment.py @@ -16,7 +16,7 @@ def get_reconstruction_origin(r): return pose.get_origin() -def test_single_shot(): +def test_single_shot() -> None: """Single shot test.""" ra = pybundle.ReconstructionAlignment() ra.add_shot("1", 0.5, 0, 0, 0, 0, 0, False) @@ -27,7 +27,7 @@ def test_single_shot(): assert np.allclose(get_shot_origin(s1), [1, 0, 0], atol=1e-6) -def test_singleton_reconstruction(): +def test_singleton_reconstruction() -> None: """Single shot in a single reconstruction.""" ra = pybundle.ReconstructionAlignment() ra.add_shot("1", 0, 0, 0, 0, 0, 0, False) @@ -43,7 +43,7 @@ def test_singleton_reconstruction(): assert np.allclose(get_shot_origin(s1), [1, 0, 0], atol=1e-6) -def test_pair(): +def test_pair() -> None: """Simple single reconstruction two shots test.""" ra = pybundle.ReconstructionAlignment() ra.add_shot("1", 0, 0, 0, 0, 0, 0, False) @@ -69,7 +69,7 @@ def test_pair(): assert np.allclose(rec_a.scale, 0.5) -def test_two_shots_one_fixed(): +def test_two_shots_one_fixed() -> None: """Two shot, one reconstruction. One shot is fixed""" ra = pybundle.ReconstructionAlignment() ra.add_shot("1", 0, 0, 0, -1, 0, 0, True) @@ -96,7 +96,7 @@ def test_two_shots_one_fixed(): assert np.allclose(rec_a.scale, 0.5) -def test_two_reconstructions_soft_alignment(): +def test_two_reconstructions_soft_alignment() -> None: """Two reconstructions""" ra = pybundle.ReconstructionAlignment() ra.add_shot("1", 0, 0, 0, 0, 0, 0, False) @@ -145,7 +145,7 @@ def test_two_reconstructions_soft_alignment(): assert np.allclose(rec_b.scale, 1) -def test_two_reconstructions_rigid_alignment(): +def test_two_reconstructions_rigid_alignment() -> None: """Two reconstructions""" ra = pybundle.ReconstructionAlignment() @@ -178,7 +178,7 @@ def test_two_reconstructions_rigid_alignment(): assert np.allclose(rec_b.scale, 1) -def test_two_reconstructions_common_camera(): +def test_two_reconstructions_common_camera() -> None: """Two reconstructions""" ra = pybundle.ReconstructionAlignment() @@ -210,7 +210,7 @@ def test_two_reconstructions_common_camera(): assert np.allclose(rec_b.scale, 1) -def test_common_points(): +def test_common_points() -> None: """Two reconstructions, two common points""" ra = pybundle.ReconstructionAlignment() ra.add_reconstruction("a", 0, 0, 0, 0, 0, 0, 1, True) diff --git a/opensfm/test/test_reconstruction_incremental.py b/opensfm/test/test_reconstruction_incremental.py index 36f5ec828..a20141359 100644 --- a/opensfm/test/test_reconstruction_incremental.py +++ b/opensfm/test/test_reconstruction_incremental.py @@ -4,7 +4,7 @@ def test_reconstruction_incremental( scene_synthetic: synthetic_scene.SyntheticInputData, -): +) -> None: reference = scene_synthetic.reconstruction dataset = synthetic_dataset.SyntheticDataSet( reference, @@ -50,7 +50,7 @@ def test_reconstruction_incremental( def test_reconstruction_incremental_rig( scene_synthetic_rig: synthetic_scene.SyntheticInputData, -): +) -> None: reference = scene_synthetic_rig.reconstruction dataset = synthetic_dataset.SyntheticDataSet( reference, diff --git a/opensfm/test/test_reconstruction_resect.py b/opensfm/test/test_reconstruction_resect.py index 90ddee4a4..8bd5f605a 100644 --- a/opensfm/test/test_reconstruction_resect.py +++ b/opensfm/test/test_reconstruction_resect.py @@ -2,16 +2,24 @@ from opensfm import config, multiview, pymap, reconstruction, types -def test_corresponding_tracks(): +def test_corresponding_tracks() -> None: t1 = {1: pymap.Observation(1.0, 1.0, 1.0, 0, 0, 0, 1, 1, 1)} t2 = {1: pymap.Observation(1.0, 1.0, 1.0, 0, 0, 0, 2, 2, 2)} + # pyre-fixme[6]: For 1st param expected `Dict[str, Observation]` but got + # `Dict[int, Observation]`. + # pyre-fixme[6]: For 2nd param expected `Dict[str, Observation]` but got + # `Dict[int, Observation]`. correspondences = reconstruction.corresponding_tracks(t1, t2) assert len(correspondences) == 0 t1 = {1: pymap.Observation(1.0, 1.0, 1.0, 0, 0, 0, 3, 3, 3)} t2 = {2: pymap.Observation(1.0, 1.0, 1.0, 0, 0, 0, 3, 3, 3)} + # pyre-fixme[6]: For 1st param expected `Dict[str, Observation]` but got + # `Dict[int, Observation]`. + # pyre-fixme[6]: For 2nd param expected `Dict[str, Observation]` but got + # `Dict[int, Observation]`. correspondences = reconstruction.corresponding_tracks(t1, t2) assert len(correspondences) == 1 assert correspondences[0] == (1, 2) @@ -25,6 +33,10 @@ def test_corresponding_tracks(): 2: pymap.Observation(1.0, 1.0, 1.0, 0, 0, 0, 5, 5, 5), } + # pyre-fixme[6]: For 1st param expected `Dict[str, Observation]` but got + # `Dict[int, Observation]`. + # pyre-fixme[6]: For 2nd param expected `Dict[str, Observation]` but got + # `Dict[int, Observation]`. correspondences = reconstruction.corresponding_tracks(t1, t2) assert len(correspondences) == 1 assert correspondences[0] == (2, 1) @@ -38,6 +50,10 @@ def test_corresponding_tracks(): 4: pymap.Observation(1.0, 1.0, 1.0, 0, 0, 0, 6, 6, 6), } + # pyre-fixme[6]: For 1st param expected `Dict[str, Observation]` but got + # `Dict[int, Observation]`. + # pyre-fixme[6]: For 2nd param expected `Dict[str, Observation]` but got + # `Dict[int, Observation]`. correspondences = reconstruction.corresponding_tracks(t1, t2) correspondences.sort(key=lambda c: c[0] + c[1]) assert len(correspondences) == 2 @@ -79,7 +95,7 @@ def move_and_scale_cluster(cluster): return cluster, translation, scale -def test_absolute_pose_generalized_shot(scene_synthetic_cube): +def test_absolute_pose_generalized_shot(scene_synthetic_cube) -> None: """Whole reconstruction resection (generalized pose) on a toy reconstruction with 0.01 meter point noise and zero outliers.""" noise = 0.01 diff --git a/opensfm/test/test_reconstruction_shot_neighborhood.py b/opensfm/test/test_reconstruction_shot_neighborhood.py index ebc465672..d302a7fa7 100644 --- a/opensfm/test/test_reconstruction_shot_neighborhood.py +++ b/opensfm/test/test_reconstruction_shot_neighborhood.py @@ -6,18 +6,18 @@ from opensfm import types -def _add_shot(rec, shot_id, cam): +def _add_shot(rec, shot_id, cam) -> None: rec.create_shot(shot_id, cam.id) -def _add_point(rec, point_id, observations): +def _add_point(rec, point_id, observations) -> None: rec.create_point(point_id) for shot_id in observations: obs = pymap.Observation(100, 200, 0.5, 255, 0, 0, int(point_id)) rec.add_observation(shot_id, point_id, obs) -def test_shot_neighborhood_linear_graph(): +def test_shot_neighborhood_linear_graph() -> None: rec = types.Reconstruction() cam = pygeometry.Camera.create_perspective(0.5, 0, 0) cam.id = "cam1" @@ -53,7 +53,7 @@ def test_shot_neighborhood_linear_graph(): assert boundary == {"im0"} -def test_shot_neighborhood_linear_graph_cpp(): +def test_shot_neighborhood_linear_graph_cpp() -> None: rec = types.Reconstruction() cam = pygeometry.Camera.create_perspective(0.5, 0, 0) cam.id = "cam1" @@ -89,7 +89,7 @@ def test_shot_neighborhood_linear_graph_cpp(): assert boundary4 == {"im0"} -def test_shot_neighborhood_complete_graph(): +def test_shot_neighborhood_complete_graph() -> None: rec = types.Reconstruction() cam = pygeometry.Camera.create_perspective(0.5, 0, 0) cam.id = "cam1" @@ -105,7 +105,7 @@ def test_shot_neighborhood_complete_graph(): assert boundary == set() -def test_shot_neighborhood_sorted_results(): +def test_shot_neighborhood_sorted_results() -> None: rec = types.Reconstruction() cam = pygeometry.Camera.create_perspective(0.5, 0, 0) cam.id = "cam1" @@ -133,7 +133,7 @@ def test_shot_neighborhood_sorted_results(): assert boundary == {"im1"} -def test_shot_neighborhood_complete_graph_cpp(): +def test_shot_neighborhood_complete_graph_cpp() -> None: rec = types.Reconstruction() cam = pygeometry.Camera.create_perspective(0.5, 0, 0) cam.id = "cam1" @@ -147,7 +147,7 @@ def test_shot_neighborhood_complete_graph_cpp(): assert boundary == set() -def test_shot_neighborhood_sorted_results_cpp(): +def test_shot_neighborhood_sorted_results_cpp() -> None: rec = types.Reconstruction() cam = pygeometry.Camera.create_perspective(0.5, 0, 0) cam.id = "cam1" diff --git a/opensfm/test/test_reconstruction_triangulation.py b/opensfm/test/test_reconstruction_triangulation.py index 552010c7f..a1e475be2 100644 --- a/opensfm/test/test_reconstruction_triangulation.py +++ b/opensfm/test/test_reconstruction_triangulation.py @@ -4,7 +4,7 @@ def test_reconstruction_triangulation( scene_synthetic_triangulation: synthetic_scene.SyntheticInputData, -): +) -> None: reference = scene_synthetic_triangulation.reconstruction dataset = synthetic_dataset.SyntheticDataSet( reference, diff --git a/opensfm/test/test_robust.py b/opensfm/test/test_robust.py index beb186111..fcece6881 100644 --- a/opensfm/test/test_robust.py +++ b/opensfm/test/test_robust.py @@ -21,7 +21,7 @@ def similarity_data(): return rotation, translation, scale, x, samples -def add_outliers(ratio_outliers, x, min, max): +def add_outliers(ratio_outliers, x, min, max) -> None: for index in np.random.permutation(len(x))[: int(ratio_outliers * len(x))]: shape = x[index].shape noise = np.random.uniform(min, max, size=shape) @@ -32,7 +32,7 @@ def add_outliers(ratio_outliers, x, min, max): x[int(index)] += sign * noise -def test_uniform_line_ransac(): +def test_uniform_line_ransac() -> None: a, b, x, samples = line_data() scale = 2.0 @@ -47,7 +47,7 @@ def test_uniform_line_ransac(): assert len(result.inliers_indices) == samples -def test_outliers_line_ransac(): +def test_outliers_line_ransac() -> None: a, b, x, samples = line_data() scale = 2.0 @@ -67,7 +67,7 @@ def test_outliers_line_ransac(): assert len(result.inliers_indices) == inliers_count -def test_normal_line_msac(): +def test_normal_line_msac() -> None: a, b, x, samples = line_data() sigma = 2.0 @@ -88,7 +88,7 @@ def test_normal_line_msac(): ) -def test_outliers_line_msac(): +def test_outliers_line_msac() -> None: a, b, x, samples = line_data() sigma = 2.0 @@ -114,7 +114,7 @@ def test_outliers_line_msac(): ) -def test_normal_line_LMedS(): +def test_normal_line_LMedS() -> None: a, b, x, samples = line_data() sigma = 2.0 @@ -133,7 +133,7 @@ def test_normal_line_LMedS(): ) -def test_outliers_line_LMedS(): +def test_outliers_line_LMedS() -> None: a, b, x, samples = line_data() sigma = 2.0 @@ -161,7 +161,7 @@ def test_outliers_line_LMedS(): ) -def test_outliers_similarity_ransac(): +def test_outliers_similarity_ransac() -> None: rotation, translation, scale, x, samples = similarity_data() similarity = pygeometry.Similarity(rotation, translation, scale) @@ -184,7 +184,7 @@ def test_outliers_similarity_ransac(): ) -def test_uniform_essential_ransac(pairs_and_their_E): +def test_uniform_essential_ransac(pairs_and_their_E) -> None: for f1, f2, _, _ in pairs_and_their_E: points = np.concatenate((f1, f2), axis=1) @@ -205,7 +205,7 @@ def test_uniform_essential_ransac(pairs_and_their_E): assert len(result.inliers_indices) == len(f1) == len(f2) -def test_outliers_essential_ransac(pairs_and_their_E): +def test_outliers_essential_ransac(pairs_and_their_E) -> None: for f1, f2, _, _ in pairs_and_their_E: points = np.concatenate((f1, f2), axis=1) @@ -230,7 +230,7 @@ def test_outliers_essential_ransac(pairs_and_their_E): assert np.isclose(len(result.inliers_indices), inliers_count, rtol=tolerance) -def test_outliers_relative_pose_ransac(pairs_and_their_E): +def test_outliers_relative_pose_ransac(pairs_and_their_E) -> None: for f1, f2, _, pose in pairs_and_their_E: points = np.concatenate((f1, f2), axis=1) @@ -261,7 +261,7 @@ def test_outliers_relative_pose_ransac(pairs_and_their_E): assert np.linalg.norm(expected - result.lo_model, ord="fro") < 16e-2 -def test_outliers_relative_rotation_ransac(pairs_and_their_E): +def test_outliers_relative_rotation_ransac(pairs_and_their_E) -> None: for f1, _, _, _ in pairs_and_their_E: vec_x = np.random.rand(3) @@ -301,7 +301,7 @@ def test_outliers_relative_rotation_ransac(pairs_and_their_E): assert np.linalg.norm(rotation - result.lo_model, ord="fro") < 8e-2 -def test_outliers_absolute_pose_ransac(shots_and_their_points): +def test_outliers_absolute_pose_ransac(shots_and_their_points) -> None: for pose, bearings, points in shots_and_their_points: scale = 1e-3 bearings = copy.deepcopy(bearings) @@ -326,7 +326,7 @@ def test_outliers_absolute_pose_ransac(shots_and_their_points): assert np.linalg.norm(expected - result.lo_model, ord="fro") < 8e-2 -def test_outliers_absolute_pose_known_rotation_ransac(shots_and_their_points): +def test_outliers_absolute_pose_known_rotation_ransac(shots_and_their_points) -> None: for pose, bearings, points in shots_and_their_points: scale = 1e-3 bearings = copy.deepcopy(bearings) diff --git a/opensfm/test/test_stats.py b/opensfm/test/test_stats.py index 7caa8f1c5..b486794eb 100644 --- a/opensfm/test/test_stats.py +++ b/opensfm/test/test_stats.py @@ -4,7 +4,7 @@ def test_processing_statistics_normal( scene_synthetic: synthetic_scene.SyntheticInputData, -): +) -> None: reference = scene_synthetic.reconstruction dataset = synthetic_dataset.SyntheticDataSet( reference, @@ -30,7 +30,7 @@ def test_processing_statistics_normal( def test_processing_statistics_null( scene_synthetic: synthetic_scene.SyntheticInputData, null_scene: types.Reconstruction, -): +) -> None: dataset = synthetic_dataset.SyntheticDataSet( null_scene, scene_synthetic.exifs, @@ -54,7 +54,7 @@ def test_processing_statistics_null( def test_features_statistics_normal( scene_synthetic: synthetic_scene.SyntheticInputData, -): +) -> None: reference = scene_synthetic.reconstruction dataset = synthetic_dataset.SyntheticDataSet( reference, @@ -85,7 +85,7 @@ def test_features_statistics_normal( def test_features_statistics_null( scene_synthetic: synthetic_scene.SyntheticInputData, null_scene: types.Reconstruction, -): +) -> None: dataset = synthetic_dataset.SyntheticDataSet( null_scene, scene_synthetic.exifs, @@ -115,7 +115,7 @@ def test_features_statistics_null( def test_reconstruction_statistics_normal( scene_synthetic: synthetic_scene.SyntheticInputData, -): +) -> None: reference = scene_synthetic.reconstruction dataset = synthetic_dataset.SyntheticDataSet( reference, @@ -150,7 +150,7 @@ def test_reconstruction_statistics_normal( def test_reconstruction_statistics_null( scene_synthetic: synthetic_scene.SyntheticInputData, null_scene: types.Reconstruction, -): +) -> None: dataset = synthetic_dataset.SyntheticDataSet( null_scene, scene_synthetic.exifs, @@ -183,7 +183,7 @@ def test_reconstruction_statistics_null( def test_cameras_statistics_normal( scene_synthetic: synthetic_scene.SyntheticInputData, -): +) -> None: reference = scene_synthetic.reconstruction dataset = synthetic_dataset.SyntheticDataSet( reference, @@ -209,7 +209,7 @@ def test_cameras_statistics_normal( def test_cameras_statistics_null( scene_synthetic: synthetic_scene.SyntheticInputData, null_scene: types.Reconstruction, -): +) -> None: dataset = synthetic_dataset.SyntheticDataSet( null_scene, scene_synthetic.exifs, @@ -223,7 +223,7 @@ def test_cameras_statistics_null( def test_rig_statistics_normal( scene_synthetic: synthetic_scene.SyntheticInputData, -): +) -> None: reference = scene_synthetic.reconstruction dataset = synthetic_dataset.SyntheticDataSet( reference, @@ -239,7 +239,7 @@ def test_rig_statistics_normal( def test_rig_statistics_null( scene_synthetic: synthetic_scene.SyntheticInputData, null_scene: types.Reconstruction, -): +) -> None: dataset = synthetic_dataset.SyntheticDataSet( null_scene, scene_synthetic.exifs, @@ -253,7 +253,7 @@ def test_rig_statistics_null( def test_gps_errors_normal( scene_synthetic: synthetic_scene.SyntheticInputData, -): +) -> None: reference = scene_synthetic.reconstruction gps_errors = stats.gps_errors([reference]) assert gps_errors == {} @@ -262,14 +262,14 @@ def test_gps_errors_normal( def test_gps_errors_null( scene_synthetic: synthetic_scene.SyntheticInputData, null_scene: types.Reconstruction, -): +) -> None: gps_errors = stats.gps_errors([null_scene]) assert gps_errors == {} def test_gcp_errors_normal( scene_synthetic: synthetic_scene.SyntheticInputData, -): +) -> None: reference = scene_synthetic.reconstruction dataset = synthetic_dataset.SyntheticDataSet( reference, @@ -285,7 +285,7 @@ def test_gcp_errors_normal( def test_gcp_errors_null( scene_synthetic: synthetic_scene.SyntheticInputData, null_scene: types.Reconstruction, -): +) -> None: dataset = synthetic_dataset.SyntheticDataSet( null_scene, scene_synthetic.exifs, diff --git a/opensfm/test/test_triangulation.py b/opensfm/test/test_triangulation.py index 17ba979cf..cc6a2ddb3 100644 --- a/opensfm/test/test_triangulation.py +++ b/opensfm/test/test_triangulation.py @@ -5,7 +5,7 @@ from opensfm import reconstruction -def test_track_triangulator_spherical(): +def test_track_triangulator_spherical() -> None: """Test triangulating tracks of spherical images.""" tracks_manager = pymap.TracksManager() tracks_manager.add_observation("im1", "1", pymap.Observation(0, 0, 1.0, 0, 0, 0, 0)) @@ -48,11 +48,11 @@ def test_track_triangulator_spherical(): assert len(rec.points["1"].get_observations()) == 2 -def unit_vector(x): +def unit_vector(x: object): return np.array(x) / np.linalg.norm(x) -def test_triangulate_bearings_dlt(): +def test_triangulate_bearings_dlt() -> None: rt1 = np.append(np.identity(3), [[0], [0], [0]], axis=1) rt2 = np.append(np.identity(3), [[-1], [0], [0]], axis=1) b1 = unit_vector([0.0, 0, 1]) @@ -60,13 +60,14 @@ def test_triangulate_bearings_dlt(): max_reprojection = 0.01 min_ray_angle = np.radians(2.0) res, X = pygeometry.triangulate_bearings_dlt( + # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[typing.Any]`. [rt1, rt2], [b1, b2], max_reprojection, min_ray_angle ) assert np.allclose(X, [0, 0, 1.0]) assert res is True -def test_triangulate_bearings_midpoint(): +def test_triangulate_bearings_midpoint() -> None: o1 = np.array([0.0, 0, 0]) b1 = unit_vector([0.0, 0, 1]) o2 = np.array([1.0, 0, 0]) @@ -74,23 +75,27 @@ def test_triangulate_bearings_midpoint(): max_reprojection = 0.01 min_ray_angle = np.radians(2.0) valid_triangulation, X = pygeometry.triangulate_bearings_midpoint( + # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[ndarray]`. + # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[typing.Any]`. [o1, o2], [b1, b2], 2 * [max_reprojection], min_ray_angle ) assert np.allclose(X, [0, 0, 1.0]) assert valid_triangulation is True -def test_triangulate_two_bearings_midpoint(): +def test_triangulate_two_bearings_midpoint() -> None: o1 = np.array([0.0, 0, 0]) b1 = unit_vector([0.0, 0, 1]) o2 = np.array([1.0, 0, 0]) b2 = unit_vector([-1.0, 0, 1]) + # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[ndarray]`. + # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[typing.Any]`. ok, X = pygeometry.triangulate_two_bearings_midpoint([o1, o2], [b1, b2]) assert ok is True assert np.allclose(X, [0, 0, 1.0]) -def test_triangulate_two_bearings_midpoint_failed(): +def test_triangulate_two_bearings_midpoint_failed() -> None: o1 = np.array([0.0, 0, 0]) b1 = unit_vector([0.0, 0, 1]) o2 = np.array([1.0, 0, 0]) @@ -98,5 +103,7 @@ def test_triangulate_two_bearings_midpoint_failed(): # almost parralel. 1e-5 will make it triangulate again. b2 = b1 + np.array([-1e-10, 0, 0]) + # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[ndarray]`. + # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[typing.Any]`. ok, X = pygeometry.triangulate_two_bearings_midpoint([o1, o2], [b1, b2]) assert ok is False diff --git a/opensfm/test/test_types.py b/opensfm/test/test_types.py index 5ec28abd1..82d1dead5 100644 --- a/opensfm/test/test_types.py +++ b/opensfm/test/test_types.py @@ -7,7 +7,7 @@ from scipy.stats import special_ortho_group -def test_reconstruction_class_initialization(): +def test_reconstruction_class_initialization() -> None: # Instantiate Reconstruction reconstruction = types.Reconstruction() @@ -26,21 +26,27 @@ def test_reconstruction_class_initialization(): metadata.orientation.value = 1 metadata.capture_time.value = 0.0 metadata.gps_accuracy.value = 5.0 + # pyre-fixme[8]: Attribute has type `ndarray`; used as `List[float]`. metadata.gps_position.value = [ 1.0815875281451939, -0.96510451436708888, 1.2042133903991235, ] + # pyre-fixme[8]: Attribute has type `ndarray`; used as `List[float]`. metadata.accelerometer.value = [0.1, 0.9, 0.0] metadata.compass_angle.value = 270.0 metadata.compass_accuracy.value = 15.0 metadata.sequence_key.value = "a_sequence_key" # Instantiate shots + # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[float]`. + # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[float]`. pose0 = pygeometry.Pose([0.0, 0.0, 0.0], [0.0, 0.0, 0.0]) shot0 = reconstruction.create_shot("0", camera.id, pose0) shot0.metadata = metadata + # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[float]`. + # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[float]`. pose1 = pygeometry.Pose([0.0, 0.0, 0.0], [-1.0, 0.0, 0.0]) shot1 = reconstruction.create_shot("1", camera.id, pose1) shot1.metadata = metadata @@ -55,14 +61,14 @@ def test_reconstruction_class_initialization(): assert reconstruction.get_shot(shot1.id) is not None -def test_is_panorama(): +def test_is_panorama() -> None: """Test spherical projection--backprojection loop.""" assert pygeometry.Camera.is_panorama("spherical") assert pygeometry.Camera.is_panorama("equirectangular") assert not pygeometry.Camera.is_panorama("fisheye") -def test_camera_deepcopy(): +def test_camera_deepcopy() -> None: cam1 = pygeometry.Camera.create_perspective(0.5, 0, 0) cam2 = copy.deepcopy(cam1) assert cam1.focal == cam2.focal @@ -72,7 +78,7 @@ def test_camera_deepcopy(): assert cam3.focal == cam2.focal -def test_shot_measurement(): +def test_shot_measurement() -> None: m = pymap.ShotMeasurementInt() assert not m.has_value m.value = 4 @@ -80,7 +86,7 @@ def test_shot_measurement(): assert m.value == 4 -def _helper_pose_equal_to_T(pose, T_cw): +def _helper_pose_equal_to_T(pose, T_cw) -> None: assert np.allclose(pose.get_R_world_to_cam(), T_cw[0:3, 0:3]) assert np.allclose(pose.get_t_world_to_cam(), T_cw[0:3, 3].reshape(3)) assert np.allclose(pose.translation, T_cw[0:3, 3].reshape(3)) @@ -97,14 +103,14 @@ def _helper_pose_equal_to_T(pose, T_cw): assert np.allclose(pose.get_Rt(), T_cw[0:3, 0:4]) -def _helper_poses_equal_py_cpp(py_pose, cpp_pose): +def _helper_poses_equal_py_cpp(py_pose, cpp_pose) -> None: assert np.allclose(py_pose.translation, cpp_pose.translation) assert np.allclose(py_pose.rotation, cpp_pose.rotation) assert np.allclose(py_pose.get_rotation_matrix(), cpp_pose.get_rotation_matrix()) assert np.allclose(py_pose.get_origin(), cpp_pose.get_origin()) -def _heper_poses_equal(pose1, pose2): +def _heper_poses_equal(pose1, pose2) -> None: assert np.allclose(pose1.translation, pose2.translation) assert np.allclose(pose1.rotation, pose2.rotation) assert np.allclose(pose1.get_rotation_matrix(), pose2.get_rotation_matrix()) @@ -118,7 +124,7 @@ def _heper_poses_equal(pose1, pose2): assert np.allclose(pose1.get_Rt(), pose2.get_Rt()) -def test_pose_setter(): +def test_pose_setter() -> None: R_cw = special_ortho_group.rvs(3) t_cw = np.random.rand(3) T_cw = np.vstack((np.column_stack((R_cw, t_cw)), np.array([0, 0, 0, 1]))) @@ -164,7 +170,7 @@ def test_pose_setter(): _helper_pose_equal_to_T(p7, T_cw) -def test_pose_transform(): +def test_pose_transform() -> None: pt = np.random.rand(3) pts = np.random.rand(10, 3) R_cw = special_ortho_group.rvs(3) @@ -180,7 +186,7 @@ def test_pose_transform(): assert np.allclose(p.transform_many(p.transform_inverse_many(pts)), pts) -def test_pose_init(): +def test_pose_init() -> None: R_cw = special_ortho_group.rvs(3) t_cw = np.random.rand(3) T_cw = np.vstack((np.column_stack((R_cw, t_cw)), np.array([0, 0, 0, 1]))) @@ -217,7 +223,7 @@ def test_pose_init(): ) -def test_pose_inverse(): +def test_pose_inverse() -> None: R_cw = special_ortho_group.rvs(3) t_cw = np.random.rand(3) T_cw = np.vstack((np.column_stack((R_cw, t_cw)), np.array([0, 0, 0, 1]))) @@ -228,13 +234,14 @@ def test_pose_inverse(): _heper_poses_equal(pose_inv, pose_inv2) -def test_pixel_to_normalized_conversion(): +def test_pixel_to_normalized_conversion() -> None: cam = pygeometry.Camera.create_perspective(1, 0, 0) width, height = 400, 150 cam.width, cam.height = width, height px_coord = np.array([50, 300]) norm_coord_comp = cam.pixel_to_normalized_coordinates(px_coord) norm_coord_static = pygeometry.Camera.pixel_to_normalized_coordinates_common( + # pyre-fixme[6]: For 1st param expected `Camera` but got `ndarray`. px_coord, width, height ) norm_coord_gt = px_coord - np.array([(width - 1.0) / 2.0, (height - 1.0) / 2.0]) @@ -244,13 +251,14 @@ def test_pixel_to_normalized_conversion(): px_coord_comp1 = cam.normalized_to_pixel_coordinates(norm_coord_comp) px_coord_comp2 = pygeometry.Camera.normalized_to_pixel_coordinates_common( + # pyre-fixme[6]: For 1st param expected `Camera` but got `ndarray`. norm_coord_comp, width, height ) assert np.allclose(px_coord, px_coord_comp1) assert np.allclose(px_coord, px_coord_comp2) -def test_shot_view_ref_count(): +def test_shot_view_ref_count() -> None: """Test that accessing shots via shot views maintains the map alive.""" rec = types.Reconstruction() camera1 = pygeometry.Camera.create_spherical() @@ -306,7 +314,7 @@ def _return_shot() -> pymap.Shot: return rec.shots["shot1"] -def test_return_shot_from_local_reconstruction(): +def test_return_shot_from_local_reconstruction() -> None: """Test that one can create a reconstruciton and return shots from it. Without proper ref counting in the python bindings, this crashes as the diff --git a/opensfm/test/test_undistort.py b/opensfm/test/test_undistort.py index 55a182507..ae516c738 100644 --- a/opensfm/test/test_undistort.py +++ b/opensfm/test/test_undistort.py @@ -4,13 +4,15 @@ from opensfm import undistort, pygeometry, types -def test_perspective_views_of_a_panorama(): +def test_perspective_views_of_a_panorama() -> None: reconstruction = types.Reconstruction() camera = pygeometry.Camera.create_spherical() camera.id = "spherical_camera" camera.width = 8000 camera.height = 4000 reconstruction.add_camera(camera) + # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[int]`. + # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[int]`. pose = pygeometry.Pose([1, 2, 3], [4, 5, 6]) spherical_shot = reconstruction.create_shot("shot1", camera.id, pose=pose) diff --git a/opensfm/test/test_vlad.py b/opensfm/test/test_vlad.py index c5acc0e22..bbf5b278e 100644 --- a/opensfm/test/test_vlad.py +++ b/opensfm/test/test_vlad.py @@ -3,7 +3,7 @@ from opensfm import vlad -def test_vlad_distances_order(): +def test_vlad_distances_order() -> None: im = "im1" other_ims = ["im2", "im3"] @@ -24,15 +24,16 @@ def test_vlad_distances_order(): assert other_ims[order_res[1]] == "im2" -def test_signed_square_root_normalize(): +def test_signed_square_root_normalize() -> None: v = [1, 0.01] + # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[float]`. res = vlad.signed_square_root_normalize(v) assert pytest.approx(np.linalg.norm(res), 1e-6) == 1 assert pytest.approx(v[0] / v[1], 1e-6) == 10 * res[0] / res[1] -def test_unnormalized_vlad(): +def test_unnormalized_vlad() -> None: features = np.array([[0, 1.1]]) centers = np.array( [ @@ -43,5 +44,6 @@ def test_unnormalized_vlad(): res = vlad.unnormalized_vlad(features, centers) + # pyre-fixme[16]: Optional type has no attribute `__getitem__`. assert res[0] == res[1] == res[2] == 0 assert pytest.approx(res[3], 1e-6) == 0.1 diff --git a/opensfm/video.py b/opensfm/video.py index 36ca48e6c..49f5c17d1 100644 --- a/opensfm/video.py +++ b/opensfm/video.py @@ -9,8 +9,9 @@ from opensfm import io -def video_orientation(video_file): +def video_orientation(video_file) -> int: # Rotation + # pyre-fixme[16]: Optional type has no attribute `read`. rotation = Popen( ["exiftool", "-Rotation", "-b", video_file], stdout=PIPE ).stdout.read() @@ -26,17 +27,18 @@ def video_orientation(video_file): orientation = 8 else: orientation = 1 + # pyre-fixme[61]: `orientation` is undefined, or not always defined. return orientation def import_video_with_gpx( video_file, gpx_file, - output_path, + output_path: str, dx, dt=None, start_time=None, - visual=False, + visual: bool=False, image_description=None, ): @@ -48,6 +50,7 @@ def import_video_with_gpx( video_start_time = dateutil.parser.parse(start_time) else: try: + # pyre-fixme[16]: Optional type has no attribute `read`. exifdate = Popen( ["exiftool", "-CreateDate", "-b", video_file], stdout=PIPE ).stdout.read() From 15929d6d8ec6a69a46511f14b4862daafbd15c9a Mon Sep 17 00:00:00 2001 From: Fabian Schenk Date: Fri, 4 Feb 2022 04:25:08 -0800 Subject: [PATCH 17/81] Clean-up and type annotations Summary: Some clean-ups and type annotations Reviewed By: paulinus Differential Revision: D33916235 fbshipit-source-id: 024f4a2abca8cc64be02bc1d2fc6c9c798e17df3 --- opensfm/context.py | 14 ++++++----- opensfm/log.py | 1 - opensfm/mesh.py | 38 ++++++++++++++++++++++------ opensfm/multiview.py | 7 ++++-- opensfm/reconstruction_helpers.py | 24 +++++++++--------- opensfm/report.py | 41 ++++++++++++++++--------------- 6 files changed, 77 insertions(+), 48 deletions(-) diff --git a/opensfm/context.py b/opensfm/context.py index 909eb0c37..5246a76c1 100644 --- a/opensfm/context.py +++ b/opensfm/context.py @@ -1,11 +1,12 @@ import logging import os + try: import resource except ModuleNotFoundError: - pass # Windows -import sys + pass # Windows import ctypes +import sys from typing import Optional import cv2 @@ -61,7 +62,8 @@ def parallel_map(func, args, num_proc, max_batch_size=1): # Memory usage -if sys.platform == 'win32': +if sys.platform == "win32": + class MEMORYSTATUSEX(ctypes.Structure): _fields_ = [ ("dwLength", ctypes.c_ulong), @@ -93,13 +95,14 @@ def current_memory_usage(): stat = MEMORYSTATUSEX() ctypes.windll.kernel32.GlobalMemoryStatusEx(ctypes.byref(stat)) return (stat.ullTotalPhys - stat.ullAvailPhys) / 1024 + + else: if sys.platform == "darwin": rusage_unit = 1 else: rusage_unit = 1024 - def memory_available() -> Optional[int]: """Available memory in MB. @@ -112,7 +115,6 @@ def memory_available() -> Optional[int]: available_mem = int(lines[1].split()[6]) return available_mem - def current_memory_usage(): return resource.getrusage(resource.RUSAGE_SELF).ru_maxrss * rusage_unit @@ -124,4 +126,4 @@ def processes_that_fit_in_memory(desired: int, per_process: int) -> int: fittable = max(1, int(available_mem / per_process)) return min(desired, fittable) else: - return desired \ No newline at end of file + return desired diff --git a/opensfm/log.py b/opensfm/log.py index d9c4adf16..379a3a53b 100644 --- a/opensfm/log.py +++ b/opensfm/log.py @@ -1,6 +1,5 @@ import logging import os -import sys from typing import Optional diff --git a/opensfm/mesh.py b/opensfm/mesh.py index b08410b5b..016372b19 100644 --- a/opensfm/mesh.py +++ b/opensfm/mesh.py @@ -1,16 +1,19 @@ #!/usr/bin/env python3 import itertools import logging +from typing import Any, Tuple, List import numpy as np import scipy.spatial -from opensfm import pygeometry +from opensfm import pygeometry, pymap, types logger = logging.getLogger(__name__) -def triangle_mesh(shot_id, r, tracks_manager): +def triangle_mesh( + shot_id: str, r: types.Reconstruction, tracks_manager: pymap.TracksManager +): """ Create triangle meshes in a list """ @@ -19,9 +22,20 @@ def triangle_mesh(shot_id, r, tracks_manager): shot = r.shots[shot_id] - if shot.camera.projection_type in ["perspective", "brown", "radial", "simple_radial"]: + if shot.camera.projection_type in [ + "perspective", + "brown", + "radial", + "simple_radial", + ]: return triangle_mesh_perspective(shot_id, r, tracks_manager) - elif shot.camera.projection_type in ["fisheye", "fisheye_opencv", "fisheye62", "fisheye624", "dual"]: + elif shot.camera.projection_type in [ + "fisheye", + "fisheye_opencv", + "fisheye62", + "fisheye624", + "dual", + ]: return triangle_mesh_fisheye(shot_id, r, tracks_manager) elif pygeometry.Camera.is_panorama(shot.camera.projection_type): return triangle_mesh_spherical(shot_id, r, tracks_manager) @@ -31,7 +45,9 @@ def triangle_mesh(shot_id, r, tracks_manager): ) -def triangle_mesh_perspective(shot_id, r, tracks_manager): +def triangle_mesh_perspective( + shot_id: str, r: types.Reconstruction, tracks_manager: pymap.TracksManager +) -> Tuple[List[Any], List[Any]]: shot = r.shots[shot_id] cam = shot.camera @@ -74,7 +90,9 @@ def triangle_mesh_perspective(shot_id, r, tracks_manager): return vertices, faces -def back_project_no_distortion(shot, pixel, depth): +def back_project_no_distortion( + shot: pymap.Shot, pixel: List[float], depth: float +) -> np.ndarray: """ Back-project a pixel of a perspective camera ignoring its radial distortion """ @@ -85,7 +103,9 @@ def back_project_no_distortion(shot, pixel, depth): return shot.pose.transform_inverse(p) -def triangle_mesh_fisheye(shot_id, r, tracks_manager): +def triangle_mesh_fisheye( + shot_id: str, r: types.Reconstruction, tracks_manager: pymap.TracksManager +) -> Tuple[List[Any], List[Any]]: shot = r.shots[shot_id] bearings = [] @@ -135,7 +155,9 @@ def good_face(face): return vertices, faces -def triangle_mesh_spherical(shot_id, r, tracks_manager): +def triangle_mesh_spherical( + shot_id: str, r: types.Reconstruction, tracks_manager: pymap.TracksManager +) -> Tuple[List[Any], List[Any]]: shot = r.shots[shot_id] bearings = [] diff --git a/opensfm/multiview.py b/opensfm/multiview.py index 011781016..f53a1c5d5 100644 --- a/opensfm/multiview.py +++ b/opensfm/multiview.py @@ -489,7 +489,7 @@ def camera_compass_angle(rotation_matrix: np.ndarray) -> float: def rotation_matrix_from_up_vector_and_compass( - up_vector: np.ndarray, compass_angle: float + up_vector: List[float], compass_angle: float ) -> np.ndarray: """Camera rotation given up_vector and compass. @@ -702,7 +702,10 @@ def triangulate_gcp( if len(os) >= 2: thresholds = len(os) * [reproj_threshold] valid_triangulation, X = pygeometry.triangulate_bearings_midpoint( - np.asarray(os), np.asarray(bs), thresholds, np.radians(min_ray_angle_degrees) + np.asarray(os), + np.asarray(bs), + thresholds, + np.radians(min_ray_angle_degrees), ) if valid_triangulation: return X diff --git a/opensfm/reconstruction_helpers.py b/opensfm/reconstruction_helpers.py index 996dac96b..90b29b916 100644 --- a/opensfm/reconstruction_helpers.py +++ b/opensfm/reconstruction_helpers.py @@ -1,6 +1,6 @@ import logging import math -from typing import Dict, Any, Iterable +from typing import Optional, List, Dict, Any, Iterable import numpy as np from opensfm import ( @@ -18,7 +18,7 @@ logger = logging.getLogger(__name__) -def guess_acceleration_from_orientation_tag(orientation): +def guess_acceleration_from_orientation_tag(orientation: int) -> List[float]: """Guess upward vector in camera coordinates given the orientation tag. Assumes camera is looking towards the horizon and horizon is horizontal @@ -41,10 +41,10 @@ def guess_acceleration_from_orientation_tag(orientation): return [1, 0, 0] if orientation == 8: return [1, 0, 0] - logger.error("Unknown orientation tag: {}".format(orientation)) + raise RuntimeError(f"Error: Unknown orientation tag: {orientation}") -def orientation_from_acceleration_in_image_axis(x, y, z): +def orientation_from_acceleration_in_image_axis(x:float, y:float) -> int: """Return the orientation tag corresponding to an acceleration""" if y <= -(np.fabs(x)): return 1 @@ -54,9 +54,11 @@ def orientation_from_acceleration_in_image_axis(x, y, z): return 6 elif x >= np.fabs(y): return 8 + else: + raise RuntimeError(f"Error: Invalid acceleration {x}, {y}!") -def transform_acceleration_from_phone_to_image_axis(x, y, z, orientation): +def transform_acceleration_from_phone_to_image_axis(x:float, y:float, z:float, orientation: int) -> List[float]: """Compute acceleration in image axis. Orientation tag is used to ensure that the resulting acceleration points @@ -75,7 +77,7 @@ def transform_acceleration_from_phone_to_image_axis(x, y, z, orientation): ix, iy, iz = -y, -x, -z for _ in range(4): - if orientation == orientation_from_acceleration_in_image_axis(ix, iy, iz): + if orientation == orientation_from_acceleration_in_image_axis(ix, iy): break else: ix, iy = -iy, ix @@ -83,7 +85,7 @@ def transform_acceleration_from_phone_to_image_axis(x, y, z, orientation): return [ix, iy, iz] -def shot_acceleration_in_image_axis(shot): +def shot_acceleration_in_image_axis(shot: pymap.Shot) -> List[float]: """Get or guess shot's acceleration.""" orientation = shot.metadata.orientation.value if not 1 <= orientation <= 8: @@ -99,14 +101,14 @@ def shot_acceleration_in_image_axis(shot): return guess_acceleration_from_orientation_tag(orientation) -def rotation_from_shot_metadata(shot): +def rotation_from_shot_metadata(shot: pymap.Shot) -> np.ndarray: rotation = rotation_from_angles(shot) if rotation is None: rotation = rotation_from_orientation_compass(shot) return rotation -def rotation_from_orientation_compass(shot): +def rotation_from_orientation_compass(shot: pymap.Shot) -> np.ndarray: up_vector = shot_acceleration_in_image_axis(shot) if shot.metadata.compass_angle.has_value: angle = shot.metadata.compass_angle.value @@ -115,7 +117,7 @@ def rotation_from_orientation_compass(shot): return multiview.rotation_matrix_from_up_vector_and_compass(up_vector, angle) -def rotation_from_angles(shot): +def rotation_from_angles(shot: pymap.Shot) -> Optional[np.ndarray]: if not shot.metadata.opk_angles.has_value: return None opk_degrees = shot.metadata.opk_angles.value @@ -123,7 +125,7 @@ def rotation_from_angles(shot): return geometry.rotation_from_opk(*opk_rad) -def reconstruction_from_metadata(data: DataSetBase, images: Iterable[str]): +def reconstruction_from_metadata(data: DataSetBase, images: Iterable[str]) -> types.Reconstruction: """Initialize a reconstruction by using EXIF data for constructing shot poses and cameras.""" data.init_reference() rig_assignments = rig.rig_assignments_per_image(data.load_rig_assignments()) diff --git a/opensfm/report.py b/opensfm/report.py index fa68f07e9..b24a89b92 100644 --- a/opensfm/report.py +++ b/opensfm/report.py @@ -7,12 +7,13 @@ from fpdf import FPDF from opensfm import io from opensfm.dataset import DataSet +from typing import Any, Dict logger = logging.getLogger(__name__) class Report: - def __init__(self, data: DataSet): + def __init__(self, data: DataSet) -> None: self.output_path = os.path.join(data.data_path, "stats") self.dataset_name = os.path.basename(data.data_path) self.io_handler = data.io_handler @@ -37,14 +38,14 @@ def __init__(self, data: DataSet): self.stats = self._read_stats_file("stats.json") - def save_report(self, filename): + def save_report(self, filename: str) -> None: bytestring = self.pdf.output(dest="S") with self.io_handler.open( os.path.join(self.output_path, filename), "wb" ) as fwb: fwb.write(bytestring) - def _make_table(self, columns_names, rows, row_header=False): + def _make_table(self, columns_names, rows, row_header=False) -> None: self.pdf.set_font("Helvetica", "", self.h3) self.pdf.set_line_width(0.3) @@ -87,25 +88,25 @@ def _make_table(self, columns_names, rows, row_header=False): self.pdf.cell(size, self.cell_height, col, align="L") self.pdf.set_xy(self.margin, self.pdf.get_y() + self.cell_height) - def _read_stats_file(self, filename): + def _read_stats_file(self, filename) -> Dict[str, Any]: file_path = os.path.join(self.output_path, filename) with self.io_handler.open_rt(file_path) as fin: return io.json_load(fin) - def _make_section(self, title): + def _make_section(self, title: str) -> None: self.pdf.set_font("Helvetica", "B", self.h1) self.pdf.set_text_color(*self.mapi_dark_grey) self.pdf.cell(0, self.margin, title, align="L") self.pdf.set_xy(self.margin, self.pdf.get_y() + 1.5 * self.margin) - def _make_subsection(self, title): + def _make_subsection(self, title: str) -> None: self.pdf.set_xy(self.margin, self.pdf.get_y() - 0.5 * self.margin) self.pdf.set_font("Helvetica", "B", self.h2) self.pdf.set_text_color(*self.mapi_dark_grey) self.pdf.cell(0, self.margin, title, align="L") self.pdf.set_xy(self.margin, self.pdf.get_y() + self.margin) - def _make_centered_image(self, image_path, desired_height): + def _make_centered_image(self, image_path: str, desired_height: float) -> None: with tempfile.TemporaryDirectory() as tmp_local_dir: local_image_path = os.path.join(tmp_local_dir, os.path.basename(image_path)) @@ -129,7 +130,7 @@ def _make_centered_image(self, image_path, desired_height): self.margin, self.pdf.get_y() + desired_height + self.margin ) - def make_title(self): + def make_title(self) -> None: # title self.pdf.set_font("Helvetica", "B", self.title_size) self.pdf.set_text_color(*self.mapi_light_green) @@ -158,7 +159,7 @@ def make_title(self): ) self.pdf.set_xy(self.margin, self.pdf.get_y() + 2 * self.margin) - def make_dataset_summary(self): + def make_dataset_summary(self) -> None: self._make_section("Dataset Summary") rows = [ @@ -176,13 +177,13 @@ def make_dataset_summary(self): self._make_table(None, rows, True) self.pdf.set_xy(self.margin, self.pdf.get_y() + self.margin) - def _has_meaningful_gcp(self): + def _has_meaningful_gcp(self) -> bool: return ( self.stats["reconstruction_statistics"]["has_gcp"] and "average_error" in self.stats["gcp_errors"] ) - def make_processing_summary(self): + def make_processing_summary(self) -> None: self._make_section("Processing Summary") rec_shots, init_shots = ( @@ -247,7 +248,7 @@ def make_processing_summary(self): self.pdf.set_xy(self.margin, self.pdf.get_y() + self.margin) - def make_processing_time_details(self): + def make_processing_time_details(self) -> None: self._make_section("Processing Time Details") columns_names = list(self.stats["processing_statistics"]["steps_times"].keys()) @@ -258,7 +259,7 @@ def make_processing_time_details(self): self._make_table(columns_names, rows) self.pdf.set_xy(self.margin, self.pdf.get_y() + 2 * self.margin) - def make_gps_details(self): + def make_gps_details(self) -> None: self._make_section("GPS/GCP Errors Details") # GPS @@ -307,7 +308,7 @@ def make_gps_details(self): self.pdf.set_xy(self.margin, self.pdf.get_y() + self.margin / 2) - def make_features_details(self): + def make_features_details(self) -> None: self._make_section("Features Details") heatmap_height = 60 @@ -333,7 +334,7 @@ def make_features_details(self): self.pdf.set_xy(self.margin, self.pdf.get_y() + self.margin) - def make_reconstruction_details(self): + def make_reconstruction_details(self) -> None: self._make_section("Reconstruction Details") rows = [ @@ -369,7 +370,7 @@ def make_reconstruction_details(self): ) self.pdf.set_xy(self.margin, self.pdf.get_y() + self.margin) - def make_camera_models_details(self): + def make_camera_models_details(self) -> None: self._make_section("Camera Models Details") for camera, params in self.stats["camera_errors"].items(): @@ -398,7 +399,7 @@ def make_camera_models_details(self): os.path.join(self.output_path, residual_grids[0]), residual_grid_height ) - def make_rig_cameras_details(self): + def make_rig_cameras_details(self) -> None: if len(self.stats["rig_errors"]) == 0: return @@ -444,7 +445,7 @@ def make_rig_cameras_details(self): self._make_table(columns_names, rows) self.pdf.set_xy(self.margin, self.pdf.get_y() + self.margin / 2) - def make_tracks_details(self): + def make_tracks_details(self) -> None: self._make_section("Tracks Details") matchgraph_height = 80 matchgraph = [ @@ -473,10 +474,10 @@ def make_tracks_details(self): self.pdf.set_xy(self.margin, self.pdf.get_y() + self.margin) - def add_page_break(self): + def add_page_break(self) -> None: self.pdf.add_page("P") - def generate_report(self): + def generate_report(self) -> None: self.make_title() self.make_dataset_summary() self.make_processing_summary() From ed3485daca9cd010fe6b471bfd09fa63b7fafebe Mon Sep 17 00:00:00 2001 From: Yann Noutary Date: Tue, 8 Feb 2022 09:30:09 -0800 Subject: [PATCH 18/81] refactor: move custom VLFeat code to OpenSfM Summary: This Diff removes the modification mades to VLFeat, and put them in OpenSfM implementation instead. we had to copy-paste the orientation extraction since there's was no way to append features the the internal covdet structure, so we use a simple std::vector instead. Reviewed By: paulinus Differential Revision: D34042916 fbshipit-source-id: 711e2cac75ead165c65a08a652d58bd19a227d20 --- opensfm/features.py | 8 +- opensfm/src/features/hahog.h | 3 +- opensfm/src/features/python/pybind.cc | 3 +- opensfm/src/features/src/hahog.cc | 142 +++++++++++++++++---- opensfm/src/third_party/vlfeat/vl/covdet.c | 70 ---------- opensfm/src/third_party/vlfeat/vl/covdet.h | 3 - 6 files changed, 127 insertions(+), 102 deletions(-) diff --git a/opensfm/features.py b/opensfm/features.py index d1634c446..4aeb05105 100644 --- a/opensfm/features.py +++ b/opensfm/features.py @@ -413,6 +413,7 @@ def extract_features_surf( desc = np.array(np.zeros((0, 3))) return points, desc + def akaze_descriptor_type(name: str) -> pyfeatures.AkazeDescriptorType: d = pyfeatures.AkazeDescriptorType.__dict__ if name in d: @@ -460,7 +461,6 @@ def extract_features_hahog( peak_threshold=config["hahog_peak_threshold"], edge_threshold=config["hahog_edge_threshold"], target_num_features=features_count, - use_adaptive_suppression=config["feature_use_adaptive_suppression"], ) if config["feature_root"]: @@ -593,6 +593,8 @@ def build_flann_index(descriptors: np.ndarray, config: Dict[str, Any]) -> Any: "tree": config["flann_tree"], } else: - raise ValueError("FLANN isn't supported for binary features because of poor-performance. Use BRUTEFORCE instead.") + raise ValueError( + "FLANN isn't supported for binary features because of poor-performance. Use BRUTEFORCE instead." + ) - return context.flann_Index(descriptors, flann_params) \ No newline at end of file + return context.flann_Index(descriptors, flann_params) diff --git a/opensfm/src/features/hahog.h b/opensfm/src/features/hahog.h index 1a6ed0420..7a833f917 100644 --- a/opensfm/src/features/hahog.h +++ b/opensfm/src/features/hahog.h @@ -5,7 +5,6 @@ namespace features { py::tuple hahog(foundation::pyarray_f image, float peak_threshold, - float edge_threshold, int target_num_features, - bool use_adaptive_suppression); + float edge_threshold, int target_num_features); } diff --git a/opensfm/src/features/python/pybind.cc b/opensfm/src/features/python/pybind.cc index 543c14b20..411dafff4 100644 --- a/opensfm/src/features/python/pybind.cc +++ b/opensfm/src/features/python/pybind.cc @@ -55,8 +55,7 @@ PYBIND11_MODULE(pyfeatures, m) { m.def("hahog", features::hahog, py::arg("image"), py::arg("peak_threshold") = 0.003, py::arg("edge_threshold") = 10, - py::arg("target_num_features") = 0, - py::arg("use_adaptive_suppression") = false); + py::arg("target_num_features") = 0); m.def("match_using_words", features::match_using_words); m.def("compute_vlad_descriptor", features::compute_vlad_descriptor, diff --git a/opensfm/src/features/src/hahog.cc b/opensfm/src/features/src/hahog.cc index 2b3c7f959..f72dd7a6b 100644 --- a/opensfm/src/features/src/hahog.cc +++ b/opensfm/src/features/src/hahog.cc @@ -11,9 +11,118 @@ extern "C" { namespace features { +// from VLFeat implementation of _vl_compare_scores +static int vlfeat_compare_scores(const void *a, const void *b) { + float fa = ((VlCovDetFeature *)a)->peakScore; + float fb = ((VlCovDetFeature *)b)->peakScore; + return (fb > fa) - (fb < fa); + // return (fa > fb) - (fa < fb) ; +} + +// select 'target_num_features' for using feature's scores +vl_size select_best_features(VlCovDet *covdet, vl_size num_features, + vl_size target_num_features) { + if (num_features > target_num_features) { + qsort(vl_covdet_get_features(covdet), num_features, sizeof(VlCovDetFeature), + vlfeat_compare_scores); + return target_num_features; + } else { + return num_features; + } +} + +// select 'target_num_features' that have a maximum score in their neighbhood. +// The neighborhood is computing using the feature's scale and +// 'non_extrema_suppression' as : neighborhood = non_extrema_suppression * scale +vl_size run_non_maxima_suppression(VlCovDet *covdet, vl_size num_features, + double non_extrema_suppression) { + vl_index i, j; + double tol = non_extrema_suppression; + VlCovDetFeature *features = (VlCovDetFeature *)vl_covdet_get_features(covdet); + for (i = 0; i < (signed)num_features; ++i) { + double x = features[i].frame.x; + double y = features[i].frame.y; + double sigma = features[i].frame.a11; + double score = features[i].peakScore; + + for (j = 0; j < (signed)num_features; ++j) { + double dx_ = features[j].frame.x - x; + double dy_ = features[j].frame.y - y; + double sigma_ = features[j].frame.a11; + double score_ = features[j].peakScore; + if (score_ == 0) continue; + if (sigma < (1 + tol) * sigma_ && sigma_ < (1 + tol) * sigma && + vl_abs_d(dx_) < tol * sigma && vl_abs_d(dy_) < tol * sigma && + vl_abs_d(score) > vl_abs_d(score_)) { + features[j].peakScore = 0; + } + } + } + j = 0; + for (i = 0; i < (signed)num_features; ++i) { + VlCovDetFeature feature = features[i]; + if (features[i].peakScore != 0) { + features[j++] = feature; + } + } + return j; +} + +vl_size run_features_selection(VlCovDet *covdet, vl_size target_num_features) { + vl_size numFeaturesKept = vl_covdet_get_num_features(covdet); + + // keep only 1.5 x targetNumFeatures for speeding-up duplicate detection + if (target_num_features != 0) { + const int to_keep = 3 * target_num_features / 2; + numFeaturesKept = select_best_features(covdet, numFeaturesKept, to_keep); + } + + // Remove non-maxima-in-their-neighborhood features + const double nonMaximaSuppressionTol = + vl_covdet_get_non_extrema_suppression_threshold(covdet); + if (nonMaximaSuppressionTol > 0.) { + numFeaturesKept = run_non_maxima_suppression(covdet, numFeaturesKept, + nonMaximaSuppressionTol); + } + + // Keep the N best + return select_best_features(covdet, numFeaturesKept, target_num_features); +} + +std::vector vlfeat_covdet_extract_orientations( + VlCovDet *covdet, vl_size num_features) { + VlCovDetFeature *features = (VlCovDetFeature *)vl_covdet_get_features(covdet); + std::vector vecFeatures; + vecFeatures.reserve(num_features); + vl_index i, j; + for (i = 0; i < (signed)num_features; ++i) { + vl_size numOrientations; + VlCovDetFeature feature = features[i]; + VlCovDetFeatureOrientation *orientations = + vl_covdet_extract_orientations_for_frame(covdet, &numOrientations, + feature.frame); + + for (j = 0; j < (signed)numOrientations; ++j) { + double A[2 * 2] = {feature.frame.a11, feature.frame.a21, + feature.frame.a12, feature.frame.a22}; + double r1 = cos(orientations[j].angle); + double r2 = sin(orientations[j].angle); + + vecFeatures.emplace_back(features[i]); + VlCovDetFeature &oriented = vecFeatures.back(); + + oriented.orientationScore = orientations[j].score; + oriented.frame.a11 = +A[0] * r1 + A[2] * r2; + oriented.frame.a21 = +A[1] * r1 + A[3] * r2; + oriented.frame.a12 = -A[0] * r2 + A[2] * r1; + oriented.frame.a22 = -A[1] * r2 + A[3] * r1; + } + } + return vecFeatures; +} + py::tuple hahog(foundation::pyarray_f image, float peak_threshold, - float edge_threshold, int target_num_features, - bool use_adaptive_suppression) { + float edge_threshold, int target_num_features) { if (!image.size()) { return py::none(); } @@ -31,35 +140,25 @@ py::tuple hahog(foundation::pyarray_f image, float peak_threshold, VlCovDet *covdet = vl_covdet_new(VL_COVDET_METHOD_HESSIAN); // set various parameters (optional) vl_covdet_set_first_octave(covdet, 0); - // vl_covdet_set_octave_resolution(covdet, octaveResolution); vl_covdet_set_peak_threshold(covdet, peak_threshold); vl_covdet_set_edge_threshold(covdet, edge_threshold); - vl_covdet_set_target_num_features(covdet, target_num_features); - vl_covdet_set_use_adaptive_suppression(covdet, use_adaptive_suppression); // process the image and run the detector vl_covdet_put_image(covdet, image.data(), image.shape(1), image.shape(0)); - - // clock_t t_scalespace = clock(); - + vl_covdet_set_non_extrema_suppression_threshold(covdet, 0); vl_covdet_detect(covdet); - // clock_t t_detect = clock(); - - // compute the affine shape of the features (optional) - // vl_covdet_extract_affine_shape(covdet); - - // clock_t t_affine = clock(); + // select the best features to keep + numFeatures = run_features_selection(covdet, target_num_features); // compute the orientation of the features (optional) - vl_covdet_extract_orientations(covdet); + std::vector vecFeatures = + vlfeat_covdet_extract_orientations(covdet, numFeatures); + numFeatures = vecFeatures.size(); // clock_t t_orient = clock(); // get feature descriptors - numFeatures = vl_covdet_get_num_features(covdet); - VlCovDetFeature const *feature = - (VlCovDetFeature const *)vl_covdet_get_features(covdet); VlSiftFilt *sift = vl_sift_new(16, 16, 1, 3, 0); vl_index i; vl_index patchResolution = 15; @@ -74,7 +173,7 @@ py::tuple hahog(foundation::pyarray_f image, float peak_threshold, vl_sift_set_magnif(sift, 3.0); for (i = 0; i < (signed)numFeatures; ++i) { - const VlFrameOrientedEllipse &frame = feature[i].frame; + const VlFrameOrientedEllipse &frame = vecFeatures.at(i).frame; float det = frame.a11 * frame.a22 - frame.a12 * frame.a21; float size = sqrt(fabs(det)); float angle = atan2(frame.a21, frame.a11) * 180.0f / M_PI; @@ -111,9 +210,8 @@ py::tuple hahog(foundation::pyarray_f image, float peak_threshold, } return py::make_tuple( - foundation::py_array_from_data(points.data(), numFeatures, 4), - foundation::py_array_from_data(desc.data(), numFeatures, dimension) - ); + foundation::py_array_from_data(points.data(), numFeatures, 4), + foundation::py_array_from_data(desc.data(), numFeatures, dimension)); } } // namespace features diff --git a/opensfm/src/third_party/vlfeat/vl/covdet.c b/opensfm/src/third_party/vlfeat/vl/covdet.c index 9b9c00d1a..f9e8c9377 100644 --- a/opensfm/src/third_party/vlfeat/vl/covdet.c +++ b/opensfm/src/third_party/vlfeat/vl/covdet.c @@ -1461,8 +1461,6 @@ struct _VlCovDet double peakThreshold ; /**< peak threshold. */ double edgeThreshold ; /**< edge threshold. */ double lapPeakThreshold; /**< peak threshold for Laplacian scale selection. */ - vl_size targetNumFeatures ;/**< number of features to keep after adaptive non-extrema suppresion. */ - vl_bool useAdaptiveSuppression ; /**< use adaptive non-maximal suppression rather than keeping the bests scores */ vl_size octaveResolution ; /**< resolution of each octave. */ vl_index firstOctave ; /**< index of the first octave. */ @@ -1535,8 +1533,6 @@ vl_covdet_new (VlCovDetMethod method) default: assert(0) ; } - self->targetNumFeatures = 0 ; - self->useAdaptiveSuppression = 0 ; self->nonExtremaSuppression = 0.5 ; self->features = NULL ; @@ -1911,16 +1907,6 @@ _vl_dog_response (float * dog, } } - -static int -_vl_compare_radius (const void * a, - const void * b) -{ - float fa = ((VlCovDetFeature *)a)->minimumSuppressionRadius ; - float fb = ((VlCovDetFeature *)b)->minimumSuppressionRadius ; - return (fb > fa) - (fb < fa) ; -} - static int _vl_compare_scores (const void * a, const void * b) @@ -2119,15 +2105,6 @@ vl_covdet_detect (VlCovDet * self) break ; } - if (self->targetNumFeatures != 0 && !self->useAdaptiveSuppression) { - // Keep only 1.5 x targetNumFeatures for speeding-up duplicate detection - int to_keep = 3 * self->targetNumFeatures / 2; - if (self->numFeatures > to_keep) { - qsort(self->features, self->numFeatures, sizeof(VlCovDetFeature), _vl_compare_scores); - self->numFeatures = to_keep; - } - } - if (self->nonExtremaSuppression) { vl_index i, j ; double tol = self->nonExtremaSuppression ; @@ -2164,40 +2141,6 @@ vl_covdet_detect (VlCovDet * self) self->numFeatures = j ; } - if (self->targetNumFeatures != 0) { - if (self->useAdaptiveSuppression) { - vl_index i, j ; - double tol = self->nonExtremaSuppression ; - self->numNonExtremaSuppressed = 0 ; - for (i = 0 ; i < (signed)self->numFeatures ; ++i) { - double x = self->features[i].frame.x ; - double y = self->features[i].frame.y ; - double sigma = self->features[i].frame.a11 ; - double score = self->features[i].peakScore ; - self->features[i].minimumSuppressionRadius = 99999999999; - - for (j = 0 ; j < (signed)self->numFeatures ; ++j) { - double score_ = self->features[j].peakScore ; - if (score_ > score) { - double dx_ = self->features[j].frame.x - x ; - double dy_ = self->features[j].frame.y - y ; - double sigma_ = self->features[j].frame.a11 ; - double radius_ = dx_ * dx_ + dy_ * dy_; // TODO(pau) use sigma to compute a 3d radius - if (radius_ < self->features[i].minimumSuppressionRadius) { - self->features[i].minimumSuppressionRadius = radius_; - } - } - } - } - qsort(self->features, self->numFeatures, sizeof(VlCovDetFeature), _vl_compare_radius); - - if (self->numFeatures > self->targetNumFeatures) self->numFeatures = self->targetNumFeatures; - } else { - qsort(self->features, self->numFeatures, sizeof(VlCovDetFeature), _vl_compare_scores); - if (self->numFeatures > self->targetNumFeatures) self->numFeatures = self->targetNumFeatures; - } - } - if (levelxx) vl_free(levelxx) ; if (levelyy) vl_free(levelyy) ; if (levelxy) vl_free(levelxy) ; @@ -3288,19 +3231,6 @@ vl_covdet_set_laplacian_peak_threshold (VlCovDet * self, double peakThreshold) self->lapPeakThreshold = peakThreshold ; } -void -vl_covdet_set_target_num_features (VlCovDet * self, vl_size target) -{ - self->targetNumFeatures = target ; -} - - -void -vl_covdet_set_use_adaptive_suppression (VlCovDet * self, vl_bool target) -{ - self->useAdaptiveSuppression = target ; -} - /* ---------------------------------------------------------------- */ /** @brief Get the index of the first octave ** @param self object. diff --git a/opensfm/src/third_party/vlfeat/vl/covdet.h b/opensfm/src/third_party/vlfeat/vl/covdet.h index 6810c670a..802d97000 100644 --- a/opensfm/src/third_party/vlfeat/vl/covdet.h +++ b/opensfm/src/third_party/vlfeat/vl/covdet.h @@ -141,7 +141,6 @@ typedef struct _VlCovDetFeature float edgeScore ; /**< edge score. */ float orientationScore ; /**< orientation score. */ float laplacianScaleScore ; /**< Laplacian scale score. */ - float minimumSuppressionRadius ; /**< distance to the closest stronger feature **/ } VlCovDetFeature ; /** @brief A detected feature orientation */ @@ -253,8 +252,6 @@ VL_EXPORT void vl_covdet_set_octave_resolution (VlCovDet * self, vl_size r) ; VL_EXPORT void vl_covdet_set_peak_threshold (VlCovDet * self, double peakThreshold) ; VL_EXPORT void vl_covdet_set_edge_threshold (VlCovDet * self, double edgeThreshold) ; VL_EXPORT void vl_covdet_set_laplacian_peak_threshold (VlCovDet * self, double peakThreshold) ; -VL_EXPORT void vl_covdet_set_target_num_features (VlCovDet * self, vl_size target) ; -VL_EXPORT void vl_covdet_set_use_adaptive_suppression (VlCovDet * self, vl_bool use) ; VL_EXPORT void vl_covdet_set_transposed (VlCovDet * self, vl_bool t) ; VL_EXPORT void vl_covdet_set_aa_accurate_smoothing (VlCovDet * self, vl_bool x) ; VL_EXPORT void vl_covdet_set_non_extrema_suppression_threshold (VlCovDet * self, double x) ; From 3314ae7b72e1d7073d9b26cac0d45c9d14ed9985 Mon Sep 17 00:00:00 2001 From: Jason Yu-Tseh Chi Date: Tue, 8 Feb 2022 19:33:00 -0800 Subject: [PATCH 19/81] opensfm use vlfeat in //third-party (#859) Summary: Switch the vlfeat used in opensfm to //third-party/vlfeat/0.9.20. Disable the TARGET for vlfeat under //mapillary/opensfm/opensfm/src/third_party/vlfeat. Pull Request resolved: https://github.com/mapillary/OpenSfM/pull/859 Reviewed By: YanNoun Differential Revision: D33965002 fbshipit-source-id: 12f37fbf325618f4f0660fc67b1be9e919609982 --- opensfm/src/CMakeLists.txt | 5 +++++ opensfm/src/features/src/hahog.cc | 19 +++---------------- opensfm/src/third_party/vlfeat/CMakeLists.txt | 2 ++ opensfm/src/third_party/vlfeat/README.opensfm | 3 ++- opensfm/src/third_party/vlfeat/vl/covdet.c | 6 +++++- opensfm/src/third_party/vlfeat/vl/covdet.h | 2 +- 6 files changed, 18 insertions(+), 19 deletions(-) diff --git a/opensfm/src/CMakeLists.txt b/opensfm/src/CMakeLists.txt index 5eb681868..8f67492e9 100644 --- a/opensfm/src/CMakeLists.txt +++ b/opensfm/src/CMakeLists.txt @@ -37,6 +37,9 @@ endif() # For compiling VLFeat add_definitions(-DVL_DISABLE_AVX) +# Use the version of vlfeat in ./src/third_party/vlfeat +add_definitions(-DINPLACE_VLFEAT) + if (WIN32) # Missing math constant add_definitions(-DM_PI=3.14159265358979323846) @@ -71,6 +74,8 @@ add_subdirectory(third_party/pybind11) add_subdirectory(third_party/akaze) add_subdirectory(third_party/vlfeat) +include_directories(third_party/vlfeat) + ####### Debugging ####### if (CMAKE_BUILD_TYPE MATCHES DEBUG) add_executable(debug_c_extension diff --git a/opensfm/src/features/src/hahog.cc b/opensfm/src/features/src/hahog.cc index f72dd7a6b..6f8002c64 100644 --- a/opensfm/src/features/src/hahog.cc +++ b/opensfm/src/features/src/hahog.cc @@ -4,9 +4,9 @@ #include extern "C" { -#include -#include #include +#include +#include } namespace features { @@ -16,7 +16,6 @@ static int vlfeat_compare_scores(const void *a, const void *b) { float fa = ((VlCovDetFeature *)a)->peakScore; float fb = ((VlCovDetFeature *)b)->peakScore; return (fb > fa) - (fb < fa); - // return (fa > fb) - (fa < fb) ; } // select 'target_num_features' for using feature's scores @@ -135,7 +134,6 @@ py::tuple hahog(foundation::pyarray_f image, float peak_threshold, { py::gil_scoped_release release; - // clock_t t_start = clock(); // create a detector object VlCovDet *covdet = vl_covdet_new(VL_COVDET_METHOD_HESSIAN); // set various parameters (optional) @@ -146,7 +144,7 @@ py::tuple hahog(foundation::pyarray_f image, float peak_threshold, // process the image and run the detector vl_covdet_put_image(covdet, image.data(), image.shape(1), image.shape(0)); vl_covdet_set_non_extrema_suppression_threshold(covdet, 0); - vl_covdet_detect(covdet); + vl_covdet_detect(covdet, std::numeric_limits::max()); // select the best features to keep numFeatures = run_features_selection(covdet, target_num_features); @@ -156,8 +154,6 @@ py::tuple hahog(foundation::pyarray_f image, float peak_threshold, vlfeat_covdet_extract_orientations(covdet, numFeatures); numFeatures = vecFeatures.size(); - // clock_t t_orient = clock(); - // get feature descriptors VlSiftFilt *sift = vl_sift_new(16, 16, 1, 3, 0); vl_index i; @@ -198,15 +194,6 @@ py::tuple hahog(foundation::pyarray_f image, float peak_threshold, } vl_sift_delete(sift); vl_covdet_delete(covdet); - - // clock_t t_description = clock(); - // std::cout << "t_scalespace " << float(t_scalespace - - // t_start)/CLOCKS_PER_SEC << "\n"; std::cout << "t_detect " << - // float(t_detect - t_scalespace)/CLOCKS_PER_SEC << "\n"; std::cout << - // "t_affine " << float(t_affine - t_detect)/CLOCKS_PER_SEC << "\n"; - // std::cout << "t_orient " << float(t_orient - t_affine)/CLOCKS_PER_SEC << - // "\n"; std::cout << "description " << float(t_description - - // t_orient)/CLOCKS_PER_SEC << "\n"; } return py::make_tuple( diff --git a/opensfm/src/third_party/vlfeat/CMakeLists.txt b/opensfm/src/third_party/vlfeat/CMakeLists.txt index 64d5c3ecd..c24198f68 100644 --- a/opensfm/src/third_party/vlfeat/CMakeLists.txt +++ b/opensfm/src/third_party/vlfeat/CMakeLists.txt @@ -9,6 +9,8 @@ if(WIN32) endif() add_library(vl ${VLFEAT_SRCS}) + + target_include_directories(vl PRIVATE ${CMAKE_SOURCE_DIR}/third_party/vlfeat diff --git a/opensfm/src/third_party/vlfeat/README.opensfm b/opensfm/src/third_party/vlfeat/README.opensfm index 00fcf2971..bf5f12f02 100644 --- a/opensfm/src/third_party/vlfeat/README.opensfm +++ b/opensfm/src/third_party/vlfeat/README.opensfm @@ -4,10 +4,11 @@ License: BSD Version: 0.9.20 2015-01-14 Local modifications: + * Strip the library to contain only relevant source files. * Set vl_size to be signed. This avoids some UndefinedBehaviorSanitizer runtime errors. The following code is common inside the library: ptr = ptr - some_negative_signed_int * some_vl_size ; -The UBSAN complains about possible overflow errors when substracting an unsigned int. Explicitly casting the vl_size var to int solves the issue. Instead, we switch vl_size to be signed from the begining. It would otherwise be difficult to find all the instances of this problem since it is only found at runtime. +The UBSAN complains about possible overflow errors when substracting an unsigned int. Explicitly casting the vl_size var to int solves the issue. Instead, we switch vl_size to be signed from the begini diff --git a/opensfm/src/third_party/vlfeat/vl/covdet.c b/opensfm/src/third_party/vlfeat/vl/covdet.c index f9e8c9377..5bba8f6d9 100644 --- a/opensfm/src/third_party/vlfeat/vl/covdet.c +++ b/opensfm/src/third_party/vlfeat/vl/covdet.c @@ -1929,7 +1929,7 @@ _vl_compare_scores (const void * a, **/ void -vl_covdet_detect (VlCovDet * self) +vl_covdet_detect (VlCovDet * self, vl_size max_num_features) { VlScaleSpaceGeometry geom = vl_scalespace_get_geometry(self->gss) ; VlScaleSpaceGeometry cgeom ; @@ -2090,6 +2090,9 @@ vl_covdet_detect (VlCovDet * self) break ; } } + if (self->numFeatures >= max_num_features) { + break; + } } /* next octave */ if (extrema) { vl_free(extrema) ; extrema = 0 ; } @@ -3231,6 +3234,7 @@ vl_covdet_set_laplacian_peak_threshold (VlCovDet * self, double peakThreshold) self->lapPeakThreshold = peakThreshold ; } + /* ---------------------------------------------------------------- */ /** @brief Get the index of the first octave ** @param self object. diff --git a/opensfm/src/third_party/vlfeat/vl/covdet.h b/opensfm/src/third_party/vlfeat/vl/covdet.h index 802d97000..94c4d65a9 100644 --- a/opensfm/src/third_party/vlfeat/vl/covdet.h +++ b/opensfm/src/third_party/vlfeat/vl/covdet.h @@ -195,7 +195,7 @@ VL_EXPORT int vl_covdet_put_image (VlCovDet * self, float const * image, vl_size width, vl_size height) ; -VL_EXPORT void vl_covdet_detect (VlCovDet * self) ; +VL_EXPORT void vl_covdet_detect (VlCovDet * self, vl_size max_num_features ) ; VL_EXPORT int vl_covdet_append_feature (VlCovDet * self, VlCovDetFeature const * feature) ; VL_EXPORT void vl_covdet_extract_orientations (VlCovDet * self) ; VL_EXPORT void vl_covdet_extract_laplacian_scales (VlCovDet * self) ; From b3bb7638337c81436f1c1c2ca4701e6eb361e7ab Mon Sep 17 00:00:00 2001 From: Jason Yu-Tseh Chi Date: Tue, 8 Feb 2022 23:59:23 -0800 Subject: [PATCH 20/81] Update vlfeat to version 0.9.20 (#862) Summary: This is a simple copy & paste from 0.9.20 to opensfm/src/third_party/vlfeat. Reviewed By: paulinus Differential Revision: D34099647 fbshipit-source-id: b0d592e167f07262259b1cfd79200d1f498c2468 --- opensfm/src/third_party/vlfeat/README.opensfm | 4 +- opensfm/src/third_party/vlfeat/vl/covdet.c | 21 ++-- opensfm/src/third_party/vlfeat/vl/covdet.h | 6 +- .../vlfeat/vl/{float.th => float.h} | 2 +- opensfm/src/third_party/vlfeat/vl/generic.c | 99 ++++++++----------- opensfm/src/third_party/vlfeat/vl/generic.h | 3 + opensfm/src/third_party/vlfeat/vl/gmm.c | 20 ++-- opensfm/src/third_party/vlfeat/vl/homkermap.c | 2 +- opensfm/src/third_party/vlfeat/vl/host.c | 2 + opensfm/src/third_party/vlfeat/vl/host.h | 6 +- opensfm/src/third_party/vlfeat/vl/imopv.c | 2 +- .../src/third_party/vlfeat/vl/imopv_sse2.c | 2 +- opensfm/src/third_party/vlfeat/vl/kmeans.c | 11 ++- opensfm/src/third_party/vlfeat/vl/mathop.c | 2 +- .../src/third_party/vlfeat/vl/mathop_avx.c | 2 +- .../src/third_party/vlfeat/vl/mathop_avx.h | 2 +- .../src/third_party/vlfeat/vl/mathop_sse2.c | 2 +- .../src/third_party/vlfeat/vl/mathop_sse2.h | 2 +- .../src/third_party/vlfeat/vl/svmdataset.c | 2 +- opensfm/src/third_party/vlfeat/vl/vlad.c | 2 +- 20 files changed, 86 insertions(+), 108 deletions(-) rename opensfm/src/third_party/vlfeat/vl/{float.th => float.h} (99%) diff --git a/opensfm/src/third_party/vlfeat/README.opensfm b/opensfm/src/third_party/vlfeat/README.opensfm index bf5f12f02..c755884ee 100644 --- a/opensfm/src/third_party/vlfeat/README.opensfm +++ b/opensfm/src/third_party/vlfeat/README.opensfm @@ -5,10 +5,8 @@ Version: 0.9.20 2015-01-14 Local modifications: -* Strip the library to contain only relevant source files. - * Set vl_size to be signed. This avoids some UndefinedBehaviorSanitizer runtime errors. The following code is common inside the library: ptr = ptr - some_negative_signed_int * some_vl_size ; -The UBSAN complains about possible overflow errors when substracting an unsigned int. Explicitly casting the vl_size var to int solves the issue. Instead, we switch vl_size to be signed from the begini +The UBSAN complains about possible overflow errors when substracting an unsigned int. Explicitly casting the vl_size var to int solves the issue. Instead, we switch vl_size to be signed from the begining. It would otherwise be difficult to find all the instances of this problem since it is only found at runtime. diff --git a/opensfm/src/third_party/vlfeat/vl/covdet.c b/opensfm/src/third_party/vlfeat/vl/covdet.c index 5bba8f6d9..0b0592f0d 100644 --- a/opensfm/src/third_party/vlfeat/vl/covdet.c +++ b/opensfm/src/third_party/vlfeat/vl/covdet.c @@ -944,7 +944,6 @@ taking the difference of successive scale space images #include "covdet.h" #include -#include /** @brief Reallocate buffer ** @param buffer @@ -1907,16 +1906,6 @@ _vl_dog_response (float * dog, } } -static int -_vl_compare_scores (const void * a, - const void * b) -{ - float fa = ((VlCovDetFeature *)a)->peakScore ; - float fb = ((VlCovDetFeature *)b)->peakScore ; - return (fb > fa) - (fb < fa) ; -//return (fa > fb) - (fa < fb) ; -} - /* ---------------------------------------------------------------- */ /* Detect features */ /* ---------------------------------------------------------------- */ @@ -2005,7 +1994,7 @@ vl_covdet_detect (VlCovDet * self, vl_size max_num_features) vl_size extremaBufferSize = 0 ; vl_size numExtrema ; vl_size index ; - for (o = cgeom.firstOctave ; o <= cgeom.lastOctave ; ++o) { + for (o = cgeom.lastOctave; o >= cgeom.firstOctave; --o) { VlScaleSpaceOctaveGeometry octgeom = vl_scalespace_get_octave_geometry(self->css, o) ; double step = octgeom.step ; vl_size width = octgeom.width ; @@ -2044,6 +2033,8 @@ vl_covdet_detect (VlCovDet * self, vl_size max_num_features) feature.frame.a12 = 0.0 ; feature.frame.a21 = 0.0 ; feature.frame.a22 = sigma ; + feature.o = o ; + feature.s = round(refined.z) ; feature.peakScore = refined.peakScore ; feature.edgeScore = refined.edgeScore ; vl_covdet_append_feature(self, &feature) ; @@ -2081,6 +2072,8 @@ vl_covdet_detect (VlCovDet * self, vl_size max_num_features) feature.frame.a12 = 0.0 ; feature.frame.a21 = 0.0 ; feature.frame.a22 = sigma ; + feature.o = o ; + feature.s = s ; feature.peakScore = refined.peakScore ; feature.edgeScore = refined.edgeScore ; vl_covdet_append_feature(self, &feature) ; @@ -2117,6 +2110,7 @@ vl_covdet_detect (VlCovDet * self, vl_size max_num_features) double y = self->features[i].frame.y ; double sigma = self->features[i].frame.a11 ; double score = self->features[i].peakScore ; + if (score == 0) continue ; for (j = 0 ; j < (signed)self->numFeatures ; ++j) { double dx_ = self->features[j].frame.x - x ; @@ -3234,7 +3228,6 @@ vl_covdet_set_laplacian_peak_threshold (VlCovDet * self, double peakThreshold) self->lapPeakThreshold = peakThreshold ; } - /* ---------------------------------------------------------------- */ /** @brief Get the index of the first octave ** @param self object. @@ -3356,7 +3349,7 @@ vl_covdet_get_num_features (VlCovDet const * self) /** @brief Get the stored frames ** @return frames stored in the detector. **/ -void * +VlCovDetFeature * vl_covdet_get_features (VlCovDet * self) { return self->features ; diff --git a/opensfm/src/third_party/vlfeat/vl/covdet.h b/opensfm/src/third_party/vlfeat/vl/covdet.h index 94c4d65a9..c0fdabb72 100644 --- a/opensfm/src/third_party/vlfeat/vl/covdet.h +++ b/opensfm/src/third_party/vlfeat/vl/covdet.h @@ -137,6 +137,8 @@ vl_get_frame_type (vl_bool affineAdaptation, vl_bool orientation) typedef struct _VlCovDetFeature { VlFrameOrientedEllipse frame ; /**< feature frame. */ + int o ; /**< Detected octave. */ + int s ; /**< Octave subdivision. */ float peakScore ; /**< peak score. */ float edgeScore ; /**< edge score. */ float orientationScore ; /**< orientation score. */ @@ -195,7 +197,7 @@ VL_EXPORT int vl_covdet_put_image (VlCovDet * self, float const * image, vl_size width, vl_size height) ; -VL_EXPORT void vl_covdet_detect (VlCovDet * self, vl_size max_num_features ) ; +VL_EXPORT void vl_covdet_detect (VlCovDet * self, vl_size max_num_features) ; VL_EXPORT int vl_covdet_append_feature (VlCovDet * self, VlCovDetFeature const * feature) ; VL_EXPORT void vl_covdet_extract_orientations (VlCovDet * self) ; VL_EXPORT void vl_covdet_extract_laplacian_scales (VlCovDet * self) ; @@ -229,7 +231,7 @@ vl_covdet_drop_features_outside (VlCovDet * self, double margin) ; /** @name Retrieve data and parameters ** @{ */ VL_EXPORT vl_size vl_covdet_get_num_features (VlCovDet const * self) ; -VL_EXPORT void * vl_covdet_get_features (VlCovDet * self) ; +VL_EXPORT VlCovDetFeature * vl_covdet_get_features (VlCovDet * self) ; VL_EXPORT vl_index vl_covdet_get_first_octave (VlCovDet const * self) ; VL_EXPORT vl_size vl_covdet_get_octave_resolution (VlCovDet const * self) ; VL_EXPORT double vl_covdet_get_peak_threshold (VlCovDet const * self) ; diff --git a/opensfm/src/third_party/vlfeat/vl/float.th b/opensfm/src/third_party/vlfeat/vl/float.h similarity index 99% rename from opensfm/src/third_party/vlfeat/vl/float.th rename to opensfm/src/third_party/vlfeat/vl/float.h index a66cbf0a4..267ca98f8 100644 --- a/opensfm/src/third_party/vlfeat/vl/float.th +++ b/opensfm/src/third_party/vlfeat/vl/float.h @@ -1,4 +1,4 @@ -/** @file float.th +/** @file float.h ** @brief Float - Template ** @author Andrea Vedaldi ** @author David Novotny diff --git a/opensfm/src/third_party/vlfeat/vl/generic.c b/opensfm/src/third_party/vlfeat/vl/generic.c index 11460b0a8..d4982e43b 100644 --- a/opensfm/src/third_party/vlfeat/vl/generic.c +++ b/opensfm/src/third_party/vlfeat/vl/generic.c @@ -777,16 +777,14 @@ calling @c omp_set_num_threads() in the application. Note that: #if defined(VL_OS_WIN) #include +#else +#include #endif #if ! defined(VL_DISABLE_THREADS) && defined(VL_THREADS_POSIX) #include #endif -#if defined(VL_OS_MACOSX) || defined(VL_OS_LINUX) -#include -#endif - #if defined(_OPENMP) #include #endif @@ -1504,66 +1502,13 @@ vl_thread_specific_state_delete (VlThreadState * self) #endif free (self) ; } -/* ---------------------------------------------------------------- */ -/* DLL entry and exit points */ -/* ---------------------------------------------------------------- */ -/* A constructor and a destructor must be called to initialize or dispose of VLFeat - * state when the DLL is loaded or unloaded. This is obtained - * in different ways depending on the operating system. - */ - -#if (defined(VL_OS_LINUX) || defined(VL_OS_MACOSX)) && defined(VL_COMPILER_GNUC) -static void vl_constructor () __attribute__ ((constructor)) ; -static void vl_destructor () __attribute__ ((destructor)) ; -#endif - -#if defined(VL_OS_WIN) -static void vl_constructor () ; -static void vl_destructor () ; - -BOOL WINAPI DllMain( - HINSTANCE hinstDLL, // handle to DLL module - DWORD fdwReason, // reason for calling function - LPVOID lpReserved ) // reserved -{ - VlState * state ; - VlThreadState * threadState ; - switch (fdwReason) { - case DLL_PROCESS_ATTACH: - /* Initialize once for each new process */ - vl_constructor () ; - break ; - - case DLL_THREAD_ATTACH: - /* Do thread-specific initialization */ - break ; - - case DLL_THREAD_DETACH: - /* Do thread-specific cleanup */ -#if ! defined(VL_DISABLE_THREADS) && defined(VL_THREADS_WIN) - state = vl_get_state() ; - threadState = (VlThreadState*) TlsGetValue(state->tlsIndex) ; - if (threadState) { - vl_thread_specific_state_delete (threadState) ; - } -#endif - break; - - case DLL_PROCESS_DETACH: - /* Perform any necessary cleanup */ - vl_destructor () ; - break; - } - return TRUE ; /* Successful DLL_PROCESS_ATTACH */ -} -#endif /* VL_OS_WIN */ /* ---------------------------------------------------------------- */ /* Library constructor and destructor */ /* ---------------------------------------------------------------- */ /** @internal @brief Initialize VLFeat state */ -static void +void vl_constructor (void) { VlState * state ; @@ -1617,7 +1562,7 @@ vl_constructor (void) GetSystemInfo (&info) ; state->numCPUs = info.dwNumberOfProcessors ; } -#elif defined(VL_OS_MACOSX) || defined(VL_OS_LINUX) +#elif defined(_SC_NPROCESSORS_ONLN) state->numCPUs = sysconf(_SC_NPROCESSORS_ONLN) ; #else state->numCPUs = 1 ; @@ -1637,7 +1582,7 @@ vl_constructor (void) } /** @internal @brief Destruct VLFeat */ -static void +void vl_destructor () { VlState * state ; @@ -1698,3 +1643,37 @@ vl_destructor () printf("VLFeat DEBUG: destructor ends.\n") ; #endif } + +/* ---------------------------------------------------------------- */ +/* Cross-platform call to constructor/destructor */ +/* ---------------------------------------------------------------- */ + +#ifdef __cplusplus + #define INITIALIZER(f) \ + static void f(void); \ + struct f##_t_ { f##_t_(void) { f(); } }; static f##_t_ f##_; \ + static void f(void) +#elif defined(_MSC_VER) + #pragma section(".CRT$XCU",read) + #define INITIALIZER2_(f,p) \ + static void f(void); \ + __declspec(allocate(".CRT$XCU")) void (*f##_)(void) = f; \ + __pragma(comment(linker,"/include:" p #f "_")) \ + static void f(void) + #ifdef _WIN64 + #define INITIALIZER(f) INITIALIZER2_(f,"") + #else + #define INITIALIZER(f) INITIALIZER2_(f,"_") + #endif +#else + #define INITIALIZER(f) \ + static void f(void) __attribute__((constructor)); \ + static void f(void) +#endif + +INITIALIZER(vl_initialize) +{ + vl_constructor(); + atexit(vl_destructor); +} + diff --git a/opensfm/src/third_party/vlfeat/vl/generic.h b/opensfm/src/third_party/vlfeat/vl/generic.h index 888c4315e..4a6296e8c 100644 --- a/opensfm/src/third_party/vlfeat/vl/generic.h +++ b/opensfm/src/third_party/vlfeat/vl/generic.h @@ -54,6 +54,9 @@ typedef vl_uint32 vl_type ; ** ::VL_TYPE_UINT8, ::VL_TYPE_UINT16, ::VL_TYPE_UINT32, ::VL_TYPE_UINT64. **/ +void vl_constructor(); +void vl_destructor(); + VL_INLINE char const * vl_get_type_name (vl_type type) { diff --git a/opensfm/src/third_party/vlfeat/vl/gmm.c b/opensfm/src/third_party/vlfeat/vl/gmm.c index 8712c58f3..585044576 100644 --- a/opensfm/src/third_party/vlfeat/vl/gmm.c +++ b/opensfm/src/third_party/vlfeat/vl/gmm.c @@ -719,7 +719,7 @@ VL_XCAT(vl_get_gmm_data_posteriors_, SFX) TYPE const * covariances, TYPE const * data) { - vl_index i_d = 0, i_cl = 0; + vl_index i_d, i_cl; vl_size dim; double LL = 0; @@ -899,20 +899,20 @@ VL_XCAT(_vl_gmm_restart_empty_modes_, SFX) (VlGMM * self, TYPE const * data) Search for the Gaussian components that (approximately) maximally contribute to make the negative log-likelihood of the data large. Then split the worst offender. - + To do so, we approximate the exptected log-likelihood of the GMM: - + E[-log(f(x))] = H(f) = - log \int f(x) log f(x) - + where the density f(x) = sum_k pk gk(x) is a GMM. This is intractable but it is easy to approximate if we suppose that supp gk is disjoint with supp gq for all components k ~= q. In this canse - + H(f) ~= sum_k [ - pk log(pk) + pk H(gk) ] - + where H(gk) is the entropy of component k taken alone. The entropy of the latter is given by: - + H(gk) = D/2 (1 + log(2pi) + 1/2 sum_{i=0}^D log sigma_i^2 */ @@ -1044,7 +1044,7 @@ VL_XCAT(_vl_gmm_maximization_, SFX) vl_size numData) { vl_size numClusters = self->numClusters; - vl_index i_d = 0, i_cl = 0; + vl_index i_d, i_cl; vl_size dim ; TYPE * oldMeans ; double time = 0 ; @@ -1102,13 +1102,13 @@ VL_XCAT(_vl_gmm_maximization_, SFX) #ifndef VL_DISABLE_AVX if (vl_get_simd_enabled() && vl_cpu_has_avx()) { - VL_XCAT(_vl_weighted_mean_sse2_, SFX) + VL_XCAT(_vl_weighted_mean_avx_, SFX) (self->dimension, means_+ i_cl * self->dimension, data + i_d * self->dimension, p) ; - VL_XCAT(_vl_weighted_sigma_sse2_, SFX) + VL_XCAT(_vl_weighted_sigma_avx_, SFX) (self->dimension, covariances_ + i_cl * self->dimension, data + i_d * self->dimension, diff --git a/opensfm/src/third_party/vlfeat/vl/homkermap.c b/opensfm/src/third_party/vlfeat/vl/homkermap.c index d7e4e9c2f..9113990fb 100644 --- a/opensfm/src/third_party/vlfeat/vl/homkermap.c +++ b/opensfm/src/third_party/vlfeat/vl/homkermap.c @@ -526,7 +526,7 @@ vl_homogeneouskernelmap_get_window_type (VlHomogeneousKernelMap const * self) #ifdef VL_HOMKERMAP_INSTANTIATING /* ---------------------------------------------------------------- */ -#include "float.th" +#include "float.h" void VL_XCAT(vl_homogeneouskernelmap_evaluate_,SFX) diff --git a/opensfm/src/third_party/vlfeat/vl/host.c b/opensfm/src/third_party/vlfeat/vl/host.c index 499db6484..50a0f0f40 100644 --- a/opensfm/src/third_party/vlfeat/vl/host.c +++ b/opensfm/src/third_party/vlfeat/vl/host.c @@ -442,6 +442,7 @@ _vl_cpuid (vl_int32* info, int function) #endif #if defined(HAS_CPUID) + void _vl_x86cpu_info_init (VlX86CpuInfo *self) { @@ -464,6 +465,7 @@ _vl_x86cpu_info_init (VlX86CpuInfo *self) self->hasAVX = info[2] & (1 << 28) ; } } + #endif char * diff --git a/opensfm/src/third_party/vlfeat/vl/host.h b/opensfm/src/third_party/vlfeat/vl/host.h index 837093af7..fb2700c77 100644 --- a/opensfm/src/third_party/vlfeat/vl/host.h +++ b/opensfm/src/third_party/vlfeat/vl/host.h @@ -199,7 +199,7 @@ the terms of the BSD license (see the COPYING file). /** @name Identifying the host threading library ** @{ */ -#if defined(VL_OS_MACOSX) || defined(VL_OS_LINUX) || \ +#if !defined(VL_OS_WIN) && !defined(VL_OS_WIN64) || \ defined(__DOXYGEN__) #define VL_THREADS_POSIX 1 #endif @@ -312,7 +312,7 @@ defined(__DOXYGEN__) #if defined(VL_COMPILER_MSC) & ! defined(__DOXYGEN__) # define VL_UNUSED # define VL_INLINE static __inline -#if _MSC_VER < 1900 +#if defined _MSC_VER && _MSC_VER < 1900 # define snprintf _snprintf #endif # define isnan _isnan @@ -389,7 +389,7 @@ typedef unsigned int vl_uint ; /**< @brief Same as unsigned int< typedef int vl_bool ; /**< @brief Boolean. */ typedef vl_int64 vl_intptr ; /**< @brief Integer holding a pointer. */ typedef vl_uint64 vl_uintptr ; /**< @brief Unsigned integer holding a pointer. */ -typedef vl_int64 vl_size ; /**< @brief Integer holding the size of a memory block. */ +typedef vl_int64 vl_size ; /**< @brief Signed integer holding the size of a memory block. */ typedef vl_int64 vl_index ; /**< @brief Signed version of ::vl_size and ::vl_uindex */ typedef vl_uint64 vl_uindex ; /**< @brief Same as ::vl_size */ #endif diff --git a/opensfm/src/third_party/vlfeat/vl/imopv.c b/opensfm/src/third_party/vlfeat/vl/imopv.c index 225b73324..984b5061a 100644 --- a/opensfm/src/third_party/vlfeat/vl/imopv.c +++ b/opensfm/src/third_party/vlfeat/vl/imopv.c @@ -59,7 +59,7 @@ the terms of the BSD license (see the COPYING file). #if defined(VL_IMOPV_INSTANTIATING) || defined(__DOXYGEN__) -#include "float.th" +#include "float.h" /* ---------------------------------------------------------------- */ /* Image Convolution */ diff --git a/opensfm/src/third_party/vlfeat/vl/imopv_sse2.c b/opensfm/src/third_party/vlfeat/vl/imopv_sse2.c index 016c23f2f..11df6d858 100644 --- a/opensfm/src/third_party/vlfeat/vl/imopv_sse2.c +++ b/opensfm/src/third_party/vlfeat/vl/imopv_sse2.c @@ -36,7 +36,7 @@ the terms of the BSD license (see the COPYING file). /* VL_IMOPV_SSE2_INSTANTIATING */ #else -#include "float.th" +#include "float.h" /* ---------------------------------------------------------------- */ void diff --git a/opensfm/src/third_party/vlfeat/vl/kmeans.c b/opensfm/src/third_party/vlfeat/vl/kmeans.c index 6eb62fab0..b7c93088b 100644 --- a/opensfm/src/third_party/vlfeat/vl/kmeans.c +++ b/opensfm/src/third_party/vlfeat/vl/kmeans.c @@ -660,7 +660,7 @@ VL_XCAT(_vl_kmeans_quantize_, SFX) TYPE const * data, vl_size numData) { - vl_index i = 0 ; + vl_index i ; #if (FLT == VL_TYPE_FLOAT) VlFloatVectorComparisonFunction distFn = vl_get_vector_comparison_function_f(self->distance) ; @@ -726,7 +726,7 @@ VL_XCAT(_vl_kmeans_quantize_ann_, SFX) vl_kdforest_build(forest,self->numCenters,self->centers); #ifdef _OPENMP -#pragma omp parallel \ +#pragma omp parallel default(none) \ num_threads(vl_get_max_threads()) \ shared(self, forest, update, assignments, distances, data, numData, distFn) #endif @@ -880,7 +880,7 @@ VL_XCAT(_vl_kmeans_refine_centers_lloyd_, SFX) } break ; } - + if (iteration == 0) { initialEnergy = energy ; } else { @@ -892,7 +892,7 @@ VL_XCAT(_vl_kmeans_refine_centers_lloyd_, SFX) break ; } } - + /* begin next iteration */ previousEnergy = energy ; @@ -1056,7 +1056,7 @@ VL_XCAT(_vl_kmeans_refine_centers_ann_, SFX) } break ; } - + if (iteration == 0) { initialEnergy = energy ; } else { @@ -1489,6 +1489,7 @@ VL_XCAT(_vl_kmeans_refine_centers_elkan_, SFX) #if defined(_OPENMP) #pragma omp parallel for \ + default(none) \ shared(self,numData, \ pointToClosestCenterUB,pointToCenterLB, \ nextCenterDistances,pointToClosestCenterUBIsStrict, \ diff --git a/opensfm/src/third_party/vlfeat/vl/mathop.c b/opensfm/src/third_party/vlfeat/vl/mathop.c index 27bd7d47b..d2a292697 100644 --- a/opensfm/src/third_party/vlfeat/vl/mathop.c +++ b/opensfm/src/third_party/vlfeat/vl/mathop.c @@ -279,7 +279,7 @@ optimal because $2^k > r$. This yields the algorithm: /* ---------------------------------------------------------------- */ #ifdef VL_MATHOP_INSTANTIATING -#include "float.th" +#include "float.h" #undef COMPARISONFUNCTION_TYPE #undef COMPARISONFUNCTION3_TYPE diff --git a/opensfm/src/third_party/vlfeat/vl/mathop_avx.c b/opensfm/src/third_party/vlfeat/vl/mathop_avx.c index 1792be88f..03dba22d8 100644 --- a/opensfm/src/third_party/vlfeat/vl/mathop_avx.c +++ b/opensfm/src/third_party/vlfeat/vl/mathop_avx.c @@ -38,7 +38,7 @@ the terms of the BSD license (see the COPYING file). #include #include "generic.h" #include "mathop.h" -#include "float.th" +#include "float.h" VL_INLINE T VL_XCAT(_vl_vhsum_avx_, SFX)(VTYPEavx x) diff --git a/opensfm/src/third_party/vlfeat/vl/mathop_avx.h b/opensfm/src/third_party/vlfeat/vl/mathop_avx.h index d3a934e4c..2acebf0fd 100644 --- a/opensfm/src/third_party/vlfeat/vl/mathop_avx.h +++ b/opensfm/src/third_party/vlfeat/vl/mathop_avx.h @@ -36,7 +36,7 @@ the terms of the BSD license (see the COPYING file). #ifndef VL_DISABLE_AVX #include "generic.h" -#include "float.th" +#include "float.h" VL_EXPORT T VL_XCAT(_vl_distance_mahalanobis_sq_avx_, SFX) diff --git a/opensfm/src/third_party/vlfeat/vl/mathop_sse2.c b/opensfm/src/third_party/vlfeat/vl/mathop_sse2.c index 788df01ae..dd98a5bea 100644 --- a/opensfm/src/third_party/vlfeat/vl/mathop_sse2.c +++ b/opensfm/src/third_party/vlfeat/vl/mathop_sse2.c @@ -38,7 +38,7 @@ the terms of the BSD license (see the COPYING file). #include #include "mathop.h" #include "generic.h" -#include "float.th" +#include "float.h" VL_INLINE T VL_XCAT(_vl_vhsum_sse2_, SFX)(VTYPE x) diff --git a/opensfm/src/third_party/vlfeat/vl/mathop_sse2.h b/opensfm/src/third_party/vlfeat/vl/mathop_sse2.h index cc10f753e..553cea12f 100644 --- a/opensfm/src/third_party/vlfeat/vl/mathop_sse2.h +++ b/opensfm/src/third_party/vlfeat/vl/mathop_sse2.h @@ -37,7 +37,7 @@ the terms of the BSD license (see the COPYING file). #ifndef VL_DISABLE_SSE2 #include "generic.h" -#include "float.th" +#include "float.h" VL_EXPORT T VL_XCAT(_vl_dot_sse2_, SFX) diff --git a/opensfm/src/third_party/vlfeat/vl/svmdataset.c b/opensfm/src/third_party/vlfeat/vl/svmdataset.c index 7b4196343..a4100d11c 100644 --- a/opensfm/src/third_party/vlfeat/vl/svmdataset.c +++ b/opensfm/src/third_party/vlfeat/vl/svmdataset.c @@ -324,7 +324,7 @@ vl_svmdataset_get_inner_product_function (VlSvmDataset const *self) #ifdef VL_SVMDATASET_INSTANTIATING /* ---------------------------------------------------------------- */ -#include "float.th" +#include "float.h" double VL_XCAT(_vl_svmdataset_inner_product_,SFX) (VlSvmDataset const *self, diff --git a/opensfm/src/third_party/vlfeat/vl/vlad.c b/opensfm/src/third_party/vlfeat/vl/vlad.c index fb5be6c3b..8b6bd6ac1 100644 --- a/opensfm/src/third_party/vlfeat/vl/vlad.c +++ b/opensfm/src/third_party/vlfeat/vl/vlad.c @@ -169,7 +169,7 @@ VL_XCAT(_vl_vlad_encode_, SFX) int flags) { vl_uindex dim ; - vl_index i_cl=0, i_d ; + vl_index i_cl, i_d ; memset(enc, 0, sizeof(TYPE) * dimension * numClusters) ; From 53399382cb232791069067e56d90c8b7974f4879 Mon Sep 17 00:00:00 2001 From: Yann Noutary Date: Mon, 14 Feb 2022 06:48:16 -0800 Subject: [PATCH 21/81] fix: fix cluster pipeline test Summary: We rename the rig camera setup function. Reviewed By: fabianschenk Differential Revision: D34147603 fbshipit-source-id: 6b60cce1b611ecfd621ec19644077b17ce0d3321 --- opensfm/synthetic_data/synthetic_examples.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/opensfm/synthetic_data/synthetic_examples.py b/opensfm/synthetic_data/synthetic_examples.py index ebef2e684..c17589395 100644 --- a/opensfm/synthetic_data/synthetic_examples.py +++ b/opensfm/synthetic_data/synthetic_examples.py @@ -33,11 +33,13 @@ def synthetic_rig_scene( scene.add_street(points_count, 15, 12).perturb_floor([0, 0, 0.1]).perturb_walls( [0.2, 0.2, 0.01] ) - make_rig_scene(scene_length, scene) + make_4_cameras_rig_scene(scene_length, scene) return scene -def make_rig_scene(scene_length: float, scene: ss.SyntheticStreetScene) -> None: +def make_4_cameras_rig_scene( + scene_length: float, scene: ss.SyntheticStreetScene +) -> None: camera_height = 2 camera_interval = 3 position_perturbation = [0.2, 0.2, 0.01] From ea7e9253bd916a203715baf5c7d954e80b18e473 Mon Sep 17 00:00:00 2001 From: Yann Noutary Date: Mon, 14 Feb 2022 06:48:16 -0800 Subject: [PATCH 22/81] fix: catch failed rigid computation Summary: This Diff catches failure in numpy SVD that arise in some configuration with few points. Reviewed By: paulinus Differential Revision: D34147638 fbshipit-source-id: ef88f2bdd02735ab69bc37c01c247a142a11f724 --- opensfm/align.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/opensfm/align.py b/opensfm/align.py index c27ee6d81..fe8981306 100644 --- a/opensfm/align.py +++ b/opensfm/align.py @@ -281,9 +281,12 @@ def compute_orientation_prior_similarity( b = max_scale * b / current_scale s = max_scale / current_scale else: - T = tf.affine_matrix_from_points( - X.T[:2], Xp.T[:2], shear=False, scale=use_scale - ) + try: + T = tf.affine_matrix_from_points( + X.T[:2], Xp.T[:2], shear=False, scale=use_scale + ) + except ValueError: + return None s = np.linalg.det(T[:2, :2]) ** 0.5 A = np.eye(3) A[:2, :2] = T[:2, :2] / s From 519be6ab03b4fbe0e0705d96a1f8d2f0dee8d5ea Mon Sep 17 00:00:00 2001 From: Yann Noutary Date: Mon, 14 Feb 2022 10:50:17 -0800 Subject: [PATCH 23/81] feat: rig-instance-wise relative constraints Summary: This Diff changes the similarity averaging so that relative constraints are on rig instances and not on shots. The behaviour isn't changed, but in the future we'd like to draw relatives from a rig-instane-wise graph instead of a shot-wise one Reviewed By: fabianschenk Differential Revision: D33914788 fbshipit-source-id: 622f15a5c9baabd4f294ed022c30f9e05815e1f5 --- opensfm/src/bundle/bundle_adjuster.h | 35 ++++---- .../src/bundle/error/relative_motion_errors.h | 23 ++--- opensfm/src/bundle/pybundle.pyi | 14 +-- opensfm/src/bundle/python/pybind.cc | 15 ++-- opensfm/src/bundle/src/bundle_adjuster.cc | 80 ++++------------- opensfm/test/test_bundle.py | 87 +++++++++++-------- 6 files changed, 111 insertions(+), 143 deletions(-) diff --git a/opensfm/src/bundle/bundle_adjuster.h b/opensfm/src/bundle/bundle_adjuster.h index 51b09cc9c..c434b4577 100644 --- a/opensfm/src/bundle/bundle_adjuster.h +++ b/opensfm/src/bundle/bundle_adjuster.h @@ -38,7 +38,7 @@ struct Reconstruction { if (shared) { return &(scales.begin()->second); } - return &(scales[shot]); + return &(scales.at(shot)); } double GetScale(const std::string &shot) const { @@ -64,14 +64,15 @@ struct PointProjectionObservation { }; struct RelativeMotion { - RelativeMotion(const std::string &reconstruction_i, const std::string &shot_i, - const std::string &reconstruction_j, const std::string &shot_j, - const Vec3d &rotation, const Vec3d &translation, - double robust_multiplier) { + RelativeMotion(const std::string &reconstruction_i, + const std::string &rig_instance_i, + const std::string &reconstruction_j, + const std::string &rig_instance_j, const Vec3d &rotation, + const Vec3d &translation, double robust_multiplier) { reconstruction_id_i = reconstruction_i; - shot_id_i = shot_i; + rig_instance_id_i = rig_instance_i; reconstruction_id_j = reconstruction_j; - shot_id_j = shot_j; + rig_instance_id_j = rig_instance_j; parameters.resize(Pose::Parameter::NUM_PARAMS); parameters.segment(Pose::Parameter::RX, 3) = rotation; parameters.segment(Pose::Parameter::TX, 3) = translation; @@ -96,9 +97,10 @@ struct RelativeMotion { void SetScaleMatrix(const MatXd &s) { scale_matrix = s; } std::string reconstruction_id_i; - std::string shot_id_i; + std::string rig_instance_id_i; std::string reconstruction_id_j; - std::string shot_id_j; + std::string rig_instance_id_j; + VecXd parameters; MatXd scale_matrix; double robust_multiplier; @@ -106,13 +108,14 @@ struct RelativeMotion { struct RelativeSimilarity : public RelativeMotion { RelativeSimilarity(const std::string &reconstruction_i, - const std::string &shot_i, + const std::string &rig_instance_i, const std::string &reconstruction_j, - const std::string &shot_j, const Vec3d &rotation, + const std::string &rig_instance_j, const Vec3d &rotation, const Vec3d &translation, double s, double robust_multiplier) - : RelativeMotion(reconstruction_i, shot_i, reconstruction_j, shot_j, - rotation, translation, robust_multiplier), + : RelativeMotion(reconstruction_i, rig_instance_i, reconstruction_j, + rig_instance_j, rotation, translation, + robust_multiplier), scale(s) { scale_matrix.resize(Pose::Parameter::NUM_PARAMS + 1, Pose::Parameter::NUM_PARAMS + 1); @@ -219,8 +222,8 @@ class BundleAdjuster { // Cluster-SfM related void AddReconstruction(const std::string &id, bool constant); - void AddReconstructionShot(const std::string &reconstruction_id, double scale, - const std::string &shot_id); + void AddReconstructionInstance(const std::string &reconstruction_id, + double scale, const std::string &instance_id); void SetScaleSharing(const std::string &id, bool share); // Real bundle adjustment : point projections @@ -292,7 +295,6 @@ class BundleAdjuster { geometry::Camera GetCamera(const std::string &id) const; geometry::Similarity GetBias(const std::string &id) const; Reconstruction GetReconstruction(const std::string &reconstruction_id) const; - Reconstruction GetShotReconstruction(const std::string &shot_id) const; Point GetPoint(const std::string &id) const; RigCamera GetRigCamera(const std::string &rig_camera_id) const; RigInstance GetRigInstance(const std::string &instance_id) const; @@ -317,7 +319,6 @@ class BundleAdjuster { std::map rig_cameras_; std::map rig_instances_; - std::unordered_map shot_to_reconstruction_; bool use_analytic_{false}; // minimization constraints diff --git a/opensfm/src/bundle/error/relative_motion_errors.h b/opensfm/src/bundle/error/relative_motion_errors.h index bfae6c601..968f71039 100644 --- a/opensfm/src/bundle/error/relative_motion_errors.h +++ b/opensfm/src/bundle/error/relative_motion_errors.h @@ -11,22 +11,19 @@ namespace bundle { struct RelativeMotionError { RelativeMotionError(const Eigen::VectorXd& Rtij, const Eigen::MatrixXd& scale_matrix) - : Rtij_(Rtij), - scale_matrix_(scale_matrix), - shot_i_rig_camera_index_(FUNCTOR_NOT_SET), - shot_j_rig_camera_index_(FUNCTOR_NOT_SET) {} + : Rtij_(Rtij), scale_matrix_(scale_matrix) {} template Eigen::Matrix Error(T const* const* p) const { // Get rotation and translation values. - Vec3 Ri = ShotRotationFunctor(shot_i_rig_instance_index_, - shot_i_rig_camera_index_)(p); - Vec3 ti = ShotPositionFunctor(shot_i_rig_instance_index_, - shot_i_rig_camera_index_)(p); - Vec3 Rj = ShotRotationFunctor(shot_j_rig_instance_index_, - shot_j_rig_camera_index_)(p); - Vec3 tj = ShotPositionFunctor(shot_j_rig_instance_index_, - shot_j_rig_camera_index_)(p); + Vec3 Ri = + ShotRotationFunctor(shot_i_rig_instance_index_, FUNCTOR_NOT_SET)(p); + Vec3 ti = + ShotPositionFunctor(shot_i_rig_instance_index_, FUNCTOR_NOT_SET)(p); + Vec3 Rj = + ShotRotationFunctor(shot_j_rig_instance_index_, FUNCTOR_NOT_SET)(p); + Vec3 tj = + ShotPositionFunctor(shot_j_rig_instance_index_, FUNCTOR_NOT_SET)(p); Eigen::Matrix residual; // Compute rotation residual: log( Rij Ri Rj^t ) -> log( Rij Ri^t Rj) @@ -52,8 +49,6 @@ struct RelativeMotionError { Eigen::VectorXd Rtij_; Eigen::MatrixXd scale_matrix_; - int shot_i_rig_camera_index_{FUNCTOR_NOT_SET}; - int shot_j_rig_camera_index_{FUNCTOR_NOT_SET}; static constexpr int shot_i_rig_instance_index_ = 0; static constexpr int shot_j_rig_instance_index_ = 1; static constexpr int scale_index_ = 2; diff --git a/opensfm/src/bundle/pybundle.pyi b/opensfm/src/bundle/pybundle.pyi index bf6dd8fc5..0a96baf1a 100644 --- a/opensfm/src/bundle/pybundle.pyi +++ b/opensfm/src/bundle/pybundle.pyi @@ -34,7 +34,7 @@ class BundleAdjuster: def add_point_prior(self, arg0: str, arg1: numpy.ndarray, arg2: numpy.ndarray, arg3: bool) -> None: ... def add_point_projection_observation(self, arg0: str, arg1: str, arg2: numpy.ndarray, arg3: float) -> None: ... def add_reconstruction(self, arg0: str, arg1: bool) -> None: ... - def add_reconstruction_shot(self, arg0: str, arg1: float, arg2: str) -> None: ... + def add_reconstruction_instance(self, arg0: str, arg1: float, arg2: str) -> None: ... def add_relative_motion(self, arg0: RelativeMotion) -> None: ... def add_relative_rotation(self, arg0: RelativeRotation) -> None: ... def add_relative_similarity(self, arg0: RelativeSimilarity) -> None: ... @@ -207,13 +207,13 @@ class RelativeMotion: @reconstruction_j.setter def reconstruction_j(self, arg0: str) -> None:... @property - def shot_i(self) -> str:... - @shot_i.setter - def shot_i(self, arg0: str) -> None:... + def rig_instance_i(self) -> str:... + @rig_instance_i.setter + def rig_instance_i(self, arg0: str) -> None:... @property - def shot_j(self) -> str:... - @shot_j.setter - def shot_j(self, arg0: str) -> None:... + def rig_instance_j(self) -> str:... + @rig_instance_j.setter + def rig_instance_j(self, arg0: str) -> None:... @property def t(self) -> numpy.ndarray:... @t.setter diff --git a/opensfm/src/bundle/python/pybind.cc b/opensfm/src/bundle/python/pybind.cc index 5dde90aab..c486c6217 100644 --- a/opensfm/src/bundle/python/pybind.cc +++ b/opensfm/src/bundle/python/pybind.cc @@ -6,18 +6,20 @@ #include PYBIND11_MODULE(pybundle, m) { - py::module::import("opensfm.pygeometry"); + py::module::import("opensfm.pygeometry"); - py::class_(m, "RelativeMotion") + py::class_(m, "RelativeMotion") .def(py::init()) .def_readwrite("reconstruction_i", &bundle::RelativeMotion::reconstruction_id_i) - .def_readwrite("shot_i", &bundle::RelativeMotion::shot_id_i) + .def_readwrite("rig_instance_i", + &bundle::RelativeMotion::rig_instance_id_i) .def_readwrite("reconstruction_j", &bundle::RelativeMotion::reconstruction_id_j) - .def_readwrite("shot_j", &bundle::RelativeMotion::shot_id_j) + .def_readwrite("rig_instance_j", + &bundle::RelativeMotion::rig_instance_id_j) .def_property("r", &bundle::RelativeMotion::GetRotation, &bundle::RelativeMotion::SetRotation) .def_property("t", &bundle::RelativeMotion::GetTranslation, @@ -84,8 +86,8 @@ PYBIND11_MODULE(pybundle, m) { .def("add_point_prior", &bundle::BundleAdjuster::AddPointPrior) .def("get_point", &bundle::BundleAdjuster::GetPoint) .def("add_reconstruction", &bundle::BundleAdjuster::AddReconstruction) - .def("add_reconstruction_shot", - &bundle::BundleAdjuster::AddReconstructionShot) + .def("add_reconstruction_instance", + &bundle::BundleAdjuster::AddReconstructionInstance) .def("add_point_projection_observation", &bundle::BundleAdjuster::AddPointProjectionObservation) .def("add_relative_motion", &bundle::BundleAdjuster::AddRelativeMotion) @@ -123,7 +125,6 @@ PYBIND11_MODULE(pybundle, m) { .def("brief_report", &bundle::BundleAdjuster::BriefReport) .def("full_report", &bundle::BundleAdjuster::FullReport); - /////////////////////////////////// // Reconstruction Aligment // diff --git a/opensfm/src/bundle/src/bundle_adjuster.cc b/opensfm/src/bundle/src/bundle_adjuster.cc index 883f47c39..bce5939f1 100644 --- a/opensfm/src/bundle/src/bundle_adjuster.cc +++ b/opensfm/src/bundle/src/bundle_adjuster.cc @@ -196,15 +196,14 @@ void BundleAdjuster::AddReconstruction(const std::string &id, bool constant) { reconstructions_[id] = r; } -void BundleAdjuster::AddReconstructionShot(const std::string &reconstruction_id, - double scale, - const std::string &shot_id) { +void BundleAdjuster::AddReconstructionInstance( + const std::string &reconstruction_id, double scale, + const std::string &instance_id) { const auto find = reconstructions_.find(reconstruction_id); if (find == reconstructions_.end()) { return; } - find->second.scales[shot_id] = scale; - shot_to_reconstruction_[shot_id] = reconstruction_id; + find->second.scales[instance_id] = scale; } void BundleAdjuster::AddPoint(const std::string &id, const Vec3d &position, @@ -780,32 +779,15 @@ void BundleAdjuster::Run() { cost_function->AddParameterBlock(1); cost_function->SetNumResiduals(6); - auto &shot_i = shots_.at(rp.shot_id_i); - auto &shot_j = shots_.at(rp.shot_id_j); + auto &rig_instance_i = rig_instances_.at(rp.rig_instance_id_i); + auto &rig_instance_j = rig_instances_.at(rp.rig_instance_id_j); auto parameter_blocks = std::vector( - {shot_i.GetRigInstance()->GetValueData().data(), - shot_j.GetRigInstance()->GetValueData().data(), - reconstructions_[rp.reconstruction_id_i].GetScalePtr(rp.shot_id_i)}); + {rig_instance_i.GetValueData().data(), + rig_instance_j.GetValueData().data(), + reconstructions_[rp.reconstruction_id_i].GetScalePtr( + rp.rig_instance_id_i)}); - auto shot_i_rig_camera = shot_i.GetRigCamera()->GetValueData().data(); - if (IsRigCameraUseful(*shot_i.GetRigCamera())) { - cost_function->AddParameterBlock(6); - relative_motion->shot_i_rig_camera_index_ = parameter_blocks.size(); - parameter_blocks.push_back(shot_i_rig_camera); - } - - auto shot_j_rig_camera = shot_j.GetRigCamera()->GetValueData().data(); - if (IsRigCameraUseful(*shot_j.GetRigCamera())) { - if (shot_j_rig_camera != shot_i_rig_camera) { - cost_function->AddParameterBlock(6); - relative_motion->shot_j_rig_camera_index_ = parameter_blocks.size(); - parameter_blocks.push_back(shot_j_rig_camera); - } else { - relative_motion->shot_j_rig_camera_index_ = - relative_motion->shot_i_rig_camera_index_; - } - } problem.AddResidualBlock(cost_function, relative_motion_loss, parameter_blocks); } @@ -828,33 +810,17 @@ void BundleAdjuster::Run() { cost_function->AddParameterBlock(1); cost_function->SetNumResiduals(7); - auto &shot_i = shots_.at(rp.shot_id_i); - auto &shot_j = shots_.at(rp.shot_id_j); + auto &rig_instance_i = rig_instances_.at(rp.rig_instance_id_i); + auto &rig_instance_j = rig_instances_.at(rp.rig_instance_id_j); auto parameter_blocks = std::vector( - {shot_i.GetRigInstance()->GetValueData().data(), - shot_j.GetRigInstance()->GetValueData().data(), - reconstructions_[rp.reconstruction_id_i].GetScalePtr(rp.shot_id_i), - reconstructions_[rp.reconstruction_id_j].GetScalePtr(rp.shot_id_j)}); + {rig_instance_i.GetValueData().data(), + rig_instance_j.GetValueData().data(), + reconstructions_[rp.reconstruction_id_i].GetScalePtr( + rp.rig_instance_id_i), + reconstructions_[rp.reconstruction_id_j].GetScalePtr( + rp.rig_instance_id_j)}); - auto shot_i_rig_camera = shot_i.GetRigCamera()->GetValueData().data(); - if (IsRigCameraUseful(*shot_i.GetRigCamera())) { - cost_function->AddParameterBlock(6); - relative_similarity->shot_i_rig_camera_index_ = parameter_blocks.size(); - parameter_blocks.push_back(shot_i_rig_camera); - } - - auto shot_j_rig_camera = shot_j.GetRigCamera()->GetValueData().data(); - if (IsRigCameraUseful(*shot_j.GetRigCamera())) { - if (shot_j_rig_camera != shot_i_rig_camera) { - cost_function->AddParameterBlock(6); - relative_similarity->shot_j_rig_camera_index_ = parameter_blocks.size(); - parameter_blocks.push_back(shot_j_rig_camera); - } else { - relative_similarity->shot_j_rig_camera_index_ = - relative_similarity->shot_i_rig_camera_index_; - } - } problem.AddResidualBlock(cost_function, relative_similarity_loss, parameter_blocks); } @@ -1224,16 +1190,6 @@ Reconstruction BundleAdjuster::GetReconstruction( return it->second; } -Reconstruction BundleAdjuster::GetShotReconstruction( - const std::string &shot_id) const { - const auto it = shot_to_reconstruction_.find(shot_id); - if (it == shot_to_reconstruction_.end()) { - throw std::runtime_error("Shot " + shot_id + - " doesn't belong to a reconstruction"); - } - return reconstructions_.at(it->second); -} - RigCamera BundleAdjuster::GetRigCamera(const std::string &rig_camera_id) const { if (rig_cameras_.find(rig_camera_id) == rig_cameras_.end()) { throw std::runtime_error("Rig camera " + rig_camera_id + " doesn't exists"); diff --git a/opensfm/test/test_bundle.py b/opensfm/test/test_bundle.py index 1cd60e55c..076c8e4cd 100644 --- a/opensfm/test/test_bundle.py +++ b/opensfm/test/test_bundle.py @@ -154,8 +154,8 @@ def test_pair(bundle_adjuster) -> None: False, ) sa.add_reconstruction("12", False) - sa.add_reconstruction_shot("12", 4, "1") - sa.add_reconstruction_shot("12", 4, "2") + sa.add_reconstruction_instance("12", 4, "1") + sa.add_reconstruction_instance("12", 4, "2") sa.set_scale_sharing("12", True) sa.add_relative_motion( # pyre-fixme[6]: For 5th param expected `ndarray` but got `List[int]`. @@ -201,8 +201,8 @@ def test_pair_with_points_priors(bundle_adjuster) -> None: sa.add_point("p2", [0, 0, 0], False) sa.add_reconstruction("12", False) - sa.add_reconstruction_shot("12", 4, "1") - sa.add_reconstruction_shot("12", 4, "2") + sa.add_reconstruction_instance("12", 4, "1") + sa.add_reconstruction_instance("12", 4, "2") # identity rotation with pan/tilt/roll sa.add_absolute_roll("1", np.radians(90), 1) @@ -261,8 +261,8 @@ def test_pair_non_rigid(bundle_adjuster) -> None: False, ) sa.add_reconstruction("12", False) - sa.add_reconstruction_shot("12", 4, "1") - sa.add_reconstruction_shot("12", 4, "2") + sa.add_reconstruction_instance("12", 4, "1") + sa.add_reconstruction_instance("12", 4, "2") sa.set_scale_sharing("12", False) sa.add_relative_similarity( # pyre-fixme[6]: For 5th param expected `ndarray` but got `List[int]`. @@ -323,10 +323,10 @@ def test_four_cams_single_reconstruction(bundle_adjuster) -> None: False, ) sa.add_reconstruction("1234", False) - sa.add_reconstruction_shot("1234", 1, "1") - sa.add_reconstruction_shot("1234", 1, "2") - sa.add_reconstruction_shot("1234", 1, "3") - sa.add_reconstruction_shot("1234", 1, "4") + sa.add_reconstruction_instance("1234", 1, "1") + sa.add_reconstruction_instance("1234", 1, "2") + sa.add_reconstruction_instance("1234", 1, "3") + sa.add_reconstruction_instance("1234", 1, "4") sa.set_scale_sharing("1234", True) sa.add_relative_motion( # pyre-fixme[6]: For 5th param expected `ndarray` but got `List[int]`. @@ -399,31 +399,46 @@ def test_four_cams_single_reconstruction_non_rigid(bundle_adjuster) -> None: False, ) sa.add_reconstruction("1234", False) - sa.add_reconstruction_shot("1234", 1, "1") - sa.add_reconstruction_shot("1234", 1, "2") - sa.add_reconstruction_shot("1234", 1, "3") - sa.add_reconstruction_shot("1234", 1, "4") + sa.add_reconstruction_instance("1234", 1, "1") + sa.add_reconstruction_instance("1234", 1, "2") + sa.add_reconstruction_instance("1234", 1, "3") + sa.add_reconstruction_instance("1234", 1, "4") sa.set_scale_sharing("1234", False) sa.add_relative_similarity( pybundle.RelativeSimilarity( - # pyre-fixme[6]: For 5th param expected `ndarray` but got `List[int]`. - # pyre-fixme[6]: For 6th param expected `ndarray` but got `List[int]`. - "1234", "1", "1234", "2", [0, 0, 0], [-1, 0, 0], 1, 1 + "1234", + "1", + "1234", + "2", + np.array([0, 0, 0]), + np.array([-1, 0, 0]), + 1, + 1, ) ) sa.add_relative_similarity( pybundle.RelativeSimilarity( - # pyre-fixme[6]: For 5th param expected `ndarray` but got `List[int]`. - # pyre-fixme[6]: For 6th param expected `ndarray` but got `List[int]`. - "1234", "2", "1234", "3", [0, 0, 0], [-1, -1, 0], 1, 1 + "1234", + "2", + "1234", + "3", + np.array([0, 0, 0]), + np.array([-1, -1, 0]), + 1, + 1, ) ) sa.add_relative_similarity( pybundle.RelativeSimilarity( - # pyre-fixme[6]: For 5th param expected `ndarray` but got `List[int]`. - # pyre-fixme[6]: For 6th param expected `ndarray` but got `List[int]`. - "1234", "3", "1234", "4", [0, 0, 0], [0, -1, 0], 1, 1 + "1234", + "3", + "1234", + "4", + np.array([0, 0, 0]), + np.array([0, -1, 0]), + 1, + 1, ) ) sa.add_rig_instance_position_prior("1", [0, 0, 0], [1, 1, 1], "") @@ -489,10 +504,10 @@ def test_four_cams_one_fixed(bundle_adjuster) -> None: False, ) sa.add_reconstruction("1234", False) - sa.add_reconstruction_shot("1234", 1, "1") - sa.add_reconstruction_shot("1234", 1, "2") - sa.add_reconstruction_shot("1234", 1, "3") - sa.add_reconstruction_shot("1234", 1, "4") + sa.add_reconstruction_instance("1234", 1, "1") + sa.add_reconstruction_instance("1234", 1, "2") + sa.add_reconstruction_instance("1234", 1, "3") + sa.add_reconstruction_instance("1234", 1, "4") sa.set_scale_sharing("1234", True) sa.add_relative_motion( # pyre-fixme[6]: For 5th param expected `ndarray` but got `List[int]`. @@ -556,9 +571,9 @@ def test_linear_motion_prior_position(bundle_adjuster) -> None: False, ) sa.add_reconstruction("123", False) - sa.add_reconstruction_shot("123", 1, "1") - sa.add_reconstruction_shot("123", 1, "2") - sa.add_reconstruction_shot("123", 1, "3") + sa.add_reconstruction_instance("123", 1, "1") + sa.add_reconstruction_instance("123", 1, "2") + sa.add_reconstruction_instance("123", 1, "3") sa.set_scale_sharing("123", True) sa.add_rig_instance_position_prior("1", [0, 0, 0], [1, 1, 1], "") sa.add_rig_instance_position_prior("3", [2, 0, 0], [1, 1, 1], "") @@ -605,9 +620,9 @@ def test_linear_motion_prior_rotation(bundle_adjuster) -> None: True, ) sa.add_reconstruction("123", False) - sa.add_reconstruction_shot("123", 1, "1") - sa.add_reconstruction_shot("123", 1, "2") - sa.add_reconstruction_shot("123", 1, "3") + sa.add_reconstruction_instance("123", 1, "1") + sa.add_reconstruction_instance("123", 1, "2") + sa.add_reconstruction_instance("123", 1, "3") sa.set_scale_sharing("123", True) sa.add_linear_motion("1", "2", "3", 0.3, 0.1, 0.1) @@ -715,9 +730,9 @@ def test_heatmaps_position(bundle_adjuster) -> None: False, ) sa.add_reconstruction("123", True) - sa.add_reconstruction_shot("123", 1, "1") - sa.add_reconstruction_shot("123", 1, "2") - sa.add_reconstruction_shot("123", 1, "3") + sa.add_reconstruction_instance("123", 1, "1") + sa.add_reconstruction_instance("123", 1, "2") + sa.add_reconstruction_instance("123", 1, "3") sa.set_scale_sharing("123", True) def bell_heatmap(size, r, mu_x, mu_y): From 2cdfd62313c6ff55489142d299de4b0c8052a420 Mon Sep 17 00:00:00 2001 From: Pyre Bot Jr <> Date: Mon, 14 Feb 2022 14:24:14 -0800 Subject: [PATCH 24/81] Add annotations to `mapillary/opensfm` Reviewed By: shannonzhu Differential Revision: D34221752 fbshipit-source-id: 1dce0c2bb7b7e25760691852479c9b0e1a37a77b --- opensfm/actions/export_colmap.py | 2 +- opensfm/geo.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/opensfm/actions/export_colmap.py b/opensfm/actions/export_colmap.py index e3ff926ab..b7d0bfea2 100644 --- a/opensfm/actions/export_colmap.py +++ b/opensfm/actions/export_colmap.py @@ -47,7 +47,7 @@ I_3 = np.eye(3) -def run_dataset(data: DataSet, binary) -> None: +def run_dataset(data: DataSet, binary: bool) -> None: """Export reconstruction to COLMAP format.""" export_folder = os.path.join(data.data_path, "colmap_export") diff --git a/opensfm/geo.py b/opensfm/geo.py index a09a584bc..62847e4e1 100644 --- a/opensfm/geo.py +++ b/opensfm/geo.py @@ -50,7 +50,7 @@ def lla_from_ecef(x, y, z): return np.degrees(lat), np.degrees(lon), alt -def ecef_from_topocentric_transform(lat, lon, alt): +def ecef_from_topocentric_transform(lat, lon, alt: float): """ Transformation from a topocentric frame at reference position to ECEF. @@ -77,7 +77,7 @@ def ecef_from_topocentric_transform(lat, lon, alt): ) -def ecef_from_topocentric_transform_finite_diff(lat, lon, alt): +def ecef_from_topocentric_transform_finite_diff(lat, lon, alt: float): """ Transformation from a topocentric frame at reference position to ECEF. @@ -124,7 +124,7 @@ def ecef_from_topocentric_transform_finite_diff(lat, lon, alt): ) -def topocentric_from_lla(lat, lon, alt, reflat, reflon, refalt): +def topocentric_from_lla(lat, lon, alt: float, reflat, reflon, refalt): """ Transform from lat, lon, alt to topocentric XYZ. From c6c27b232e8aca3068e0e7cb85c3de79a91da737 Mon Sep 17 00:00:00 2001 From: Fabian Schenk Date: Tue, 15 Feb 2022 02:20:10 -0800 Subject: [PATCH 25/81] Fix potentially failing test Summary: `test_robust` occasionally finds more inliers (see output below). This happens because `random` can also be very close to zero. This diff fixes this problem by adding a `0.01` offset Output: ``` def test_outliers_line_ransac() -> None: a, b, x, samples = line_data() scale = 2.0 y = a * x + b + np.random.rand(x.shape[0]) * scale ratio_outliers = 0.4 outliers_max = 5.0 add_outliers(ratio_outliers, x, scale, outliers_max) data = np.array([x, y]).transpose() params = pyrobust.RobustEstimatorParams() result = pyrobust.ransac_line(data, scale, params, pyrobust.RansacType.RANSAC) inliers_count = (1 - ratio_outliers) * samples > assert result.score == inliers_count E assert 61.0 == 60.0 E + where 61.0 = .score ``` Reviewed By: YanNoun Differential Revision: D34211202 fbshipit-source-id: c903f6bc33e1f298234604aa444b05a6b0be0e51 --- opensfm/test/test_robust.py | 15 ++++++++------- 1 file changed, 8 insertions(+), 7 deletions(-) diff --git a/opensfm/test/test_robust.py b/opensfm/test/test_robust.py index fcece6881..39942b753 100644 --- a/opensfm/test/test_robust.py +++ b/opensfm/test/test_robust.py @@ -1,19 +1,20 @@ import copy +from typing import Tuple import numpy as np from opensfm import pyrobust, pygeometry -def line_data(): +def line_data() -> Tuple[int, int, np.ndarray, int]: a, b = 2, 3 samples = 100 x = np.linspace(0, 100, samples) return a, b, x, samples -def similarity_data(): - rotation = (0.1, 0.2, 0.3) - translation = (4, 5, 6) +def similarity_data() -> Tuple[np.ndarray, np.ndarray, int, np.ndarray, int]: + rotation = np.array([0.1, 0.2, 0.3]) + translation = np.array([4, 5, 6]) scale = 2 samples = 100 @@ -21,7 +22,7 @@ def similarity_data(): return rotation, translation, scale, x, samples -def add_outliers(ratio_outliers, x, min, max) -> None: +def add_outliers(ratio_outliers: float, x: np.ndarray, min: float, max: float) -> None: for index in np.random.permutation(len(x))[: int(ratio_outliers * len(x))]: shape = x[index].shape noise = np.random.uniform(min, max, size=shape) @@ -63,8 +64,8 @@ def test_outliers_line_ransac() -> None: result = pyrobust.ransac_line(data, scale, params, pyrobust.RansacType.RANSAC) inliers_count = (1 - ratio_outliers) * samples - assert result.score == inliers_count - assert len(result.inliers_indices) == inliers_count + assert np.allclose(result.score, inliers_count, atol=1) + assert np.allclose(len(result.inliers_indices), inliers_count, atol=1) def test_normal_line_msac() -> None: From ac92ac62f7dfe9189f9dc5386b787e551cdc7f0b Mon Sep 17 00:00:00 2001 From: Fabian Schenk Date: Tue, 15 Feb 2022 02:20:10 -0800 Subject: [PATCH 26/81] Synthetic dataset does not require images Summary: This diff disables the check for `image_list` for the synthetic dataset. Reviewed By: YanNoun Differential Revision: D34209941 fbshipit-source-id: 3546d5569b72e0dbea0ae8d779333e37aafba78f --- opensfm/synthetic_data/synthetic_dataset.py | 26 ++++++++++++--------- 1 file changed, 15 insertions(+), 11 deletions(-) diff --git a/opensfm/synthetic_data/synthetic_dataset.py b/opensfm/synthetic_data/synthetic_dataset.py index e2d5ff4d8..a7f237c9a 100644 --- a/opensfm/synthetic_data/synthetic_dataset.py +++ b/opensfm/synthetic_data/synthetic_dataset.py @@ -8,14 +8,13 @@ from opensfm import tracking, features as oft, types, pymap, pygeometry, io, geo from opensfm.dataset import DataSet - logger = logging.getLogger(__name__) class SyntheticFeatures(collections.abc.MutableMapping): database: Union[Dict[str, oft.FeaturesData], shelve.Shelf] - def __init__(self, on_disk_filename: Optional[str]): + def __init__(self, on_disk_filename: Optional[str]) -> None: if on_disk_filename: self.database = shelve.open(on_disk_filename, flag="n") else: @@ -24,11 +23,12 @@ def __init__(self, on_disk_filename: Optional[str]): for m in ["keys", "items", "values", "get"]: setattr(self, m, getattr(self.database, m)) - def sync(self): - if type(self.database) is dict: + def sync(self) -> None: + database = self.database + if type(database) is dict: return else: - self.database.sync() + database.sync() def __getitem__(self, key): return self.database.__getitem__(key) @@ -61,7 +61,7 @@ def __init__( tracks_manager: Optional[pymap.TracksManager] = None, gcps: Optional[Dict[str, pymap.GroundControlPoint]] = None, output_path: Optional[str] = None, - ): + ) -> None: data_path = "" if not output_path else output_path if data_path: io.mkdir_p(data_path) @@ -125,7 +125,7 @@ def load_features(self, image: str) -> Optional[oft.FeaturesData]: return None return feat[image] - def save_features(self, image: str, features_data: oft.FeaturesData): + def save_features(self, image: str, features_data: oft.FeaturesData) -> None: pass def matches_exists(self, image: str) -> bool: @@ -141,22 +141,26 @@ def load_matches(self, image: str) -> Dict[str, np.ndarray]: else: return {} - def _check_and_create_matches(self): + def load_image_list(self) -> None: + pass + + def _check_and_create_matches(self) -> None: if self.matches is None: self.matches = self._construct_matches() - def _construct_matches(self): + def _construct_matches(self) -> Dict[str, Any]: matches = {} + tracks_manager = self.load_tracks_manager() for im1 in self.images(): for im2 in self.images(): if im1 == im2: continue image_matches = matches.setdefault(im1, {}) - tracks = tracking.common_tracks(self.tracks_manager, im1, im2)[0] + tracks = tracking.common_tracks(tracks_manager, im1, im2)[0] if len(tracks) > 10: pair_matches = [] for t in tracks: - observations = self.tracks_manager.get_track_observations(t) + observations = tracks_manager.get_track_observations(t) pair_matches.append( np.array([observations[im1].id, observations[im2].id]) ) From bf48ad277f29670c3442b533847eb32596b0a422 Mon Sep 17 00:00:00 2001 From: jonasdlindner Date: Tue, 15 Feb 2022 02:20:10 -0800 Subject: [PATCH 27/81] Raise Error when Image Path is empty (#865) Summary: Fixes https://github.com/mapillary/OpenSfM/issues/864 Pull Request resolved: https://github.com/mapillary/OpenSfM/pull/865 Reviewed By: YanNoun Differential Revision: D34172531 Pulled By: fabianschenk fbshipit-source-id: 372ce55520509faba5a388a018a283e94e3be392 --- opensfm/dataset.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/opensfm/dataset.py b/opensfm/dataset.py index 87d15f84a..ecaf75078 100644 --- a/opensfm/dataset.py +++ b/opensfm/dataset.py @@ -69,12 +69,20 @@ def _image_list_file(self) -> str: def load_image_list(self) -> None: """Load image list from image_list.txt or list images/ folder.""" image_list_file = self._image_list_file() + image_list_path = os.path.join(self.data_path, "images") + if self.io_handler.isfile(image_list_file): with self.io_handler.open_rt(image_list_file) as fin: lines = fin.read().splitlines() self._set_image_list(lines) else: - self._set_image_path(os.path.join(self.data_path, "images")) + self._set_image_path(image_list_path) + + if self.data_path and not self.image_list: + raise IOError( + "No Images found in {}" + .format(image_list_path) + ) def images(self) -> List[str]: """List of file names of all images in the dataset.""" From 4e63ca6b117489e1e7259b25ad3f092e08eb1d6a Mon Sep 17 00:00:00 2001 From: Piero Toffanin Date: Wed, 16 Feb 2022 05:50:12 -0800 Subject: [PATCH 28/81] Adds support for Phase One iXM-RS100F (#867) Summary: Adds support this camera: https://www.ggs-solutions.eu/wp-content/uploads/technical_details_iXM-RS150F_iXM-RS100F-.pdf Pull Request resolved: https://github.com/mapillary/OpenSfM/pull/867 Reviewed By: tobias-o Differential Revision: D34240166 Pulled By: fabianschenk fbshipit-source-id: 6d0a35e1fe97c2fa0769cb6912185db80a63f8ff --- opensfm/data/sensor_data.json | 1 + 1 file changed, 1 insertion(+) diff --git a/opensfm/data/sensor_data.json b/opensfm/data/sensor_data.json index 094e728bd..b68c35061 100644 --- a/opensfm/data/sensor_data.json +++ b/opensfm/data/sensor_data.json @@ -2575,6 +2575,7 @@ "Pentax X70": 6.08, "Pentax X90": 6.08, "Pentax XG-1": 6.08, + "Phase One iXM-RS100F": 53.4, "Praktica DC 20": 6.4, "Praktica DC 21": 5.33, "Praktica DC 22": 5.33, From 9ef8e67204bfa27773b4fc67b61abeab25bdd004 Mon Sep 17 00:00:00 2001 From: Someone Serge Date: Wed, 16 Feb 2022 05:51:32 -0800 Subject: [PATCH 29/81] fix[report.py]: encode fpdf str result into bytes (#854) Summary: Fixes https://github.com/mapillary/OpenSfM/issues/853 I call `encode` only conditionally, because I have a suspicion that different versions of `fpdf` might actually have different interfaces... Pull Request resolved: https://github.com/mapillary/OpenSfM/pull/854 Reviewed By: tobias-o Differential Revision: D34240168 Pulled By: fabianschenk fbshipit-source-id: ecafe1cf26d5b84163612d46f96544a84abfc814 --- opensfm/report.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/opensfm/report.py b/opensfm/report.py index b24a89b92..b57473cd6 100644 --- a/opensfm/report.py +++ b/opensfm/report.py @@ -40,6 +40,9 @@ def __init__(self, data: DataSet) -> None: def save_report(self, filename: str) -> None: bytestring = self.pdf.output(dest="S") + if isinstance(bytestring, str): + bytestring = bytestring.encode("utf8") + with self.io_handler.open( os.path.join(self.output_path, filename), "wb" ) as fwb: From 399740cb2c9d9dc853e1901d1dded6666367e209 Mon Sep 17 00:00:00 2001 From: Maciej Skrzypkowski Date: Thu, 17 Feb 2022 06:16:52 -0800 Subject: [PATCH 30/81] Fixed misspelling in quality_report.rst (#858) Summary: Misspelling of Reconstructed Points Pull Request resolved: https://github.com/mapillary/OpenSfM/pull/858 Reviewed By: fabianschenk Differential Revision: D34208356 Pulled By: YanNoun fbshipit-source-id: 71430f82da7deb3b785377e54b8ded36fd9a803c --- doc/source/quality_report.rst | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/doc/source/quality_report.rst b/doc/source/quality_report.rst index 64a22408f..c55f36ada 100644 --- a/doc/source/quality_report.rst +++ b/doc/source/quality_report.rst @@ -24,7 +24,7 @@ Processing Summary |processing| - Reconstructed Images : reconstructed images over total number of images - - Reconstructed Images : reconstructed points over total number of points in the `tracks.csv` file (`create_tracks`) + - Reconstructed Points : reconstructed points over total number of points in the `tracks.csv` file (`create_tracks`) - Reconstructed Components : number of continously reconstructed sets of images - Detected Features : median number (accross images) of detected features - Reconstructed Features : median number (accross images) of reconstructed features From 79aa4bdd8bd08dc0cd9e3086d170cedb29ac9760 Mon Sep 17 00:00:00 2001 From: Pyre Bot Jr <> Date: Thu, 17 Feb 2022 22:40:46 -0800 Subject: [PATCH 31/81] Add annotations to `mapillary/opensfm` Reviewed By: shannonzhu Differential Revision: D34332163 fbshipit-source-id: fd6c8fb9f7b70459922e49d150dcff5f021b8f2f --- opensfm/geo.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/opensfm/geo.py b/opensfm/geo.py index 62847e4e1..e2746cd2f 100644 --- a/opensfm/geo.py +++ b/opensfm/geo.py @@ -124,7 +124,7 @@ def ecef_from_topocentric_transform_finite_diff(lat, lon, alt: float): ) -def topocentric_from_lla(lat, lon, alt: float, reflat, reflon, refalt): +def topocentric_from_lla(lat, lon, alt: float, reflat, reflon, refalt: float): """ Transform from lat, lon, alt to topocentric XYZ. @@ -145,7 +145,7 @@ def topocentric_from_lla(lat, lon, alt: float, reflat, reflon, refalt): return tx, ty, tz -def lla_from_topocentric(x, y, z, reflat, reflon, refalt): +def lla_from_topocentric(x, y, z, reflat, reflon, refalt: float): """ Transform from topocentric XYZ to lat, lon, alt. """ From b26c150193d6d78796794120a0bc172d6fa9ba8d Mon Sep 17 00:00:00 2001 From: Fabian Schenk Date: Mon, 21 Feb 2022 04:11:29 -0800 Subject: [PATCH 32/81] Fix camera bindings Summary: Bind the static methods correctly: - `pixel_to_normalized_coordinates_common` - `pixel_to_normalized_coordinates_many_common` - `normalized_to_pixel_coordinates_common` - `normalized_to_pixel_coordinates_many_common` Reviewed By: tobias-o Differential Revision: D34303208 fbshipit-source-id: 898c1c19b6d14f4a1416b50ceb80e69294ddfefb --- opensfm/src/bundle/pybundle.pyi | 1 + opensfm/src/dense/pydense.pyi | 1 + opensfm/src/features/pyfeatures.pyi | 3 ++- opensfm/src/geo/pygeo.pyi | 11 ++++++----- opensfm/src/geometry/pygeometry.pyi | 13 +++++++++---- opensfm/src/geometry/python/pybind.cc | 8 ++++---- opensfm/src/map/pymap.pyi | 2 ++ opensfm/src/robust/pyrobust.pyi | 1 + opensfm/src/sfm/pysfm.pyi | 1 + opensfm/test/test_types.py | 2 -- 10 files changed, 27 insertions(+), 16 deletions(-) diff --git a/opensfm/src/bundle/pybundle.pyi b/opensfm/src/bundle/pybundle.pyi index 0a96baf1a..fa1fbae9a 100644 --- a/opensfm/src/bundle/pybundle.pyi +++ b/opensfm/src/bundle/pybundle.pyi @@ -2,6 +2,7 @@ # Do not manually edit # To regenerate: # $ buck run //mapillary/opensfm/opensfm/src/bundle:pybundle_stubgen +# Use proper mode, e.g. @arvr/mode/linux/dev for arvr # @generated import numpy diff --git a/opensfm/src/dense/pydense.pyi b/opensfm/src/dense/pydense.pyi index bc17f04b0..13a398277 100644 --- a/opensfm/src/dense/pydense.pyi +++ b/opensfm/src/dense/pydense.pyi @@ -2,6 +2,7 @@ # Do not manually edit # To regenerate: # $ buck run //mapillary/opensfm/opensfm/src/dense:pydense_stubgen +# Use proper mode, e.g. @arvr/mode/linux/dev for arvr # @generated import numpy diff --git a/opensfm/src/features/pyfeatures.pyi b/opensfm/src/features/pyfeatures.pyi index 84e4989e7..755d7c89d 100644 --- a/opensfm/src/features/pyfeatures.pyi +++ b/opensfm/src/features/pyfeatures.pyi @@ -2,6 +2,7 @@ # Do not manually edit # To regenerate: # $ buck run //mapillary/opensfm/opensfm/src/features:pyfeatures_stubgen +# Use proper mode, e.g. @arvr/mode/linux/dev for arvr # @generated import numpy @@ -135,5 +136,5 @@ class AkazeDiffusivityType: def akaze(arg0: numpy.ndarray, arg1: AKAZEOptions) -> tuple:... def compute_vlad_descriptor(arg0: numpy.ndarray, arg1: numpy.ndarray) -> numpy.ndarray:... def compute_vlad_distances(arg0: Dict[str, numpy.ndarray], arg1: str, arg2: Set[str]) -> Tuple[List[float], List[str]]:... -def hahog(image: numpy.ndarray, peak_threshold: float = 0.003, edge_threshold: float = 10, target_num_features: int = 0, use_adaptive_suppression: bool = False) -> tuple:... +def hahog(image: numpy.ndarray, peak_threshold: float = 0.003, edge_threshold: float = 10, target_num_features: int = 0) -> tuple:... def match_using_words(arg0: numpy.ndarray, arg1: numpy.ndarray, arg2: numpy.ndarray, arg3: numpy.ndarray, arg4: float, arg5: int) -> numpy.ndarray:... diff --git a/opensfm/src/geo/pygeo.pyi b/opensfm/src/geo/pygeo.pyi index e81db755a..0e5f2774f 100644 --- a/opensfm/src/geo/pygeo.pyi +++ b/opensfm/src/geo/pygeo.pyi @@ -2,6 +2,7 @@ # Do not manually edit # To regenerate: # $ buck run //mapillary/opensfm/opensfm/src/geo:pygeo_stubgen +# Use proper mode, e.g. @arvr/mode/linux/dev for arvr # @generated import numpy @@ -32,19 +33,19 @@ def ecef_from_lla(arg0: float, arg1: float, arg2: float) -> numpy.ndarray:... @overload def ecef_from_lla(arg0: numpy.ndarray) -> numpy.ndarray:... @overload -def ecef_from_topocentric_transform(arg0: numpy.ndarray) -> numpy.ndarray:... -@overload def ecef_from_topocentric_transform(arg0: float, arg1: float, arg2: float) -> numpy.ndarray:... @overload -def ecef_from_topocentric_transform_finite_diff(arg0: float, arg1: float, arg2: float) -> numpy.ndarray:... +def ecef_from_topocentric_transform(arg0: numpy.ndarray) -> numpy.ndarray:... @overload def ecef_from_topocentric_transform_finite_diff(arg0: numpy.ndarray) -> numpy.ndarray:... -def gps_distance(arg0: numpy.ndarray, arg1: numpy.ndarray) -> float:... @overload -def lla_from_ecef(arg0: float, arg1: float, arg2: float) -> numpy.ndarray:... +def ecef_from_topocentric_transform_finite_diff(arg0: float, arg1: float, arg2: float) -> numpy.ndarray:... +def gps_distance(arg0: numpy.ndarray, arg1: numpy.ndarray) -> float:... @overload def lla_from_ecef(arg0: numpy.ndarray) -> numpy.ndarray:... @overload +def lla_from_ecef(arg0: float, arg1: float, arg2: float) -> numpy.ndarray:... +@overload def lla_from_topocentric(arg0: numpy.ndarray, arg1: numpy.ndarray) -> numpy.ndarray:... @overload def lla_from_topocentric(arg0: float, arg1: float, arg2: float, arg3: float, arg4: float, arg5: float) -> numpy.ndarray:... diff --git a/opensfm/src/geometry/pygeometry.pyi b/opensfm/src/geometry/pygeometry.pyi index 7a846383c..9eb576777 100644 --- a/opensfm/src/geometry/pygeometry.pyi +++ b/opensfm/src/geometry/pygeometry.pyi @@ -2,6 +2,7 @@ # Do not manually edit # To regenerate: # $ buck run //mapillary/opensfm/opensfm/src/geometry:pygeometry_stubgen +# Use proper mode, e.g. @arvr/mode/linux/dev for arvr # @generated import numpy @@ -89,15 +90,19 @@ class Camera: @staticmethod def is_panorama(arg0: str) -> bool: ... def normalized_to_pixel_coordinates(self, arg0: numpy.ndarray) -> numpy.ndarray: ... - def normalized_to_pixel_coordinates_common(self, arg0: int, arg1: int) -> numpy.ndarray: ... + @staticmethod + def normalized_to_pixel_coordinates_common(arg0: numpy.ndarray, arg1: int, arg2: int) -> numpy.ndarray: ... def normalized_to_pixel_coordinates_many(self, arg0: numpy.ndarray) -> numpy.ndarray: ... - def normalized_to_pixel_coordinates_many_common(self, arg0: int, arg1: int) -> numpy.ndarray: ... + @staticmethod + def normalized_to_pixel_coordinates_many_common(arg0: numpy.ndarray, arg1: int, arg2: int) -> numpy.ndarray: ... def pixel_bearing(self, arg0: numpy.ndarray) -> numpy.ndarray: ... def pixel_bearing_many(self, arg0: numpy.ndarray) -> numpy.ndarray: ... def pixel_to_normalized_coordinates(self, arg0: numpy.ndarray) -> numpy.ndarray: ... - def pixel_to_normalized_coordinates_common(self, arg0: int, arg1: int) -> numpy.ndarray: ... + @staticmethod + def pixel_to_normalized_coordinates_common(arg0: numpy.ndarray, arg1: int, arg2: int) -> numpy.ndarray: ... def pixel_to_normalized_coordinates_many(self, arg0: numpy.ndarray) -> numpy.ndarray: ... - def pixel_to_normalized_coordinates_many_common(self, arg0: int, arg1: int) -> numpy.ndarray: ... + @staticmethod + def pixel_to_normalized_coordinates_many_common(arg0: numpy.ndarray, arg1: int, arg2: int) -> numpy.ndarray: ... def project(self, arg0: numpy.ndarray) -> numpy.ndarray: ... def project_many(self, arg0: numpy.ndarray) -> numpy.ndarray: ... def set_parameter_value(self, arg0: CameraParameters, arg1: float) -> None: ... diff --git a/opensfm/src/geometry/python/pybind.cc b/opensfm/src/geometry/python/pybind.cc index 451ae9005..9412cd790 100644 --- a/opensfm/src/geometry/python/pybind.cc +++ b/opensfm/src/geometry/python/pybind.cc @@ -76,10 +76,10 @@ PYBIND11_MODULE(pygeometry, m) { .def("get_parameters_values", &geometry::Camera::GetParametersValues) .def("get_parameters_types", &geometry::Camera::GetParametersTypes) .def("get_parameters_map", &geometry::Camera::GetParametersMap) - .def("pixel_to_normalized_coordinates_common", + .def_static("pixel_to_normalized_coordinates_common", (Vec2d(*)(const Vec2d&, const int, const int)) & geometry::Camera::PixelToNormalizedCoordinates) - .def("pixel_to_normalized_coordinates_many_common", + .def_static("pixel_to_normalized_coordinates_many_common", (MatX2d(*)(const MatX2d&, const int, const int)) & geometry::Camera::PixelToNormalizedCoordinatesMany) .def("pixel_to_normalized_coordinates", @@ -88,10 +88,10 @@ PYBIND11_MODULE(pygeometry, m) { .def("pixel_to_normalized_coordinates_many", (MatX2d(geometry::Camera::*)(const MatX2d&) const) & geometry::Camera::PixelToNormalizedCoordinatesMany) - .def("normalized_to_pixel_coordinates_common", + .def_static("normalized_to_pixel_coordinates_common", (Vec2d(*)(const Vec2d&, const int, const int)) & geometry::Camera::NormalizedToPixelCoordinates) - .def("normalized_to_pixel_coordinates_many_common", + .def_static("normalized_to_pixel_coordinates_many_common", (MatX2d(*)(const MatX2d&, const int, const int)) & geometry::Camera::NormalizedToPixelCoordinatesMany) .def("normalized_to_pixel_coordinates", diff --git a/opensfm/src/map/pymap.pyi b/opensfm/src/map/pymap.pyi index b189a1e56..11f00531f 100644 --- a/opensfm/src/map/pymap.pyi +++ b/opensfm/src/map/pymap.pyi @@ -2,6 +2,7 @@ # Do not manually edit # To regenerate: # $ buck run //mapillary/opensfm/opensfm/src/map:pymap_stubgen +# Use proper mode, e.g. @arvr/mode/linux/dev for arvr # @generated import numpy @@ -176,6 +177,7 @@ class Map: def update_shot(self, arg0: Shot) -> Shot: ... class Observation: def __init__(self, x: float, y: float, s: float, r: int, g: int, b: int, feature: int, segmentation: int = -1, instance: int = -1) -> None: ... + def copy(self) -> Observation: ... @property def color(self) -> numpy.ndarray:... @color.setter diff --git a/opensfm/src/robust/pyrobust.pyi b/opensfm/src/robust/pyrobust.pyi index 16489af50..0846303e9 100644 --- a/opensfm/src/robust/pyrobust.pyi +++ b/opensfm/src/robust/pyrobust.pyi @@ -2,6 +2,7 @@ # Do not manually edit # To regenerate: # $ buck run //mapillary/opensfm/opensfm/src/robust:pyrobust_stubgen +# Use proper mode, e.g. @arvr/mode/linux/dev for arvr # @generated import numpy diff --git a/opensfm/src/sfm/pysfm.pyi b/opensfm/src/sfm/pysfm.pyi index ca9169388..d957c13c2 100644 --- a/opensfm/src/sfm/pysfm.pyi +++ b/opensfm/src/sfm/pysfm.pyi @@ -2,6 +2,7 @@ # Do not manually edit # To regenerate: # $ buck run //mapillary/opensfm/opensfm/src/sfm:pysfm_stubgen +# Use proper mode, e.g. @arvr/mode/linux/dev for arvr # @generated import opensfm.pybundle diff --git a/opensfm/test/test_types.py b/opensfm/test/test_types.py index 82d1dead5..91c69e98f 100644 --- a/opensfm/test/test_types.py +++ b/opensfm/test/test_types.py @@ -241,7 +241,6 @@ def test_pixel_to_normalized_conversion() -> None: px_coord = np.array([50, 300]) norm_coord_comp = cam.pixel_to_normalized_coordinates(px_coord) norm_coord_static = pygeometry.Camera.pixel_to_normalized_coordinates_common( - # pyre-fixme[6]: For 1st param expected `Camera` but got `ndarray`. px_coord, width, height ) norm_coord_gt = px_coord - np.array([(width - 1.0) / 2.0, (height - 1.0) / 2.0]) @@ -251,7 +250,6 @@ def test_pixel_to_normalized_conversion() -> None: px_coord_comp1 = cam.normalized_to_pixel_coordinates(norm_coord_comp) px_coord_comp2 = pygeometry.Camera.normalized_to_pixel_coordinates_common( - # pyre-fixme[6]: For 1st param expected `Camera` but got `ndarray`. norm_coord_comp, width, height ) assert np.allclose(px_coord, px_coord_comp1) From c2cde23000325cc73793160063b24459a15e4ccb Mon Sep 17 00:00:00 2001 From: Yann Noutary Date: Mon, 21 Feb 2022 11:51:15 -0800 Subject: [PATCH 33/81] fix: robustness to large-frame camera Summary: This Diff strenghten the behaviour of parrallel processing steps with images, namely feature extraction and undistortion. We take into account the memory needed for processing a given image in both processes, in order to better pick a suitable number of // processes and queue size. We also refactor slightly PLY export, so that no intermediate string is created and instead, data is directly flushed to file handler. Reviewed By: mlopezantequera Differential Revision: D34302997 fbshipit-source-id: b988b29450e2036567892492ed78489ad11af3bc --- opensfm/features_processing.py | 37 +++++++- opensfm/io.py | 168 +++++++++++++++++---------------- opensfm/undistort.py | 24 ++++- 3 files changed, 142 insertions(+), 87 deletions(-) diff --git a/opensfm/features_processing.py b/opensfm/features_processing.py index 69f7aa6b1..7e545687b 100644 --- a/opensfm/features_processing.py +++ b/opensfm/features_processing.py @@ -19,22 +19,36 @@ def run_features_processing(data: DataSetBase, images: List[str], force: bool) - """Main entry point for running features extraction on a list of images.""" default_queue_size = 10 max_queue_size = 200 + mem_available = log.memory_available() + processes = data.config["processes"] if mem_available: + # Use 90% of available memory + ratio_use = 0.9 + mem_available *= ratio_use + logger.info( + f"Planning to use {mem_available} MB of RAM for both processing queue and parallel processing." + ) + + # 50% for the queue / 50% for parralel processing expected_mb = mem_available / 2 expected_images = min( max_queue_size, int(expected_mb / average_image_size(data)) ) - logger.info(f"Capping memory usage to ~ {expected_mb} MB") + processing_size = average_processing_size(data) + logger.info( + f"Scale-space expected size of a single image : {processing_size} MB" + ) + processes = min(max(1, int(expected_mb / processing_size)), processes) else: expected_images = default_queue_size - logger.info(f"Expecting to process {expected_images} images.") + logger.info( + f"Expecting to queue at most {expected_images} images while parallel processing of {processes} images." + ) process_queue = queue.Queue(expected_images) arguments: List[Tuple[str, Any]] = [] - processes = data.config["processes"] - if processes == 1: for image in images: counter = Counter() @@ -72,6 +86,21 @@ def average_image_size(data: DataSetBase) -> float: return average_size_mb / max(1, len(data.load_camera_models())) +def average_processing_size(data: DataSetBase) -> float: + processing_size = data.config["feature_process_size"] + + min_octave_size = 16 # from covdet.c + octaveResolution = 3 # from covdet.c + start_size = processing_size * processing_size * 4 / 1024 / 1024 + last_octave = math.floor(math.log2(processing_size / min_octave_size)) + + total_size = 0 + for _ in range(last_octave + 1): + total_size += start_size * octaveResolution + start_size /= 2 + return total_size + + def is_high_res_panorama( data: DataSetBase, image_key: str, image_array: np.ndarray ) -> bool: diff --git a/opensfm/io.py b/opensfm/io.py index 043317a7c..26086e132 100644 --- a/opensfm/io.py +++ b/opensfm/io.py @@ -29,13 +29,15 @@ def camera_from_json(key: str, obj: Dict[str, Any]) -> pygeometry.Camera: obj["focal_x"], obj["focal_y"] / obj["focal_x"], np.array([obj.get("c_x", 0.0), obj.get("c_y", 0.0)]), - np.array([ - obj.get("k1", 0.0), - obj.get("k2", 0.0), - obj.get("k3", 0.0), - obj.get("p1", 0.0), - obj.get("p2", 0.0), - ]), + np.array( + [ + obj.get("k1", 0.0), + obj.get("k2", 0.0), + obj.get("k3", 0.0), + obj.get("p1", 0.0), + obj.get("p2", 0.0), + ] + ), ) elif pt == "fisheye": camera = pygeometry.Camera.create_fisheye( @@ -46,58 +48,66 @@ def camera_from_json(key: str, obj: Dict[str, Any]) -> pygeometry.Camera: obj["focal_x"], obj["focal_y"] / obj["focal_x"], np.array([obj.get("c_x", 0.0), obj.get("c_y", 0.0)]), - np.array([ - obj.get("k1", 0.0), - obj.get("k2", 0.0), - obj.get("k3", 0.0), - obj.get("k4", 0.0), - ]), + np.array( + [ + obj.get("k1", 0.0), + obj.get("k2", 0.0), + obj.get("k3", 0.0), + obj.get("k4", 0.0), + ] + ), ) elif pt == "fisheye62": camera = pygeometry.Camera.create_fisheye62( obj["focal_x"], obj["focal_y"] / obj["focal_x"], np.array([obj.get("c_x", 0.0), obj.get("c_y", 0.0)]), - np.array([ - obj.get("k1", 0.0), - obj.get("k2", 0.0), - obj.get("k3", 0.0), - obj.get("k4", 0.0), - obj.get("k5", 0.0), - obj.get("k6", 0.0), - obj.get("p1", 0.0), - obj.get("p2", 0.0), - ]), + np.array( + [ + obj.get("k1", 0.0), + obj.get("k2", 0.0), + obj.get("k3", 0.0), + obj.get("k4", 0.0), + obj.get("k5", 0.0), + obj.get("k6", 0.0), + obj.get("p1", 0.0), + obj.get("p2", 0.0), + ] + ), ) elif pt == "fisheye624": camera = pygeometry.Camera.create_fisheye624( obj["focal_x"], obj["focal_y"] / obj["focal_x"], np.array([obj.get("c_x", 0.0), obj.get("c_y", 0.0)]), - np.array([ - obj.get("k1", 0.0), - obj.get("k2", 0.0), - obj.get("k3", 0.0), - obj.get("k4", 0.0), - obj.get("k5", 0.0), - obj.get("k6", 0.0), - obj.get("p1", 0.0), - obj.get("p2", 0.0), - obj.get("s0", 0.0), - obj.get("s1", 0.0), - obj.get("s2", 0.0), - obj.get("s3", 0.0), - ]), + np.array( + [ + obj.get("k1", 0.0), + obj.get("k2", 0.0), + obj.get("k3", 0.0), + obj.get("k4", 0.0), + obj.get("k5", 0.0), + obj.get("k6", 0.0), + obj.get("p1", 0.0), + obj.get("p2", 0.0), + obj.get("s0", 0.0), + obj.get("s1", 0.0), + obj.get("s2", 0.0), + obj.get("s3", 0.0), + ] + ), ) elif pt == "radial": camera = pygeometry.Camera.create_radial( obj["focal_x"], obj["focal_y"] / obj["focal_x"], np.array([obj.get("c_x", 0.0), obj.get("c_y", 0.0)]), - np.array([ - obj.get("k1", 0.0), - obj.get("k2", 0.0), - ]), + np.array( + [ + obj.get("k1", 0.0), + obj.get("k2", 0.0), + ] + ), ) elif pt == "simple_radial": camera = pygeometry.Camera.create_simple_radial( @@ -685,14 +695,21 @@ def camera_from_vector( elif projection_type == "fisheye624": fx, fy, cx, cy, k1, k2, k3, k4, k5, k6, p1, p2, s0, s1, s2, s3 = parameters camera = pygeometry.Camera.create_fisheye624( - fx, fy / fx, np.array([cx, cy]), np.array([k1, k2, k3, k4, k5, k6, p1, p2, s0, s1, s2, s3]) + fx, + fy / fx, + np.array([cx, cy]), + np.array([k1, k2, k3, k4, k5, k6, p1, p2, s0, s1, s2, s3]), ) elif projection_type == "radial": fx, fy, cx, cy, k1, k2 = parameters - camera = pygeometry.Camera.create_radial(fx, fy / fx, np.array([cx, cy]), np.array([k1, k2])) + camera = pygeometry.Camera.create_radial( + fx, fy / fx, np.array([cx, cy]), np.array([k1, k2]) + ) elif projection_type == "simple_radial": fx, fy, cx, cy, k1 = parameters - camera = pygeometry.Camera.create_simple_radial(fx, fy / fx, np.array([cx, cy]), k1) + camera = pygeometry.Camera.create_simple_radial( + fx, fy / fx, np.array([cx, cy]), k1 + ) elif projection_type == "dual": focal, k1, k2, transition = parameters camera = pygeometry.Camera.create_dual(transition, focal, k1, k2) @@ -1107,47 +1124,38 @@ def point_cloud_to_ply( colors: np.ndarray, labels: np.ndarray, fp: TextIO, -) -> None: - """Export depthmap points as a PLY string""" - lines = _point_cloud_to_ply_lines(points, normals, colors, labels) - fp.writelines(lines) - - -def _point_cloud_to_ply_lines( - points: np.ndarray, - normals: np.ndarray, - colors: np.ndarray, - labels: np.ndarray, ): - yield "ply\n" - yield "format ascii 1.0\n" - yield "element vertex {}\n".format(len(points)) - yield "property float x\n" - yield "property float y\n" - yield "property float z\n" - yield "property float nx\n" - yield "property float ny\n" - yield "property float nz\n" - yield "property uchar diffuse_red\n" - yield "property uchar diffuse_green\n" - yield "property uchar diffuse_blue\n" - yield "property uchar class\n" - yield "end_header\n" + fp.write("ply\n") + fp.write("format ascii 1.0\n") + fp.write("element vertex {}\n".format(len(points))) + fp.write("property float x\n") + fp.write("property float y\n") + fp.write("property float z\n") + fp.write("property float nx\n") + fp.write("property float ny\n") + fp.write("property float nz\n") + fp.write("property uchar diffuse_red\n") + fp.write("property uchar diffuse_green\n") + fp.write("property uchar diffuse_blue\n") + fp.write("property uchar class\n") + fp.write("end_header\n") template = "{:.4f} {:.4f} {:.4f} {:.3f} {:.3f} {:.3f} {} {} {} {}\n" for i in range(len(points)): p, n, c, l = points[i], normals[i], colors[i], labels[i] - yield template.format( - p[0], - p[1], - p[2], - n[0], - n[1], - n[2], - int(c[0]), - int(c[1]), - int(c[2]), - int(l), + fp.write( + template.format( + p[0], + p[1], + p[2], + n[0], + n[1], + n[2], + int(c[0]), + int(c[1]), + int(c[2]), + int(l), + ) ) diff --git a/opensfm/undistort.py b/opensfm/undistort.py index 876c62ccd..6e2053e8e 100644 --- a/opensfm/undistort.py +++ b/opensfm/undistort.py @@ -11,6 +11,7 @@ pymap, transformations as tf, types, + features_processing, ) from opensfm.context import parallel_map from opensfm.dataset import UndistortedDataSet @@ -55,9 +56,7 @@ def undistort_reconstruction( shot, subshot_width, urec, image_format, rig_instance_count ) else: - logger.warning( - f"Not undistorting {shot.id} with unknown camera type." - ) + logger.warning(f"Not undistorting {shot.id} with unknown camera type.") continue for subshot in subshots: @@ -95,6 +94,25 @@ def undistort_reconstruction_with_images( arguments.append((reconstruction.shots[shot_id], subshots, data, udata)) processes = data.config["processes"] + + # trim processes to available memory, otherwise, pray + mem_available = log.memory_available() + if mem_available: + # Use 90% of available memory + ratio_use = 0.9 + mem_available *= ratio_use + + processing_size = data.config["depthmap_resolution"] + output_size = processing_size * processing_size * 4 / 1024 / 1024 + + undistort_factor = 3 # 1 for original image, 2 for (U,V) remapping + input_size = features_processing.average_image_size(data) * undistort_factor + processing_size = output_size + input_size + processes = min(max(1, int(mem_available / processing_size)), processes) + logger.info( + f"Undistorting in parallel with {processes} processes ({processing_size} MB per image)" + ) + parallel_map(undistort_image_and_masks, arguments, processes) return undistorted_shots From 21c22a9b3a506dcc6da4f4a0931a6b6b56019fb8 Mon Sep 17 00:00:00 2001 From: Yann Noutary Date: Mon, 21 Feb 2022 11:51:15 -0800 Subject: [PATCH 34/81] feat: data importer Summary: Adds a save_exif_overrides Reviewed By: mlopezantequera Differential Revision: D34303071 fbshipit-source-id: b44625ad9807d8f6679505789c43ceb4b8fddd7c --- opensfm/dataset.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/opensfm/dataset.py b/opensfm/dataset.py index ecaf75078..e0215a577 100644 --- a/opensfm/dataset.py +++ b/opensfm/dataset.py @@ -500,6 +500,11 @@ def load_exif_overrides(self) -> Dict[str, Any]: with self.io_handler.open_rt(self._exif_overrides_file()) as fin: return json.load(fin) + def save_exif_overrides(self, exif_overrides: Dict[str, Any]) -> None: + """Load EXIF overrides data.""" + with self.io_handler.open_wt(self._exif_overrides_file()) as fout: + io.json_dump(exif_overrides, fout) + def _rig_cameras_file(self) -> str: """Return path of rig models file""" return os.path.join(self.data_path, "rig_cameras.json") From f66e51df7200fac676d8487499ebaf40e3de3e88 Mon Sep 17 00:00:00 2001 From: Fabian Schenk Date: Wed, 23 Feb 2022 06:43:55 -0800 Subject: [PATCH 35/81] Remove pyre-fixme Summary: Remove `pyre-fixme`s and annotate a few things Reviewed By: tobias-o Differential Revision: D34303359 fbshipit-source-id: 67736abd744ba7c59b405168a5fd61c0933e0a9a --- opensfm/actions/export_pmvs.py | 6 +- opensfm/pairs_selection.py | 2 - opensfm/test/test_bundle.py | 235 ++++++------------ opensfm/test/test_datastructures.py | 24 +- opensfm/test/test_dense.py | 4 +- opensfm/test/test_io.py | 4 +- opensfm/test/test_matching.py | 38 +-- opensfm/test/test_multiview.py | 10 +- opensfm/test/test_pairs_selection.py | 27 +- opensfm/test/test_reconstruction_alignment.py | 6 +- opensfm/test/test_reconstruction_resect.py | 60 ++--- opensfm/test/test_triangulation.py | 24 +- opensfm/test/test_types.py | 16 +- opensfm/test/test_undistort.py | 4 +- opensfm/test/test_vlad.py | 6 +- opensfm/video.py | 3 +- 16 files changed, 179 insertions(+), 290 deletions(-) diff --git a/opensfm/actions/export_pmvs.py b/opensfm/actions/export_pmvs.py index e1180deb2..69f9232e0 100644 --- a/opensfm/actions/export_pmvs.py +++ b/opensfm/actions/export_pmvs.py @@ -12,7 +12,7 @@ logger = logging.getLogger(__name__) -def run_dataset(data: DataSet, points, image_list, output, undistorted): +def run_dataset(data: DataSet, points, image_list, output, undistorted) -> None: """Export reconstruction to PLY format Args: @@ -45,6 +45,7 @@ def run_dataset(data: DataSet, points, image_list, output, undistorted): image_graph = tracking.as_weighted_graph(tracks_manager) except IOError: image_graph = None + tracks_manager = None export_only = None if image_list: @@ -58,7 +59,6 @@ def run_dataset(data: DataSet, points, image_list, output, undistorted): reconstruction, h, image_graph, - # pyre-fixme[61]: `tracks_manager` may not be initialized here. tracks_manager, base_output_path, data, @@ -80,7 +80,7 @@ def export( udata: UndistortedDataSet, with_points, export_only, -): +) -> None: logger.info("Reconstruction %d" % index) output_path = os.path.join(base_output_path, "recon%d" % index) io.mkdir_p(output_path) diff --git a/opensfm/pairs_selection.py b/opensfm/pairs_selection.py index 535d86981..819345b8f 100644 --- a/opensfm/pairs_selection.py +++ b/opensfm/pairs_selection.py @@ -796,7 +796,6 @@ def ordered_pairs( while next_image: im1 = next_image next_image = None - for im2 in per_image[im1]: if (im2, im1) not in ordered: ordered.add((im1, im2)) @@ -806,5 +805,4 @@ def ordered_pairs( if not next_image and remaining: next_image = remaining.pop() - return list(ordered) diff --git a/opensfm/test/test_bundle.py b/opensfm/test/test_bundle.py index 076c8e4cd..5e64cfb83 100644 --- a/opensfm/test/test_bundle.py +++ b/opensfm/test/test_bundle.py @@ -32,7 +32,7 @@ def test_unicode_strings_in_bundle() -> None: @pytest.fixture() -def bundle_adjuster(): +def bundle_adjuster() -> pybundle.BundleAdjuster: ba = pybundle.BundleAdjuster() camera = pygeometry.Camera.create_perspective(1.0, 0.0, 0.0) ba.add_camera("cam1", camera, camera, True) @@ -40,20 +40,18 @@ def bundle_adjuster(): return ba -def test_sigleton(bundle_adjuster) -> None: +def test_sigleton(bundle_adjuster: pybundle.BundleAdjuster) -> None: """Single camera test""" sa = bundle_adjuster sa.add_rig_instance( "1", - # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[float]`. - # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[int]`. - pygeometry.Pose([0.5, 0, 0], [0, 0, 0]), + pygeometry.Pose(np.array([0.5, 0, 0]), np.array([0, 0, 0])), {"1": "cam1"}, {"1": "rig_cam1"}, False, ) - sa.add_rig_instance_position_prior("1", [1, 0, 0], [1, 1, 1], "") - sa.add_absolute_up_vector("1", [0, -1, 0], 1) + sa.add_rig_instance_position_prior("1", np.array([1, 0, 0]), np.array([1, 1, 1]), "") + sa.add_absolute_up_vector("1", np.array([0, -1, 0]), 1) sa.add_absolute_pan("1", np.radians(180), 1) sa.run() @@ -61,20 +59,18 @@ def test_sigleton(bundle_adjuster) -> None: assert np.allclose(s1.translation, [1, 0, 0], atol=1e-6) -def test_singleton_pan_tilt_roll(bundle_adjuster) -> None: +def test_singleton_pan_tilt_roll(bundle_adjuster: pybundle.BundleAdjuster) -> None: """Single camera test with pan, tilt, roll priors.""" pan, tilt, roll = 1, 0.3, 0.2 sa = bundle_adjuster sa.add_rig_instance( "1", - # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[float]`. - # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[int]`. - pygeometry.Pose([0.5, 0, 0], [0, 0, 0]), + pygeometry.Pose(np.array([0.5, 0, 0]), np.array([0, 0, 0])), {"1": "cam1"}, {"1": "rig_cam1"}, False, ) - sa.add_rig_instance_position_prior("1", [1, 0, 0], [1, 1, 1], "") + sa.add_rig_instance_position_prior("1", np.array([1, 0, 0]), np.array([1, 1, 1]), "") sa.add_absolute_pan("1", pan, 1) sa.add_absolute_tilt("1", tilt, 1) sa.add_absolute_roll("1", roll, 1) @@ -88,7 +84,7 @@ def test_singleton_pan_tilt_roll(bundle_adjuster) -> None: assert np.allclose(ptr, (pan, tilt, roll)) -def _projection_errors_std(points): +def _projection_errors_std(points) -> float: all_errors = [] for p in points.values(): all_errors += p.reprojection_errors.values() @@ -132,23 +128,19 @@ def test_bundle_projection_fixed_internals(scene_synthetic) -> None: assert reference.cameras["1"].k2 == orig_camera.k2 -def test_pair(bundle_adjuster) -> None: +def test_pair(bundle_adjuster: pybundle.BundleAdjuster) -> None: """Simple two camera test""" sa = bundle_adjuster sa.add_rig_instance( "1", - # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[int]`. - # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[int]`. - pygeometry.Pose([0, 0, 0], [0, 0, 0]), + pygeometry.Pose(np.array([0, 0, 0]), np.array([0, 0, 0])), {"1": "cam1"}, {"1": "rig_cam1"}, False, ) sa.add_rig_instance( "2", - # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[int]`. - # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[int]`. - pygeometry.Pose([0, 0, 0], [0, 0, 0]), + pygeometry.Pose(np.array([0, 0, 0]), np.array([0, 0, 0])), {"2": "cam1"}, {"2": "rig_cam1"}, False, @@ -158,12 +150,10 @@ def test_pair(bundle_adjuster) -> None: sa.add_reconstruction_instance("12", 4, "2") sa.set_scale_sharing("12", True) sa.add_relative_motion( - # pyre-fixme[6]: For 5th param expected `ndarray` but got `List[int]`. - # pyre-fixme[6]: For 6th param expected `ndarray` but got `List[int]`. - pybundle.RelativeMotion("12", "1", "12", "2", [0, 0, 0], [-1, 0, 0], 1) + pybundle.RelativeMotion("12", "1", "12", "2", np.array([0, 0, 0]), np.array([-1, 0, 0]), 1) ) - sa.add_rig_instance_position_prior("1", [0, 0, 0], [1, 1, 1], "") - sa.add_rig_instance_position_prior("2", [2, 0, 0], [1, 1, 1], "") + sa.add_rig_instance_position_prior("1", np.array([0, 0, 0]), np.array([1, 1, 1]), "") + sa.add_rig_instance_position_prior("2", np.array([2, 0, 0]), np.array([1, 1, 1]), "") sa.run() s1 = sa.get_rig_instance_pose("1") @@ -176,29 +166,25 @@ def test_pair(bundle_adjuster) -> None: assert np.allclose(r12.get_scale("2"), 0.5) -def test_pair_with_points_priors(bundle_adjuster) -> None: +def test_pair_with_points_priors(bundle_adjuster: pybundle.BundleAdjuster) -> None: """Simple two rigs test with a point constraint for anchoring""" sa = bundle_adjuster sa.add_rig_instance( "1", - # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[float]`. - # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[float]`. - pygeometry.Pose([1e-3, 1e-3, 1e-3], [1e-3, 1e-3, 1e-3]), + pygeometry.Pose(np.array([1e-3, 1e-3, 1e-3]), np.array([1e-3, 1e-3, 1e-3])), {"1": "cam1"}, {"1": "rig_cam1"}, False, ) sa.add_rig_instance( "2", - # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[float]`. - # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[float]`. - pygeometry.Pose([1e-3, 1e-3, 1e-3], [1e-3, 1e-3, 1e-3]), + pygeometry.Pose(np.array([1e-3, 1e-3, 1e-3]), np.array([1e-3, 1e-3, 1e-3])), {"2": "cam1"}, {"2": "rig_cam1"}, False, ) - sa.add_point("p1", [0, 0, 0], False) - sa.add_point("p2", [0, 0, 0], False) + sa.add_point("p1", np.array([0, 0, 0]), False) + sa.add_point("p2", np.array([0, 0, 0]), False) sa.add_reconstruction("12", False) sa.add_reconstruction_instance("12", 4, "1") @@ -211,18 +197,16 @@ def test_pair_with_points_priors(bundle_adjuster) -> None: sa.set_scale_sharing("12", True) sa.add_relative_motion( - # pyre-fixme[6]: For 5th param expected `ndarray` but got `List[int]`. - # pyre-fixme[6]: For 6th param expected `ndarray` but got `List[int]`. - pybundle.RelativeMotion("12", "1", "12", "2", [0, 0, 0], [-1, 0, 0], 1) + pybundle.RelativeMotion("12", "1", "12", "2", np.array([0, 0, 0]), np.array([-1, 0, 0]), 1) ) - sa.add_point_projection_observation("1", "p1", [0, 0], 1) - sa.add_point_projection_observation("2", "p1", [-0.5, 0], 1) - sa.add_point_prior("p1", [-0.5, 2, 2], [1, 1, 1], True) + sa.add_point_projection_observation("1", "p1", np.array([0, 0]), 1) + sa.add_point_projection_observation("2", "p1", np.array([-0.5, 0]), 1) + sa.add_point_prior("p1", np.array([-0.5, 2, 2]), np.array([1, 1, 1]), True) - sa.add_point_projection_observation("2", "p2", [0, 0], 1) - sa.add_point_projection_observation("1", "p2", [0.5, 0], 1) - sa.add_point_prior("p2", [1.5, 2, 2], [1, 1, 1], True) + sa.add_point_projection_observation("2", "p2", np.array([0, 0]), 1) + sa.add_point_projection_observation("1", "p2", np.array([0.5, 0]), 1) + sa.add_point_prior("p2", np.array([1.5, 2, 2]), np.array([1, 1, 1]), True) sa.run() s1 = sa.get_rig_instance_pose("1") @@ -239,23 +223,19 @@ def test_pair_with_points_priors(bundle_adjuster) -> None: assert np.allclose(r12.get_scale("2"), 0.5) -def test_pair_non_rigid(bundle_adjuster) -> None: +def test_pair_non_rigid(bundle_adjuster: pybundle.BundleAdjuster) -> None: """Simple two rigs test""" sa = bundle_adjuster sa.add_rig_instance( "1", - # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[int]`. - # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[int]`. - pygeometry.Pose([0, 0, 0], [0, 0, 0]), + pygeometry.Pose(np.array([0, 0, 0]), np.array([0, 0, 0])), {"1": "cam1"}, {"1": "rig_cam1"}, False, ) sa.add_rig_instance( "2", - # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[int]`. - # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[int]`. - pygeometry.Pose([0, 0, 0], [0, 0, 0]), + pygeometry.Pose(np.array([0, 0, 0]), np.array([0, 0, 0])), {"2": "cam1"}, {"2": "rig_cam1"}, False, @@ -265,12 +245,10 @@ def test_pair_non_rigid(bundle_adjuster) -> None: sa.add_reconstruction_instance("12", 4, "2") sa.set_scale_sharing("12", False) sa.add_relative_similarity( - # pyre-fixme[6]: For 5th param expected `ndarray` but got `List[int]`. - # pyre-fixme[6]: For 6th param expected `ndarray` but got `List[int]`. - pybundle.RelativeSimilarity("12", "1", "12", "2", [0, 0, 0], [-1, 0, 0], 1, 1) + pybundle.RelativeSimilarity("12", "1", "12", "2", np.array([0, 0, 0]), np.array([-1, 0, 0]), 1, 1) ) - sa.add_rig_instance_position_prior("1", [0, 0, 0], [1, 1, 1], "") - sa.add_rig_instance_position_prior("2", [2, 0, 0], [1, 1, 1], "") + sa.add_rig_instance_position_prior("1", np.array([0, 0, 0]), np.array([1, 1, 1]), "") + sa.add_rig_instance_position_prior("2", np.array([2, 0, 0]), np.array([1, 1, 1]), "") sa.run() s1 = sa.get_rig_instance_pose("1") @@ -283,41 +261,33 @@ def test_pair_non_rigid(bundle_adjuster) -> None: assert np.allclose(r12.get_scale("2"), 0.5) -def test_four_cams_single_reconstruction(bundle_adjuster) -> None: +def test_four_cams_single_reconstruction(bundle_adjuster: pybundle.BundleAdjuster) -> None: """Four rigs, one reconstruction""" sa = bundle_adjuster sa.add_rig_instance( "1", - # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[int]`. - # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[int]`. - pygeometry.Pose([0, 0, 0], [0, 0, 0]), + pygeometry.Pose(np.array([0, 0, 0]), np.array([0, 0, 0])), {"1": "cam1"}, {"1": "rig_cam1"}, False, ) sa.add_rig_instance( "2", - # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[int]`. - # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[int]`. - pygeometry.Pose([0, 0, 0], [0, 0, 0]), + pygeometry.Pose(np.array([0, 0, 0]), np.array([0, 0, 0])), {"2": "cam1"}, {"2": "rig_cam1"}, False, ) sa.add_rig_instance( "3", - # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[int]`. - # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[int]`. - pygeometry.Pose([0, 0, 0], [0, 0, 0]), + pygeometry.Pose(np.array([0, 0, 0]), np.array([0, 0, 0])), {"3": "cam1"}, {"3": "rig_cam1"}, False, ) sa.add_rig_instance( "4", - # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[int]`. - # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[int]`. - pygeometry.Pose([0, 0, 0], [0, 0, 0]), + pygeometry.Pose(np.array([0, 0, 0]), np.array([0, 0, 0])), {"4": "cam1"}, {"4": "rig_cam1"}, False, @@ -329,23 +299,17 @@ def test_four_cams_single_reconstruction(bundle_adjuster) -> None: sa.add_reconstruction_instance("1234", 1, "4") sa.set_scale_sharing("1234", True) sa.add_relative_motion( - # pyre-fixme[6]: For 5th param expected `ndarray` but got `List[int]`. - # pyre-fixme[6]: For 6th param expected `ndarray` but got `List[int]`. - pybundle.RelativeMotion("1234", "1", "1234", "2", [0, 0, 0], [-1, 0, 0], 1) + pybundle.RelativeMotion("1234", "1", "1234", "2", np.array([0, 0, 0]), np.array([-1, 0, 0]), 1) ) sa.add_relative_motion( - # pyre-fixme[6]: For 5th param expected `ndarray` but got `List[int]`. - # pyre-fixme[6]: For 6th param expected `ndarray` but got `List[int]`. - pybundle.RelativeMotion("1234", "1", "1234", "3", [0, 0, 0], [0, -1, 0], 1) + pybundle.RelativeMotion("1234", "1", "1234", "3", np.array([0, 0, 0]), np.array([0, -1, 0]), 1) ) sa.add_relative_motion( - # pyre-fixme[6]: For 5th param expected `ndarray` but got `List[int]`. - # pyre-fixme[6]: For 6th param expected `ndarray` but got `List[int]`. - pybundle.RelativeMotion("1234", "1", "1234", "4", [0, 0, 0], [0, 0, -1], 1) + pybundle.RelativeMotion("1234", "1", "1234", "4", np.array([0, 0, 0]), np.array([0, 0, -1]), 1) ) - sa.add_rig_instance_position_prior("1", [0, 0, 0], [1, 1, 1], "") - sa.add_rig_instance_position_prior("2", [2, 0, 0], [1, 1, 1], "") - sa.add_rig_instance_position_prior("3", [0, 2, 0], [1, 1, 1], "") + sa.add_rig_instance_position_prior("1", np.array([0, 0, 0]), np.array([1, 1, 1]), "") + sa.add_rig_instance_position_prior("2", np.array([2, 0, 0]), np.array([1, 1, 1]), "") + sa.add_rig_instance_position_prior("3", np.array([0, 2, 0]), np.array([1, 1, 1]), "") sa.run() s1 = sa.get_rig_instance_pose("1") @@ -359,41 +323,33 @@ def test_four_cams_single_reconstruction(bundle_adjuster) -> None: assert np.allclose(s4.translation, [0, 0, -2], atol=1e-6) -def test_four_cams_single_reconstruction_non_rigid(bundle_adjuster) -> None: +def test_four_cams_single_reconstruction_non_rigid(bundle_adjuster: pybundle.BundleAdjuster) -> None: """Four rigs, one reconstruction""" sa = bundle_adjuster sa.add_rig_instance( "1", - # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[int]`. - # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[int]`. - pygeometry.Pose([0, 0, 0], [0, 0, 0]), + pygeometry.Pose(np.array([0, 0, 0]), np.array([0, 0, 0])), {"1": "cam1"}, {"1": "rig_cam1"}, False, ) sa.add_rig_instance( "2", - # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[int]`. - # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[int]`. - pygeometry.Pose([0, 0, 0], [0, 0, 0]), + pygeometry.Pose(np.array([0, 0, 0]), np.array([0, 0, 0])), {"2": "cam1"}, {"2": "rig_cam1"}, False, ) sa.add_rig_instance( "3", - # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[int]`. - # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[int]`. - pygeometry.Pose([0, 0, 0], [0, 0, 0]), + pygeometry.Pose(np.array([0, 0, 0]), np.array([0, 0, 0])), {"3": "cam1"}, {"3": "rig_cam1"}, False, ) sa.add_rig_instance( "4", - # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[int]`. - # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[int]`. - pygeometry.Pose([0, 0, 0], [0, 0, 0]), + pygeometry.Pose(np.array([0, 0, 0]), np.array([0, 0, 0])), {"4": "cam1"}, {"4": "rig_cam1"}, False, @@ -441,10 +397,10 @@ def test_four_cams_single_reconstruction_non_rigid(bundle_adjuster) -> None: 1, ) ) - sa.add_rig_instance_position_prior("1", [0, 0, 0], [1, 1, 1], "") - sa.add_rig_instance_position_prior("2", [2, 0, 0], [1, 1, 1], "") - sa.add_rig_instance_position_prior("3", [4, 2, 0], [1, 1, 1], "") - sa.add_rig_instance_position_prior("4", [4, 4, 0], [1, 1, 1], "") + sa.add_rig_instance_position_prior("1", np.array([0, 0, 0]), np.array([1, 1, 1]), "") + sa.add_rig_instance_position_prior("2", np.array([2, 0, 0]), np.array([1, 1, 1]), "") + sa.add_rig_instance_position_prior("3", np.array([4, 2, 0]), np.array([1, 1, 1]), "") + sa.add_rig_instance_position_prior("4", np.array([4, 4, 0]), np.array([1, 1, 1]), "") sa.run() s1 = sa.get_rig_instance_pose("1") @@ -464,41 +420,33 @@ def test_four_cams_single_reconstruction_non_rigid(bundle_adjuster) -> None: assert np.allclose(r1234.get_scale("4"), 0.5) -def test_four_cams_one_fixed(bundle_adjuster) -> None: +def test_four_cams_one_fixed(bundle_adjuster: pybundle.BundleAdjuster) -> None: """Four rigs, one reconstruction""" sa = bundle_adjuster sa.add_rig_instance( "1", - # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[int]`. - # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[int]`. - pygeometry.Pose([0, 0, 0], [0, 0, 0]), + pygeometry.Pose(np.array([0, 0, 0]), np.array([0, 0, 0])), {"1": "cam1"}, {"1": "rig_cam1"}, True, ) sa.add_rig_instance( "2", - # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[int]`. - # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[int]`. - pygeometry.Pose([0, 0, 0], [0, 0, 0]), + pygeometry.Pose(np.array([0, 0, 0]), np.array([0, 0, 0])), {"2": "cam1"}, {"2": "rig_cam1"}, False, ) sa.add_rig_instance( "3", - # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[int]`. - # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[int]`. - pygeometry.Pose([0, 0, 0], [0, 0, 0]), + pygeometry.Pose(np.array([0, 0, 0]), np.array([0, 0, 0])), {"3": "cam1"}, {"3": "rig_cam1"}, False, ) sa.add_rig_instance( "4", - # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[float]`. - # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[int]`. - pygeometry.Pose([0.0, 0, 0], [0, 0, 0]), + pygeometry.Pose(np.array([0, 0, 0]), np.array([0, 0, 0])), {"4": "cam1"}, {"4": "rig_cam1"}, False, @@ -510,23 +458,17 @@ def test_four_cams_one_fixed(bundle_adjuster) -> None: sa.add_reconstruction_instance("1234", 1, "4") sa.set_scale_sharing("1234", True) sa.add_relative_motion( - # pyre-fixme[6]: For 5th param expected `ndarray` but got `List[int]`. - # pyre-fixme[6]: For 6th param expected `ndarray` but got `List[int]`. - pybundle.RelativeMotion("1234", "1", "1234", "2", [0, 0, 0], [-1, 0, 0], 1) + pybundle.RelativeMotion("1234", "1", "1234", "2", np.array([0, 0, 0]), np.array([-1, 0, 0]), 1) ) sa.add_relative_motion( - # pyre-fixme[6]: For 5th param expected `ndarray` but got `List[int]`. - # pyre-fixme[6]: For 6th param expected `ndarray` but got `List[int]`. - pybundle.RelativeMotion("1234", "1", "1234", "3", [0, 0, 0], [0, -1, 0], 1) + pybundle.RelativeMotion("1234", "1", "1234", "3", np.array([0, 0, 0]), np.array([0, -1, 0]), 1) ) sa.add_relative_motion( - # pyre-fixme[6]: For 5th param expected `ndarray` but got `List[int]`. - # pyre-fixme[6]: For 6th param expected `ndarray` but got `List[int]`. - pybundle.RelativeMotion("1234", "1", "1234", "4", [0, 0, 0], [0, 0, -1], 1) + pybundle.RelativeMotion("1234", "1", "1234", "4", np.array([0, 0, 0]), np.array([0, 0, -1]), 1) ) - sa.add_rig_instance_position_prior("1", [100, 0, 0], [1, 1, 1], "") - sa.add_rig_instance_position_prior("2", [2, 0, 0], [1, 1, 1], "") - sa.add_rig_instance_position_prior("3", [0, 2, 0], [1, 1, 1], "") + sa.add_rig_instance_position_prior("1", np.array([100, 0, 0]), np.array([1, 1, 1]), "") + sa.add_rig_instance_position_prior("2", np.array([2, 0, 0]), np.array([1, 1, 1]), "") + sa.add_rig_instance_position_prior("3", np.array([0, 2, 0]), np.array([1, 1, 1]), "") sa.run() s1 = sa.get_rig_instance_pose("1") @@ -540,32 +482,26 @@ def test_four_cams_one_fixed(bundle_adjuster) -> None: assert np.allclose(s4.translation, [0, 0, -2], atol=1e-6) -def test_linear_motion_prior_position(bundle_adjuster) -> None: +def test_linear_motion_prior_position(bundle_adjuster: pybundle.BundleAdjuster) -> None: """Three rigs, middle has no gps info. Translation only""" sa = bundle_adjuster sa.add_rig_instance( "1", - # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[int]`. - # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[int]`. - pygeometry.Pose([0, 0, 0], [0, 0, 0]), + pygeometry.Pose(np.array([0, 0, 0]), np.array([0, 0, 0])), {"1": "cam1"}, {"1": "rig_cam1"}, True, ) sa.add_rig_instance( "2", - # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[int]`. - # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[int]`. - pygeometry.Pose([0, 0, 0], [0, 0, 0]), + pygeometry.Pose(np.array([0, 0, 0]), np.array([0, 0, 0])), {"2": "cam1"}, {"2": "rig_cam1"}, False, ) sa.add_rig_instance( "3", - # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[int]`. - # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[int]`. - pygeometry.Pose([0, 0, 0], [0, 0, 0]), + pygeometry.Pose(np.array([0, 0, 0]), np.array([0, 0, 0])), {"3": "cam1"}, {"3": "rig_cam1"}, False, @@ -575,8 +511,8 @@ def test_linear_motion_prior_position(bundle_adjuster) -> None: sa.add_reconstruction_instance("123", 1, "2") sa.add_reconstruction_instance("123", 1, "3") sa.set_scale_sharing("123", True) - sa.add_rig_instance_position_prior("1", [0, 0, 0], [1, 1, 1], "") - sa.add_rig_instance_position_prior("3", [2, 0, 0], [1, 1, 1], "") + sa.add_rig_instance_position_prior("1", np.array([0, 0, 0]), np.array([1, 1, 1]), "") + sa.add_rig_instance_position_prior("3", np.array([2, 0, 0]), np.array([1, 1, 1]), "") sa.add_linear_motion("1", "2", "3", 0.5, 0.1, 0.1) sa.run() @@ -589,32 +525,26 @@ def test_linear_motion_prior_position(bundle_adjuster) -> None: assert np.allclose(s3.translation, [-2, 0, 0], atol=1e-6) -def test_linear_motion_prior_rotation(bundle_adjuster) -> None: +def test_linear_motion_prior_rotation(bundle_adjuster: pybundle.BundleAdjuster) -> None: """Three rigs, middle has no gps or orientation info""" sa = bundle_adjuster sa.add_rig_instance( "1", - # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[int]`. - # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[int]`. - pygeometry.Pose([0, 0, 0], [0, 0, 0]), + pygeometry.Pose(np.array([0, 0, 0]), np.array([0, 0, 0])), {"1": "cam1"}, {"1": "rig_cam1"}, True, ) sa.add_rig_instance( "2", - # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[int]`. - # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[int]`. - pygeometry.Pose([0, 0, 0], [0, 0, 0]), + pygeometry.Pose(np.array([0, 0, 0]), np.array([0, 0, 0])), {"2": "cam1"}, {"2": "rig_cam1"}, False, ) sa.add_rig_instance( "3", - # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[int]`. - # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[int]`. - pygeometry.Pose([0, 1, 0], [0, 0, 0]), + pygeometry.Pose(np.array([0, 1, 0]), np.array([0, 0, 0])), {"3": "cam1"}, {"3": "rig_cam1"}, True, @@ -682,8 +612,7 @@ def test_bundle_alignment_prior() -> None: shot = r.create_shot( "1", camera.id, pygeometry.Pose(np.random.rand(3), np.random.rand(3)) ) - # pyre-fixme[8]: Attribute has type `ndarray`; used as `List[int]`. - shot.metadata.gps_position.value = [0, 0, 0] + shot.metadata.gps_position.value = np.array([0, 0, 0]) shot.metadata.gps_accuracy.value = 1 camera_priors = {camera.id: camera} @@ -699,32 +628,26 @@ def test_bundle_alignment_prior() -> None: assert np.allclose(shot.pose.transform([0, 0, 1]), [0, -1, 0], atol=1e-7) -def test_heatmaps_position(bundle_adjuster) -> None: +def test_heatmaps_position(bundle_adjuster: pybundle.BundleAdjuster) -> None: """Three cameras. Same heatmap different offsets""" sa = bundle_adjuster sa.add_rig_instance( "1", - # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[int]`. - # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[int]`. - pygeometry.Pose([0, 0, 0], [0, 0, 0]), + pygeometry.Pose(np.array([0, 0, 0]), np.array([0, 0, 0])), {"1": "cam1"}, {"1": "rig_cam1"}, False, ) sa.add_rig_instance( "2", - # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[int]`. - # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[int]`. - pygeometry.Pose([0, 0, 0], [0, 0, 0]), + pygeometry.Pose(np.array([0, 0, 0]), np.array([0, 0, 0])), {"2": "cam1"}, {"2": "rig_cam1"}, False, ) sa.add_rig_instance( "3", - # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[int]`. - # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[int]`. - pygeometry.Pose([0, 0, 0], [0, 0, 0]), + pygeometry.Pose(np.array([0, 0, 0]), np.array([0, 0, 0])), {"3": "cam1"}, {"3": "rig_cam1"}, False, diff --git a/opensfm/test/test_datastructures.py b/opensfm/test/test_datastructures.py index dbba54f8f..1c8ef983d 100644 --- a/opensfm/test/test_datastructures.py +++ b/opensfm/test/test_datastructures.py @@ -20,7 +20,7 @@ def _create_reconstruction( n_points: int=0, dist_to_shots: bool=False, dist_to_pano_shots: bool=False, -): +) -> types.Reconstruction: """Creates a reconstruction with n_cameras random cameras and shots, where n_shots_cam is a dictionary, containing the camera_id and the number of shots. @@ -161,9 +161,7 @@ def test_brown_camera() -> None: p2 = 0.002 k3 = 0.01 cam_cpp = pygeometry.Camera.create_brown( - # pyre-fixme[6]: For 3rd param expected `ndarray` but got `List[float]`. - # pyre-fixme[6]: For 4th param expected `ndarray` but got `List[float]`. - focal_x, focal_y / focal_x, [c_x, c_y], [k1, k2, k3, p1, p2] + focal_x, focal_y / focal_x, np.array([c_x, c_y]), np.array([k1, k2, k3, p1, p2]) ) cam_cpp.width = 800 cam_cpp.height = 600 @@ -204,10 +202,8 @@ def test_fisheye_opencv_camera() -> None: rec = types.Reconstruction() focal = 0.6 aspect_ratio = 0.7 - ppoint = [0.51, 0.52] - dist = [-0.1, 0.09, 0.08, 0.01] - # pyre-fixme[6]: For 3rd param expected `ndarray` but got `List[float]`. - # pyre-fixme[6]: For 4th param expected `ndarray` but got `List[float]`. + ppoint = np.array([0.51, 0.52]) + dist = np.array([-0.1, 0.09, 0.08, 0.01]) cam_cpp = pygeometry.Camera.create_fisheye_opencv(focal, aspect_ratio, ppoint, dist) cam_cpp.width = 800 cam_cpp.height = 600 @@ -228,10 +224,8 @@ def test_fisheye62_camera() -> None: rec = types.Reconstruction() focal = 0.6 aspect_ratio = 0.7 - ppoint = [0.51, 0.52] - dist = [-0.1, 0.09, 0.08, 0.01, 0.02, 0.05, 0.1, 0.2] # [k1-k6, p1, p2] - # pyre-fixme[6]: For 3rd param expected `ndarray` but got `List[float]`. - # pyre-fixme[6]: For 4th param expected `ndarray` but got `List[float]`. + ppoint = np.array([0.51, 0.52]) + dist = np.array([-0.1, 0.09, 0.08, 0.01, 0.02, 0.05, 0.1, 0.2]) # [k1-k6, p1, p2] cam_cpp = pygeometry.Camera.create_fisheye62(focal, aspect_ratio, ppoint, dist) cam_cpp.width = 800 cam_cpp.height = 600 @@ -254,10 +248,8 @@ def test_fisheye624_camera() -> None: rec = types.Reconstruction() focal = 0.6 aspect_ratio = 0.7 - ppoint = [0.51, 0.52] - dist = [-0.1, 0.09, 0.08, 0.01, 0.02, 0.05, 0.1, 0.2, 0.01, -0.003, 0.005, -0.007] # [k1-k6, p1, p2, s0-s3] - # pyre-fixme[6]: For 3rd param expected `ndarray` but got `List[float]`. - # pyre-fixme[6]: For 4th param expected `ndarray` but got `List[float]`. + ppoint = np.array([0.51, 0.52]) + dist = np.array([-0.1, 0.09, 0.08, 0.01, 0.02, 0.05, 0.1, 0.2, 0.01, -0.003, 0.005, -0.007]) # [k1-k6, p1, p2, s0-s3] cam_cpp = pygeometry.Camera.create_fisheye624(focal, aspect_ratio, ppoint, dist) cam_cpp.width = 800 cam_cpp.height = 600 diff --git a/opensfm/test/test_dense.py b/opensfm/test/test_dense.py index cc83e7e55..5f2bb9789 100644 --- a/opensfm/test/test_dense.py +++ b/opensfm/test/test_dense.py @@ -32,9 +32,7 @@ def test_depthmap_to_ply() -> None: r = types.Reconstruction() r.add_camera(camera) shot = r.create_shot( - # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[float]`. - # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[float]`. - "shot1", camera.id, pygeometry.Pose([0.0, 0.0, 0.0], [0.0, 0.0, 0.0]) + "shot1", camera.id, pygeometry.Pose(np.array([0.0, 0.0, 0.0]), np.array([0.0, 0.0, 0.0])) ) image = np.zeros((height, width, 3)) diff --git a/opensfm/test/test_io.py b/opensfm/test/test_io.py index 49fac0f55..4775e98ac 100644 --- a/opensfm/test/test_io.py +++ b/opensfm/test/test_io.py @@ -3,7 +3,7 @@ from io import StringIO import numpy as np -from opensfm import pygeometry, geo, io, types +from opensfm import pygeometry, io, types from opensfm.test import data_generation, utils @@ -128,7 +128,7 @@ def test_read_write_ground_control_points() -> None: } """ - def check_points(points): + def check_points(points) -> None: assert len(points) == 2 p1, p2 = points if p1.id != "1": diff --git a/opensfm/test/test_matching.py b/opensfm/test/test_matching.py index dc2256bb4..c397b7ee1 100644 --- a/opensfm/test/test_matching.py +++ b/opensfm/test/test_matching.py @@ -1,3 +1,5 @@ +from typing import Any, Dict, List, Set, Tuple + import numpy as np from opensfm import bow from opensfm import config @@ -7,7 +9,7 @@ from opensfm.synthetic_data import synthetic_dataset -def compute_words(features, bag_of_words, num_words, bow_matcher_type): +def compute_words(features: np.ndarray, bag_of_words, num_words, bow_matcher_type) -> np.ndarray: closest_words = bag_of_words.map_to_words(features, num_words, bow_matcher_type) if closest_words is None: return np.array([], dtype=np.int32) @@ -15,7 +17,9 @@ def compute_words(features, bag_of_words, num_words, bow_matcher_type): return closest_words.astype(np.int32) -def example_features(nfeatures, config): +def example_features( + nfeatures: int, config: Dict[str, Any] +) -> Tuple[List[np.ndarray], List[np.ndarray]]: words, frequencies = bow.load_bow_words_and_frequencies(config) bag_of_words = bow.BagOfWords(words, frequencies) @@ -110,25 +114,27 @@ def test_match_images(scene_synthetic) -> None: matches = pairs.get(pair) if matches is None or len(matches) == 1: matches = pairs.get(pair[::-1]) - # pyre-fixme[6]: For 1st param expected `Sized` but got - # `Optional[List[Tuple[int, int]]]`. + assert matches is not None assert len(matches) > 25 def test_ordered_pairs() -> None: - neighbors = [ - [1, 3], - [1, 2], - [2, 5], - [3, 2], - [4, 5], - ] - images = [1, 2, 3] - # pyre-fixme[6]: For 1st param expected `Set[Tuple[str, str]]` but got - # `List[List[int]]`. - # pyre-fixme[6]: For 2nd param expected `List[str]` but got `List[int]`. + neighbors: Set[Tuple[str, str]] = { + ("1", "3"), + ("1", "2"), + ("2", "5"), + ("3", "2"), + ("4", "5"), + } + images = ["1", "2", "3"] pairs = pairs_selection.ordered_pairs(neighbors, images) - assert set(pairs) == {(1, 2), (1, 3), (2, 5), (3, 2)} + + assert {tuple(sorted(p)) for p in pairs} == { + ("1", "2"), + ("1", "3"), + ("2", "5"), + ("2", "3"), + } def test_triangulation_inliers(pairs_and_their_E) -> None: diff --git a/opensfm/test/test_multiview.py b/opensfm/test/test_multiview.py index df85065c3..ab943fe41 100644 --- a/opensfm/test/test_multiview.py +++ b/opensfm/test/test_multiview.py @@ -1,14 +1,12 @@ import copy -import random import numpy as np from opensfm import multiview from opensfm import pygeometry from opensfm import transformations as tf -from opensfm.synthetic_data import synthetic_examples -def normalized(x): +def normalized(x: np.ndarray) -> np.ndarray: return x / np.linalg.norm(x) @@ -21,9 +19,8 @@ def test_motion_from_plane_homography() -> None: H = scale * (d * R - np.outer(t, n)) motions = multiview.motion_from_plane_homography(H) - + assert motions is not None goodness = [] - # pyre-fixme[16]: Optional type has no attribute `__iter__`. for Re, te, ne, de in motions: scalee = np.linalg.norm(te) good_R = np.allclose(R, Re) @@ -125,9 +122,8 @@ def test_relative_rotation(pairs_and_their_E) -> None: rotation = np.array([vec_x, vec_y, vec_z]) f1 /= np.linalg.norm(f1, axis=1)[:, None] - f2 = [rotation.dot(x) for x in f1] + f2 = np.array([rotation.dot(x) for x in f1]) - # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[typing.Any]`. result = pygeometry.relative_rotation_n_points(f1, f2) assert np.allclose(rotation, result, rtol=1e-10) diff --git a/opensfm/test/test_pairs_selection.py b/opensfm/test/test_pairs_selection.py index 77d3d1219..4f3c10c47 100644 --- a/opensfm/test/test_pairs_selection.py +++ b/opensfm/test/test_pairs_selection.py @@ -1,4 +1,6 @@ +import argparse import os.path +from typing import Any, Dict import numpy as np import pytest @@ -9,11 +11,6 @@ NEIGHBORS = 6 -class Args: - def __init__(self, dataset) -> None: - self.dataset = dataset - - @pytest.fixture(scope="module", autouse=True) def clear_cache(): """ @@ -27,7 +24,7 @@ def clear_cache(): @pytest.fixture(scope="module", autouse=True) -def lund_path(tmpdir_factory): +def lund_path(tmpdir_factory) -> str: """ Precompute exif and features to avoid doing it for every test which is time consuming. @@ -39,7 +36,9 @@ def lund_path(tmpdir_factory): # Use words matcher type to support the bow retrieval test data_generation.save_config({"matcher_type": "WORDS"}, path) - args = Args(path) + args = argparse.Namespace() + args.dataset = path + data = dataset.DataSet(path) commands.extract_metadata.Command().run(data, args) commands.detect_features.Command().run(data, args) @@ -47,7 +46,9 @@ def lund_path(tmpdir_factory): return path -def match_candidates_from_metadata(data, neighbors: int=NEIGHBORS, assert_count: int=NEIGHBORS) -> None: +def match_candidates_from_metadata( + data, neighbors: int = NEIGHBORS, assert_count: int = NEIGHBORS +) -> None: assert neighbors >= assert_count ims = sorted(data.images()) @@ -74,7 +75,7 @@ def match_candidates_from_metadata(data, neighbors: int=NEIGHBORS, assert_count: assert count >= assert_count -def create_match_candidates_config(**kwargs): +def create_match_candidates_config(**kwargs) -> Dict[str, Any]: config = { "matcher_type": "BRUTEFORCE", "matching_gps_distance": 0, @@ -161,24 +162,20 @@ def test_get_gps_opk_point() -> None: def test_find_best_altitude_convergent() -> None: - origins = {"0": [2.0, 0.0, 8.0], "1": [-2.0, 0.0, 8.0]} + origins = {"0": np.array([2.0, 0.0, 8.0]), "1": np.array([-2.0, 0.0, 8.0])} directions = { "0": np.array([-1.0, 0.0, -1.0]), "1": np.array([1.0, 0.0, -1.0]), } - # pyre-fixme[6]: For 1st param expected `Dict[str, ndarray]` but got `Dict[str, - # List[float]]`. altitude = pairs_selection.find_best_altitude(origins, directions) assert np.allclose([altitude], [2.0], atol=1e-2) def test_find_best_altitude_divergent() -> None: - origins = {"0": [2.0, 0.0, 8.0], "1": [-2.0, 0.0, 8.0]} + origins = {"0": np.array([2.0, 0.0, 8.0]), "1": np.array([-2.0, 0.0, 8.0])} directions = { "0": np.array([1.0, 0.0, -1.0]), "1": np.array([-1.0, 0.0, -1.0]), } - # pyre-fixme[6]: For 1st param expected `Dict[str, ndarray]` but got `Dict[str, - # List[float]]`. altitude = pairs_selection.find_best_altitude(origins, directions) assert np.allclose([altitude], pairs_selection.DEFAULT_Z, atol=1e-2) diff --git a/opensfm/test/test_reconstruction_alignment.py b/opensfm/test/test_reconstruction_alignment.py index f8c31fb77..fb2f156c4 100644 --- a/opensfm/test/test_reconstruction_alignment.py +++ b/opensfm/test/test_reconstruction_alignment.py @@ -3,16 +3,16 @@ from opensfm import pygeometry -def get_shot_origin(shot): +def get_shot_origin(shot) -> np.ndarray: """Compute the origin of a shot.""" pose = pygeometry.Pose([shot.rx, shot.ry, shot.rz], [shot.tx, shot.ty, shot.tz]) return pose.get_origin() -def get_reconstruction_origin(r): +def get_reconstruction_origin(r) -> np.ndarray: """Compute the origin of a reconstruction.""" s = r.scale - pose = pygeometry.Pose([r.rx, r.ry, r.rz], [r.tx / s, r.ty / s, r.tz / s]) + pose = pygeometry.Pose(np.array([r.rx, r.ry, r.rz]), np.array([r.tx / s, r.ty / s, r.tz / s])) return pose.get_origin() diff --git a/opensfm/test/test_reconstruction_resect.py b/opensfm/test/test_reconstruction_resect.py index 8bd5f605a..3a5e1f4dc 100644 --- a/opensfm/test/test_reconstruction_resect.py +++ b/opensfm/test/test_reconstruction_resect.py @@ -1,67 +1,53 @@ import numpy as np from opensfm import config, multiview, pymap, reconstruction, types - +from typing import Tuple def test_corresponding_tracks() -> None: - t1 = {1: pymap.Observation(1.0, 1.0, 1.0, 0, 0, 0, 1, 1, 1)} - t2 = {1: pymap.Observation(1.0, 1.0, 1.0, 0, 0, 0, 2, 2, 2)} + t1 = {"1": pymap.Observation(1.0, 1.0, 1.0, 0, 0, 0, 1, 1, 1)} + t2 = {"1": pymap.Observation(1.0, 1.0, 1.0, 0, 0, 0, 2, 2, 2)} - # pyre-fixme[6]: For 1st param expected `Dict[str, Observation]` but got - # `Dict[int, Observation]`. - # pyre-fixme[6]: For 2nd param expected `Dict[str, Observation]` but got - # `Dict[int, Observation]`. correspondences = reconstruction.corresponding_tracks(t1, t2) assert len(correspondences) == 0 - t1 = {1: pymap.Observation(1.0, 1.0, 1.0, 0, 0, 0, 3, 3, 3)} - t2 = {2: pymap.Observation(1.0, 1.0, 1.0, 0, 0, 0, 3, 3, 3)} + t1 = {"1": pymap.Observation(1.0, 1.0, 1.0, 0, 0, 0, 3, 3, 3)} + t2 = {"2": pymap.Observation(1.0, 1.0, 1.0, 0, 0, 0, 3, 3, 3)} - # pyre-fixme[6]: For 1st param expected `Dict[str, Observation]` but got - # `Dict[int, Observation]`. - # pyre-fixme[6]: For 2nd param expected `Dict[str, Observation]` but got - # `Dict[int, Observation]`. correspondences = reconstruction.corresponding_tracks(t1, t2) assert len(correspondences) == 1 - assert correspondences[0] == (1, 2) + assert correspondences[0] == ("1", "2") t1 = { - 1: pymap.Observation(1.0, 1.0, 1.0, 0, 0, 0, 3, 3, 3), - 2: pymap.Observation(1.0, 1.0, 1.0, 0, 0, 0, 4, 4, 4), + "1": pymap.Observation(1.0, 1.0, 1.0, 0, 0, 0, 3, 3, 3), + "2": pymap.Observation(1.0, 1.0, 1.0, 0, 0, 0, 4, 4, 4), } t2 = { - 1: pymap.Observation(1.0, 1.0, 1.0, 0, 0, 0, 4, 4, 4), - 2: pymap.Observation(1.0, 1.0, 1.0, 0, 0, 0, 5, 5, 5), + "1": pymap.Observation(1.0, 1.0, 1.0, 0, 0, 0, 4, 4, 4), + "2": pymap.Observation(1.0, 1.0, 1.0, 0, 0, 0, 5, 5, 5), } - # pyre-fixme[6]: For 1st param expected `Dict[str, Observation]` but got - # `Dict[int, Observation]`. - # pyre-fixme[6]: For 2nd param expected `Dict[str, Observation]` but got - # `Dict[int, Observation]`. correspondences = reconstruction.corresponding_tracks(t1, t2) assert len(correspondences) == 1 - assert correspondences[0] == (2, 1) + assert correspondences[0] == ("2", "1") t1 = { - 1: pymap.Observation(1.0, 1.0, 1.0, 0, 0, 0, 5, 5, 5), - 2: pymap.Observation(1.0, 1.0, 1.0, 0, 0, 0, 6, 6, 6), + "1": pymap.Observation(1.0, 1.0, 1.0, 0, 0, 0, 5, 5, 5), + "2": pymap.Observation(1.0, 1.0, 1.0, 0, 0, 0, 6, 6, 6), } t2 = { - 3: pymap.Observation(1.0, 1.0, 1.0, 0, 0, 0, 5, 5, 5), - 4: pymap.Observation(1.0, 1.0, 1.0, 0, 0, 0, 6, 6, 6), + "3": pymap.Observation(1.0, 1.0, 1.0, 0, 0, 0, 5, 5, 5), + "4": pymap.Observation(1.0, 1.0, 1.0, 0, 0, 0, 6, 6, 6), } - # pyre-fixme[6]: For 1st param expected `Dict[str, Observation]` but got - # `Dict[int, Observation]`. - # pyre-fixme[6]: For 2nd param expected `Dict[str, Observation]` but got - # `Dict[int, Observation]`. correspondences = reconstruction.corresponding_tracks(t1, t2) correspondences.sort(key=lambda c: c[0] + c[1]) assert len(correspondences) == 2 - assert correspondences[0] == (1, 3) - assert correspondences[1] == (2, 4) + assert correspondences[0] == ("1", "3") + assert correspondences[1] == ("2", "4") -def copy_cluster_points(cluster, tracks_manager, points, noise): +def copy_cluster_points( + cluster: types.Reconstruction, tracks_manager: pymap.TracksManager, points, noise +) -> types.Reconstruction: for shot in cluster.shots: for point in tracks_manager.get_shot_observations(shot): base = points[point] @@ -71,7 +57,9 @@ def copy_cluster_points(cluster, tracks_manager, points, noise): return cluster -def split_synthetic_reconstruction(scene, tracks_manager, cluster_size, point_noise): +def split_synthetic_reconstruction( + scene, tracks_manager: pymap.TracksManager, cluster_size, point_noise +) -> Tuple[types.Reconstruction, types.Reconstruction]: cluster1 = types.Reconstruction() cluster2 = types.Reconstruction() cluster1.cameras = scene.cameras @@ -87,7 +75,7 @@ def split_synthetic_reconstruction(scene, tracks_manager, cluster_size, point_no return cluster1, cluster2 -def move_and_scale_cluster(cluster): +def move_and_scale_cluster(cluster: types.Reconstruction)->Tuple[types.Reconstruction, np.ndarray, float]: scale = np.random.rand(1) translation = np.random.rand(3) for point in cluster.points.values(): diff --git a/opensfm/test/test_triangulation.py b/opensfm/test/test_triangulation.py index cc6a2ddb3..a0b16a5d9 100644 --- a/opensfm/test/test_triangulation.py +++ b/opensfm/test/test_triangulation.py @@ -48,7 +48,7 @@ def test_track_triangulator_spherical() -> None: assert len(rec.points["1"].get_observations()) == 2 -def unit_vector(x: object): +def unit_vector(x: object) -> np.ndarray: return np.array(x) / np.linalg.norm(x) @@ -60,8 +60,7 @@ def test_triangulate_bearings_dlt() -> None: max_reprojection = 0.01 min_ray_angle = np.radians(2.0) res, X = pygeometry.triangulate_bearings_dlt( - # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[typing.Any]`. - [rt1, rt2], [b1, b2], max_reprojection, min_ray_angle + [rt1, rt2], np.asarray([b1, b2]), max_reprojection, min_ray_angle ) assert np.allclose(X, [0, 0, 1.0]) assert res is True @@ -75,9 +74,10 @@ def test_triangulate_bearings_midpoint() -> None: max_reprojection = 0.01 min_ray_angle = np.radians(2.0) valid_triangulation, X = pygeometry.triangulate_bearings_midpoint( - # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[ndarray]`. - # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[typing.Any]`. - [o1, o2], [b1, b2], 2 * [max_reprojection], min_ray_angle + np.asarray([o1, o2]), + np.asarray([b1, b2]), + 2 * [max_reprojection], + min_ray_angle, ) assert np.allclose(X, [0, 0, 1.0]) assert valid_triangulation is True @@ -88,9 +88,9 @@ def test_triangulate_two_bearings_midpoint() -> None: b1 = unit_vector([0.0, 0, 1]) o2 = np.array([1.0, 0, 0]) b2 = unit_vector([-1.0, 0, 1]) - # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[ndarray]`. - # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[typing.Any]`. - ok, X = pygeometry.triangulate_two_bearings_midpoint([o1, o2], [b1, b2]) + ok, X = pygeometry.triangulate_two_bearings_midpoint( + np.asarray([o1, o2]), np.asarray([b1, b2]) + ) assert ok is True assert np.allclose(X, [0, 0, 1.0]) @@ -103,7 +103,7 @@ def test_triangulate_two_bearings_midpoint_failed() -> None: # almost parralel. 1e-5 will make it triangulate again. b2 = b1 + np.array([-1e-10, 0, 0]) - # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[ndarray]`. - # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[typing.Any]`. - ok, X = pygeometry.triangulate_two_bearings_midpoint([o1, o2], [b1, b2]) + ok, X = pygeometry.triangulate_two_bearings_midpoint( + np.asarray([o1, o2]), np.asarray([b1, b2]) + ) assert ok is False diff --git a/opensfm/test/test_types.py b/opensfm/test/test_types.py index 91c69e98f..739cd70b1 100644 --- a/opensfm/test/test_types.py +++ b/opensfm/test/test_types.py @@ -26,28 +26,22 @@ def test_reconstruction_class_initialization() -> None: metadata.orientation.value = 1 metadata.capture_time.value = 0.0 metadata.gps_accuracy.value = 5.0 - # pyre-fixme[8]: Attribute has type `ndarray`; used as `List[float]`. - metadata.gps_position.value = [ + metadata.gps_position.value = np.array([ 1.0815875281451939, -0.96510451436708888, 1.2042133903991235, - ] - # pyre-fixme[8]: Attribute has type `ndarray`; used as `List[float]`. - metadata.accelerometer.value = [0.1, 0.9, 0.0] + ]) + metadata.accelerometer.value = np.array([0.1, 0.9, 0.0]) metadata.compass_angle.value = 270.0 metadata.compass_accuracy.value = 15.0 metadata.sequence_key.value = "a_sequence_key" # Instantiate shots - # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[float]`. - # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[float]`. - pose0 = pygeometry.Pose([0.0, 0.0, 0.0], [0.0, 0.0, 0.0]) + pose0 = pygeometry.Pose(np.array([0.0, 0.0, 0.0]), np.array([0.0, 0.0, 0.0])) shot0 = reconstruction.create_shot("0", camera.id, pose0) shot0.metadata = metadata - # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[float]`. - # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[float]`. - pose1 = pygeometry.Pose([0.0, 0.0, 0.0], [-1.0, 0.0, 0.0]) + pose1 = pygeometry.Pose(np.array([0.0, 0.0, 0.0]), np.array([-1.0, 0.0, 0.0])) shot1 = reconstruction.create_shot("1", camera.id, pose1) shot1.metadata = metadata diff --git a/opensfm/test/test_undistort.py b/opensfm/test/test_undistort.py index ae516c738..3ea656967 100644 --- a/opensfm/test/test_undistort.py +++ b/opensfm/test/test_undistort.py @@ -11,9 +11,7 @@ def test_perspective_views_of_a_panorama() -> None: camera.width = 8000 camera.height = 4000 reconstruction.add_camera(camera) - # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[int]`. - # pyre-fixme[6]: For 2nd param expected `ndarray` but got `List[int]`. - pose = pygeometry.Pose([1, 2, 3], [4, 5, 6]) + pose = pygeometry.Pose(np.array([1, 2, 3]), np.array([4, 5, 6])) spherical_shot = reconstruction.create_shot("shot1", camera.id, pose=pose) urec = types.Reconstruction() diff --git a/opensfm/test/test_vlad.py b/opensfm/test/test_vlad.py index bbf5b278e..c406195c0 100644 --- a/opensfm/test/test_vlad.py +++ b/opensfm/test/test_vlad.py @@ -25,8 +25,7 @@ def test_vlad_distances_order() -> None: def test_signed_square_root_normalize() -> None: - v = [1, 0.01] - # pyre-fixme[6]: For 1st param expected `ndarray` but got `List[float]`. + v = np.array([1, 0.01]) res = vlad.signed_square_root_normalize(v) assert pytest.approx(np.linalg.norm(res), 1e-6) == 1 @@ -43,7 +42,6 @@ def test_unnormalized_vlad() -> None: ) res = vlad.unnormalized_vlad(features, centers) - - # pyre-fixme[16]: Optional type has no attribute `__getitem__`. + assert res is not None assert res[0] == res[1] == res[2] == 0 assert pytest.approx(res[3], 1e-6) == 0.1 diff --git a/opensfm/video.py b/opensfm/video.py index 49f5c17d1..17f1acbdd 100644 --- a/opensfm/video.py +++ b/opensfm/video.py @@ -25,9 +25,10 @@ def video_orientation(video_file) -> int: orientation = 3 elif rotation == 270: orientation = 8 + else: + raise RuntimeError(f"rotation {rotation} has no valid orientation!") else: orientation = 1 - # pyre-fixme[61]: `orientation` is undefined, or not always defined. return orientation From fdea40fa373ece313571abde85c8c86d1117cfab Mon Sep 17 00:00:00 2001 From: Pau Gargallo Date: Fri, 4 Mar 2022 07:15:47 -0800 Subject: [PATCH 36/81] Make config a dataclass Summary: For now, only the definition changes. The actual config object that the Dataset contains is still a Dict[str, Any] Reviewed By: fabianschenk Differential Revision: D34641779 fbshipit-source-id: fd0edbf084066ed638dc4e46725603c0b12233d4 --- opensfm/config.py | 546 ++++++++++++++++++++++++++++++---------------- 1 file changed, 359 insertions(+), 187 deletions(-) diff --git a/opensfm/config.py b/opensfm/config.py index 6be7be9ef..956a3c419 100644 --- a/opensfm/config.py +++ b/opensfm/config.py @@ -1,201 +1,373 @@ import os +from dataclasses import dataclass, asdict import yaml -default_config_yaml = """ -# Metadata -use_exif_size: yes -unknown_camera_models_are_different: no # Treat images from unknown camera models as comming from different cameras -default_focal_prior: 0.85 - -# Params for features -feature_type: HAHOG # Feature type (AKAZE, SURF, SIFT, HAHOG, ORB) -feature_root: 1 # If 1, apply square root mapping to features -feature_min_frames: 4000 # If fewer frames are detected, sift_peak_threshold/surf_hessian_threshold is reduced. -feature_min_frames_panorama: 16000 # Same as above but for panorama images -feature_process_size: 2048 # Resize the image if its size is larger than specified. Set to -1 for original size -feature_process_size_panorama: 4096 # Same as above but for panorama images -feature_use_adaptive_suppression: no -features_bake_segmentation: no # Bake segmentation info (class and instance) in the feature data. Thus it is done once for all at extraction time. - -# Params for SIFT -sift_peak_threshold: 0.1 # Smaller value -> more features -sift_edge_threshold: 10 # See OpenCV doc - -# Params for SURF -surf_hessian_threshold: 3000 # Smaller value -> more features -surf_n_octaves: 4 # See OpenCV doc -surf_n_octavelayers: 2 # See OpenCV doc -surf_upright: 0 # See OpenCV doc - -# Params for AKAZE (See details in lib/src/third_party/akaze/AKAZEConfig.h) -akaze_omax: 4 # Maximum octave evolution of the image 2^sigma (coarsest scale sigma units) -akaze_dthreshold: 0.001 # Detector response threshold to accept point -akaze_descriptor: MSURF # Feature type -akaze_descriptor_size: 0 # Size of the descriptor in bits. 0->Full size -akaze_descriptor_channels: 3 # Number of feature channels (1,2,3) -akaze_kcontrast_percentile: 0.7 -akaze_use_isotropic_diffusion: no - -# Params for HAHOG -hahog_peak_threshold: 0.00001 -hahog_edge_threshold: 10 -hahog_normalize_to_uchar: yes - -# Params for general matching -lowes_ratio: 0.8 # Ratio test for matches -matcher_type: FLANN # FLANN, BRUTEFORCE, or WORDS -symmetric_matching: yes # Match symmetricly or one-way - -# Params for FLANN matching -flann_algorithm: KMEANS # Algorithm type (KMEANS, KDTREE) -flann_branching: 8 # See OpenCV doc -flann_iterations: 10 # See OpenCV doc -flann_tree: 8 # See OpenCV doc -flann_checks: 20 # Smaller -> Faster (but might lose good matches) - -# Params for BoW matching -bow_file: bow_hahog_root_uchar_10000.npz -bow_words_to_match: 50 # Number of words to explore per feature. -bow_num_checks: 20 # Number of matching features to check. -bow_matcher_type: FLANN # Matcher type to assign words to features - -# Params for VLAD matching -vlad_file: bow_hahog_root_uchar_64.npz - -# Params for guided matching -guided_spanning_trees: 5 # Number of randomized spanning-trees to samples over the tracks-graph -guided_spanning_trees_random: 0.5 # Random ratio higher bound edges are multiplied with -guided_matching_threshold: 0.006 # Threshold for epipolar distance for accepting a match in radians -guided_min_length_initial: 3 # Minimum track length for initial triangulation -guided_min_length_final: 3 # Minimum track length for final triangulation -guided_extend_threshold: 0.002 # Threshold of reprojection for extending a track within a new image (in radians) -guided_extend_image_neighbors: 50 # Number of images considered as neighbors of another one -guided_extend_feature_neighbors: 10 # Maximum number of reprojected neighbors (in the tracks-graph) to check when extending a track within a new image - -# Params for matching -matching_gps_distance: 150 # Maximum gps distance between two images for matching -matching_gps_neighbors: 0 # Number of images to match selected by GPS distance. Set to 0 to use no limit (or disable if matching_gps_distance is also 0) -matching_time_neighbors: 0 # Number of images to match selected by time taken. Set to 0 to disable -matching_order_neighbors: 0 # Number of images to match selected by image name. Set to 0 to disable -matching_bow_neighbors: 0 # Number of images to match selected by BoW distance. Set to 0 to disable -matching_bow_gps_distance: 0 # Maximum GPS distance for preempting images before using selection by BoW distance. Set to 0 to disable -matching_bow_gps_neighbors: 0 # Number of images (selected by GPS distance) to preempt before using selection by BoW distance. Set to 0 to use no limit (or disable if matching_bow_gps_distance is also 0) -matching_bow_other_cameras: False # If True, BoW image selection will use N neighbors from the same camera + N neighbors from any different camera. If False, the selection will take the nearest neighbors from all cameras. -matching_vlad_neighbors: 0 # Number of images to match selected by VLAD distance. Set to 0 to disable -matching_vlad_gps_distance: 0 # Maximum GPS distance for preempting images before using selection by VLAD distance. Set to 0 to disable -matching_vlad_gps_neighbors: 0 # Number of images (selected by GPS distance) to preempt before using selection by VLAD distance. Set to 0 to use no limit (or disable if matching_vlad_gps_distance is also 0) -matching_vlad_other_cameras: False # If True, VLAD image selection will use N neighbors from the same camera + N neighbors from any different camera. If False, the selection will take the nearest neighbors from all cameras. -matching_graph_rounds: 0 # Number of rounds to run when running triangulation-based pair selection -matching_use_filters: False # If True, removes static matches using ad-hoc heuristics -matching_use_segmentation: no # Use segmentation information (if available) to improve matching - -# Params for geometric estimation -robust_matching_threshold: 0.004 # Outlier threshold for fundamental matrix estimation as portion of image width -robust_matching_calib_threshold: 0.004 # Outlier threshold for essential matrix estimation during matching in radians -robust_matching_min_match: 20 # Minimum number of matches to accept matches between two images -five_point_algo_threshold: 0.004 # Outlier threshold for essential matrix estimation during incremental reconstruction in radians -five_point_algo_min_inliers: 20 # Minimum number of inliers for considering a two view reconstruction valid -five_point_refine_match_iterations: 10 # Number of LM iterations to run when refining relative pose during matching -five_point_refine_rec_iterations: 1000 # Number of LM iterations to run when refining relative pose during reconstruction -five_point_reversal_check: False # Check for Necker reversal ambiguities. Useful for long focal length with long distance capture (aerial manned) -five_point_reversal_ratio: 0.95 # Ratio of triangulated points non-reversed/reversed when checking for Necker reversal ambiguities -triangulation_threshold: 0.006 # Outlier threshold for accepting a triangulated point in radians -triangulation_min_ray_angle: 1.0 # Minimum angle between views to accept a triangulated point -triangulation_type: FULL # Triangulation type : either considering all rays (FULL), or sing a RANSAC variant (ROBUST) -triangulation_refinement_iterations: 10 # Number of LM iterations to run when refining a point -resection_threshold: 0.004 # Outlier threshold for resection in radians -resection_min_inliers: 10 # Minimum number of resection inliers to accept it - -# Params for track creation -min_track_length: 2 # Minimum number of features/images per track - -# Params for bundle adjustment -loss_function: SoftLOneLoss # Loss function for the ceres problem (see: http://ceres-solver.org/modeling.html#lossfunction) -loss_function_threshold: 1 # Threshold on the squared residuals. Usually cost is quadratic for smaller residuals and sub-quadratic above. -reprojection_error_sd: 0.004 # The standard deviation of the reprojection error -exif_focal_sd: 0.01 # The standard deviation of the exif focal length in log-scale -principal_point_sd: 0.01 # The standard deviation of the principal point coordinates -radial_distortion_k1_sd: 0.01 # The standard deviation of the first radial distortion parameter -radial_distortion_k2_sd: 0.01 # The standard deviation of the second radial distortion parameter -radial_distortion_k3_sd: 0.01 # The standard deviation of the third radial distortion parameter -radial_distortion_k4_sd: 0.01 # The standard deviation of the fourth radial distortion parameter -tangential_distortion_p1_sd: 0.01 # The standard deviation of the first tangential distortion parameter -tangential_distortion_p2_sd: 0.01 # The standard deviation of the second tangential distortion parameter -gcp_horizontal_sd: 0.01 # The default horizontal standard deviation of the GCPs (in meters) -gcp_vertical_sd: 0.1 # The default vertical standard deviation of the GCPs (in meters) -rig_translation_sd: 0.1 # The standard deviation of the rig translation -rig_rotation_sd: 0.1 # The standard deviation of the rig rotation -bundle_outlier_filtering_type: FIXED # Type of threshold for filtering outlier : either fixed value (FIXED) or based on actual distribution (AUTO) -bundle_outlier_auto_ratio: 3.0 # For AUTO filtering type, projections with larger reprojection than ratio-times-mean, are removed -bundle_outlier_fixed_threshold: 0.006 # For FIXED filtering type, projections with larger reprojection error after bundle adjustment are removed -optimize_camera_parameters: yes # Optimize internal camera parameters during bundle -bundle_max_iterations: 100 # Maximum optimizer iterations. - -retriangulation: yes # Retriangulate all points from time to time -retriangulation_ratio: 1.2 # Retriangulate when the number of points grows by this ratio -bundle_analytic_derivatives: yes # Use analytic derivatives or auto-differentiated ones during bundle adjustment -bundle_interval: 999999 # Bundle after adding 'bundle_interval' cameras -bundle_new_points_ratio: 1.2 # Bundle when the number of points grows by this ratio -local_bundle_radius: 3 # Max image graph distance for images to be included in local bundle adjustment -local_bundle_min_common_points: 20 # Minimum number of common points betwenn images to be considered neighbors -local_bundle_max_shots: 30 # Max number of shots to optimize during local bundle adjustment - -save_partial_reconstructions: no # Save reconstructions at every iteration - -# Params for GPS alignment -use_altitude_tag: no # Use or ignore EXIF altitude tag -align_method: auto # orientation_prior or naive -align_orientation_prior: horizontal # horizontal, vertical or no_roll -bundle_use_gps: yes # Enforce GPS position in bundle adjustment -bundle_use_gcp: no # Enforce Ground Control Point position in bundle adjustment -bundle_compensate_gps_bias: no # Compensate GPS with a per-camera similarity transform - - -# Params for rigs -rig_calibration_subset_size: 15 # Number of rig instances to use when calibration rigs -rig_calibration_completeness: 0.85 # Ratio of reconstructed images needed to consider a reconstruction for rig calibration -rig_calibration_max_rounds: 10 # Number of SfM tentatives to run until we get a satisfying reconstruction - -# Params for image undistortion -undistorted_image_format: jpg # Format in which to save the undistorted images -undistorted_image_max_size: 100000 # Max width and height of the undistorted image - -# Params for depth estimation -depthmap_method: PATCH_MATCH_SAMPLE # Raw depthmap computation algorithm (PATCH_MATCH, BRUTE_FORCE, PATCH_MATCH_SAMPLE) -depthmap_resolution: 640 # Resolution of the depth maps -depthmap_num_neighbors: 10 # Number of neighboring views -depthmap_num_matching_views: 6 # Number of neighboring views used for each depthmaps -depthmap_min_depth: 0 # Minimum depth in meters. Set to 0 to auto-infer from the reconstruction. -depthmap_max_depth: 0 # Maximum depth in meters. Set to 0 to auto-infer from the reconstruction. -depthmap_patchmatch_iterations: 3 # Number of PatchMatch iterations to run -depthmap_patch_size: 7 # Size of the correlation patch -depthmap_min_patch_sd: 1.0 # Patches with lower standard deviation are ignored -depthmap_min_correlation_score: 0.1 # Minimum correlation score to accept a depth value -depthmap_same_depth_threshold: 0.01 # Threshold to measure depth closeness -depthmap_min_consistent_views: 3 # Min number of views that should reconstruct a point for it to be valid -depthmap_save_debug_files: no # Save debug files with partial reconstruction results - -# Other params -processes: 1 # Number of threads to use -read_processes: 4 # When processes > 1, number of threads used for reading images - -# Params for submodel split and merge -submodel_size: 80 # Average number of images per submodel -submodel_overlap: 30.0 # Radius of the overlapping region between submodels -submodels_relpath: "submodels" # Relative path to the submodels directory -submodel_relpath_template: "submodels/submodel_%04d" # Template to generate the relative path to a submodel directory -submodel_images_relpath_template: "submodels/submodel_%04d/images" # Template to generate the relative path to a submodel images directory -""" + +@dataclass +class OpenSfMConfig: + ################################## + # Params for metadata + ################################## + use_exif_size: bool = True + # Treat images from unknown camera models as comming from different cameras + unknown_camera_models_are_different: bool = False + default_focal_prior: float = 0.85 + + ################################## + # Params for features + ################################## + # Feature type (AKAZE, SURF, SIFT, HAHOG, ORB) + feature_type: str = "HAHOG" + # If true, apply square root mapping to features + feature_root: bool = True + # If fewer frames are detected, sift_peak_threshold/surf_hessian_threshold is reduced. + feature_min_frames: int = 4000 + # Same as above but for panorama images + feature_min_frames_panorama: int = 16000 + # Resize the image if its size is larger than specified. Set to -1 for original size + feature_process_size: int = 2048 + # Same as above but for panorama images + feature_process_size_panorama: int = 4096 + feature_use_adaptive_suppression: bool = False + # Bake segmentation info (class and instance) in the feature data. Thus it is done once for all at extraction time. + features_bake_segmentation: bool = False + + ################################## + # Params for SIFT + ################################## + # Smaller value -> more features + sift_peak_threshold: float = 0.1 + # See OpenCV doc + sift_edge_threshold: int = 10 + + ################################## + # Params for SURF + ################################## + # Smaller value -> more features + surf_hessian_threshold: float = 3000 + # See OpenCV doc + surf_n_octaves: int = 4 + # See OpenCV doc + surf_n_octavelayers: int = 2 + # See OpenCV doc + surf_upright: int = 0 + + ################################## + # Params for AKAZE (See details in lib/src/third_party/akaze/AKAZEConfig.h) + ################################## + # Maximum octave evolution of the image 2^sigma (coarsest scale sigma units) + akaze_omax: int = 4 + # Detector response threshold to accept point + akaze_dthreshold: float = 0.001 + # Feature type + akaze_descriptor: str = "MSURF" + # Size of the descriptor in bits. 0->Full size + akaze_descriptor_size: int = 0 + # Number of feature channels (1,2,3) + akaze_descriptor_channels: int = 3 + akaze_kcontrast_percentile: float = 0.7 + akaze_use_isotropic_diffusion: bool = False + + ################################## + # Params for HAHOG + ################################## + hahog_peak_threshold: float = 0.00001 + hahog_edge_threshold: float = 10 + hahog_normalize_to_uchar: bool = True + + ################################## + # Params for general matching + ################################## + # Ratio test for matches + lowes_ratio: float = 0.8 + # FLANN, BRUTEFORCE, or WORDS + matcher_type: str = "FLANN" + # Match symmetricly or one-way + symmetric_matching: bool = True + + ################################## + # Params for FLANN matching + ################################## + # Algorithm type (KMEANS, KDTREE) + flann_algorithm: str = "KMEANS" + # See OpenCV doc + flann_branching: int = 8 + # See OpenCV doc + flann_iterations: int = 10 + # See OpenCV doc + flann_tree: int = 8 + # Smaller -> Faster (but might lose good matches) + flann_checks: int = 20 + + ################################## + # Params for BoW matching + ################################## + bow_file: str = "bow_hahog_root_uchar_10000.npz" + # Number of words to explore per feature. + bow_words_to_match: int = 50 + # Number of matching features to check. + bow_num_checks: int = 20 + # Matcher type to assign words to features + bow_matcher_type: str = "FLANN" + + ################################## + # Params for VLAD matching + ################################## + vlad_file: str = "bow_hahog_root_uchar_64.npz" + + ################################## + # Params for guided matching + ################################## + # Number of randomized spanning-trees to samples over the tracks-graph + guided_spanning_trees: int = 5 + # Random ratio higher bound edges are multiplied with + guided_spanning_trees_random: float = 0.5 + # Threshold for epipolar distance for accepting a match in radians + guided_matching_threshold: float = 0.006 + # Minimum track length for initial triangulation + guided_min_length_initial: int = 3 + # Minimum track length for final triangulation + guided_min_length_final: int = 3 + # Threshold of reprojection for extending a track within a new image (in radians) + guided_extend_threshold: float = 0.002 + # Number of images considered as neighbors of another one + guided_extend_image_neighbors: int = 50 + # Maximum number of reprojected neighbors (in the tracks-graph) to check when extending a track within a new image + guided_extend_feature_neighbors: int = 10 + + ################################## + # Params for matching + ################################## + # Maximum gps distance between two images for matching + matching_gps_distance: float = 150 + # Number of images to match selected by GPS distance. Set to 0 to use no limit (or disable if matching_gps_distance is also 0) + matching_gps_neighbors: int = 0 + # Number of images to match selected by time taken. Set to 0 to disable + matching_time_neighbors: int = 0 + # Number of images to match selected by image name. Set to 0 to disable + matching_order_neighbors: int = 0 + # Number of images to match selected by BoW distance. Set to 0 to disable + matching_bow_neighbors: int = 0 + # Maximum GPS distance for preempting images before using selection by BoW distance. Set to 0 to disable + matching_bow_gps_distance: float = 0 + # Number of images (selected by GPS distance) to preempt before using selection by BoW distance. Set to 0 to use no limit (or disable if matching_bow_gps_distance is also 0) + matching_bow_gps_neighbors: int = 0 + # If True, BoW image selection will use N neighbors from the same camera + N neighbors from any different camera. If False, the selection will take the nearest neighbors from all cameras. + matching_bow_other_cameras: bool = False + # Number of images to match selected by VLAD distance. Set to 0 to disable + matching_vlad_neighbors: int = 0 + # Maximum GPS distance for preempting images before using selection by VLAD distance. Set to 0 to disable + matching_vlad_gps_distance: float = 0 + # Number of images (selected by GPS distance) to preempt before using selection by VLAD distance. Set to 0 to use no limit (or disable if matching_vlad_gps_distance is also 0) + matching_vlad_gps_neighbors: int = 0 + # If True, VLAD image selection will use N neighbors from the same camera + N neighbors from any different camera. If False, the selection will take the nearest neighbors from all cameras. + matching_vlad_other_cameras: bool = False + # Number of rounds to run when running triangulation-based pair selection + matching_graph_rounds: int = 0 + # If True, removes static matches using ad-hoc heuristics + matching_use_filters: bool = False + # Use segmentation information (if available) to improve matching + matching_use_segmentation: bool = False + + ################################## + # Params for geometric estimation + ################################## + # Outlier threshold for fundamental matrix estimation as portion of image width + robust_matching_threshold: float = 0.004 + # Outlier threshold for essential matrix estimation during matching in radians + robust_matching_calib_threshold: float = 0.004 + # Minimum number of matches to accept matches between two images + robust_matching_min_match: int = 20 + # Outlier threshold for essential matrix estimation during incremental reconstruction in radians + five_point_algo_threshold: float = 0.004 + # Minimum number of inliers for considering a two view reconstruction valid + five_point_algo_min_inliers: int = 20 + # Number of LM iterations to run when refining relative pose during matching + five_point_refine_match_iterations: int = 10 + # Number of LM iterations to run when refining relative pose during reconstruction + five_point_refine_rec_iterations: int = 1000 + # Check for Necker reversal ambiguities. Useful for long focal length with long distance capture (aerial manned) + five_point_reversal_check: bool = False + # Ratio of triangulated points non-reversed/reversed when checking for Necker reversal ambiguities + five_point_reversal_ratio: float = 0.95 + # Outlier threshold for accepting a triangulated point in radians + triangulation_threshold: float = 0.006 + # Minimum angle between views to accept a triangulated point + triangulation_min_ray_angle: float = 1.0 + # Triangulation type : either considering all rays (FULL), or sing a RANSAC variant (ROBUST) + triangulation_type: str = "FULL" + # Number of LM iterations to run when refining a point + triangulation_refinement_iterations: int = 10 + # Outlier threshold for resection in radians + resection_threshold: float = 0.004 + # Minimum number of resection inliers to accept it + resection_min_inliers: int = 10 + + ################################## + # Params for track creation + ################################## + # Minimum number of features/images per track + min_track_length: int = 2 + + ################################## + # Params for bundle adjustment + ################################## + # Loss function for the ceres problem (see: http://ceres-solver.org/modeling.html#lossfunction) + loss_function: str = "SoftLOneLoss" + # Threshold on the squared residuals. Usually cost is quadratic for smaller residuals and sub-quadratic above. + loss_function_threshold: float = 1 + # The standard deviation of the reprojection error + reprojection_error_sd: float = 0.004 + # The standard deviation of the exif focal length in log-scale + exif_focal_sd: float = 0.01 + # The standard deviation of the principal point coordinates + principal_point_sd: float = 0.01 + # The standard deviation of the first radial distortion parameter + radial_distortion_k1_sd: float = 0.01 + # The standard deviation of the second radial distortion parameter + radial_distortion_k2_sd: float = 0.01 + # The standard deviation of the third radial distortion parameter + radial_distortion_k3_sd: float = 0.01 + # The standard deviation of the fourth radial distortion parameter + radial_distortion_k4_sd: float = 0.01 + # The standard deviation of the first tangential distortion parameter + tangential_distortion_p1_sd: float = 0.01 + # The standard deviation of the second tangential distortion parameter + tangential_distortion_p2_sd: float = 0.01 + # The default horizontal standard deviation of the GCPs (in meters) + gcp_horizontal_sd: float = 0.01 + # The default vertical standard deviation of the GCPs (in meters) + gcp_vertical_sd: float = 0.1 + # The standard deviation of the rig translation + rig_translation_sd: float = 0.1 + # The standard deviation of the rig rotation + rig_rotation_sd: float = 0.1 + # Type of threshold for filtering outlier : either fixed value (FIXED) or based on actual distribution (AUTO) + bundle_outlier_filtering_type: str = "FIXED" + # For AUTO filtering type, projections with larger reprojection than ratio-times-mean, are removed + bundle_outlier_auto_ratio: float = 3.0 + # For FIXED filtering type, projections with larger reprojection error after bundle adjustment are removed + bundle_outlier_fixed_threshold: float = 0.006 + # Optimize internal camera parameters during bundle + optimize_camera_parameters: bool = True + # Maximum optimizer iterations. + bundle_max_iterations: int = 100 + + # Retriangulate all points from time to time + retriangulation: bool = True + # Retriangulate when the number of points grows by this ratio + retriangulation_ratio: float = 1.2 + # Use analytic derivatives or auto-differentiated ones during bundle adjustment + bundle_analytic_derivatives: bool = True + # Bundle after adding 'bundle_interval' cameras + bundle_interval: int = 999999 + # Bundle when the number of points grows by this ratio + bundle_new_points_ratio: float = 1.2 + # Max image graph distance for images to be included in local bundle adjustment + local_bundle_radius: int = 3 + # Minimum number of common points betwenn images to be considered neighbors + local_bundle_min_common_points: int = 20 + # Max number of shots to optimize during local bundle adjustment + local_bundle_max_shots: int = 30 + + # Save reconstructions at every iteration + save_partial_reconstructions: bool = False + + ################################## + # Params for GPS alignment + ################################## + # Use or ignore EXIF altitude tag + use_altitude_tag: bool = False + # orientation_prior or naive + align_method: str = "auto" + # horizontal, vertical or no_roll + align_orientation_prior: str = "horizontal" + # Enforce GPS position in bundle adjustment + bundle_use_gps: bool = True + # Enforce Ground Control Point position in bundle adjustment + bundle_use_gcp: bool = False + # Compensate GPS with a per-camera similarity transform + bundle_compensate_gps_bias: bool = False + + ################################## + # Params for rigs + ################################## + # Number of rig instances to use when calibration rigs + rig_calibration_subset_size: int = 15 + # Ratio of reconstructed images needed to consider a reconstruction for rig calibration + rig_calibration_completeness: float = 0.85 + # Number of SfM tentatives to run until we get a satisfying reconstruction + rig_calibration_max_rounds: int = 10 + + ################################## + # Params for image undistortion + ################################## + # Format in which to save the undistorted images + undistorted_image_format: str = "jpg" + # Max width and height of the undistorted image + undistorted_image_max_size: int = 100000 + + ################################## + # Params for depth estimation + ################################## + # Raw depthmap computation algorithm (PATCH_MATCH, BRUTE_FORCE, PATCH_MATCH_SAMPLE) + depthmap_method: str = "PATCH_MATCH_SAMPLE" + # Resolution of the depth maps + depthmap_resolution: int = 640 + # Number of neighboring views + depthmap_num_neighbors: int = 10 + # Number of neighboring views used for each depthmaps + depthmap_num_matching_views: int = 6 + # Minimum depth in meters. Set to 0 to auto-infer from the reconstruction. + depthmap_min_depth: float = 0 + # Maximum depth in meters. Set to 0 to auto-infer from the reconstruction. + depthmap_max_depth: float = 0 + # Number of PatchMatch iterations to run + depthmap_patchmatch_iterations: int = 3 + # Size of the correlation patch + depthmap_patch_size: int = 7 + # Patches with lower standard deviation are ignored + depthmap_min_patch_sd: float = 1.0 + # Minimum correlation score to accept a depth value + depthmap_min_correlation_score: float = 0.1 + # Threshold to measure depth closeness + depthmap_same_depth_threshold: float = 0.01 + # Min number of views that should reconstruct a point for it to be valid + depthmap_min_consistent_views: int = 3 + # Save debug files with partial reconstruction results + depthmap_save_debug_files: bool = False + + ################################## + # Params for multi-processing/threading + ################################## + # Number of threads to use + processes: int = 1 + # When processes > 1, number of threads used for reading images + read_processes: int = 4 + + ################################## + # Params for submodel split and merge + ################################## + # Average number of images per submodel + submodel_size: int = 80 + # Radius of the overlapping region between submodels + submodel_overlap: float = 30.0 + # Relative path to the submodels directory + submodels_relpath: str = "submodels" + # Template to generate the relative path to a submodel directory + submodel_relpath_template: str = "submodels/submodel_%04d" + # Template to generate the relative path to a submodel images directory + submodel_images_relpath_template: str = "submodels/submodel_%04d/images" def default_config(): """Return default configuration""" - return yaml.safe_load(default_config_yaml) + return asdict(OpenSfMConfig()) def load_config(filepath): - """DEPRECATED: Load config from a config.yaml filepath""" + """DEPRECATED: = Load config from a config.yaml filepath""" if not os.path.isfile(filepath): return default_config() From 536b6e1414c8a93f0815dbae85d03749daaa5432 Mon Sep 17 00:00:00 2001 From: Someone Serge Date: Thu, 10 Mar 2022 05:57:53 -0800 Subject: [PATCH 37/81] CMake: target_link_libraries() the native exported targets of Eigen and Ceres (#872) Summary: - [x] Upstream documentation suggests eigen be used through the `Eigen3::Eigen` target: https://eigen.tuxfamily.org/dox/TopicCMakeGuide.html - [x] Eliminates the need for custom FindEigen.cmake and hard-coded paths - [x] Makes it easier to package opensfm for distributions - [x] Closes: https://github.com/mapillary/OpenSfM/issues/802 - [x] Checked `docker build -f Dockerfile.ceres2 .` and `nix build` - [ ] `target_link_libraries` vs `target_include_directories` propagation rules: cf. bottom of the section https://cmake.org/cmake/help/latest/manual/cmake-buildsystem.7.html#transitive-usage-requirements I needed these changes in the process of packaging OpenSfM for nixpkgs, which significantly simplifies the building and deployment compared to plain docker: https://github.com/NixOS/nixpkgs/pull/152957 You can see the build status, including the tests (currently failing to build for macos) at: https://github.com/SomeoneSerge/pkgs/actions I'd hope to follow up with a few more changes that I found I needed, but I'd need your advice on how to go about them: - Discovering system-distributed gtest? https://github.com/mapillary/OpenSfM/issues/870 - Discovering system-distributed pybind? https://github.com/mapillary/OpenSfM/issues/871 Thank you! Pull Request resolved: https://github.com/mapillary/OpenSfM/pull/872 Reviewed By: YanNoun Differential Revision: D34521618 Pulled By: fabianschenk fbshipit-source-id: 462b5c63f3c32c4d5fb9f105b7a52dd3b12635e9 --- opensfm/src/CMakeLists.txt | 14 ++- opensfm/src/bundle/CMakeLists.txt | 9 +- opensfm/src/cmake/FindCeres.cmake | 60 ---------- opensfm/src/cmake/FindEigen.cmake | 160 -------------------------- opensfm/src/foundation/CMakeLists.txt | 2 +- opensfm/src/geo/CMakeLists.txt | 3 +- opensfm/src/geometry/CMakeLists.txt | 20 ++-- opensfm/src/map/CMakeLists.txt | 2 +- opensfm/src/sfm/CMakeLists.txt | 4 +- 9 files changed, 37 insertions(+), 237 deletions(-) delete mode 100644 opensfm/src/cmake/FindCeres.cmake delete mode 100644 opensfm/src/cmake/FindEigen.cmake diff --git a/opensfm/src/CMakeLists.txt b/opensfm/src/CMakeLists.txt index 8f67492e9..c85aa6fb4 100644 --- a/opensfm/src/CMakeLists.txt +++ b/opensfm/src/CMakeLists.txt @@ -54,11 +54,21 @@ endif() find_package(LAPACK) find_package(SuiteSparse) -find_package(Eigen REQUIRED) -find_package(Ceres REQUIRED) +find_package(Eigen3 REQUIRED) +find_package(Ceres) find_package(Gflags REQUIRED) find_package(Glog REQUIRED) +# Ceres2 exposes Ceres::ceres target. +# Ceres1 exposes just ceres. +# - if there's no such target, cmake will convert it into -lceres +# and the linker will fail +if(Ceres_ceres_FOUND) + set(CERES_LIBRARIES Ceres::ceres) +else() + set(CERES_LIBRARIES ceres) +endif() + find_package(OpenCV) # OpenCV's OpenCVConfig will enforce imgcodecs for < 3.0 # (even if OPTIONAL_COMPONENTS) so we remove it as we don't need it diff --git a/opensfm/src/bundle/CMakeLists.txt b/opensfm/src/bundle/CMakeLists.txt index 8d8c272c5..994e871da 100644 --- a/opensfm/src/bundle/CMakeLists.txt +++ b/opensfm/src/bundle/CMakeLists.txt @@ -22,13 +22,14 @@ target_link_libraries(bundle ${SUITESPARSE_LIBRARIES} foundation ) + if (LAPACK_FOUND) target_include_directories(bundle PRIVATE ${LAPACK_INCLUDE_DIRS}) endif() if (SUITESPARSE_FOUND) target_include_directories(bundle PRIVATE ${SUITESPARSE_INCLUDE_DIRS}) endif() -target_include_directories(bundle PRIVATE ${CERES_INCLUDE_DIR} ${CMAKE_SOURCE_DIR}) +target_include_directories(bundle PRIVATE ${CMAKE_SOURCE_DIR}) if (OPENSFM_BUILD_TESTS) set(BUNDLE_TEST_FILES @@ -36,11 +37,15 @@ if (OPENSFM_BUILD_TESTS) test/bundle_data_test.cc ) add_executable(bundle_test ${BUNDLE_TEST_FILES}) - target_include_directories(bundle_test PRIVATE ${CMAKE_SOURCE_DIR} ${EIGEN_INCLUDE_DIRS} ${GMOCK_INCLUDE_DIRS}) + target_include_directories(bundle_test + PRIVATE + ${CMAKE_SOURCE_DIR} + ${GMOCK_INCLUDE_DIRS}) target_link_libraries(bundle_test PUBLIC bundle geometry + Eigen3::Eigen ${TEST_MAIN}) add_test(bundle_test bundle_test) endif() diff --git a/opensfm/src/cmake/FindCeres.cmake b/opensfm/src/cmake/FindCeres.cmake deleted file mode 100644 index 74e2a7973..000000000 --- a/opensfm/src/cmake/FindCeres.cmake +++ /dev/null @@ -1,60 +0,0 @@ -# - Find Ceres library -# Find the native Ceres includes and library -# This module defines -# CERES_INCLUDE_DIRS, where to find ceres.h, Set when -# CERES_INCLUDE_DIR is found. -# CERES_LIBRARIES, libraries to link against to use Ceres. -# CERES_ROOT_DIR, The base directory to search for Ceres. -# This can also be an environment variable. -# CERES_FOUND, If false, do not try to use Ceres. -# -# also defined, but not for general use are -# CERES_LIBRARY, where to find the Ceres library. - -# If CERES_ROOT_DIR was defined in the environment, use it. -IF(NOT CERES_ROOT_DIR AND NOT $ENV{CERES_ROOT_DIR} STREQUAL "") - SET(CERES_ROOT_DIR $ENV{CERES_ROOT_DIR}) -ENDIF() - -SET(_ceres_SEARCH_DIRS - ${CERES_ROOT_DIR} - /usr/local - /sw # Fink - /opt/local # DarwinPorts - /opt/csw # Blastwave - /opt/lib/ceres -) - -FIND_PATH(CERES_INCLUDE_DIR - NAMES - ceres/ceres.h - HINTS - ${_ceres_SEARCH_DIRS} - PATH_SUFFIXES - include -) - -FIND_LIBRARY(CERES_LIBRARY - NAMES - ceres - HINTS - ${_ceres_SEARCH_DIRS} - PATH_SUFFIXES - lib64 lib - ) - -# handle the QUIETLY and REQUIRED arguments and set CERES_FOUND to TRUE if -# all listed variables are TRUE -INCLUDE(FindPackageHandleStandardArgs) -FIND_PACKAGE_HANDLE_STANDARD_ARGS(ceres DEFAULT_MSG - CERES_LIBRARY CERES_INCLUDE_DIR) - -IF(CERES_FOUND) - SET(CERES_LIBRARIES ${CERES_LIBRARY}) - SET(CERES_INCLUDE_DIRS ${CERES_INCLUDE_DIR}) -ENDIF(CERES_FOUND) - -MARK_AS_ADVANCED( - CERES_INCLUDE_DIR - CERES_LIBRARY -) diff --git a/opensfm/src/cmake/FindEigen.cmake b/opensfm/src/cmake/FindEigen.cmake deleted file mode 100644 index 2cd3e12c0..000000000 --- a/opensfm/src/cmake/FindEigen.cmake +++ /dev/null @@ -1,160 +0,0 @@ -# Ceres Solver - A fast non-linear least squares minimizer -# Copyright 2013 Google Inc. All rights reserved. -# http://code.google.com/p/ceres-solver/ -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are met: -# -# * Redistributions of source code must retain the above copyright notice, -# this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above copyright notice, -# this list of conditions and the following disclaimer in the documentation -# and/or other materials provided with the distribution. -# * Neither the name of Google Inc. nor the names of its contributors may be -# used to endorse or promote products derived from this software without -# specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" -# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE -# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE -# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE -# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR -# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF -# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS -# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN -# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) -# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE -# POSSIBILITY OF SUCH DAMAGE. -# -# Author: alexs.mac@gmail.com (Alex Stewart) -# - -# FindEigen.cmake - Find Eigen library, version >= 3. -# -# This module defines the following variables: -# -# EIGEN_FOUND: TRUE iff Eigen is found. -# EIGEN_INCLUDE_DIRS: Include directories for Eigen. -# -# EIGEN_VERSION: Extracted from Eigen/src/Core/util/Macros.h -# EIGEN_WORLD_VERSION: Equal to 3 if EIGEN_VERSION = 3.2.0 -# EIGEN_MAJOR_VERSION: Equal to 2 if EIGEN_VERSION = 3.2.0 -# EIGEN_MINOR_VERSION: Equal to 0 if EIGEN_VERSION = 3.2.0 -# -# The following variables control the behaviour of this module: -# -# EIGEN_INCLUDE_DIR_HINTS: List of additional directories in which to -# search for eigen includes, e.g: /timbuktu/eigen3. -# -# The following variables are also defined by this module, but in line with -# CMake recommended FindPackage() module style should NOT be referenced directly -# by callers (use the plural variables detailed above instead). These variables -# do however affect the behaviour of the module via FIND_[PATH/LIBRARY]() which -# are NOT re-called (i.e. search for library is not repeated) if these variables -# are set with valid values _in the CMake cache_. This means that if these -# variables are set directly in the cache, either by the user in the CMake GUI, -# or by the user passing -DVAR=VALUE directives to CMake when called (which -# explicitly defines a cache variable), then they will be used verbatim, -# bypassing the HINTS variables and other hard-coded search locations. -# -# EIGEN_INCLUDE_DIR: Include directory for CXSparse, not including the -# include directory of any dependencies. - -# Called if we failed to find Eigen or any of it's required dependencies, -# unsets all public (designed to be used externally) variables and reports -# error message at priority depending upon [REQUIRED/QUIET/] argument. -MACRO(EIGEN_REPORT_NOT_FOUND REASON_MSG) - UNSET(EIGEN_FOUND) - UNSET(EIGEN_INCLUDE_DIRS) - # Make results of search visible in the CMake GUI if Eigen has not - # been found so that user does not have to toggle to advanced view. - MARK_AS_ADVANCED(CLEAR EIGEN_INCLUDE_DIR) - # Note _FIND_[REQUIRED/QUIETLY] variables defined by FindPackage() - # use the camelcase library name, not uppercase. - IF (Eigen_FIND_QUIETLY) - MESSAGE(STATUS "Failed to find Eigen - " ${REASON_MSG} ${ARGN}) - ELSEIF (Eigen_FIND_REQUIRED) - MESSAGE(FATAL_ERROR "Failed to find Eigen - " ${REASON_MSG} ${ARGN}) - ELSE() - # Neither QUIETLY nor REQUIRED, use no priority which emits a message - # but continues configuration and allows generation. - MESSAGE("-- Failed to find Eigen - " ${REASON_MSG} ${ARGN}) - ENDIF () -ENDMACRO(EIGEN_REPORT_NOT_FOUND) - -# Search user-installed locations first, so that we prefer user installs -# to system installs where both exist. -# -# TODO: Add standard Windows search locations for Eigen. -LIST(APPEND EIGEN_CHECK_INCLUDE_DIRS - /usr/local/include/eigen3 - /usr/local/homebrew/include/eigen3 # Mac OS X - /opt/local/var/macports/software/eigen3 # Mac OS X. - /opt/local/include/eigen3 - /usr/include/eigen3) - -# Search supplied hint directories first if supplied. -FIND_PATH(EIGEN_INCLUDE_DIR - NAMES Eigen/Core - PATHS ${EIGEN_INCLUDE_DIR_HINTS} - ${EIGEN_CHECK_INCLUDE_DIRS}) -IF (NOT EIGEN_INCLUDE_DIR OR - NOT EXISTS ${EIGEN_INCLUDE_DIR}) - EIGEN_REPORT_NOT_FOUND( - "Could not find eigen3 include directory, set EIGEN_INCLUDE_DIR to " - "path to eigen3 include directory, e.g. /usr/local/include/eigen3.") -ENDIF (NOT EIGEN_INCLUDE_DIR OR - NOT EXISTS ${EIGEN_INCLUDE_DIR}) - -# Mark internally as found, then verify. EIGEN_REPORT_NOT_FOUND() unsets -# if called. -SET(EIGEN_FOUND TRUE) - -# Extract Eigen version from Eigen/src/Core/util/Macros.h -IF (EIGEN_INCLUDE_DIR) - SET(EIGEN_VERSION_FILE ${EIGEN_INCLUDE_DIR}/Eigen/src/Core/util/Macros.h) - IF (NOT EXISTS ${EIGEN_VERSION_FILE}) - EIGEN_REPORT_NOT_FOUND( - "Could not find file: ${EIGEN_VERSION_FILE} " - "containing version information in Eigen install located at: " - "${EIGEN_INCLUDE_DIR}.") - ELSE (NOT EXISTS ${EIGEN_VERSION_FILE}) - FILE(READ ${EIGEN_VERSION_FILE} EIGEN_VERSION_FILE_CONTENTS) - - STRING(REGEX MATCH "#define EIGEN_WORLD_VERSION [0-9]+" - EIGEN_WORLD_VERSION "${EIGEN_VERSION_FILE_CONTENTS}") - STRING(REGEX REPLACE "#define EIGEN_WORLD_VERSION ([0-9]+)" "\\1" - EIGEN_WORLD_VERSION "${EIGEN_WORLD_VERSION}") - - STRING(REGEX MATCH "#define EIGEN_MAJOR_VERSION [0-9]+" - EIGEN_MAJOR_VERSION "${EIGEN_VERSION_FILE_CONTENTS}") - STRING(REGEX REPLACE "#define EIGEN_MAJOR_VERSION ([0-9]+)" "\\1" - EIGEN_MAJOR_VERSION "${EIGEN_MAJOR_VERSION}") - - STRING(REGEX MATCH "#define EIGEN_MINOR_VERSION [0-9]+" - EIGEN_MINOR_VERSION "${EIGEN_VERSION_FILE_CONTENTS}") - STRING(REGEX REPLACE "#define EIGEN_MINOR_VERSION ([0-9]+)" "\\1" - EIGEN_MINOR_VERSION "${EIGEN_MINOR_VERSION}") - - # This is on a single line s/t CMake does not interpret it as a list of - # elements and insert ';' separators which would result in 3.;2.;0 nonsense. - SET(EIGEN_VERSION "${EIGEN_WORLD_VERSION}.${EIGEN_MAJOR_VERSION}.${EIGEN_MINOR_VERSION}") - ENDIF (NOT EXISTS ${EIGEN_VERSION_FILE}) -ENDIF (EIGEN_INCLUDE_DIR) - -# Set standard CMake FindPackage variables if found. -IF (EIGEN_FOUND) - SET(EIGEN_INCLUDE_DIRS ${EIGEN_INCLUDE_DIR}) -ENDIF (EIGEN_FOUND) - -# Handle REQUIRED / QUIET optional arguments and version. -INCLUDE(FindPackageHandleStandardArgs) -FIND_PACKAGE_HANDLE_STANDARD_ARGS(Eigen - REQUIRED_VARS EIGEN_INCLUDE_DIRS - VERSION_VAR EIGEN_VERSION) - -# Only mark internal variables as advanced if we found Eigen, otherwise -# leave it visible in the standard GUI for the user to set manually. -IF (EIGEN_FOUND) - MARK_AS_ADVANCED(FORCE EIGEN_INCLUDE_DIR) -ENDIF (EIGEN_FOUND) diff --git a/opensfm/src/foundation/CMakeLists.txt b/opensfm/src/foundation/CMakeLists.txt index 8da92eafb..401852277 100644 --- a/opensfm/src/foundation/CMakeLists.txt +++ b/opensfm/src/foundation/CMakeLists.txt @@ -15,13 +15,13 @@ target_link_libraries(foundation pybind11 ${OpenCV_LIBS} ${OpenMP_libomp_LIBRARY} + Eigen3::Eigen PRIVATE ${GFLAGS_LIBRARY} ${GLOG_LIBRARY} ) target_include_directories(foundation PUBLIC - ${EIGEN_INCLUDE_DIRS} ${PYTHON_INCLUDE_DIRS} ${CMAKE_SOURCE_DIR} ${OpenMP_CXX_INCLUDE_DIR} diff --git a/opensfm/src/geo/CMakeLists.txt b/opensfm/src/geo/CMakeLists.txt index c14494343..a9cbae02a 100644 --- a/opensfm/src/geo/CMakeLists.txt +++ b/opensfm/src/geo/CMakeLists.txt @@ -15,10 +15,11 @@ if (OPENSFM_BUILD_TESTS) test/geo_test.cc ) add_executable(geo_test ${GEO_TEST_FILES}) - target_include_directories(geo_test PRIVATE ${CMAKE_SOURCE_DIR} ${EIGEN_INCLUDE_DIRS}) + target_include_directories(geo_test PRIVATE ${CMAKE_SOURCE_DIR}) target_link_libraries(geo_test PUBLIC geo + Eigen3::Eigen ${TEST_MAIN}) add_test(geo_test geo_test) endif() diff --git a/opensfm/src/geometry/CMakeLists.txt b/opensfm/src/geometry/CMakeLists.txt index b407444c8..e6dda2c26 100644 --- a/opensfm/src/geometry/CMakeLists.txt +++ b/opensfm/src/geometry/CMakeLists.txt @@ -17,13 +17,14 @@ set(GEOMETRY_FILES src/triangulation.cc src/absolute_pose.cc src/relative_pose.cc -) + ) add_library(geometry ${GEOMETRY_FILES}) target_link_libraries(geometry - PRIVATE + PRIVATE foundation -) -target_include_directories(geometry PUBLIC ${CMAKE_SOURCE_DIR} ${CERES_INCLUDE_DIR}) + ${CERES_LIBRARIES} + ) +target_include_directories(geometry PUBLIC ${CMAKE_SOURCE_DIR}) if (OPENSFM_BUILD_TESTS) set(GEOMETRY_TEST_FILES @@ -31,13 +32,14 @@ if (OPENSFM_BUILD_TESTS) test/camera_functions_test.cc test/covariance_test.cc test/point_test.cc - ) + ) add_executable(geometry_test ${GEOMETRY_TEST_FILES}) - target_include_directories(geometry_test PRIVATE ${CMAKE_SOURCE_DIR} ${EIGEN_INCLUDE_DIRS}) + target_include_directories(geometry_test PRIVATE ${CMAKE_SOURCE_DIR}) target_link_libraries(geometry_test - PUBLIC - geometry - ${TEST_MAIN}) + PUBLIC + geometry + Eigen3::Eigen + ${TEST_MAIN}) add_test(geometry_test geometry_test) endif() diff --git a/opensfm/src/map/CMakeLists.txt b/opensfm/src/map/CMakeLists.txt index 3e31be2e4..b6f67bcdb 100644 --- a/opensfm/src/map/CMakeLists.txt +++ b/opensfm/src/map/CMakeLists.txt @@ -21,6 +21,7 @@ add_library(map ${MAP_FILES}) target_link_libraries(map PUBLIC pybind11 + Eigen3::Eigen PRIVATE geo geometry @@ -28,7 +29,6 @@ target_link_libraries(map target_include_directories(map PUBLIC - ${EIGEN_INCLUDE_DIRS} ${PYTHON_INCLUDE_DIRS} ${CMAKE_SOURCE_DIR} ) diff --git a/opensfm/src/sfm/CMakeLists.txt b/opensfm/src/sfm/CMakeLists.txt index cc7223a46..98c28f413 100644 --- a/opensfm/src/sfm/CMakeLists.txt +++ b/opensfm/src/sfm/CMakeLists.txt @@ -8,12 +8,14 @@ set(SFM_FILES ) add_library(sfm ${SFM_FILES}) target_link_libraries(sfm + PUBLIC + Eigen3::Eigen PRIVATE foundation map bundle ) -target_include_directories(sfm PUBLIC ${EIGEN_INCLUDE_DIRS} ${CMAKE_SOURCE_DIR}) +target_include_directories(sfm PUBLIC ${CMAKE_SOURCE_DIR}) if (OPENSFM_BUILD_TESTS) set(SFM_TEST_FILES From 9268047c9763a58a4985b51a01b516c8823591db Mon Sep 17 00:00:00 2001 From: Yann Noutary Date: Fri, 11 Mar 2022 07:34:59 -0800 Subject: [PATCH 38/81] feat: calibration database as YAML file Summary: This Diff refactors/featurize the hard-coded calibration as a standalone YAML file Reviewed By: paulinus Differential Revision: D34550711 fbshipit-source-id: 8e6157244c9fb5ac21417c961045c39ca32b3a44 --- doc/source/index.rst | 1 + doc/source/sensor_database.rst | 38 ++++++++++++ opensfm/context.py | 3 +- opensfm/data/camera_calibration.yaml | 89 ++++++++++++++++++++++++++++ opensfm/data/sensor_data.readme.txt | 18 ++---- opensfm/exif.py | 63 ++++++-------------- opensfm/sensors.py | 8 +++ setup.py | 19 +++--- 8 files changed, 174 insertions(+), 65 deletions(-) create mode 100644 doc/source/sensor_database.rst create mode 100644 opensfm/data/camera_calibration.yaml diff --git a/doc/source/index.rst b/doc/source/index.rst index a769445f7..5c0101f7c 100644 --- a/doc/source/index.rst +++ b/doc/source/index.rst @@ -18,6 +18,7 @@ OpenSfM rig annotation_tool api + sensor_database Indices and tables ================== diff --git a/doc/source/sensor_database.rst b/doc/source/sensor_database.rst new file mode 100644 index 000000000..63fdc1eb4 --- /dev/null +++ b/doc/source/sensor_database.rst @@ -0,0 +1,38 @@ +.. Doc on rig + +.. _sensors_database: + +Calibration Database +==================== + + +Overview +-------- + +In order to produce accurate geometry, structure-from-motion (SfM) needs to have correct estimates of the imaging sensor geometry, such as : lens type (fisheye, perspective, spherical), focal, distorsion, principal point. Please refer to the `Geometric Models`_ section for a comprehensive list of camera internal parameters (calibration). + +While reconstructing the scene (using incremental SfM), OpenSfM will adjust for the camera calibration values that best explain the seen geometry. However, in order to get optimal and failsafe results, it is recommended to have a first good guess of the calibration values. By default, OpenSfM will try to get these values by reading the image EXIFs, where the focal length can be red, and is one of the most important of the calibration values. However, sometimes, EXIFs does not contain such value, or it is erroneous, and/or it is better to have other values than just the focal length. + +Here comes sensors databases to the rescue. These are files stored under ``opensfm/data`` : + - ``sensor_data_detailed.json`` + - ``sensor_data.json`` + - ``sensor_calibration.yaml`` + +sensor_data_detailed.json +------------------------- + +This file contains physical sensor's width and height, in millimeters, for a given ``model make`` sensor (see `extract_metadata`_). It means that if only the focal length is available in the EXIFs, since we also have the sensor physical size, we know the full sensor geometry. + + +sensor_data.json +---------------- + +This file contains a multiplicative factor for a given ``model make`` sensor (see `extract_metadata`_). When applied to the EXIFs focal length, this factor gives the focal 35mm equivalent. Since we know the dimensions of 35mm equivalent (24x32 mm), we again know the full sensor geometry. + +camera_calibration.json +------------------------ + +This file contains the full definition (in OpenSfM format) of camera calibrations. Calibration are for a given ``make`` (see `extract_metadata`_), and then, they're further refined : + - If ``ALL`` is specified, then the calibration is valid for all ``make model`` camera independant of their ``model`` value + - If ``MODEL`` is specified, then calibrations are per actual ``model`` + - If ``FOCAL`` is specified, then calibrations are per focal length red from the EXIFs diff --git a/opensfm/context.py b/opensfm/context.py index 5246a76c1..03d886488 100644 --- a/opensfm/context.py +++ b/opensfm/context.py @@ -17,7 +17,8 @@ abspath = os.path.dirname(os.path.realpath(__file__)) -SENSOR = os.path.join(abspath, "data", "sensor_data.json") +SENSOR_DATA = os.path.join(abspath, "data", "sensor_data.json") +CAMERA_CALIBRATION = os.path.join(abspath, "data", "camera_calibration.yaml") BOW_PATH = os.path.join(abspath, "data", "bow") diff --git a/opensfm/data/camera_calibration.yaml b/opensfm/data/camera_calibration.yaml new file mode 100644 index 000000000..2cf333d99 --- /dev/null +++ b/opensfm/data/camera_calibration.yaml @@ -0,0 +1,89 @@ +[{ + "gopro":{ + "MODEL":{ + "hero8 black":{ + "focal_x": 0.47720253548498875, + "focal_y": 0.4776476227437118, + "k1": -0.004417272934201867, + "k2": 0.0026456102958567146, + "c_x": -0.0017664444590190724, + "c_y": 0.006393052002726844, + "p1": 0.0, "p2": 0.0, "k3": 0.0, + "projection_type": "brown" + } + }, + "FOCAL":{ + # GoPro Hero 3, 7MP medium + 20: {"focal": 0.625, "k1": -0.37, "k2": 0.28}, + # GoPro Hero 3, 7MP wide + 15: {"focal": 0.466, "k1": -0.195, "k2": 0.030}, + # GoPro Hero 2, 5MP medium + 23: {"focal": 0.71875, "k1": -0.38, "k2": 0.24}, + # GoPro Hero 2, 5MP wide + 16: {"focal": 0.5, "k1": -0.39, "k2": 0.22}, + } + }, + "garmin":{ + "MODEL":{ + # "v2 garmin virb 4608 3456 perspective 0" + "virb": {"focal": 0.57, "k1": -0.30, "k2": 0.06}, + # "v2 garmin virbxe 3477 1950 perspective 0.3888" + # "v2 garmin virbxe 1600 1200 perspective 0.3888" + # "v2 garmin virbxe 4000 3000 perspective 0.3888" + # Calibration when using camera's undistortion + "virbxe": {"focal": 0.466, "k1": -0.08, "k2": 0.0} + } + }, + "drift":{ + "MODEL":{ + "ghost s": {"focal": 0.47, "k1": -0.22, "k2": 0.03} + } + }, + "xiaoyi":{ + "ALL": {"focal": 0.5, "k1": -0.19, "k2": 0.028} + }, + "geo":{ + "MODEL":{ + "frames": {"focal": 0.5, "k1": -0.24, "k2": 0.04} + } + }, + "bullet5s":{ + "ALL": {"focal": 0.57, "k1": -0.30, "k2": 0.06} + }, + "sony":{ + "MODEL":{ + "hdr-as200v": {"focal": 0.55, "k1": -0.30, "k2": 0.08}, + "hdr-as300": {"focal": 0.3958, "k1": -0.1496, "k2": 0.0201} + } + }, + "PARROT":{ + "MODEL":{ + "Bebop 2": {"focal": 0.36666666666666666, "projection_type": "fisheye"} + } + }, + "dji":{ + "MODEL":{ + "fc300s": { + "focal": 0.5930666527479901, + "k1": -0.012137318698010527, + "k2": 0.016199087342953698 + } + } + }, + "skydio":{ + "MODEL":{ + "2": { + "focal_x": 0.5890650637205782, + "focal_y": 0.5893009528604379, + "c_x": 0.006414544008217537, + "c_y": 0.007804076177596079, + "k1": 0.007400489742572139, + "k2": -0.01564249925934293, + "p1": -0.007885887470989788, + "p2": 0.007514503550147163, + "k3": -0.0019150459181816895, + "projection_type": "brown" + } + } + } +}] diff --git a/opensfm/data/sensor_data.readme.txt b/opensfm/data/sensor_data.readme.txt index c41e50574..8c3d230d0 100644 --- a/opensfm/data/sensor_data.readme.txt +++ b/opensfm/data/sensor_data.readme.txt @@ -2,28 +2,22 @@ Sensor Database for Cameras ____________________________________________ -This is a database of camera sensors (mainly CCD sensor size now). The first version of the database (~3600 digital cameras) is contributed by Gregor Brdnik, the creator of http://www.digicamdb.com/. +This is a database of camera sensors (mainly CCD sensor size now). The first version of the database (~3600 digital cameras) is contributed by Gregor Brdnik, the creator of http://www.digicamdb.com/. -Any further contributions to the database are encouraged and welcome. +Any further contributions to the database are encouraged and welcome. -------------------------------------------- License -------------------------------------------- -This database is licensed under the same license as OpenSfM which is Simplified BSD license. +This database is licensed under the same license as OpenSfM which is Simplified BSD license. -------------------------------------------- Contents -------------------------------------------- -The database contains two json files, 'sensor_data.json' and 'sensor_data_detailed.json'. +The database contains two json files, 'sensor_data.json', 'sensor_data_detailed.json' and 'sensor_calibration.json'. -sensor_data.json : A slim version of the database - a dictionary that contains only the camera model and the sensor size in mm. Each item in the dictionary is in the form of 'MAKE MODEL: SENSOR_SIZE in mm'. In general, 'MAKE' and 'MODEL' are available in the EXIF of an image. For example, given MAKE='Canon', MODEL='EOS 1000D', one will be able to query the sensor size from the dictionary {"Canon EOS 1000D": 22.2}. +sensor_data.json : A slim version of the database - a dictionary that contains only the camera model and the sensor size in mm. Each item in the dictionary is in the form of 'MAKE MODEL: SENSOR_SIZE in mm'. In general, 'MAKE' and 'MODEL' are available in the EXIF of an image. For example, given MAKE='Canon', MODEL='EOS 1000D', one will be able to query the sensor size from the dictionary {"Canon EOS 1000D": 22.2}. sensor_data_detailed.json : A detailed version of database that contains more complete information about the sensors. - - - - - - - +camera_calibration.json : OpenSfM version of full calibration of sensors. diff --git a/opensfm/exif.py b/opensfm/exif.py index f837b94c2..aa1dc01ce 100644 --- a/opensfm/exif.py +++ b/opensfm/exif.py @@ -9,7 +9,7 @@ from opensfm import pygeometry from opensfm.dataset_base import DataSetBase from opensfm.geo import ecef_from_lla -from opensfm.sensors import sensor_data +from opensfm.sensors import sensor_data, camera_calibration logger = logging.getLogger(__name__) @@ -607,49 +607,21 @@ def hard_coded_calibration(exif): fmm35 = int(round(focal * 36.0)) make = exif["make"].strip().lower() model = exif["model"].strip().lower() - if "gopro" in make: - if fmm35 == 20: - # GoPro Hero 3, 7MP medium - return {"focal": focal, "k1": -0.37, "k2": 0.28} - elif fmm35 == 15: - # GoPro Hero 3, 7MP wide - # "v2 gopro hero3+ black edition 3000 2250 perspective 0.4166" - return {"focal": 0.466, "k1": -0.195, "k2": 0.030} - elif fmm35 == 23: - # GoPro Hero 2, 5MP medium - return {"focal": focal, "k1": -0.38, "k2": 0.24} - elif fmm35 == 16: - # GoPro Hero 2, 5MP wide - return {"focal": focal, "k1": -0.39, "k2": 0.22} - elif "bullet5s" in make: - return {"focal": 0.57, "k1": -0.30, "k2": 0.06} - elif "garmin" == make: - if "virb" == model: - # "v2 garmin virb 4608 3456 perspective 0" - return {"focal": 0.5, "k1": -0.08, "k2": 0.005} - elif "virbxe" == model: - # "v2 garmin virbxe 3477 1950 perspective 0.3888" - # "v2 garmin virbxe 1600 1200 perspective 0.3888" - # "v2 garmin virbxe 4000 3000 perspective 0.3888" - # Calibration when using camera's undistortion - return {"focal": 0.466, "k1": -0.08, "k2": 0.0} - # Calibration when not using camera's undistortion - # return {'focal': 0.466, 'k1': -0.195, 'k2'; 0.030} - elif "drift" == make: - if "ghost s" == model: - return {"focal": 0.47, "k1": -0.22, "k2": 0.03} - elif "xiaoyi" in make: - return {"focal": 0.5, "k1": -0.19, "k2": 0.028} - elif "geo" == make and "frames" == model: - return {"focal": 0.5, "k1": -0.24, "k2": 0.04} - elif "sony" == make: - if "hdr-as200v" == model: - return {"focal": 0.55, "k1": -0.30, "k2": 0.08} - elif "hdr-as300" in model: - return {"focal": 0.3958, "k1": -0.1496, "k2": 0.0201} - elif "PARROT" == make: - if "Bebop 2" == model: - return {"focal": 0.36666666666666666} + raw_calibrations = camera_calibration()[0] + if make not in raw_calibrations: + return None + models = raw_calibrations[make] + if "ALL" in models: + return models["ALL"] + if "MODEL" in models: + if model not in models["MODEL"]: + return None + return models["MODEL"][model] + if "FOCAL" in models: + if fmm35 not in models["FOCAL"]: + return None + return models["FOCAL"][fmm35] + return None def focal_ratio_calibration(exif): @@ -731,7 +703,8 @@ def calibration_from_metadata(metadata, data: DataSetBase): or focal_ratio_calibration(metadata) or default_calibration(data) ) - calib["projection_type"] = pt + if "projection_type" not in calib: + calib["projection_type"] = pt return calib diff --git a/opensfm/sensors.py b/opensfm/sensors.py index 61225c496..14c629082 100644 --- a/opensfm/sensors.py +++ b/opensfm/sensors.py @@ -1,5 +1,6 @@ from functools import lru_cache +import yaml from opensfm import context from opensfm import io @@ -11,3 +12,10 @@ def sensor_data(): # Convert model types to lower cases for easier query return {k.lower(): v for k, v in data.items()} + + +@lru_cache(1) +def camera_calibration(): + with io.open_rt(context.CAMERA_CALIBRATION) as f: + data = yaml.safe_load(f) + return data diff --git a/setup.py b/setup.py index 1120717f9..96620d3b2 100644 --- a/setup.py +++ b/setup.py @@ -1,9 +1,9 @@ #!/usr/bin/env python3 +import multiprocessing import os import subprocess import sys -import multiprocessing import setuptools from sphinx.setup_command import BuildDoc @@ -35,10 +35,10 @@ def configure_c_extension(): "../opensfm/src", "-DPYTHON_EXECUTABLE=" + sys.executable, ] - if sys.platform == 'win32': + if sys.platform == "win32": cmake_command += [ - '-DVCPKG_TARGET_TRIPLET=x64-windows', - '-DCMAKE_TOOLCHAIN_FILE=../vcpkg/scripts/buildsystems/vcpkg.cmake' + "-DVCPKG_TARGET_TRIPLET=x64-windows", + "-DCMAKE_TOOLCHAIN_FILE=../vcpkg/scripts/buildsystems/vcpkg.cmake", ] subprocess.check_call(cmake_command, cwd="cmake_build") @@ -46,10 +46,14 @@ def configure_c_extension(): def build_c_extension(): """Compile C extension.""" print("Compiling extension...") - if sys.platform == 'win32': - subprocess.check_call(['cmake', '--build', '.', '--config', 'Release'], cwd='cmake_build') + if sys.platform == "win32": + subprocess.check_call( + ["cmake", "--build", ".", "--config", "Release"], cwd="cmake_build" + ) else: - subprocess.check_call(['make', '-j' + str(multiprocessing.cpu_count())], cwd='cmake_build') + subprocess.check_call( + ["make", "-j" + str(multiprocessing.cpu_count())], cwd="cmake_build" + ) configure_c_extension() @@ -84,6 +88,7 @@ def build_c_extension(): "pyfoundation.*", "pymap.*", "data/sensor_data.json", + "data/camera_calibration.yaml", "data/bow/bow_hahog_root_uchar_10000.npz", "data/bow/bow_hahog_root_uchar_64.npz", ] From 8630af903198d7a628b3484c573a67ce96b9a6c5 Mon Sep 17 00:00:00 2001 From: Yann Noutary Date: Mon, 14 Mar 2022 04:35:28 -0700 Subject: [PATCH 39/81] fix: fix SENSOR_DATA Summary: This Diff fixes the broken `context.SENSOR_DATA` Reviewed By: mlopezantequera, DodgySpaniard Differential Revision: D34859022 fbshipit-source-id: fcbddc472963228a4437fa80afde5476696310c1 --- opensfm/sensors.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/opensfm/sensors.py b/opensfm/sensors.py index 14c629082..54c737f0b 100644 --- a/opensfm/sensors.py +++ b/opensfm/sensors.py @@ -7,7 +7,7 @@ @lru_cache(1) def sensor_data(): - with io.open_rt(context.SENSOR) as f: + with io.open_rt(context.SENSOR_DATA) as f: data = io.json_load(f) # Convert model types to lower cases for easier query From d09037ae4a327a1e991bda0914a42b673409a373 Mon Sep 17 00:00:00 2001 From: Yann Noutary Date: Mon, 21 Mar 2022 10:27:50 -0700 Subject: [PATCH 40/81] fix: continuous shots for proper split Reviewed By: paulinus Differential Revision: D34582041 fbshipit-source-id: 43879f162f94befbb98bde44506dd31b1e1d66d5 --- opensfm/synthetic_data/synthetic_scene.py | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/opensfm/synthetic_data/synthetic_scene.py b/opensfm/synthetic_data/synthetic_scene.py index c97c93cc7..3c572d802 100644 --- a/opensfm/synthetic_data/synthetic_scene.py +++ b/opensfm/synthetic_data/synthetic_scene.py @@ -339,8 +339,11 @@ def add_rig_camera_sequence( sg.perturb_points(instances_positions, position_noise) sg.perturb_rotations(instances_rotations, rotation_noise) + shift = sum(len(s) for s in self.shot_ids) shots_ids_per_camera = [] - for rig_camera_p, rig_camera_r in zip(relative_positions, relative_rotations): + for j, (rig_camera_p, rig_camera_r) in enumerate( + zip(relative_positions, relative_rotations) + ): pose_rig_camera = pygeometry.Pose(rig_camera_r) pose_rig_camera.set_origin(rig_camera_p) @@ -353,10 +356,11 @@ def add_rig_camera_sequence( rotations.append(composed.rotation) positions.append(composed.get_origin()) - shift = sum(len(s) for s in shots_ids_per_camera) - shots_ids_per_camera.append( - [f"Shot {shift+i:04d}" for i in range(len(positions))] - ) + camera_shot_ids = [] + for i in range(len(positions)): + shot_index = i * len(relative_positions) + j + camera_shot_ids.append(f"Shot {shift+shot_index:04d}") + shots_ids_per_camera.append(camera_shot_ids) self.cameras.append(cameras) self.shot_ids += shots_ids_per_camera From fcad213233791e83335b6bb493001f47dc782dab Mon Sep 17 00:00:00 2001 From: Yann Noutary Date: Mon, 21 Mar 2022 10:27:50 -0700 Subject: [PATCH 41/81] feat: support EPSG in GCP list Summary: This Diff makes OpenSfM support EPSG definition and upgrade pyproj calls to support newer Transform(CS In, CS Out) call convention. We also add a little tweak to better support ODM GCP list files directly. Reviewed By: mlopezantequera Differential Revision: D34380711 fbshipit-source-id: 30edc1fcd4b243e12092fcf9b64198c9a2b0a731 --- opensfm/io.py | 20 ++++++++++++++------ opensfm/test/test_io.py | 2 +- 2 files changed, 15 insertions(+), 7 deletions(-) diff --git a/opensfm/io.py b/opensfm/io.py index 26086e132..c6efb0608 100644 --- a/opensfm/io.py +++ b/opensfm/io.py @@ -821,15 +821,20 @@ def camera_to_vector(camera: pygeometry.Camera) -> List[float]: def _read_gcp_list_lines( lines: Iterable[str], projection, - exif: Dict[str, Dict[str, Any]], + exifs: Dict[str, Dict[str, Any]], ) -> List[pymap.GroundControlPoint]: points = {} for line in lines: words = line.split(None, 5) easting, northing, alt, pixel_x, pixel_y = map(float, words[:5]) - shot_id = words[5].strip() key = (easting, northing, alt) + shot_tokens = words[5].split(None) + shot_id = shot_tokens[0] + if shot_id not in exifs: + continue + + if key in points: point = points[key] else: @@ -840,7 +845,7 @@ def _read_gcp_list_lines( else: has_altitude = True if projection is not None: - lon, lat = projection(easting, northing, inverse=True) + lat, lon = projection.transform(easting, northing) else: lon, lat = easting, northing @@ -852,7 +857,7 @@ def _read_gcp_list_lines( points[key] = point # Convert 2D coordinates - d = exif[shot_id] + d = exifs[shot_id] coordinates = features.normalized_image_coordinates( np.array([[pixel_x, pixel_y]]), d["width"], d["height"] )[0] @@ -885,12 +890,15 @@ def _parse_utm_projection_string(line: str) -> str: def _parse_projection(line: str): """Build a proj4 from the GCP format line.""" + crs_4326 = pyproj.CRS.from_epsg(4326) if line.strip() == "WGS84": return None elif line.upper().startswith("WGS84 UTM"): - return pyproj.Proj(_parse_utm_projection_string(line)) + return pyproj.Transformer.from_proj(pyproj.CRS(_parse_utm_projection_string(line)), crs_4326) elif "+proj" in line: - return pyproj.Proj(line) + return pyproj.Transformer.from_proj(pyproj.CRS(line), crs_4326) + elif line.upper().startswith("EPSG:"): + return pyproj.Transformer.from_proj(pyproj.CRS.from_epsg(int(line.split(":")[1])), crs_4326) else: raise ValueError("Un-supported geo system definition: {}".format(line)) diff --git a/opensfm/test/test_io.py b/opensfm/test/test_io.py index 4775e98ac..56852bd90 100644 --- a/opensfm/test/test_io.py +++ b/opensfm/test/test_io.py @@ -71,7 +71,7 @@ def test_parse_projection() -> None: proj = io._parse_projection("WGS84 UTM 31N") easting, northing = 431760, 4582313.7 lat, lon = 41.38946, 2.18378 - plon, plat = proj(easting, northing, inverse=True) + plat, plon = proj.transform(easting, northing) assert np.allclose((lat, lon), (plat, plon)) From 221083fcfbdd4837221cc743522466209f5a2fbc Mon Sep 17 00:00:00 2001 From: Yann Noutary Date: Mon, 21 Mar 2022 10:27:50 -0700 Subject: [PATCH 42/81] refactor: support combination of merges Reviewed By: paulinus Differential Revision: D34414106 fbshipit-source-id: e2d44dbee367c982c0c690d2a68040bebf6e54a7 --- opensfm/types.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/opensfm/types.py b/opensfm/types.py index a86db365f..396d410b5 100644 --- a/opensfm/types.py +++ b/opensfm/types.py @@ -334,6 +334,8 @@ def remove_observation(self, shot_id: str, lm_id: str) -> None: def __deepcopy__(self, d): # create new reconstruction rec_cpy = Reconstruction() + rec_cpy.reference = self.reference + copy_observations = False # Check if we also need the observations if "copy_observations" in d: @@ -358,6 +360,10 @@ def __deepcopy__(self, d): obs = shot.get_observation(obs_id) rec_cpy.add_observation(shot.id, point.id, obs) + # Copy the biases + for bias_id, bias in self.biases.items(): + rec_cpy.set_bias(bias_id, bias) + return rec_cpy def add_correspondences_from_tracks_manager( From a678af0b8f9eaef24304811541db765e9ded5594 Mon Sep 17 00:00:00 2001 From: Yann Noutary Date: Mon, 21 Mar 2022 10:27:50 -0700 Subject: [PATCH 43/81] feat: split clusters command Reviewed By: paulinus Differential Revision: D34820307 fbshipit-source-id: befc21232fa73c0b32b05c4db2aa919fc20cd433 --- opensfm/align.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/opensfm/align.py b/opensfm/align.py index fe8981306..57b5cbf70 100644 --- a/opensfm/align.py +++ b/opensfm/align.py @@ -68,6 +68,10 @@ def apply_similarity( for rig_instance in reconstruction.rig_instances.values(): apply_similarity_pose(rig_instance.pose, s, A, b) + # Scale rig cameras + for rig_camera in reconstruction.rig_cameras.values(): + apply_similarity_pose(rig_camera.pose, s, np.eye(3), np.array([0, 0, 0])) + def compute_reconstruction_similarity( reconstruction: types.Reconstruction, From 3c2c524501936cbbdd6ee4407b883bd9a64acbea Mon Sep 17 00:00:00 2001 From: Yann Noutary Date: Mon, 21 Mar 2022 10:27:50 -0700 Subject: [PATCH 44/81] feat: ready for no-GPS Reviewed By: paulinus Differential Revision: D34820306 fbshipit-source-id: 0258d4ede97113690b201c84471f358449e3d7a6 --- opensfm/reconstruction.py | 5 +++- opensfm/reconstruction_helpers.py | 13 +++++----- opensfm/src/bundle/bundle_adjuster.h | 6 +++++ .../src/bundle/error/absolute_motion_errors.h | 16 ++++++++++++ opensfm/src/bundle/pybundle.pyi | 1 + opensfm/src/bundle/python/pybind.cc | 1 + opensfm/src/bundle/src/bundle_adjuster.cc | 25 +++++++++++++++++++ 7 files changed, 60 insertions(+), 7 deletions(-) diff --git a/opensfm/reconstruction.py b/opensfm/reconstruction.py index 0ca43a85f..468d9c277 100644 --- a/opensfm/reconstruction.py +++ b/opensfm/reconstruction.py @@ -54,8 +54,9 @@ def _add_gcp_to_bundle( shots: Dict[str, pymap.Shot], gcp_horizontal_sd: float, gcp_vertical_sd: float, -) -> None: +) -> int: """Add Ground Control Points constraints to the bundle problem.""" + added_gcps = 0 gcp_sd = np.array([gcp_horizontal_sd, gcp_horizontal_sd, gcp_vertical_sd]) for point in gcp: point_id = "gcp-" + point.id @@ -91,6 +92,8 @@ def _add_gcp_to_bundle( observation.projection, scale, ) + added_gcps += 1 + return added_gcps def bundle( diff --git a/opensfm/reconstruction_helpers.py b/opensfm/reconstruction_helpers.py index 90b29b916..657703b00 100644 --- a/opensfm/reconstruction_helpers.py +++ b/opensfm/reconstruction_helpers.py @@ -44,7 +44,7 @@ def guess_acceleration_from_orientation_tag(orientation: int) -> List[float]: raise RuntimeError(f"Error: Unknown orientation tag: {orientation}") -def orientation_from_acceleration_in_image_axis(x:float, y:float) -> int: +def orientation_from_acceleration_in_image_axis(x: float, y: float) -> int: """Return the orientation tag corresponding to an acceleration""" if y <= -(np.fabs(x)): return 1 @@ -58,7 +58,9 @@ def orientation_from_acceleration_in_image_axis(x:float, y:float) -> int: raise RuntimeError(f"Error: Invalid acceleration {x}, {y}!") -def transform_acceleration_from_phone_to_image_axis(x:float, y:float, z:float, orientation: int) -> List[float]: +def transform_acceleration_from_phone_to_image_axis( + x: float, y: float, z: float, orientation: int +) -> List[float]: """Compute acceleration in image axis. Orientation tag is used to ensure that the resulting acceleration points @@ -125,7 +127,9 @@ def rotation_from_angles(shot: pymap.Shot) -> Optional[np.ndarray]: return geometry.rotation_from_opk(*opk_rad) -def reconstruction_from_metadata(data: DataSetBase, images: Iterable[str]) -> types.Reconstruction: +def reconstruction_from_metadata( + data: DataSetBase, images: Iterable[str] +) -> types.Reconstruction: """Initialize a reconstruction by using EXIF data for constructing shot poses and cameras.""" data.init_reference() rig_assignments = rig.rig_assignments_per_image(data.load_rig_assignments()) @@ -182,9 +186,6 @@ def exif_to_metadata( metadata.gps_accuracy.value = gps.get("dop", 15.0) if metadata.gps_accuracy.value == 0.0: metadata.gps_accuracy.value = 15.0 - else: - metadata.gps_position.value = np.array([0.0, 0.0, 0.0]) - metadata.gps_accuracy.value = 999999.0 opk = exif.get("opk") if opk and "omega" in opk and "phi" in opk and "kappa" in opk: diff --git a/opensfm/src/bundle/bundle_adjuster.h b/opensfm/src/bundle/bundle_adjuster.h index c434b4577..1f9403ea6 100644 --- a/opensfm/src/bundle/bundle_adjuster.h +++ b/opensfm/src/bundle/bundle_adjuster.h @@ -265,6 +265,10 @@ class BundleAdjuster { double position_std_deviation, double orientation_std_deviation); + // Gauge fixing + void SetGaugeFixShots(const std::string &shot_origin, + const std::string &shot_scale); + // Minimization setup void SetPointProjectionLossFunction(std::string name, double threshold); void SetRelativeMotionLossFunction(std::string name, double threshold); @@ -363,6 +367,8 @@ class BundleAdjuster { std::string relative_motion_loss_name_; double relative_motion_loss_threshold_; bool adjust_absolute_position_std_; + foundation::OptionalValue> + gauge_fix_shots_; bool compute_covariances_; bool covariance_estimation_valid_; diff --git a/opensfm/src/bundle/error/absolute_motion_errors.h b/opensfm/src/bundle/error/absolute_motion_errors.h index 4d4e770a3..7a75ece12 100644 --- a/opensfm/src/bundle/error/absolute_motion_errors.h +++ b/opensfm/src/bundle/error/absolute_motion_errors.h @@ -170,4 +170,20 @@ struct HeatmapdCostFunctor { const double resolution_; const double scale_; }; + +struct TranslationPriorError { + explicit TranslationPriorError(const double prior_norm) + : prior_norm_(prior_norm) {} + + template + bool operator()(const T* const rig_instance1, const T* const rig_instance2, + T* residuals) const { + auto t1 = Eigen::Map>(rig_instance1 + Pose::Parameter::TX); + auto t2 = Eigen::Map>(rig_instance2 + Pose::Parameter::TX); + residuals[0] = log((t1 - t2).norm() / T(prior_norm_)); + return true; + } + + double prior_norm_; +}; } // namespace bundle diff --git a/opensfm/src/bundle/pybundle.pyi b/opensfm/src/bundle/pybundle.pyi index fa1fbae9a..dd5142feb 100644 --- a/opensfm/src/bundle/pybundle.pyi +++ b/opensfm/src/bundle/pybundle.pyi @@ -54,6 +54,7 @@ class BundleAdjuster: def set_adjust_absolute_position_std(self, arg0: bool) -> None: ... def set_compute_covariances(self, arg0: bool) -> None: ... def set_compute_reprojection_errors(self, arg0: bool) -> None: ... + def set_gauge_fix_shots(self, arg0: str, arg1: str) -> None: ... def set_internal_parameters_prior_sd(self, arg0: float, arg1: float, arg2: float, arg3: float, arg4: float, arg5: float, arg6: float, arg7: float) -> None: ... def set_linear_solver_type(self, arg0: str) -> None: ... def set_max_num_iterations(self, arg0: int) -> None: ... diff --git a/opensfm/src/bundle/python/pybind.cc b/opensfm/src/bundle/python/pybind.cc index c486c6217..20fb74f12 100644 --- a/opensfm/src/bundle/python/pybind.cc +++ b/opensfm/src/bundle/python/pybind.cc @@ -105,6 +105,7 @@ PYBIND11_MODULE(pybundle, m) { .def("add_absolute_tilt", &bundle::BundleAdjuster::AddAbsoluteTilt) .def("add_absolute_roll", &bundle::BundleAdjuster::AddAbsoluteRoll) .def("add_linear_motion", &bundle::BundleAdjuster::AddLinearMotion) + .def("set_gauge_fix_shots", &bundle::BundleAdjuster::SetGaugeFixShots) .def("set_internal_parameters_prior_sd", &bundle::BundleAdjuster::SetInternalParametersPriorSD) .def("set_compute_covariances", diff --git a/opensfm/src/bundle/src/bundle_adjuster.cc b/opensfm/src/bundle/src/bundle_adjuster.cc index bce5939f1..513e01d30 100644 --- a/opensfm/src/bundle/src/bundle_adjuster.cc +++ b/opensfm/src/bundle/src/bundle_adjuster.cc @@ -336,6 +336,13 @@ void BundleAdjuster::AddAbsoluteRoll(const std::string &shot_id, double angle, absolute_rolls_.push_back(a); } +void BundleAdjuster::SetGaugeFixShots(const std::string &shot_origin, + const std::string &shot_scale) { + Shot *shot = &shots_.at(shot_origin); + shot->GetRigInstance()->SetParametersToOptimize({}); + gauge_fix_shots_.SetValue(std::make_pair(shot_origin, shot_scale)); +} + void BundleAdjuster::SetPointProjectionLossFunction(std::string name, double threshold) { point_projection_loss_name_ = name; @@ -1055,6 +1062,24 @@ void BundleAdjuster::Run() { parameter_blocks); } + // Gauge fix + if (gauge_fix_shots_.HasValue()) { + const auto &gauge_shots = gauge_fix_shots_.Value(); + auto instance1 = shots_.at(gauge_shots.first).GetRigInstance(); + auto instance2 = shots_.at(gauge_shots.second).GetRigInstance(); + const double norm = + (instance1->GetValue().GetOrigin() - instance2->GetValue().GetOrigin()) + .norm(); + + ceres::CostFunction *cost_function = + new ceres::AutoDiffCostFunction( + new TranslationPriorError(norm)); + + problem.AddResidualBlock(cost_function, nullptr, + instance1->GetValueData().data(), + instance2->GetValueData().data()); + } + // Solve ceres::Solver::Options options; if (!ceres::StringToLinearSolverType(linear_solver_type_, From b43f5faca309676dcb6bcd9225d7ac46b98c2f96 Mon Sep 17 00:00:00 2001 From: Yann Noutary Date: Wed, 23 Mar 2022 03:47:41 -0700 Subject: [PATCH 45/81] =?UTF-8?q?fix:=20build=20missing=20OpenMP=20include?= =?UTF-8?q?=20through=20the=20use=20of=20foundation=20lib's=E2=80=A6=20(#8?= =?UTF-8?q?84)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Summary: This PR fixes the MacOS build missing OpenMP include through the use of foundation lib's dependancies Pull Request resolved: https://github.com/mapillary/OpenSfM/pull/884 Reviewed By: fabianschenk Differential Revision: D35006993 Pulled By: YanNoun fbshipit-source-id: 513050371101b1521e54f0546e0c4c537c698457 --- opensfm/src/map/CMakeLists.txt | 2 ++ 1 file changed, 2 insertions(+) diff --git a/opensfm/src/map/CMakeLists.txt b/opensfm/src/map/CMakeLists.txt index b6f67bcdb..804ffd7d0 100644 --- a/opensfm/src/map/CMakeLists.txt +++ b/opensfm/src/map/CMakeLists.txt @@ -25,6 +25,7 @@ target_link_libraries(map PRIVATE geo geometry + foundation ) target_include_directories(map @@ -38,6 +39,7 @@ target_link_libraries(pymap PRIVATE map geometry + foundation bundle pybind11 ) From 271338e28bb0a8cdd3158c1b53148d44e7be2405 Mon Sep 17 00:00:00 2001 From: Yann Noutary Date: Thu, 24 Mar 2022 07:08:22 -0700 Subject: [PATCH 46/81] fix: strictly use datasets images when constructing merged shots metadata Summary: This Diff makes sure that only shots that are in the dataset are being used when constructing the merged's shots metadata : - We added checks in `SyntheticDataset` to repro the issue (we were using a subset already but no checking in most accessors) - We added a presence check in the assigning function Reviewed By: paulinus, tobias-o Differential Revision: D35081796 fbshipit-source-id: 3a6fc87b74d0a86ff2007a6faff755d1ee206ccc --- opensfm/synthetic_data/synthetic_dataset.py | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/opensfm/synthetic_data/synthetic_dataset.py b/opensfm/synthetic_data/synthetic_dataset.py index a7f237c9a..78b642a52 100644 --- a/opensfm/synthetic_data/synthetic_dataset.py +++ b/opensfm/synthetic_data/synthetic_dataset.py @@ -81,6 +81,10 @@ def __init__( def images(self) -> List[str]: return self.image_list + def _raise_if_absent_image(self, image: str): + if image not in self.image_list: + raise RuntimeError("Image isn't present in the synthetic dataset") + def load_camera_models(self) -> Dict[str, pygeometry.Camera]: return self.reconstruction.cameras @@ -100,12 +104,15 @@ def load_rig_assignments(self) -> Dict[str, List[Tuple[str, str]]]: return rig_assignments def load_exif(self, image: str) -> Dict[str, Any]: + self._raise_if_absent_image(image) return self.exifs[image] def exif_exists(self, image: str) -> bool: - return True + return image in self.image_list def features_exist(self, image: str) -> bool: + if image not in self.image_list: + return False if self.features is None: return False feat = self.features @@ -114,10 +121,12 @@ def features_exist(self, image: str) -> bool: return image in feat def load_words(self, image: str): + self._raise_if_absent_image(image) n_closest = 50 return [image] * n_closest def load_features(self, image: str) -> Optional[oft.FeaturesData]: + self._raise_if_absent_image(image) if not self.features: return None feat = self.features @@ -129,12 +138,15 @@ def save_features(self, image: str, features_data: oft.FeaturesData) -> None: pass def matches_exists(self, image: str) -> bool: + if image not in self.image_list: + return False self._check_and_create_matches() if self.matches is None: return False return True def load_matches(self, image: str) -> Dict[str, np.ndarray]: + self._raise_if_absent_image(image) self._check_and_create_matches() if self.matches is not None: return self.matches[image] From 9f9074344b6fc940853948dd77b15b9397a5e964 Mon Sep 17 00:00:00 2001 From: Yann Noutary Date: Mon, 28 Mar 2022 07:07:58 -0700 Subject: [PATCH 47/81] refactor & feat: simplify bundle tests and toy example Summary: This Diff refactors a bit the bundle tests for removing redundant content. We also refactor `test_four_cams_single_reconstruction_non_rigid` in a test with two reconstructions `test_four_cams_double_reconstruction` Reviewed By: DodgySpaniard Differential Revision: D35142471 fbshipit-source-id: cbf98d51e960418b5b49684c31cadd6af3dd3d90 --- opensfm/test/test_bundle.py | 389 +++++++++++++++--------------------- 1 file changed, 160 insertions(+), 229 deletions(-) diff --git a/opensfm/test/test_bundle.py b/opensfm/test/test_bundle.py index 5e64cfb83..6e4019555 100644 --- a/opensfm/test/test_bundle.py +++ b/opensfm/test/test_bundle.py @@ -50,7 +50,9 @@ def test_sigleton(bundle_adjuster: pybundle.BundleAdjuster) -> None: {"1": "rig_cam1"}, False, ) - sa.add_rig_instance_position_prior("1", np.array([1, 0, 0]), np.array([1, 1, 1]), "") + sa.add_rig_instance_position_prior( + "1", np.array([1, 0, 0]), np.array([1, 1, 1]), "" + ) sa.add_absolute_up_vector("1", np.array([0, -1, 0]), 1) sa.add_absolute_pan("1", np.radians(180), 1) @@ -70,7 +72,9 @@ def test_singleton_pan_tilt_roll(bundle_adjuster: pybundle.BundleAdjuster) -> No {"1": "rig_cam1"}, False, ) - sa.add_rig_instance_position_prior("1", np.array([1, 0, 0]), np.array([1, 1, 1]), "") + sa.add_rig_instance_position_prior( + "1", np.array([1, 0, 0]), np.array([1, 1, 1]), "" + ) sa.add_absolute_pan("1", pan, 1) sa.add_absolute_tilt("1", tilt, 1) sa.add_absolute_roll("1", roll, 1) @@ -128,32 +132,36 @@ def test_bundle_projection_fixed_internals(scene_synthetic) -> None: assert reference.cameras["1"].k2 == orig_camera.k2 +def create_shots(bundle_adjuster: pybundle.BundleAdjuster, num_shots: int) -> None: + for i in range(num_shots): + instance_id = str(i + 1) + bundle_adjuster.add_rig_instance( + instance_id, + pygeometry.Pose(np.array([0, 0, 0]), np.array([0, 0, 0])), + {instance_id: "cam1"}, + {instance_id: "rig_cam1"}, + False, + ) + + def test_pair(bundle_adjuster: pybundle.BundleAdjuster) -> None: """Simple two camera test""" sa = bundle_adjuster - sa.add_rig_instance( - "1", - pygeometry.Pose(np.array([0, 0, 0]), np.array([0, 0, 0])), - {"1": "cam1"}, - {"1": "rig_cam1"}, - False, - ) - sa.add_rig_instance( - "2", - pygeometry.Pose(np.array([0, 0, 0]), np.array([0, 0, 0])), - {"2": "cam1"}, - {"2": "rig_cam1"}, - False, - ) + create_shots(sa, 2) + sa.add_reconstruction("12", False) sa.add_reconstruction_instance("12", 4, "1") sa.add_reconstruction_instance("12", 4, "2") sa.set_scale_sharing("12", True) sa.add_relative_motion( - pybundle.RelativeMotion("12", "1", "12", "2", np.array([0, 0, 0]), np.array([-1, 0, 0]), 1) + pybundle.RelativeMotion( + "12", "1", "12", "2", np.array([0, 0, 0]), np.array([-1, 0, 0]), 1 + ) ) - sa.add_rig_instance_position_prior("1", np.array([0, 0, 0]), np.array([1, 1, 1]), "") - sa.add_rig_instance_position_prior("2", np.array([2, 0, 0]), np.array([1, 1, 1]), "") + + std_dev = np.array([1, 1, 1]) + sa.add_rig_instance_position_prior("1", np.array([0, 0, 0]), std_dev, "") + sa.add_rig_instance_position_prior("2", np.array([2, 0, 0]), std_dev, "") sa.run() s1 = sa.get_rig_instance_pose("1") @@ -169,20 +177,16 @@ def test_pair(bundle_adjuster: pybundle.BundleAdjuster) -> None: def test_pair_with_points_priors(bundle_adjuster: pybundle.BundleAdjuster) -> None: """Simple two rigs test with a point constraint for anchoring""" sa = bundle_adjuster - sa.add_rig_instance( - "1", - pygeometry.Pose(np.array([1e-3, 1e-3, 1e-3]), np.array([1e-3, 1e-3, 1e-3])), - {"1": "cam1"}, - {"1": "rig_cam1"}, - False, - ) - sa.add_rig_instance( - "2", - pygeometry.Pose(np.array([1e-3, 1e-3, 1e-3]), np.array([1e-3, 1e-3, 1e-3])), - {"2": "cam1"}, - {"2": "rig_cam1"}, - False, - ) + for i in range(2): + instance_id = str(i + 1) + sa.add_rig_instance( + instance_id, + pygeometry.Pose(np.array([1e-3, 1e-3, 1e-3]), np.array([1e-3, 1e-3, 1e-3])), + {instance_id: "cam1"}, + {instance_id: "rig_cam1"}, + False, + ) + sa.add_point("p1", np.array([0, 0, 0]), False) sa.add_point("p2", np.array([0, 0, 0]), False) @@ -197,16 +201,19 @@ def test_pair_with_points_priors(bundle_adjuster: pybundle.BundleAdjuster) -> No sa.set_scale_sharing("12", True) sa.add_relative_motion( - pybundle.RelativeMotion("12", "1", "12", "2", np.array([0, 0, 0]), np.array([-1, 0, 0]), 1) + pybundle.RelativeMotion( + "12", "1", "12", "2", np.array([0, 0, 0]), np.array([-1, 0, 0]), 1 + ) ) + std_dev = np.array([1, 1, 1]) sa.add_point_projection_observation("1", "p1", np.array([0, 0]), 1) sa.add_point_projection_observation("2", "p1", np.array([-0.5, 0]), 1) - sa.add_point_prior("p1", np.array([-0.5, 2, 2]), np.array([1, 1, 1]), True) + sa.add_point_prior("p1", np.array([-0.5, 2, 2]), std_dev, True) sa.add_point_projection_observation("2", "p2", np.array([0, 0]), 1) sa.add_point_projection_observation("1", "p2", np.array([0.5, 0]), 1) - sa.add_point_prior("p2", np.array([1.5, 2, 2]), np.array([1, 1, 1]), True) + sa.add_point_prior("p2", np.array([1.5, 2, 2]), std_dev, True) sa.run() s1 = sa.get_rig_instance_pose("1") @@ -226,29 +233,21 @@ def test_pair_with_points_priors(bundle_adjuster: pybundle.BundleAdjuster) -> No def test_pair_non_rigid(bundle_adjuster: pybundle.BundleAdjuster) -> None: """Simple two rigs test""" sa = bundle_adjuster - sa.add_rig_instance( - "1", - pygeometry.Pose(np.array([0, 0, 0]), np.array([0, 0, 0])), - {"1": "cam1"}, - {"1": "rig_cam1"}, - False, - ) - sa.add_rig_instance( - "2", - pygeometry.Pose(np.array([0, 0, 0]), np.array([0, 0, 0])), - {"2": "cam1"}, - {"2": "rig_cam1"}, - False, - ) + create_shots(sa, 2) + sa.add_reconstruction("12", False) sa.add_reconstruction_instance("12", 4, "1") sa.add_reconstruction_instance("12", 4, "2") sa.set_scale_sharing("12", False) sa.add_relative_similarity( - pybundle.RelativeSimilarity("12", "1", "12", "2", np.array([0, 0, 0]), np.array([-1, 0, 0]), 1, 1) + pybundle.RelativeSimilarity( + "12", "1", "12", "2", np.array([0, 0, 0]), np.array([-1, 0, 0]), 1, 1 + ) ) - sa.add_rig_instance_position_prior("1", np.array([0, 0, 0]), np.array([1, 1, 1]), "") - sa.add_rig_instance_position_prior("2", np.array([2, 0, 0]), np.array([1, 1, 1]), "") + + std_dev = np.array([1, 1, 1]) + sa.add_rig_instance_position_prior("1", np.array([0, 0, 0]), std_dev, "") + sa.add_rig_instance_position_prior("2", np.array([2, 0, 0]), std_dev, "") sa.run() s1 = sa.get_rig_instance_pose("1") @@ -261,37 +260,13 @@ def test_pair_non_rigid(bundle_adjuster: pybundle.BundleAdjuster) -> None: assert np.allclose(r12.get_scale("2"), 0.5) -def test_four_cams_single_reconstruction(bundle_adjuster: pybundle.BundleAdjuster) -> None: +def test_four_cams_single_reconstruction( + bundle_adjuster: pybundle.BundleAdjuster, +) -> None: """Four rigs, one reconstruction""" sa = bundle_adjuster - sa.add_rig_instance( - "1", - pygeometry.Pose(np.array([0, 0, 0]), np.array([0, 0, 0])), - {"1": "cam1"}, - {"1": "rig_cam1"}, - False, - ) - sa.add_rig_instance( - "2", - pygeometry.Pose(np.array([0, 0, 0]), np.array([0, 0, 0])), - {"2": "cam1"}, - {"2": "rig_cam1"}, - False, - ) - sa.add_rig_instance( - "3", - pygeometry.Pose(np.array([0, 0, 0]), np.array([0, 0, 0])), - {"3": "cam1"}, - {"3": "rig_cam1"}, - False, - ) - sa.add_rig_instance( - "4", - pygeometry.Pose(np.array([0, 0, 0]), np.array([0, 0, 0])), - {"4": "cam1"}, - {"4": "rig_cam1"}, - False, - ) + create_shots(sa, 4) + sa.add_reconstruction("1234", False) sa.add_reconstruction_instance("1234", 1, "1") sa.add_reconstruction_instance("1234", 1, "2") @@ -299,17 +274,25 @@ def test_four_cams_single_reconstruction(bundle_adjuster: pybundle.BundleAdjuste sa.add_reconstruction_instance("1234", 1, "4") sa.set_scale_sharing("1234", True) sa.add_relative_motion( - pybundle.RelativeMotion("1234", "1", "1234", "2", np.array([0, 0, 0]), np.array([-1, 0, 0]), 1) + pybundle.RelativeMotion( + "1234", "1", "1234", "2", np.array([0, 0, 0]), np.array([-1, 0, 0]), 1 + ) ) sa.add_relative_motion( - pybundle.RelativeMotion("1234", "1", "1234", "3", np.array([0, 0, 0]), np.array([0, -1, 0]), 1) + pybundle.RelativeMotion( + "1234", "1", "1234", "3", np.array([0, 0, 0]), np.array([0, -1, 0]), 1 + ) ) sa.add_relative_motion( - pybundle.RelativeMotion("1234", "1", "1234", "4", np.array([0, 0, 0]), np.array([0, 0, -1]), 1) + pybundle.RelativeMotion( + "1234", "1", "1234", "4", np.array([0, 0, 0]), np.array([0, 0, -1]), 1 + ) ) - sa.add_rig_instance_position_prior("1", np.array([0, 0, 0]), np.array([1, 1, 1]), "") - sa.add_rig_instance_position_prior("2", np.array([2, 0, 0]), np.array([1, 1, 1]), "") - sa.add_rig_instance_position_prior("3", np.array([0, 2, 0]), np.array([1, 1, 1]), "") + + std_dev = np.array([1, 1, 1]) + sa.add_rig_instance_position_prior("1", np.array([0, 0, 0]), std_dev, "") + sa.add_rig_instance_position_prior("2", np.array([2, 0, 0]), std_dev, "") + sa.add_rig_instance_position_prior("3", np.array([0, 2, 0]), std_dev, "") sa.run() s1 = sa.get_rig_instance_pose("1") @@ -323,134 +306,102 @@ def test_four_cams_single_reconstruction(bundle_adjuster: pybundle.BundleAdjuste assert np.allclose(s4.translation, [0, 0, -2], atol=1e-6) -def test_four_cams_single_reconstruction_non_rigid(bundle_adjuster: pybundle.BundleAdjuster) -> None: - """Four rigs, one reconstruction""" +def test_four_cams_double_reconstruction( + bundle_adjuster: pybundle.BundleAdjuster, +) -> None: + """Four rigs, two reconstruction""" sa = bundle_adjuster - sa.add_rig_instance( - "1", - pygeometry.Pose(np.array([0, 0, 0]), np.array([0, 0, 0])), - {"1": "cam1"}, - {"1": "rig_cam1"}, - False, - ) - sa.add_rig_instance( - "2", - pygeometry.Pose(np.array([0, 0, 0]), np.array([0, 0, 0])), - {"2": "cam1"}, - {"2": "rig_cam1"}, - False, - ) - sa.add_rig_instance( - "3", - pygeometry.Pose(np.array([0, 0, 0]), np.array([0, 0, 0])), - {"3": "cam1"}, - {"3": "rig_cam1"}, - False, - ) - sa.add_rig_instance( - "4", - pygeometry.Pose(np.array([0, 0, 0]), np.array([0, 0, 0])), - {"4": "cam1"}, - {"4": "rig_cam1"}, - False, - ) - sa.add_reconstruction("1234", False) - sa.add_reconstruction_instance("1234", 1, "1") - sa.add_reconstruction_instance("1234", 1, "2") - sa.add_reconstruction_instance("1234", 1, "3") - sa.add_reconstruction_instance("1234", 1, "4") - sa.set_scale_sharing("1234", False) + create_shots(sa, 4) + + sa.add_reconstruction("12", False) + sa.add_reconstruction_instance("12", 1, "1") + sa.add_reconstruction_instance("12", 1, "2") + sa.add_reconstruction_instance("12", 1, "3") + sa.set_scale_sharing("12", False) + + sa.add_reconstruction("34", False) + sa.add_reconstruction_instance("34", 1, "2") + sa.add_reconstruction_instance("34", 1, "3") + sa.add_reconstruction_instance("34", 1, "4") + sa.set_scale_sharing("34", False) sa.add_relative_similarity( pybundle.RelativeSimilarity( - "1234", + "12", "1", - "1234", + "12", "2", np.array([0, 0, 0]), - np.array([-1, 0, 0]), + np.array([-0.5, -0.5, -0.5]), 1, 1, ) ) sa.add_relative_similarity( pybundle.RelativeSimilarity( - "1234", + "12", "2", - "1234", + "12", "3", np.array([0, 0, 0]), - np.array([-1, -1, 0]), + np.array([-0.5, -0.5, -0.5]), 1, 1, ) ) + sa.add_relative_similarity( pybundle.RelativeSimilarity( - "1234", - "3", - "1234", - "4", - np.array([0, 0, 0]), - np.array([0, -1, 0]), - 1, - 1, + "34", "3", "34", "4", np.array([0, 0, 0]), np.array([-2, -2, -2]), 1, 1 ) ) - sa.add_rig_instance_position_prior("1", np.array([0, 0, 0]), np.array([1, 1, 1]), "") - sa.add_rig_instance_position_prior("2", np.array([2, 0, 0]), np.array([1, 1, 1]), "") - sa.add_rig_instance_position_prior("3", np.array([4, 2, 0]), np.array([1, 1, 1]), "") - sa.add_rig_instance_position_prior("4", np.array([4, 4, 0]), np.array([1, 1, 1]), "") + sa.add_relative_similarity( + pybundle.RelativeSimilarity( + "34", "2", "34", "3", np.array([0, 0, 0]), np.array([-2, -2, -2]), 1, 1 + ) + ) + + std_dev = np.array([1, 1, 1]) + sa.add_rig_instance_position_prior("1", np.array([0, 0, 0]), std_dev, "") + sa.add_rig_instance_position_prior("4", np.array([3, 3, 3]), std_dev, "") sa.run() s1 = sa.get_rig_instance_pose("1") s2 = sa.get_rig_instance_pose("2") s3 = sa.get_rig_instance_pose("3") s4 = sa.get_rig_instance_pose("4") + r12 = sa.get_reconstruction("12") + r34 = sa.get_reconstruction("34") - r1234 = sa.get_reconstruction("1234") + assert np.allclose(s1.get_origin(), [0, 0, 0], atol=1e-6) + assert np.allclose(s2.get_origin(), [1, 1, 1], atol=1e-6) + assert np.allclose(s3.get_origin(), [2, 2, 2], atol=1e-6) + assert np.allclose(s4.get_origin(), [3, 3, 3], atol=1e-6) - assert np.allclose(s1.translation, [0, 0, 0], atol=1e-6) - assert np.allclose(s2.translation, [-2, 0, 0], atol=1e-6) - assert np.allclose(s3.translation, [-4, -2, 0], atol=1e-6) - assert np.allclose(s4.translation, [-4, -4, 0], atol=1e-6) - assert np.allclose(r1234.get_scale("1"), 0.5) - assert np.allclose(r1234.get_scale("2"), 0.5) - assert np.allclose(r1234.get_scale("3"), 0.5) - assert np.allclose(r1234.get_scale("4"), 0.5) + r12 = sa.get_reconstruction("12") + assert np.allclose(r12.get_scale("1"), 0.5) + assert np.allclose(r12.get_scale("2"), 0.5) + assert np.allclose(r12.get_scale("3"), 0.5) + + r34 = sa.get_reconstruction("34") + assert np.allclose(r34.get_scale("2"), 2.0) + assert np.allclose(r34.get_scale("3"), 2.0) + assert np.allclose(r34.get_scale("4"), 2.0) def test_four_cams_one_fixed(bundle_adjuster: pybundle.BundleAdjuster) -> None: """Four rigs, one reconstruction""" sa = bundle_adjuster - sa.add_rig_instance( - "1", - pygeometry.Pose(np.array([0, 0, 0]), np.array([0, 0, 0])), - {"1": "cam1"}, - {"1": "rig_cam1"}, - True, - ) - sa.add_rig_instance( - "2", - pygeometry.Pose(np.array([0, 0, 0]), np.array([0, 0, 0])), - {"2": "cam1"}, - {"2": "rig_cam1"}, - False, - ) - sa.add_rig_instance( - "3", - pygeometry.Pose(np.array([0, 0, 0]), np.array([0, 0, 0])), - {"3": "cam1"}, - {"3": "rig_cam1"}, - False, - ) - sa.add_rig_instance( - "4", - pygeometry.Pose(np.array([0, 0, 0]), np.array([0, 0, 0])), - {"4": "cam1"}, - {"4": "rig_cam1"}, - False, - ) + for i in range(4): + instance_id = str(i + 1) + sa.add_rig_instance( + instance_id, + pygeometry.Pose(np.array([0, 0, 0]), np.array([0, 0, 0])), + {instance_id: "cam1"}, + {instance_id: "rig_cam1"}, + i == 0, + ) + sa.add_reconstruction("1234", False) sa.add_reconstruction_instance("1234", 1, "1") sa.add_reconstruction_instance("1234", 1, "2") @@ -458,17 +409,25 @@ def test_four_cams_one_fixed(bundle_adjuster: pybundle.BundleAdjuster) -> None: sa.add_reconstruction_instance("1234", 1, "4") sa.set_scale_sharing("1234", True) sa.add_relative_motion( - pybundle.RelativeMotion("1234", "1", "1234", "2", np.array([0, 0, 0]), np.array([-1, 0, 0]), 1) + pybundle.RelativeMotion( + "1234", "1", "1234", "2", np.array([0, 0, 0]), np.array([-1, 0, 0]), 1 + ) ) sa.add_relative_motion( - pybundle.RelativeMotion("1234", "1", "1234", "3", np.array([0, 0, 0]), np.array([0, -1, 0]), 1) + pybundle.RelativeMotion( + "1234", "1", "1234", "3", np.array([0, 0, 0]), np.array([0, -1, 0]), 1 + ) ) sa.add_relative_motion( - pybundle.RelativeMotion("1234", "1", "1234", "4", np.array([0, 0, 0]), np.array([0, 0, -1]), 1) + pybundle.RelativeMotion( + "1234", "1", "1234", "4", np.array([0, 0, 0]), np.array([0, 0, -1]), 1 + ) ) - sa.add_rig_instance_position_prior("1", np.array([100, 0, 0]), np.array([1, 1, 1]), "") - sa.add_rig_instance_position_prior("2", np.array([2, 0, 0]), np.array([1, 1, 1]), "") - sa.add_rig_instance_position_prior("3", np.array([0, 2, 0]), np.array([1, 1, 1]), "") + + std_dev = np.array([1, 1, 1]) + sa.add_rig_instance_position_prior("1", np.array([100, 0, 0]), std_dev, "") + sa.add_rig_instance_position_prior("2", np.array([2, 0, 0]), std_dev, "") + sa.add_rig_instance_position_prior("3", np.array([0, 2, 0]), std_dev, "") sa.run() s1 = sa.get_rig_instance_pose("1") @@ -485,34 +444,17 @@ def test_four_cams_one_fixed(bundle_adjuster: pybundle.BundleAdjuster) -> None: def test_linear_motion_prior_position(bundle_adjuster: pybundle.BundleAdjuster) -> None: """Three rigs, middle has no gps info. Translation only""" sa = bundle_adjuster - sa.add_rig_instance( - "1", - pygeometry.Pose(np.array([0, 0, 0]), np.array([0, 0, 0])), - {"1": "cam1"}, - {"1": "rig_cam1"}, - True, - ) - sa.add_rig_instance( - "2", - pygeometry.Pose(np.array([0, 0, 0]), np.array([0, 0, 0])), - {"2": "cam1"}, - {"2": "rig_cam1"}, - False, - ) - sa.add_rig_instance( - "3", - pygeometry.Pose(np.array([0, 0, 0]), np.array([0, 0, 0])), - {"3": "cam1"}, - {"3": "rig_cam1"}, - False, - ) + create_shots(sa, 3) + sa.add_reconstruction("123", False) sa.add_reconstruction_instance("123", 1, "1") sa.add_reconstruction_instance("123", 1, "2") sa.add_reconstruction_instance("123", 1, "3") sa.set_scale_sharing("123", True) - sa.add_rig_instance_position_prior("1", np.array([0, 0, 0]), np.array([1, 1, 1]), "") - sa.add_rig_instance_position_prior("3", np.array([2, 0, 0]), np.array([1, 1, 1]), "") + + std_dev = np.array([1, 1, 1]) + sa.add_rig_instance_position_prior("1", np.array([0, 0, 0]), std_dev, "") + sa.add_rig_instance_position_prior("3", np.array([2, 0, 0]), std_dev, "") sa.add_linear_motion("1", "2", "3", 0.5, 0.1, 0.1) sa.run() @@ -631,27 +573,16 @@ def test_bundle_alignment_prior() -> None: def test_heatmaps_position(bundle_adjuster: pybundle.BundleAdjuster) -> None: """Three cameras. Same heatmap different offsets""" sa = bundle_adjuster - sa.add_rig_instance( - "1", - pygeometry.Pose(np.array([0, 0, 0]), np.array([0, 0, 0])), - {"1": "cam1"}, - {"1": "rig_cam1"}, - False, - ) - sa.add_rig_instance( - "2", - pygeometry.Pose(np.array([0, 0, 0]), np.array([0, 0, 0])), - {"2": "cam1"}, - {"2": "rig_cam1"}, - False, - ) - sa.add_rig_instance( - "3", - pygeometry.Pose(np.array([0, 0, 0]), np.array([0, 0, 0])), - {"3": "cam1"}, - {"3": "rig_cam1"}, - False, - ) + for i in range(3): + instance_id = str(i + 1) + sa.add_rig_instance( + instance_id, + pygeometry.Pose(np.array([0, 0, 0]), np.array([0, 0, 0])), + {instance_id: "cam1"}, + {instance_id: "rig_cam1"}, + False, + ) + sa.add_reconstruction("123", True) sa.add_reconstruction_instance("123", 1, "1") sa.add_reconstruction_instance("123", 1, "2") From 70ffef3c899b3bdc4ad59155bb0a73b8d47400a7 Mon Sep 17 00:00:00 2001 From: Yann Noutary Date: Mon, 28 Mar 2022 07:07:58 -0700 Subject: [PATCH 48/81] refactor & fix: unified and correct handling of +/- scale in relative constraints Reviewed By: paulinus Differential Revision: D35043814 fbshipit-source-id: 164e4f40e1b2d3dce4a4a16368aa4605fc9dc237 --- opensfm/src/bundle/bundle_adjuster.h | 61 ++------- opensfm/src/bundle/data/bias.h | 8 +- .../src/bundle/error/relative_motion_errors.h | 82 +++++------ opensfm/src/bundle/pybundle.pyi | 29 +--- opensfm/src/bundle/python/pybind.cc | 22 +-- opensfm/src/bundle/src/bundle_adjuster.cc | 70 ++++------ opensfm/test/test_bundle.py | 128 +++++++++++++----- 7 files changed, 172 insertions(+), 228 deletions(-) diff --git a/opensfm/src/bundle/bundle_adjuster.h b/opensfm/src/bundle/bundle_adjuster.h index 1f9403ea6..8fa00da41 100644 --- a/opensfm/src/bundle/bundle_adjuster.h +++ b/opensfm/src/bundle/bundle_adjuster.h @@ -64,64 +64,34 @@ struct PointProjectionObservation { }; struct RelativeMotion { - RelativeMotion(const std::string &reconstruction_i, - const std::string &rig_instance_i, - const std::string &reconstruction_j, + RelativeMotion(const std::string &rig_instance_i, const std::string &rig_instance_j, const Vec3d &rotation, - const Vec3d &translation, double robust_multiplier) { - reconstruction_id_i = reconstruction_i; + const Vec3d &translation, double scale, + double robust_multiplier, bool observed_scale) { rig_instance_id_i = rig_instance_i; - reconstruction_id_j = reconstruction_j; rig_instance_id_j = rig_instance_j; - parameters.resize(Pose::Parameter::NUM_PARAMS); - parameters.segment(Pose::Parameter::RX, 3) = rotation; - parameters.segment(Pose::Parameter::TX, 3) = translation; - scale_matrix.resize(Pose::Parameter::NUM_PARAMS, - Pose::Parameter::NUM_PARAMS); + + const int num_parameters = Similarity::Parameter::NUM_PARAMS; + parameters.resize(num_parameters); + scale_matrix.resize(num_parameters, num_parameters); + + parameters.segment(Similarity::Parameter::RX, 3) = rotation; + parameters.segment(Similarity::Parameter::TX, 3) = translation; + parameters(Similarity::Parameter::SCALE) = scale; scale_matrix.setIdentity(); this->robust_multiplier = robust_multiplier; + this->observed_scale = observed_scale; } - Vec3d GetRotation() const { - return parameters.segment(Pose::Parameter::RX, 3); - } - Vec3d GetTranslation() const { - return parameters.segment(Pose::Parameter::TX, 3); - } - void SetRotation(const Vec3d &r) { - parameters.segment(Pose::Parameter::RX, 3) = r; - } - void SetTranslation(const Vec3d &t) { - parameters.segment(Pose::Parameter::TX, 3) = t; - } void SetScaleMatrix(const MatXd &s) { scale_matrix = s; } - std::string reconstruction_id_i; std::string rig_instance_id_i; - std::string reconstruction_id_j; std::string rig_instance_id_j; VecXd parameters; MatXd scale_matrix; double robust_multiplier; -}; - -struct RelativeSimilarity : public RelativeMotion { - RelativeSimilarity(const std::string &reconstruction_i, - const std::string &rig_instance_i, - const std::string &reconstruction_j, - const std::string &rig_instance_j, const Vec3d &rotation, - const Vec3d &translation, double s, - double robust_multiplier) - : RelativeMotion(reconstruction_i, rig_instance_i, reconstruction_j, - rig_instance_j, rotation, translation, - robust_multiplier), - scale(s) { - scale_matrix.resize(Pose::Parameter::NUM_PARAMS + 1, - Pose::Parameter::NUM_PARAMS + 1); - scale_matrix.setIdentity(); - } - double scale; + bool observed_scale; }; struct RelativeRotation { @@ -234,7 +204,6 @@ class BundleAdjuster { // Relative motion constraints void AddRelativeMotion(const RelativeMotion &rm); - void AddRelativeSimilarity(const RelativeSimilarity &rm); void AddRelativeRotation(const RelativeRotation &rr); // Absolute motion constraints @@ -316,9 +285,10 @@ class BundleAdjuster { // minimized data std::map cameras_; - std::map bias_; + std::map bias_; std::map shots_; std::map reconstructions_; + std::map reconstructions_assignments_; std::map points_; std::map rig_cameras_; std::map rig_instances_; @@ -333,7 +303,6 @@ class BundleAdjuster { // relative motion between shots std::vector relative_motions_; - std::vector relative_similarity_; std::vector relative_rotations_; std::vector common_positions_; diff --git a/opensfm/src/bundle/data/bias.h b/opensfm/src/bundle/data/bias.h index d8612f5ed..8c27633a7 100644 --- a/opensfm/src/bundle/data/bias.h +++ b/opensfm/src/bundle/data/bias.h @@ -7,10 +7,10 @@ namespace bundle { -struct Bias : public Data { +struct Similarity : public Data { enum Parameter { RX, RY, RZ, TX, TY, TZ, SCALE, NUM_PARAMS }; - Bias(const std::string &id, const geometry::Similarity &value) + Similarity(const std::string &id, const geometry::Similarity &value) : Data(id, value) { Init(); } @@ -31,14 +31,14 @@ struct Bias : public Data { }; struct SimilarityPriorTransform - : public geometry::Functor { template VecN operator()(T const *parameters, T const *data) const { Vec3 R = ShotRotationFunctor(0, FUNCTOR_NOT_SET)(¶meters); Vec3 t = ShotPositionFunctor(0, FUNCTOR_NOT_SET)(¶meters); - const T *const scale = parameters + Bias::Parameter::SCALE; + const T *const scale = parameters + Similarity::Parameter::SCALE; VecN transformed = Eigen::Map>(data).eval(); diff --git a/opensfm/src/bundle/error/relative_motion_errors.h b/opensfm/src/bundle/error/relative_motion_errors.h index 968f71039..6e7cffa13 100644 --- a/opensfm/src/bundle/error/relative_motion_errors.h +++ b/opensfm/src/bundle/error/relative_motion_errors.h @@ -1,20 +1,24 @@ #pragma once +#include #include #include #include +#include #include namespace bundle { struct RelativeMotionError { - RelativeMotionError(const Eigen::VectorXd& Rtij, - const Eigen::MatrixXd& scale_matrix) - : Rtij_(Rtij), scale_matrix_(scale_matrix) {} + RelativeMotionError(const Eigen::VectorXd& observed_Rts, + const Eigen::MatrixXd& scale_matrix, bool observed_scale) + : observed_Rts_(observed_Rts), + scale_matrix_(scale_matrix), + observed_scale_(observed_scale) {} template - Eigen::Matrix Error(T const* const* p) const { + bool operator()(T const* const* p, T* r) const { // Get rotation and translation values. Vec3 Ri = ShotRotationFunctor(shot_i_rig_instance_index_, FUNCTOR_NOT_SET)(p); @@ -24,59 +28,41 @@ struct RelativeMotionError { ShotRotationFunctor(shot_j_rig_instance_index_, FUNCTOR_NOT_SET)(p); Vec3 tj = ShotPositionFunctor(shot_j_rig_instance_index_, FUNCTOR_NOT_SET)(p); - Eigen::Matrix residual; - - // Compute rotation residual: log( Rij Ri Rj^t ) -> log( Rij Ri^t Rj) - const Eigen::Matrix Rij = - Rtij_.segment<3>(Pose::Parameter::RX).cast(); - residual.segment(0, 3) = MultRotations(Rij, (-Ri).eval(), Rj.eval()); - - // Compute translation residual: tij - scale * ( tj - Rj Ri^t ti ) -> tij - // - scale * Rj^t * (ti - tj) - const auto scale = p[scale_index_]; - const auto tij = Rtij_.segment<3>(Pose::Parameter::TX).cast(); - residual.segment(3, 3) = - tij - scale[0] * RotatePoint((-Rj).eval(), (ti - tj).eval()); - return residual; - } + Eigen::Map > residual(r, Similarity::Parameter::NUM_PARAMS); - template - bool operator()(T const* const* p, T* r) const { - Eigen::Map > residual(r); - residual = scale_matrix_.cast() * Error(p); - return true; - } + // Compute rotation residual: log( Rij Ri^t Rj) + const Vec3 Rij = observed_Rts_.segment<3>(Pose::Parameter::RX).cast(); + residual.segment(Pose::Parameter::RX, 3) = + MultRotations(Rij, (-Ri).eval(), Rj.eval()); - Eigen::VectorXd Rtij_; - Eigen::MatrixXd scale_matrix_; - static constexpr int shot_i_rig_instance_index_ = 0; - static constexpr int shot_j_rig_instance_index_ = 1; - static constexpr int scale_index_ = 2; -}; + // Compute translation residual: tij - sj * Rj^t * (ti - tj) + const auto scale_i = p[scale_i_index_]; + const auto scale_j = p[scale_j_index_]; + const auto tij = observed_Rts_.segment<3>(Pose::Parameter::TX).cast(); + residual.segment(Pose::Parameter::TX, 3) = + tij - scale_j[0] * RotatePoint((-Rj).eval(), (ti - tj).eval()); -struct RelativeSimilarityError : public RelativeMotionError { - RelativeSimilarityError(const Eigen::VectorXd& Rtij, double Sij, - const Eigen::MatrixXd& scale_matrix) - : RelativeMotionError(Rtij, scale_matrix), Sij_(Sij) {} - - template - bool operator()(T const* const* p, T* r) const { - auto scale_i = p[scale_i_index_]; - auto scale_j = p[scale_j_index_]; - - Eigen::Map > residual(r); - residual.segment(0, 6) = RelativeMotionError::Error(p); - if (scale_i[0] == T(0.0)) { + if (scale_i[0] == T(0.0) || scale_j[0] == T(0.0)) { return false; } - residual(6) = (T(Sij_) - scale_j[0] / scale_i[0]); + + if (observed_scale_) { + const auto Sij = T(observed_Rts_(Similarity::Parameter::SCALE)); + residual(Similarity::Parameter::SCALE) = Sij - scale_j[0] / scale_i[0]; + } else { + residual(Similarity::Parameter::SCALE) = T(0.); + } residual = scale_matrix_.cast() * residual; return true; } - double Sij_; - static constexpr int scale_i_index_ = 2; - static constexpr int scale_j_index_ = 3; + Eigen::VectorXd observed_Rts_; + Eigen::MatrixXd scale_matrix_; + static constexpr int shot_i_rig_instance_index_ = 0; + static constexpr int shot_j_rig_instance_index_ = 1; + bool observed_scale_; + int scale_i_index_{2}; + int scale_j_index_{2}; }; struct RelativeRotationError { diff --git a/opensfm/src/bundle/pybundle.pyi b/opensfm/src/bundle/pybundle.pyi index dd5142feb..5eaa6067a 100644 --- a/opensfm/src/bundle/pybundle.pyi +++ b/opensfm/src/bundle/pybundle.pyi @@ -17,8 +17,7 @@ __all__ = [ "Reconstruction", "ReconstructionAlignment", "RelativeMotion", -"RelativeRotation", -"RelativeSimilarity" +"RelativeRotation" ] class BundleAdjuster: def __init__(self) -> None: ... @@ -38,7 +37,6 @@ class BundleAdjuster: def add_reconstruction_instance(self, arg0: str, arg1: float, arg2: str) -> None: ... def add_relative_motion(self, arg0: RelativeMotion) -> None: ... def add_relative_rotation(self, arg0: RelativeRotation) -> None: ... - def add_relative_similarity(self, arg0: RelativeSimilarity) -> None: ... def add_rig_camera(self, arg0: str, arg1: opensfm.pygeometry.Pose, arg2: opensfm.pygeometry.Pose, arg3: bool) -> None: ... def add_rig_instance(self, arg0: str, arg1: opensfm.pygeometry.Pose, arg2: Dict[str, str], arg3: Dict[str, str], arg4: bool) -> None: ... def add_rig_instance_position_prior(self, arg0: str, arg1: numpy.ndarray, arg2: numpy.ndarray, arg3: str) -> None: ... @@ -194,21 +192,9 @@ class ReconstructionAlignment: def get_shot(self, arg0: str) -> RAShot: ... def run(self) -> None: ... class RelativeMotion: - def __init__(self, arg0: str, arg1: str, arg2: str, arg3: str, arg4: numpy.ndarray, arg5: numpy.ndarray, arg6: float) -> None: ... + def __init__(self, arg0: str, arg1: str, arg2: numpy.ndarray, arg3: numpy.ndarray, arg4: float, arg5: float, arg6: bool) -> None: ... def set_scale_matrix(self, arg0: numpy.ndarray) -> None: ... @property - def r(self) -> numpy.ndarray:... - @r.setter - def r(self, arg1: numpy.ndarray) -> None:... - @property - def reconstruction_i(self) -> str:... - @reconstruction_i.setter - def reconstruction_i(self, arg0: str) -> None:... - @property - def reconstruction_j(self) -> str:... - @reconstruction_j.setter - def reconstruction_j(self, arg0: str) -> None:... - @property def rig_instance_i(self) -> str:... @rig_instance_i.setter def rig_instance_i(self, arg0: str) -> None:... @@ -216,10 +202,6 @@ class RelativeMotion: def rig_instance_j(self) -> str:... @rig_instance_j.setter def rig_instance_j(self, arg0: str) -> None:... - @property - def t(self) -> numpy.ndarray:... - @t.setter - def t(self, arg1: numpy.ndarray) -> None:... class RelativeRotation: def __init__(self, arg0: str, arg1: str, arg2: numpy.ndarray) -> None: ... def set_scale_matrix(self, arg0: numpy.ndarray) -> None: ... @@ -235,10 +217,3 @@ class RelativeRotation: def shot_j(self) -> str:... @shot_j.setter def shot_j(self, arg0: str) -> None:... -class RelativeSimilarity: - def __init__(self, arg0: str, arg1: str, arg2: str, arg3: str, arg4: numpy.ndarray, arg5: numpy.ndarray, arg6: float, arg7: float) -> None: ... - def set_scale_matrix(self, arg0: numpy.ndarray) -> None: ... - @property - def scale(self) -> float:... - @scale.setter - def scale(self, arg0: float) -> None:... diff --git a/opensfm/src/bundle/python/pybind.cc b/opensfm/src/bundle/python/pybind.cc index 20fb74f12..2075a48e5 100644 --- a/opensfm/src/bundle/python/pybind.cc +++ b/opensfm/src/bundle/python/pybind.cc @@ -10,30 +10,14 @@ PYBIND11_MODULE(pybundle, m) { py::class_(m, "RelativeMotion") .def(py::init()) - .def_readwrite("reconstruction_i", - &bundle::RelativeMotion::reconstruction_id_i) + const Eigen::Vector3d &, const Eigen::Vector3d &, double, + double, bool>()) .def_readwrite("rig_instance_i", &bundle::RelativeMotion::rig_instance_id_i) - .def_readwrite("reconstruction_j", - &bundle::RelativeMotion::reconstruction_id_j) .def_readwrite("rig_instance_j", &bundle::RelativeMotion::rig_instance_id_j) - .def_property("r", &bundle::RelativeMotion::GetRotation, - &bundle::RelativeMotion::SetRotation) - .def_property("t", &bundle::RelativeMotion::GetTranslation, - &bundle::RelativeMotion::SetTranslation) .def("set_scale_matrix", &bundle::RelativeMotion::SetScaleMatrix); - py::class_(m, "RelativeSimilarity") - .def(py::init()) - .def_readwrite("scale", &bundle::RelativeSimilarity::scale) - .def("set_scale_matrix", &bundle::RelativeSimilarity::SetScaleMatrix); - py::class_(m, "RelativeRotation") .def(py::init()) @@ -91,8 +75,6 @@ PYBIND11_MODULE(pybundle, m) { .def("add_point_projection_observation", &bundle::BundleAdjuster::AddPointProjectionObservation) .def("add_relative_motion", &bundle::BundleAdjuster::AddRelativeMotion) - .def("add_relative_similarity", - &bundle::BundleAdjuster::AddRelativeSimilarity) .def("add_relative_rotation", &bundle::BundleAdjuster::AddRelativeRotation) .def("add_common_position", &bundle::BundleAdjuster::AddCommonPosition) diff --git a/opensfm/src/bundle/src/bundle_adjuster.cc b/opensfm/src/bundle/src/bundle_adjuster.cc index 513e01d30..171dad2ab 100644 --- a/opensfm/src/bundle/src/bundle_adjuster.cc +++ b/opensfm/src/bundle/src/bundle_adjuster.cc @@ -11,6 +11,8 @@ #include #include +#include "bundle/data/bias.h" + namespace { bool IsRigCameraUseful(bundle::RigCamera &rig_camera) { return !(rig_camera.GetParametersToOptimize().empty() && @@ -100,7 +102,7 @@ void BundleAdjuster::SetCameraBias(const std::string &camera_id, if (bias_exists == bias_.end()) { throw std::runtime_error("Camera " + camera_id + " doesn't exist."); } - bias_exists->second = Bias(camera_id, bias); + bias_exists->second = Similarity(camera_id, bias); } void BundleAdjuster::AddRigInstance( @@ -204,6 +206,7 @@ void BundleAdjuster::AddReconstructionInstance( return; } find->second.scales[instance_id] = scale; + reconstructions_assignments_[instance_id] = reconstruction_id; } void BundleAdjuster::AddPoint(const std::string &id, const Vec3d &position, @@ -250,10 +253,6 @@ void BundleAdjuster::AddRelativeMotion(const RelativeMotion &rm) { relative_motions_.push_back(rm); } -void BundleAdjuster::AddRelativeSimilarity(const RelativeSimilarity &rm) { - relative_similarity_.push_back(rm); -} - void BundleAdjuster::AddRelativeRotation(const RelativeRotation &rr) { relative_rotations_.push_back(rr); } @@ -729,7 +728,7 @@ void BundleAdjuster::Run() { new ceres::DynamicAutoDiffCostFunction(position_prior); cost_function->SetNumResiduals(3); cost_function->AddParameterBlock(Pose::Parameter::NUM_PARAMS); - cost_function->AddParameterBlock(Bias::Parameter::NUM_PARAMS); + cost_function->AddParameterBlock(Similarity::Parameter::NUM_PARAMS); cost_function->AddParameterBlock(1); problem.AddResidualBlock( cost_function, nullptr, i.second.GetValueData().data(), @@ -776,59 +775,36 @@ void BundleAdjuster::Run() { ceres::LossFunction *relative_motion_loss = CreateLossFunction(relative_motion_loss_name_, robust_threshold); - auto *relative_motion = - new RelativeMotionError(rp.parameters, rp.scale_matrix); + auto *relative_motion = new RelativeMotionError( + rp.parameters, rp.scale_matrix, rp.observed_scale); auto *cost_function = new ceres::DynamicAutoDiffCostFunction( relative_motion); cost_function->AddParameterBlock(6); cost_function->AddParameterBlock(6); cost_function->AddParameterBlock(1); - cost_function->SetNumResiduals(6); + cost_function->SetNumResiduals(Similarity::Parameter::NUM_PARAMS); auto &rig_instance_i = rig_instances_.at(rp.rig_instance_id_i); auto &rig_instance_j = rig_instances_.at(rp.rig_instance_id_j); - auto parameter_blocks = std::vector( - {rig_instance_i.GetValueData().data(), - rig_instance_j.GetValueData().data(), - reconstructions_[rp.reconstruction_id_i].GetScalePtr( - rp.rig_instance_id_i)}); - - problem.AddResidualBlock(cost_function, relative_motion_loss, - parameter_blocks); - } - - // Add relative similarity errors - for (auto &rp : relative_similarity_) { - double robust_threshold = - relative_motion_loss_threshold_ * rp.robust_multiplier; - ceres::LossFunction *relative_similarity_loss = - CreateLossFunction(relative_motion_loss_name_, robust_threshold); - - auto *relative_similarity = - new RelativeSimilarityError(rp.parameters, rp.scale, rp.scale_matrix); - auto *cost_function = - new ceres::DynamicAutoDiffCostFunction( - relative_similarity); - cost_function->AddParameterBlock(6); - cost_function->AddParameterBlock(6); - cost_function->AddParameterBlock(1); - cost_function->AddParameterBlock(1); - cost_function->SetNumResiduals(7); - - auto &rig_instance_i = rig_instances_.at(rp.rig_instance_id_i); - auto &rig_instance_j = rig_instances_.at(rp.rig_instance_id_j); + auto *scale_i = + reconstructions_[reconstructions_assignments_.at(rp.rig_instance_id_i)] + .GetScalePtr(rp.rig_instance_id_i); + auto *scale_j = + reconstructions_[reconstructions_assignments_.at(rp.rig_instance_id_j)] + .GetScalePtr(rp.rig_instance_id_j); + auto parameter_blocks = + std::vector({rig_instance_i.GetValueData().data(), + rig_instance_j.GetValueData().data(), scale_i}); - auto parameter_blocks = std::vector( - {rig_instance_i.GetValueData().data(), - rig_instance_j.GetValueData().data(), - reconstructions_[rp.reconstruction_id_i].GetScalePtr( - rp.rig_instance_id_i), - reconstructions_[rp.reconstruction_id_j].GetScalePtr( - rp.rig_instance_id_j)}); + if (scale_i != scale_j) { + cost_function->AddParameterBlock(1); + relative_motion->scale_j_index_ = parameter_blocks.size(); + parameter_blocks.push_back(scale_j); + } - problem.AddResidualBlock(cost_function, relative_similarity_loss, + problem.AddResidualBlock(cost_function, relative_motion_loss, parameter_blocks); } diff --git a/opensfm/test/test_bundle.py b/opensfm/test/test_bundle.py index 6e4019555..eb613833d 100644 --- a/opensfm/test/test_bundle.py +++ b/opensfm/test/test_bundle.py @@ -155,7 +155,7 @@ def test_pair(bundle_adjuster: pybundle.BundleAdjuster) -> None: sa.set_scale_sharing("12", True) sa.add_relative_motion( pybundle.RelativeMotion( - "12", "1", "12", "2", np.array([0, 0, 0]), np.array([-1, 0, 0]), 1 + "1", "2", np.array([0, 0, 0]), np.array([-1, 0, 0]), 1, 1, False ) ) @@ -202,7 +202,13 @@ def test_pair_with_points_priors(bundle_adjuster: pybundle.BundleAdjuster) -> No sa.set_scale_sharing("12", True) sa.add_relative_motion( pybundle.RelativeMotion( - "12", "1", "12", "2", np.array([0, 0, 0]), np.array([-1, 0, 0]), 1 + "1", + "2", + np.array([0, 0, 0]), + np.array([-1, 0, 0]), + 1, + 1, + False, ) ) @@ -239,9 +245,9 @@ def test_pair_non_rigid(bundle_adjuster: pybundle.BundleAdjuster) -> None: sa.add_reconstruction_instance("12", 4, "1") sa.add_reconstruction_instance("12", 4, "2") sa.set_scale_sharing("12", False) - sa.add_relative_similarity( - pybundle.RelativeSimilarity( - "12", "1", "12", "2", np.array([0, 0, 0]), np.array([-1, 0, 0]), 1, 1 + sa.add_relative_motion( + pybundle.RelativeMotion( + "1", "2", np.array([0, 0, 0]), np.array([-1, 0, 0]), 1, 1, False ) ) @@ -256,7 +262,7 @@ def test_pair_non_rigid(bundle_adjuster: pybundle.BundleAdjuster) -> None: assert np.allclose(s1.translation, [0, 0, 0], atol=1e-6) assert np.allclose(s2.translation, [-2, 0, 0], atol=1e-6) - assert np.allclose(r12.get_scale("1"), 0.5) + assert np.allclose(r12.get_scale("1"), 4.0) assert np.allclose(r12.get_scale("2"), 0.5) @@ -273,19 +279,40 @@ def test_four_cams_single_reconstruction( sa.add_reconstruction_instance("1234", 1, "3") sa.add_reconstruction_instance("1234", 1, "4") sa.set_scale_sharing("1234", True) + + relative_scale = 1 + robust = 1 sa.add_relative_motion( pybundle.RelativeMotion( - "1234", "1", "1234", "2", np.array([0, 0, 0]), np.array([-1, 0, 0]), 1 + "1", + "2", + np.array([0, 0, 0]), + np.array([-1, 0, 0]), + relative_scale, + robust, + False, ) ) sa.add_relative_motion( pybundle.RelativeMotion( - "1234", "1", "1234", "3", np.array([0, 0, 0]), np.array([0, -1, 0]), 1 + "1", + "3", + np.array([0, 0, 0]), + np.array([0, -1, 0]), + relative_scale, + robust, + False, ) ) sa.add_relative_motion( pybundle.RelativeMotion( - "1234", "1", "1234", "4", np.array([0, 0, 0]), np.array([0, 0, -1]), 1 + "1", + "4", + np.array([0, 0, 0]), + np.array([0, 0, -1]), + relative_scale, + robust, + False, ) ) @@ -316,48 +343,58 @@ def test_four_cams_double_reconstruction( sa.add_reconstruction("12", False) sa.add_reconstruction_instance("12", 1, "1") sa.add_reconstruction_instance("12", 1, "2") - sa.add_reconstruction_instance("12", 1, "3") sa.set_scale_sharing("12", False) sa.add_reconstruction("34", False) - sa.add_reconstruction_instance("34", 1, "2") sa.add_reconstruction_instance("34", 1, "3") sa.add_reconstruction_instance("34", 1, "4") sa.set_scale_sharing("34", False) - sa.add_relative_similarity( - pybundle.RelativeSimilarity( - "12", + relative_scale = 1 + robust = 1 + sa.add_relative_motion( + pybundle.RelativeMotion( "1", - "12", "2", np.array([0, 0, 0]), np.array([-0.5, -0.5, -0.5]), - 1, - 1, + relative_scale, + robust, + True, ) ) - sa.add_relative_similarity( - pybundle.RelativeSimilarity( - "12", - "2", - "12", + sa.add_relative_motion( + pybundle.RelativeMotion( "3", + "2", np.array([0, 0, 0]), - np.array([-0.5, -0.5, -0.5]), - 1, - 1, + np.array([0.5, 0.5, 0.5]), + relative_scale, + robust, + False, ) ) - sa.add_relative_similarity( - pybundle.RelativeSimilarity( - "34", "3", "34", "4", np.array([0, 0, 0]), np.array([-2, -2, -2]), 1, 1 + sa.add_relative_motion( + pybundle.RelativeMotion( + "3", + "4", + np.array([0, 0, 0]), + np.array([-2, -2, -2]), + relative_scale, + robust, + True, ) ) - sa.add_relative_similarity( - pybundle.RelativeSimilarity( - "34", "2", "34", "3", np.array([0, 0, 0]), np.array([-2, -2, -2]), 1, 1 + sa.add_relative_motion( + pybundle.RelativeMotion( + "2", + "3", + np.array([0, 0, 0]), + np.array([-2, -2, -2]), + relative_scale, + robust, + False, ) ) @@ -381,10 +418,8 @@ def test_four_cams_double_reconstruction( r12 = sa.get_reconstruction("12") assert np.allclose(r12.get_scale("1"), 0.5) assert np.allclose(r12.get_scale("2"), 0.5) - assert np.allclose(r12.get_scale("3"), 0.5) r34 = sa.get_reconstruction("34") - assert np.allclose(r34.get_scale("2"), 2.0) assert np.allclose(r34.get_scale("3"), 2.0) assert np.allclose(r34.get_scale("4"), 2.0) @@ -408,19 +443,40 @@ def test_four_cams_one_fixed(bundle_adjuster: pybundle.BundleAdjuster) -> None: sa.add_reconstruction_instance("1234", 1, "3") sa.add_reconstruction_instance("1234", 1, "4") sa.set_scale_sharing("1234", True) + + relative_scale = 1 + robust = 1 sa.add_relative_motion( pybundle.RelativeMotion( - "1234", "1", "1234", "2", np.array([0, 0, 0]), np.array([-1, 0, 0]), 1 + "1", + "2", + np.array([0, 0, 0]), + np.array([-1, 0, 0]), + relative_scale, + robust, + False, ) ) sa.add_relative_motion( pybundle.RelativeMotion( - "1234", "1", "1234", "3", np.array([0, 0, 0]), np.array([0, -1, 0]), 1 + "1", + "3", + np.array([0, 0, 0]), + np.array([0, -1, 0]), + relative_scale, + robust, + False, ) ) sa.add_relative_motion( pybundle.RelativeMotion( - "1234", "1", "1234", "4", np.array([0, 0, 0]), np.array([0, 0, -1]), 1 + "1", + "4", + np.array([0, 0, 0]), + np.array([0, 0, -1]), + relative_scale, + robust, + False, ) ) From 0214dc3fd3ef347a866620ff3193bff0f83c8eef Mon Sep 17 00:00:00 2001 From: Saijin-Naib <19295950+Saijin-Naib@users.noreply.github.com> Date: Tue, 29 Mar 2022 03:09:12 -0700 Subject: [PATCH 49/81] Update opensfm/pair_selection.py (#879) Summary: Fix "altide" for "altitude". Pull Request resolved: https://github.com/mapillary/OpenSfM/pull/879 Reviewed By: YanNoun Differential Revision: D35209170 Pulled By: fabianschenk fbshipit-source-id: 91226e7989ca34ce0360b4d1154da253271cab63 --- opensfm/pairs_selection.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/opensfm/pairs_selection.py b/opensfm/pairs_selection.py index 819345b8f..af13d3297 100644 --- a/opensfm/pairs_selection.py +++ b/opensfm/pairs_selection.py @@ -96,7 +96,7 @@ def find_best_altitude( extrema = -coeffs[1] / (2 * coeffs[0]) if extrema < 0: logger.info( - f"Altiude is negative ({extrema}) : viewing directions are probably divergent. Using default altide of {DEFAULT_Z}" + f"Altitude is negative ({extrema}) : viewing directions are probably divergent. Using default altitude of {DEFAULT_Z}" ) extrema = DEFAULT_Z return extrema @@ -105,7 +105,7 @@ def find_best_altitude( def get_representative_points( images: List[str], exifs: Dict[str, Any], reference: geo.TopocentricConverter ) -> Dict[str, np.ndarray]: - """Return a topiocentric point for each image, that is suited to run distance-based pair selection.""" + """Return a topocentric point for each image, that is suited to run distance-based pair selection.""" origin = {} directions = {} From 5d9d6c5d9fc9ac4ed532d1e796aa9a181f1fc8d0 Mon Sep 17 00:00:00 2001 From: Manuel Lopez Antequera Date: Tue, 5 Apr 2022 01:06:39 -0700 Subject: [PATCH 50/81] Not all control points are ground control points / have LLA Reviewed By: fabianschenk Differential Revision: D35310844 fbshipit-source-id: 0471b47ca8447e186b5f1be29eff6fd2b06fe4e7 --- opensfm/dataset.py | 22 ++++++++++------------ 1 file changed, 10 insertions(+), 12 deletions(-) diff --git a/opensfm/dataset.py b/opensfm/dataset.py index e0215a577..9be9f512f 100644 --- a/opensfm/dataset.py +++ b/opensfm/dataset.py @@ -79,10 +79,7 @@ def load_image_list(self) -> None: self._set_image_path(image_list_path) if self.data_path and not self.image_list: - raise IOError( - "No Images found in {}" - .format(image_list_path) - ) + raise IOError("No Images found in {}".format(image_list_path)) def images(self) -> List[str]: """List of file names of all images in the dataset.""" @@ -986,14 +983,15 @@ def invent_reference_from_gps_and_gcp( if not wlat and not wlon: for gcp in data.load_ground_control_points(): - lat += gcp.lla["latitude"] - lon += gcp.lla["longitude"] - wlat += 1 - wlon += 1 - - if gcp.has_altitude: - alt += gcp.lla["altitude"] - walt += 1 + if gcp.lla: + lat += gcp.lla["latitude"] + lon += gcp.lla["longitude"] + wlat += 1 + wlon += 1 + + if gcp.has_altitude: + alt += gcp.lla["altitude"] + walt += 1 if wlat: lat /= wlat From 318a77020a4692f65c126387462be5b942f86034 Mon Sep 17 00:00:00 2001 From: Fabian Schenk Date: Tue, 5 Apr 2022 06:26:32 -0700 Subject: [PATCH 51/81] Add pyre annotations for more OpenSfM stuff Summary: Infer more annotations for OpenSfM Reviewed By: YanNoun Differential Revision: D35112003 fbshipit-source-id: d0cf143eb6b614e79cc9cb31c9cd357ed16611d8 --- annotation_gui_gcp/lib/geometry.py | 6 +- annotation_gui_gcp/lib/views/cad_view.py | 37 +++--- annotation_gui_gcp/main.py | 24 ++-- bin/plot_matches.py | 12 +- opensfm/actions/bundle.py | 3 +- opensfm/actions/compute_statistics.py | 4 +- opensfm/actions/create_rig.py | 8 +- opensfm/actions/create_submodels.py | 16 +-- opensfm/actions/detect_features.py | 6 +- opensfm/actions/export_colmap.py | 37 +++--- opensfm/actions/export_geocoords.py | 2 +- opensfm/actions/export_pmvs.py | 2 +- opensfm/actions/export_visualsfm.py | 10 +- opensfm/actions/extend_reconstruction.py | 5 +- opensfm/actions/extract_metadata.py | 8 +- opensfm/align.py | 2 +- opensfm/config.py | 7 +- opensfm/context.py | 18 +-- opensfm/dataset.py | 2 +- opensfm/dataset_base.py | 2 +- opensfm/exif.py | 113 ++++++++++-------- opensfm/feature_loading.py | 2 +- opensfm/features.py | 2 +- opensfm/features_processing.py | 8 +- opensfm/geo.py | 8 +- opensfm/geotag_from_gpx.py | 38 +++--- opensfm/io.py | 55 +++++---- opensfm/large/tools.py | 60 ++++++---- opensfm/masking.py | 2 +- opensfm/matching.py | 14 +-- opensfm/mesh.py | 6 +- opensfm/multiview.py | 22 ++-- opensfm/pairs_selection.py | 6 +- opensfm/reconstruction.py | 6 +- opensfm/reconstruction_helpers.py | 2 +- opensfm/report.py | 2 +- opensfm/rig.py | 21 +--- opensfm/sensors.py | 6 +- opensfm/synthetic_data/synthetic_dataset.py | 14 +-- opensfm/synthetic_data/synthetic_generator.py | 13 +- opensfm/test/conftest.py | 35 ++++-- opensfm/test/data_generation.py | 2 +- opensfm/test/test_dataset.py | 8 +- opensfm/test/test_datastructures.py | 8 +- opensfm/test/test_io.py | 1 + opensfm/test/test_pairs_selection.py | 7 +- opensfm/tracking.py | 5 +- opensfm/transformations.py | 12 +- opensfm/undistort.py | 2 +- opensfm/video.py | 7 +- viewer/server.py | 2 +- 51 files changed, 378 insertions(+), 322 deletions(-) diff --git a/annotation_gui_gcp/lib/geometry.py b/annotation_gui_gcp/lib/geometry.py index cdd572c75..c10b2daa5 100644 --- a/annotation_gui_gcp/lib/geometry.py +++ b/annotation_gui_gcp/lib/geometry.py @@ -1,7 +1,9 @@ from opensfm import dataset +from numpy import ndarray +from typing import Dict, Tuple -def get_all_track_observations(gcp_database, track_id: str): +def get_all_track_observations(gcp_database, track_id: str) -> Dict[str, ndarray]: print(f"Getting all observations of track {track_id}") data = dataset.DataSet(gcp_database.path) tracks_manager = data.load_tracks_manager() @@ -9,7 +11,7 @@ def get_all_track_observations(gcp_database, track_id: str): return {shot_id: obs.point for shot_id, obs in track_obs.items()} -def get_tracks_visible_in_image(gcp_database, image_key, min_len: int=5): +def get_tracks_visible_in_image(gcp_database, image_key, min_len: int=5) -> Dict[str, Tuple[ndarray, int]]: print(f"Getting track observations visible in {image_key}") data = dataset.DataSet(gcp_database.path) tracks_manager = data.load_tracks_manager() diff --git a/annotation_gui_gcp/lib/views/cad_view.py b/annotation_gui_gcp/lib/views/cad_view.py index 116256a97..fb3e46c79 100644 --- a/annotation_gui_gcp/lib/views/cad_view.py +++ b/annotation_gui_gcp/lib/views/cad_view.py @@ -1,17 +1,17 @@ import json import logging from pathlib import Path -from typing import Dict, Any +from typing import Any, Dict, Tuple import rasterio from annotation_gui_gcp.lib.views.web_view import WebView, distinct_colors from flask import send_file from PIL import ImageColor -logger = logging.getLogger(__name__) +logger: logging.Logger = logging.getLogger(__name__) -def _load_georeference_metadata(path_cad_model): +def _load_georeference_metadata(path_cad_model) -> Dict[str, Any]: path_metadata = path_cad_model.with_suffix(".json") if not path_metadata.is_file(): @@ -30,7 +30,7 @@ def __init__( route_prefix, path_cad_file, is_geo_reference=False, - ): + )-> None: super().__init__(main_ui, web_app, route_prefix) self.main_ui = main_ui @@ -47,7 +47,7 @@ def __init__( view_func=self.get_model, ) - def get_model(self): + def get_model(self) -> Any: return send_file(self.cad_path, mimetype="application/octet-stream") def process_client_message(self, data: Dict[str, Any]) -> None: @@ -59,7 +59,7 @@ def process_client_message(self, data: Dict[str, Any]) -> None: else: raise ValueError(f"Unknown event {event}") - def add_remove_update_point_observation(self, point_coordinates=None): + def add_remove_update_point_observation(self, point_coordinates=None)->None: gcp_manager = self.main_ui.gcp_manager active_gcp = self.main_ui.curr_point if active_gcp is None: @@ -72,12 +72,9 @@ def add_remove_update_point_observation(self, point_coordinates=None): ) # Add the new observation - if point_coordinates is not None: - lla = ( - self.xyz_to_latlon(*point_coordinates) - if self.is_geo_reference - else None - ) + if point_coordinates is not None and self.is_geo_reference is not None: + lla = self.xyz_to_latlon(*point_coordinates) + geo = { "latitude": lla[0], "longitude": lla[1], @@ -97,23 +94,23 @@ def add_remove_update_point_observation(self, point_coordinates=None): ) self.main_ui.populate_gcp_list() - def display_points(self): + def display_points(self) -> None: pass - def refocus(self, lat, lon): + def refocus(self, lat, lon)->None: x, y, z = self.latlon_to_xyz(lat, lon) self.send_sse_message( {"x": x, "y": y, "z": z}, event_type="move_camera", ) - def highlight_gcp_reprojection(self, *args, **kwargs): + def highlight_gcp_reprojection(self, *args, **kwargs)->None: pass - def populate_image_list(self, *args, **kwargs): + def populate_image_list(self, *args, **kwargs)->None: pass - def latlon_to_xyz(self, lat, lon): + def latlon_to_xyz(self, lat, lon) -> Tuple[float, float, float]: xs, ys, zs = rasterio.warp.transform("EPSG:4326", self.crs, [lon], [lat], [0]) x = xs[0] * self.scale - self.offset[0] y = ys[0] * self.scale - self.offset[1] @@ -121,7 +118,7 @@ def latlon_to_xyz(self, lat, lon): y, z = z, -y return x, y, z - def xyz_to_latlon(self, x, y, z): + def xyz_to_latlon(self, x, y, z) -> Tuple[float, float, float]: y, z = -z, y # Add offset (cm) and transform to m @@ -131,13 +128,13 @@ def xyz_to_latlon(self, x, y, z): lons, lats, alts = rasterio.warp.transform(self.crs, "EPSG:4326", [x], [y], [z]) return lats[0], lons[0], alts[0] - def load_georeference_metadata(self, path_cad_model): + def load_georeference_metadata(self, path_cad_model)->None: metadata = _load_georeference_metadata(path_cad_model) self.scale = metadata["scale"] self.crs = metadata["crs"] self.offset = metadata["offset"] - def sync_to_client(self): + def sync_to_client(self)->None: """ Sends all the data required to initialize or sync the CAD view """ diff --git a/annotation_gui_gcp/main.py b/annotation_gui_gcp/main.py index 8710cba4f..df33f4925 100644 --- a/annotation_gui_gcp/main.py +++ b/annotation_gui_gcp/main.py @@ -2,7 +2,9 @@ import json import typing as t from collections import OrderedDict, defaultdict +from os import PathLike from pathlib import Path +from typing import Union import numpy as np from annotation_gui_gcp.lib import GUI @@ -12,7 +14,7 @@ from opensfm import dataset, io -def get_parser(): +def get_parser() -> argparse.ArgumentParser: parser = argparse.ArgumentParser(description=__doc__) parser.add_argument("dataset", help="dataset") parser.add_argument( @@ -52,7 +54,7 @@ def get_parser(): return parser -def file_sanity_check(root, seq_dict, fname): +def file_sanity_check(root, seq_dict, fname) -> t.Set[str]: # Images available under ./images for a sanity check available_images = {p.name for p in (root / "images").iterdir()} keys_in_seq_dict = {im_key for seq_keys in seq_dict.values() for im_key in seq_keys} @@ -88,7 +90,9 @@ def load_rig_assignments(root: Path) -> t.Dict[str, t.List[str]]: def load_sequence_database_from_file( - root, fname="sequence_database.json", skip_missing: bool=False + root: Path, + fname: Union[PathLike[str], str] = "sequence_database.json", + skip_missing: bool = False, ): """ Simply loads a sequence file and returns it. @@ -119,7 +123,7 @@ def load_sequence_database_from_file( return seq_dict -def load_shots_from_reconstructions(path, min_ims): +def load_shots_from_reconstructions(path, min_ims) -> t.List[t.List[str]]: data = dataset.DataSet(path) reconstructions = data.load_reconstruction() @@ -150,7 +154,9 @@ def load_shots_from_reconstructions(path, min_ims): return output -def group_by_reconstruction(args, groups_from_sequence_database): +def group_by_reconstruction( + args, groups_from_sequence_database +) -> t.Dict[str, t.List[str]]: all_recs_shots = load_shots_from_reconstructions( args.dataset, min_ims=args.min_images_in_reconstruction ) @@ -173,7 +179,7 @@ def group_by_reconstruction(args, groups_from_sequence_database): return groups -def group_images(args): +def group_images(args) -> t.Dict[str, t.List[str]]: """ Groups the images to be shown in different windows/views @@ -203,11 +209,11 @@ def group_images(args): return groups_from_sequence_database -def find_suitable_cad_paths(path_cad_files, path_dataset, n_paths: int=6): +def find_suitable_cad_paths(path_cad_files: Path, path_dataset, n_paths: int = 6): if path_cad_files is None: return [] - def latlon_from_meta(path_cad): + def latlon_from_meta(path_cad) -> t.Tuple[float, float]: path_meta = path_cad.with_suffix(".json") with open(path_meta) as f: meta = json.load(f) @@ -231,7 +237,7 @@ def latlon_from_meta(path_cad): return [cad_files[i] for i in ixs_sort] -def init_ui(): +def init_ui() -> t.Tuple[Flask, argparse.Namespace]: app = Flask(__name__) parser = get_parser() args = parser.parse_args() diff --git a/bin/plot_matches.py b/bin/plot_matches.py index 9e7b955f7..94f9d7fbf 100755 --- a/bin/plot_matches.py +++ b/bin/plot_matches.py @@ -10,9 +10,11 @@ from opensfm import dataset from opensfm import features from opensfm import io +from numpy import ndarray +from typing import List -def plot_matches(im1, im2, p1, p2): +def plot_matches(im1, im2, p1: ndarray, p2: ndarray) -> None: h1, w1, c = im1.shape h2, w2, c = im2.shape image = np.zeros((max(h1, h2), w1 + w2, 3), dtype=im1.dtype) @@ -29,7 +31,7 @@ def plot_matches(im1, im2, p1, p2): pl.plot(p2[:, 0] + w1, p2[:, 1], "ob") -def plot_graph(data): +def plot_graph(data) -> None: cmap = cm.get_cmap("viridis") connectivity = {} for im1 in images: @@ -70,7 +72,7 @@ def plot_graph(data): pl.savefig(os.path.join(data.data_path, "matchgraph.png")) -def plot_matches_for_images(data, image, images): +def plot_matches_for_images(data, image, images) -> None: if image: pairs = [(image, o) for o in images if o != image] elif images: @@ -122,10 +124,10 @@ def plot_matches_for_images(data, image, images): parser.add_argument( "--save_figs", help="save figures instead of showing them", action="store_true" ) - args = parser.parse_args() + args: argparse.Namespace = parser.parse_args() data = dataset.DataSet(args.dataset) - images = data.images() + images: List[str] = data.images() if args.graph: plot_graph(data) diff --git a/opensfm/actions/bundle.py b/opensfm/actions/bundle.py index dea614365..fa4afdc48 100644 --- a/opensfm/actions/bundle.py +++ b/opensfm/actions/bundle.py @@ -1,8 +1,9 @@ import opensfm.reconstruction as orec from opensfm.dataset_base import DataSetBase +from typing import Optional -def run_dataset(dataset: DataSetBase, input, output) -> None: +def run_dataset(dataset: DataSetBase, input: Optional[str], output: Optional[str]) -> None: """Bundle a reconstructions. Args: diff --git a/opensfm/actions/compute_statistics.py b/opensfm/actions/compute_statistics.py index f5b11ec17..736c11706 100644 --- a/opensfm/actions/compute_statistics.py +++ b/opensfm/actions/compute_statistics.py @@ -5,10 +5,10 @@ from opensfm import stats from opensfm.dataset import DataSet -logger = logging.getLogger(__name__) +logger: logging.Logger = logging.getLogger(__name__) -def run_dataset(data: DataSet, diagram_max_points=-1): +def run_dataset(data: DataSet, diagram_max_points: int=-1) -> None: """Compute various staistics of a datasets and write them to 'stats' folder Args: diff --git a/opensfm/actions/create_rig.py b/opensfm/actions/create_rig.py index bc5bb2b8d..823f9f019 100644 --- a/opensfm/actions/create_rig.py +++ b/opensfm/actions/create_rig.py @@ -2,12 +2,14 @@ from opensfm import pymap, rig, reconstruction_helpers as helpers, types from opensfm.dataset import DataSet, DataSetBase +from opensfm.types import Reconstruction +from typing import Dict, List -logger = logging.getLogger(__name__) +logger: logging.Logger = logging.getLogger(__name__) -def run_dataset(data: DataSet, method, definition, output_debug): +def run_dataset(data: DataSet, method, definition: Dict[str, str], output_debug) -> None: """Given a dataset that contains rigs, construct rig data files. Args: @@ -27,7 +29,7 @@ def run_dataset(data: DataSet, method, definition, output_debug): data.save_reconstruction(reconstructions, "rig_instances.json") -def _reconstruction_from_rigs_and_assignments(data: DataSetBase): +def _reconstruction_from_rigs_and_assignments(data: DataSetBase) -> List[Reconstruction]: assignments = data.load_rig_assignments() rig_cameras = data.load_rig_cameras() diff --git a/opensfm/actions/create_submodels.py b/opensfm/actions/create_submodels.py index b69641376..08ef8e205 100644 --- a/opensfm/actions/create_submodels.py +++ b/opensfm/actions/create_submodels.py @@ -7,10 +7,10 @@ from opensfm.large import tools -logger = logging.getLogger(__name__) +logger: logging.Logger = logging.getLogger(__name__) -def run_dataset(data: DataSet): +def run_dataset(data: DataSet) -> None: """ Split the dataset into smaller submodels. """ meta_data = MetaDataSet(data.data_path) @@ -31,7 +31,7 @@ def run_dataset(data: DataSet): meta_data.create_submodels(meta_data.load_clusters_with_neighbors()) -def _create_image_list(data: DataSet, meta_data): +def _create_image_list(data: DataSet, meta_data) -> None: ills = [] for image in data.images(): exif = data.load_exif(image) @@ -50,7 +50,7 @@ def _create_image_list(data: DataSet, meta_data): meta_data.create_image_list(ills) -def _read_image_groups(meta_data: MetaDataSet): +def _read_image_groups(meta_data: MetaDataSet) -> None: image_cluster = {} cluster_images = defaultdict(list) for image, cluster in meta_data.load_image_groups(): @@ -82,7 +82,7 @@ def _read_image_groups(meta_data: MetaDataSet): meta_data.save_clusters(images, positions, labels, centers) -def _cluster_images(meta_data: MetaDataSet, cluster_size): +def _cluster_images(meta_data: MetaDataSet, cluster_size: float) -> None: images = [] positions = [] for image, lat, lon in meta_data.images_with_gps(): @@ -103,7 +103,7 @@ def _cluster_images(meta_data: MetaDataSet, cluster_size): meta_data.save_clusters(images, positions, labels, centers) -def _add_cluster_neighbors(meta_data: MetaDataSet, max_distance): +def _add_cluster_neighbors(meta_data: MetaDataSet, max_distance) -> None: images, positions, labels, centers = meta_data.load_clusters() clusters = tools.add_cluster_neighbors(positions, labels, centers, max_distance) @@ -114,7 +114,7 @@ def _add_cluster_neighbors(meta_data: MetaDataSet, max_distance): meta_data.save_clusters_with_neighbors(image_clusters) -def _save_cluster_neighbors_geojson(meta_data: MetaDataSet): +def _save_cluster_neighbors_geojson(meta_data: MetaDataSet) -> None: image_coordinates = {} for image, lat, lon in meta_data.images_with_gps(): image_coordinates[image] = [lon, lat] @@ -137,7 +137,7 @@ def _save_cluster_neighbors_geojson(meta_data: MetaDataSet): meta_data.save_cluster_with_neighbors_geojson(geojson) -def _save_clusters_geojson(meta_data: MetaDataSet): +def _save_clusters_geojson(meta_data: MetaDataSet) -> None: image_coordinates = {} for image, lat, lon in meta_data.images_with_gps(): image_coordinates[image] = [lon, lat] diff --git a/opensfm/actions/detect_features.py b/opensfm/actions/detect_features.py index 7cf974e20..a84f6791b 100644 --- a/opensfm/actions/detect_features.py +++ b/opensfm/actions/detect_features.py @@ -5,10 +5,10 @@ from opensfm.dataset_base import DataSetBase -logger = logging.getLogger(__name__) +logger: logging.Logger = logging.getLogger(__name__) -def run_dataset(data: DataSetBase): +def run_dataset(data: DataSetBase) -> None: """Compute features for all images.""" start = timer() @@ -17,7 +17,7 @@ def run_dataset(data: DataSetBase): write_report(data, end - start) -def write_report(data: DataSetBase, wall_time: float): +def write_report(data: DataSetBase, wall_time: float) -> None: image_reports = [] for image in data.images(): try: diff --git a/opensfm/actions/export_colmap.py b/opensfm/actions/export_colmap.py index b7d0bfea2..ab275e074 100644 --- a/opensfm/actions/export_colmap.py +++ b/opensfm/actions/export_colmap.py @@ -38,6 +38,7 @@ import tempfile import typing as t from struct import pack +from typing import Tuple import numpy as np from opensfm import features @@ -166,26 +167,26 @@ def run_dataset(data: DataSet, binary: bool) -> None: ) -def image_ids_to_pair_id(image_id1, image_id2): +def image_ids_to_pair_id(image_id1, image_id2) -> int: if image_id1 > image_id2: image_id1, image_id2 = image_id2, image_id1 return image_id1 * MAX_IMAGE_ID + image_id2 -def pair_id_to_image_ids(pair_id): +def pair_id_to_image_ids(pair_id) -> Tuple[int, int]: image_id2 = pair_id % MAX_IMAGE_ID image_id1 = (pair_id - image_id2) // MAX_IMAGE_ID return image_id1, image_id2 -def array_to_blob(array): +def array_to_blob(array) -> bytes: if IS_PYTHON3: return array.tobytes() else: return np.getbuffer(array) -def blob_to_array(blob, dtype, shape=(-1,)): +def blob_to_array(blob, dtype, shape: Tuple[int] = (-1,)): if IS_PYTHON3: return np.fromstring(blob, dtype=dtype).reshape(*shape) else: @@ -194,10 +195,10 @@ def blob_to_array(blob, dtype, shape=(-1,)): class COLMAPDatabase(sqlite3.Connection): @staticmethod - def connect(database_path): + def connect(database_path) -> t.Any: return sqlite3.connect(database_path, factory=COLMAPDatabase) - def __init__(self, *args, **kwargs): + def __init__(self, *args, **kwargs) -> None: super(COLMAPDatabase, self).__init__(*args, **kwargs) self.create_tables = lambda: self.executescript(CREATE_ALL) @@ -215,7 +216,7 @@ def __init__(self, *args, **kwargs): def add_camera( self, model, width, height, params, prior_focal_length=False, camera_id=None - ): + ) -> t.Any: params = np.asarray(params, np.float64) cursor = self.execute( "INSERT INTO cameras VALUES (?, ?, ?, ?, ?, ?)", @@ -232,7 +233,7 @@ def add_camera( def add_image( self, name, camera_id, prior_q=(0, 0, 0, 0), prior_t=(0, 0, 0), image_id=None - ): + ) -> t.Any: cursor = self.execute( "INSERT INTO images VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)", ( @@ -250,7 +251,7 @@ def add_image( ) return cursor.lastrowid - def add_keypoints(self, image_id, keypoints): + def add_keypoints(self, image_id, keypoints) -> None: assert len(keypoints.shape) == 2 assert keypoints.shape[1] in [2, 4, 6] @@ -260,14 +261,14 @@ def add_keypoints(self, image_id, keypoints): (image_id,) + keypoints.shape + (array_to_blob(keypoints),), ) - def add_descriptors(self, image_id, descriptors): + def add_descriptors(self, image_id, descriptors) -> None: descriptors = np.ascontiguousarray(descriptors, np.uint8) self.execute( "INSERT INTO descriptors VALUES (?, ?, ?, ?)", (image_id,) + descriptors.shape + (array_to_blob(descriptors),), ) - def add_matches(self, image_id1, image_id2, matches): + def add_matches(self, image_id1, image_id2, matches) -> None: assert len(matches.shape) == 2 assert matches.shape[1] == 2 @@ -283,7 +284,7 @@ def add_matches(self, image_id1, image_id2, matches): def add_two_view_geometry( self, image_id1, image_id2, matches, F=I_3, E=I_3, H=I_3, config=2 - ): + ) -> None: assert len(matches.shape) == 2 assert matches.shape[1] == 2 @@ -352,7 +353,7 @@ def camera_to_colmap_params(camera) -> t.Tuple[float, ...]: raise ValueError("Can't convert {camera.projection_type} to COLMAP") -def export_cameras(data, db): +def export_cameras(data, db) -> t.Tuple[t.Dict[str, int], t.Dict[str, int]]: camera_map = {} for camera_model, camera in data.load_camera_models().items(): if data.camera_models_overrides_exists(): @@ -378,7 +379,7 @@ def export_cameras(data, db): return images_map, camera_map -def export_features(data, db, images_map): +def export_features(data, db, images_map) -> t.Dict[str, np.ndarray]: features_map = {} for image in data.images(): width = data.load_exif(image)["width"] @@ -422,7 +423,7 @@ def export_matches(data, db, features_map, images_map) -> None: db.add_matches(images_map[pair[0]], images_map[pair[1]], inliers) -def export_cameras_reconstruction(data, path, camera_map, binary: bool=False) -> None: +def export_cameras_reconstruction(data, path, camera_map, binary: bool = False) -> None: reconstructions = data.load_reconstruction() cameras = {} for reconstruction in reconstructions: @@ -455,7 +456,7 @@ def export_cameras_reconstruction(data, path, camera_map, binary: bool=False) -> def export_images_reconstruction( - data, path, camera_map, images_map, features_map, points_map, binary: bool=False + data, path, camera_map, images_map, features_map, points_map, binary: bool = False ) -> None: reconstructions = data.load_reconstruction() tracks_manager = data.load_tracks_manager() @@ -529,7 +530,7 @@ def export_images_reconstruction( fout.close() -def export_points_reconstruction(data, path, images_map, binary: bool=False): +def export_points_reconstruction(data, path, images_map, binary: bool = False): reconstructions = data.load_reconstruction() tracks_manager = data.load_tracks_manager() @@ -587,7 +588,7 @@ def export_points_reconstruction(data, path, images_map, binary: bool=False): return points_map -def angle_axis_to_quaternion(angle_axis): +def angle_axis_to_quaternion(angle_axis: np.ndarray) -> t.List[float]: angle = np.linalg.norm(angle_axis) x = angle_axis[0] / angle diff --git a/opensfm/actions/export_geocoords.py b/opensfm/actions/export_geocoords.py index a1b64ddc8..67b1f7233 100644 --- a/opensfm/actions/export_geocoords.py +++ b/opensfm/actions/export_geocoords.py @@ -9,7 +9,7 @@ from opensfm.geo import TopocentricConverter from typing import List, Sequence -logger = logging.getLogger(__name__) +logger: logging.Logger = logging.getLogger(__name__) def run_dataset( diff --git a/opensfm/actions/export_pmvs.py b/opensfm/actions/export_pmvs.py index 69f9232e0..264fb2e4a 100644 --- a/opensfm/actions/export_pmvs.py +++ b/opensfm/actions/export_pmvs.py @@ -9,7 +9,7 @@ from opensfm.dataset import DataSet, UndistortedDataSet -logger = logging.getLogger(__name__) +logger: logging.Logger = logging.getLogger(__name__) def run_dataset(data: DataSet, points, image_list, output, undistorted) -> None: diff --git a/opensfm/actions/export_visualsfm.py b/opensfm/actions/export_visualsfm.py index fcb41a900..1b1ce0846 100644 --- a/opensfm/actions/export_visualsfm.py +++ b/opensfm/actions/export_visualsfm.py @@ -7,10 +7,10 @@ from opensfm.dataset import DataSet, UndistortedDataSet -logger = logging.getLogger(__name__) +logger: logging.Logger = logging.getLogger(__name__) -def run_dataset(data: DataSet, points, image_list): +def run_dataset(data: DataSet, points, image_list) -> None: udata = data.undistorted_dataset() validate_image_names(data, udata) @@ -31,7 +31,7 @@ def run_dataset(data: DataSet, points, image_list): def export( reconstruction, tracks_manager, udata: UndistortedDataSet, with_points, export_only -): +) -> None: lines = ["NVM_V3", "", str(len(reconstruction.shots))] shot_size_cache = {} shot_index = {} @@ -124,13 +124,13 @@ def export( fout.write("\n".join(lines)) -def image_path(image, udata: UndistortedDataSet): +def image_path(image: str, udata: UndistortedDataSet) -> str: """Path to the undistorted image relative to the dataset path.""" path = udata._undistorted_image_file(image) return os.path.relpath(path, udata.data_path) -def validate_image_names(data: DataSet, udata: UndistortedDataSet): +def validate_image_names(data: DataSet, udata: UndistortedDataSet) -> None: """Check that image files do not have spaces.""" for image in data.images(): filename = image_path(image, udata) diff --git a/opensfm/actions/extend_reconstruction.py b/opensfm/actions/extend_reconstruction.py index 9b1da7b97..2d9bd303d 100644 --- a/opensfm/actions/extend_reconstruction.py +++ b/opensfm/actions/extend_reconstruction.py @@ -1,8 +1,9 @@ -from opensfm import io, reconstruction, types +from opensfm import io, reconstruction from opensfm.dataset_base import DataSetBase +from typing import Optional -def run_dataset(data: DataSetBase, input, output) -> None: +def run_dataset(data: DataSetBase, input: Optional[str], output: Optional[str]) -> None: recs_base = data.load_reconstruction(input) if len(recs_base) == 0: return diff --git a/opensfm/actions/extract_metadata.py b/opensfm/actions/extract_metadata.py index 8120def0d..b7f5fcbae 100644 --- a/opensfm/actions/extract_metadata.py +++ b/opensfm/actions/extract_metadata.py @@ -1,16 +1,16 @@ import copy import logging from functools import partial - +from typing import Any, Dict from opensfm import exif from opensfm.dataset_base import DataSetBase -logger = logging.getLogger(__name__) +logger: logging.Logger = logging.getLogger(__name__) logging.getLogger("exifread").setLevel(logging.WARNING) -def run_dataset(data: DataSetBase): +def run_dataset(data: DataSetBase) -> None: """Extract metadata from images' EXIF tag.""" exif_overrides = {} @@ -48,7 +48,7 @@ def run_dataset(data: DataSetBase): data.save_camera_models(camera_models) -def _extract_exif(image, data: DataSetBase): +def _extract_exif(image: str, data: DataSetBase) -> Dict[str, Any]: with data.open_image_file(image) as fp: d = exif.extract_exif_from_file( fp, diff --git a/opensfm/align.py b/opensfm/align.py index 57b5cbf70..171510c1e 100644 --- a/opensfm/align.py +++ b/opensfm/align.py @@ -10,7 +10,7 @@ from opensfm import multiview, transformations as tf, types, pygeometry, pymap -logger = logging.getLogger(__name__) +logger: logging.Logger = logging.getLogger(__name__) def align_reconstruction( diff --git a/opensfm/config.py b/opensfm/config.py index 956a3c419..8815dbc66 100644 --- a/opensfm/config.py +++ b/opensfm/config.py @@ -2,6 +2,7 @@ from dataclasses import dataclass, asdict import yaml +from typing import Any, Dict, IO, Union @dataclass @@ -361,12 +362,12 @@ class OpenSfMConfig: submodel_images_relpath_template: str = "submodels/submodel_%04d/images" -def default_config(): +def default_config() -> Dict[str, Any]: """Return default configuration""" return asdict(OpenSfMConfig()) -def load_config(filepath): +def load_config(filepath) -> Dict[str, Any]: """DEPRECATED: = Load config from a config.yaml filepath""" if not os.path.isfile(filepath): return default_config() @@ -375,7 +376,7 @@ def load_config(filepath): return load_config_from_fileobject(fin) -def load_config_from_fileobject(f): +def load_config_from_fileobject(f: Union[IO[bytes], IO[str], bytes, str]) -> Dict[str, Any]: """Load config from a config.yaml fileobject""" config = default_config() diff --git a/opensfm/context.py b/opensfm/context.py index 03d886488..58fa25296 100644 --- a/opensfm/context.py +++ b/opensfm/context.py @@ -13,7 +13,7 @@ from joblib import Parallel, delayed, parallel_backend -logger = logging.getLogger(__name__) +logger: logging.Logger = logging.getLogger(__name__) abspath = os.path.dirname(os.path.realpath(__file__)) @@ -23,12 +23,12 @@ # Handle different OpenCV versions -OPENCV5 = int(cv2.__version__.split(".")[0]) >= 5 -OPENCV4 = int(cv2.__version__.split(".")[0]) >= 4 -OPENCV44 = ( +OPENCV5: bool = int(cv2.__version__.split(".")[0]) >= 5 +OPENCV4: bool = int(cv2.__version__.split(".")[0]) >= 4 +OPENCV44: bool = ( int(cv2.__version__.split(".")[0]) == 4 and int(cv2.__version__.split(".")[1]) >= 4 ) -OPENCV3 = int(cv2.__version__.split(".")[0]) >= 3 +OPENCV3: bool = int(cv2.__version__.split(".")[0]) >= 3 if hasattr(cv2, "flann_Index"): flann_Index = cv2.flann_Index @@ -40,7 +40,7 @@ # Parallel processes -def parallel_map(func, args, num_proc, max_batch_size=1): +def parallel_map(func, args, num_proc: int, max_batch_size: int = 1): """Run function for all arguments using multiple processes.""" # De-activate/Restore any inner OpenCV threading threads_used = cv2.getNumThreads() @@ -78,7 +78,7 @@ class MEMORYSTATUSEX(ctypes.Structure): ("sullAvailExtendedVirtual", ctypes.c_ulonglong), ] - def __init__(self): + def __init__(self) -> None: # have to initialize this to the size of MEMORYSTATUSEX self.dwLength = ctypes.sizeof(self) super(MEMORYSTATUSEX, self).__init__() @@ -92,7 +92,7 @@ def memory_available() -> Optional[int]: ctypes.windll.kernel32.GlobalMemoryStatusEx(ctypes.byref(stat)) return stat.ullAvailPhys / 1024 / 1024 - def current_memory_usage(): + def current_memory_usage() -> int: stat = MEMORYSTATUSEX() ctypes.windll.kernel32.GlobalMemoryStatusEx(ctypes.byref(stat)) return (stat.ullTotalPhys - stat.ullAvailPhys) / 1024 @@ -116,7 +116,7 @@ def memory_available() -> Optional[int]: available_mem = int(lines[1].split()[6]) return available_mem - def current_memory_usage(): + def current_memory_usage() -> int: return resource.getrusage(resource.RUSAGE_SELF).ru_maxrss * rusage_unit diff --git a/opensfm/dataset.py b/opensfm/dataset.py index 9be9f512f..a2ade713a 100644 --- a/opensfm/dataset.py +++ b/opensfm/dataset.py @@ -21,7 +21,7 @@ from opensfm.dataset_base import DataSetBase from PIL.PngImagePlugin import PngImageFile -logger = logging.getLogger(__name__) +logger: logging.Logger = logging.getLogger(__name__) class DataSet(DataSetBase): diff --git a/opensfm/dataset_base.py b/opensfm/dataset_base.py index 0d64df1cf..9f3c64068 100644 --- a/opensfm/dataset_base.py +++ b/opensfm/dataset_base.py @@ -12,7 +12,7 @@ pymap, ) -logger = logging.getLogger(__name__) +logger: logging.Logger = logging.getLogger(__name__) class DataSetBase(ABC): diff --git a/opensfm/exif.py b/opensfm/exif.py index aa1dc01ce..d46ead2d8 100644 --- a/opensfm/exif.py +++ b/opensfm/exif.py @@ -1,7 +1,7 @@ import datetime import logging from codecs import encode, decode -from typing import Tuple +from typing import Any, Dict, List, Optional, Tuple import exifread import numpy as np @@ -9,9 +9,10 @@ from opensfm import pygeometry from opensfm.dataset_base import DataSetBase from opensfm.geo import ecef_from_lla +from opensfm.pygeometry import Camera from opensfm.sensors import sensor_data, camera_calibration -logger = logging.getLogger(__name__) +logger: logging.Logger = logging.getLogger(__name__) inch_in_mm = 25.4 cm_in_mm = 10 @@ -20,14 +21,14 @@ maximum_altitude = 1e4 -def eval_frac(value): +def eval_frac(value) -> Optional[float]: try: return float(value.num) / float(value.den) except ZeroDivisionError: return None -def gps_to_decimal(values, reference): +def gps_to_decimal(values, reference) -> Optional[float]: sign = 1 if reference in "NE" else -1 degrees = eval_frac(values[0]) minutes = eval_frac(values[1]) @@ -37,7 +38,7 @@ def gps_to_decimal(values, reference): return None -def get_tag_as_float(tags, key, index=0): +def get_tag_as_float(tags, key, index: int = 0) -> Optional[float]: if key in tags: val = tags[key].values[index] if isinstance(val, exifread.utils.Ratio): @@ -54,7 +55,9 @@ def get_tag_as_float(tags, key, index=0): return None -def compute_focal(focal_35, focal, sensor_width, sensor_string) -> Tuple[float, float]: +def compute_focal( + focal_35: Optional[float], focal: Optional[float], sensor_width, sensor_string +) -> Tuple[float, float]: if focal_35 is not None and focal_35 > 0: focal_ratio = focal_35 / 36.0 # 35mm film produces 36x24mm pictures. else: @@ -69,14 +72,14 @@ def compute_focal(focal_35, focal, sensor_width, sensor_string) -> Tuple[float, return focal_35, focal_ratio -def sensor_string(make, model): +def sensor_string(make: str, model: str) -> str: if make != "unknown": # remove duplicate 'make' information in 'model' model = model.replace(make, "") return (make.strip() + " " + model.strip()).strip().lower() -def camera_id(exif): +def camera_id(exif) -> str: return camera_id_( exif["make"], exif["model"], @@ -87,7 +90,7 @@ def camera_id(exif): ) -def camera_id_(make, model, width, height, projection_type, focal): +def camera_id_(make, model, width, height, projection_type, focal) -> str: if make != "unknown": # remove duplicate 'make' information in 'model' model = model.replace(make, "") @@ -104,17 +107,19 @@ def camera_id_(make, model, width, height, projection_type, focal): ).lower() -def extract_exif_from_file(fileobj, image_size_loader, use_exif_size, name=None): +def extract_exif_from_file( + fileobj, image_size_loader, use_exif_size, name=None +) -> Dict[str, Any]: exif_data = EXIF(fileobj, image_size_loader, use_exif_size, name=name) d = exif_data.extract_exif() return d -def unescape_string(s): +def unescape_string(s) -> str: return decode(encode(s, "latin-1", "backslashreplace"), "unicode-escape") -def parse_xmp_string(xmp_str): +def parse_xmp_string(xmp_str: str): for _ in range(2): try: return x2d.parse(xmp_str) @@ -123,7 +128,7 @@ def parse_xmp_string(xmp_str): return None -def get_xmp(fileobj): +def get_xmp(fileobj) -> List[str]: """Extracts XMP metadata from and image fileobj""" img_str = str(fileobj.read()) xmp_start = img_str.find(" Dict[str, Any]: for i in xmp: for k in i: if "GPano" in k: @@ -154,7 +159,9 @@ def get_gpano_from_xmp(xmp): class EXIF: - def __init__(self, fileobj, image_size_loader, use_exif_size=True, name=None): + def __init__( + self, fileobj, image_size_loader, use_exif_size=True, name=None + ) -> None: self.image_size_loader = image_size_loader self.use_exif_size = use_exif_size self.fileobj = fileobj @@ -163,7 +170,7 @@ def __init__(self, fileobj, image_size_loader, use_exif_size=True, name=None): self.xmp = get_xmp(fileobj) self.fileobj_name = self.fileobj.name if name is None else name - def extract_image_size(self): + def extract_image_size(self) -> Tuple[int, int]: if ( self.use_exif_size and "EXIF ExifImageWidth" in self.tags @@ -186,7 +193,7 @@ def extract_image_size(self): height, width = self.image_size_loader() return width, height - def _decode_make_model(self, value): + def _decode_make_model(self, value) -> str: """Python 2/3 compatible decoding of make/model field.""" if hasattr(value, "decode"): try: @@ -196,7 +203,7 @@ def _decode_make_model(self, value): else: return value - def extract_make(self): + def extract_make(self) -> str: # Camera make and model if "EXIF LensMake" in self.tags: make = self.tags["EXIF LensMake"].values @@ -206,7 +213,7 @@ def extract_make(self): make = "unknown" return self._decode_make_model(make) - def extract_model(self): + def extract_model(self) -> str: if "EXIF LensModel" in self.tags: model = self.tags["EXIF LensModel"].values elif "Image Model" in self.tags: @@ -215,11 +222,11 @@ def extract_model(self): model = "unknown" return self._decode_make_model(model) - def extract_projection_type(self): + def extract_projection_type(self) -> str: gpano = get_gpano_from_xmp(self.xmp) return gpano.get("GPano:ProjectionType", "perspective") - def extract_focal(self): + def extract_focal(self) -> Tuple[float, float]: make, model = self.extract_make(), self.extract_model() focal_35, focal_ratio = compute_focal( get_tag_as_float(self.tags, "EXIF FocalLengthIn35mmFilm"), @@ -229,7 +236,7 @@ def extract_focal(self): ) return focal_35, focal_ratio - def extract_sensor_width(self): + def extract_sensor_width(self) -> Optional[float]: """Compute sensor with from width and resolution.""" if ( "EXIF FocalPlaneResolutionUnit" not in self.tags @@ -241,15 +248,17 @@ def extract_sensor_width(self): if not mm_per_unit: return None pixels_per_unit = get_tag_as_float(self.tags, "EXIF FocalPlaneXResolution") - if pixels_per_unit <= 0: + if pixels_per_unit is None: + return None + if pixels_per_unit <= 0.0: pixels_per_unit = get_tag_as_float(self.tags, "EXIF FocalPlaneYResolution") - if pixels_per_unit <= 0: + if pixels_per_unit is None or pixels_per_unit <= 0.0: return None units_per_pixel = 1 / pixels_per_unit width_in_pixels = self.extract_image_size()[0] return width_in_pixels * units_per_pixel * mm_per_unit - def get_mm_per_unit(self, resolution_unit): + def get_mm_per_unit(self, resolution_unit) -> Optional[float]: """Length of a resolution unit in millimeters. Uses the values from the EXIF specs in @@ -272,7 +281,7 @@ def get_mm_per_unit(self, resolution_unit): ) return None - def extract_orientation(self): + def extract_orientation(self) -> int: orientation = 1 if "Image Orientation" in self.tags: value = self.tags.get("Image Orientation").values[0] @@ -280,7 +289,7 @@ def extract_orientation(self): orientation = value return orientation - def extract_ref_lon_lat(self): + def extract_ref_lon_lat(self) -> Tuple[str, str]: if "GPS GPSLatitudeRef" in self.tags: reflat = self.tags["GPS GPSLatitudeRef"].values else: @@ -291,7 +300,7 @@ def extract_ref_lon_lat(self): reflon = "E" return reflon, reflat - def extract_dji_lon_lat(self): + def extract_dji_lon_lat(self) -> Tuple[float, float]: lon = self.xmp[0]["@drone-dji:Longitude"] lat = self.xmp[0]["@drone-dji:Latitude"] lon_number = float(lon[1:]) @@ -300,23 +309,23 @@ def extract_dji_lon_lat(self): lat_number = lat_number if lat[0] == "+" else -lat_number return lon_number, lat_number - def extract_dji_altitude(self): + def extract_dji_altitude(self) -> float: return float(self.xmp[0]["@drone-dji:AbsoluteAltitude"]) - def has_xmp(self): + def has_xmp(self) -> bool: return len(self.xmp) > 0 - def has_dji_latlon(self): + def has_dji_latlon(self) -> bool: return ( self.has_xmp() and "@drone-dji:Latitude" in self.xmp[0] and "@drone-dji:Longitude" in self.xmp[0] ) - def has_dji_altitude(self): + def has_dji_altitude(self) -> bool: return self.has_xmp() and "@drone-dji:AbsoluteAltitude" in self.xmp[0] - def extract_lon_lat(self): + def extract_lon_lat(self) -> Tuple[Optional[float], Optional[float]]: if self.has_dji_latlon(): lon, lat = self.extract_dji_lon_lat() elif "GPS GPSLatitude" in self.tags: @@ -327,7 +336,7 @@ def extract_lon_lat(self): lon, lat = None, None return lon, lat - def extract_altitude(self): + def extract_altitude(self) -> Optional[float]: if self.has_dji_altitude(): altitude = self.extract_dji_altitude() elif "GPS GPSAltitude" in self.tags: @@ -350,14 +359,12 @@ def extract_altitude(self): altitude = None return altitude - def extract_dop(self): + def extract_dop(self) -> Optional[float]: if "GPS GPSDOP" in self.tags: - dop = eval_frac(self.tags["GPS GPSDOP"].values[0]) - else: - dop = None - return dop + return eval_frac(self.tags["GPS GPSDOP"].values[0]) + return None - def extract_geo(self): + def extract_geo(self) -> Dict[str, Any]: altitude = self.extract_altitude() dop = self.extract_dop() lon, lat = self.extract_lon_lat() @@ -372,14 +379,18 @@ def extract_geo(self): d["dop"] = dop return d - def extract_capture_time(self): + def extract_capture_time(self) -> float: if ( "GPS GPSDate" in self.tags and "GPS GPSTimeStamp" in self.tags # Actually GPSDateStamp ): try: - hours = int(get_tag_as_float(self.tags, "GPS GPSTimeStamp", 0)) - minutes = int(get_tag_as_float(self.tags, "GPS GPSTimeStamp", 1)) + hours_f = get_tag_as_float(self.tags, "GPS GPSTimeStamp", 0) + minutes_f = get_tag_as_float(self.tags, "GPS GPSTimeStamp", 1) + if hours_f is None or minutes_f is None: + raise TypeError + hours = int(hours_f) + minutes = int(minutes_f) seconds = get_tag_as_float(self.tags, "GPS GPSTimeStamp", 2) gps_timestamp_string = "{0:s} {1:02d}:{2:02d}:{3:02f}".format( self.tags["GPS GPSDate"].values, hours, minutes, seconds @@ -451,7 +462,7 @@ def extract_capture_time(self): ) return 0.0 - def extract_opk(self, geo): + def extract_opk(self, geo) -> Optional[Dict[str, Any]]: opk = None if self.has_xmp() and geo and "latitude" in geo and "longitude" in geo: @@ -575,7 +586,7 @@ def extract_opk(self, geo): return opk - def extract_exif(self): + def extract_exif(self) -> Dict[str, Any]: width, height = self.extract_image_size() projection_type = self.extract_projection_type() focal_35, focal_ratio = self.extract_focal() @@ -602,7 +613,7 @@ def extract_exif(self): return d -def hard_coded_calibration(exif): +def hard_coded_calibration(exif) -> Optional[Dict[str, Any]]: focal = exif["focal_ratio"] fmm35 = int(round(focal * 36.0)) make = exif["make"].strip().lower() @@ -624,7 +635,7 @@ def hard_coded_calibration(exif): return None -def focal_ratio_calibration(exif): +def focal_ratio_calibration(exif) -> Optional[Dict[str, Any]]: if exif.get("focal_ratio"): return { "focal": exif["focal_ratio"], @@ -636,7 +647,7 @@ def focal_ratio_calibration(exif): } -def focal_xy_calibration(exif): +def focal_xy_calibration(exif) -> Optional[Dict[str, Any]]: focal = exif.get("focal_x", exif.get("focal_ratio")) if focal: return { @@ -659,7 +670,7 @@ def focal_xy_calibration(exif): } -def default_calibration(data: DataSetBase): +def default_calibration(data: DataSetBase) -> Dict[str, Any]: return { "focal": data.config["default_focal_prior"], "focal_x": data.config["default_focal_prior"], @@ -681,7 +692,7 @@ def default_calibration(data: DataSetBase): } -def calibration_from_metadata(metadata, data: DataSetBase): +def calibration_from_metadata(metadata, data: DataSetBase) -> Dict[str, Any]: """Finds the best calibration in one of the calibration sources.""" pt = metadata.get("projection_type", default_projection).lower() if ( @@ -710,7 +721,7 @@ def calibration_from_metadata(metadata, data: DataSetBase): def camera_from_exif_metadata( metadata, data: DataSetBase, calibration_func=calibration_from_metadata -): +) -> Camera: """ Create a camera object from exif metadata and the calibration function that turns metadata into usable calibration parameters. diff --git a/opensfm/feature_loading.py b/opensfm/feature_loading.py index 0277fc010..bd8baa022 100644 --- a/opensfm/feature_loading.py +++ b/opensfm/feature_loading.py @@ -7,7 +7,7 @@ from opensfm.dataset_base import DataSetBase -logger = logging.getLogger(__name__) +logger: logging.Logger = logging.getLogger(__name__) SEGMENTATION_IN_DESCRIPTOR_MULT = ( diff --git a/opensfm/features.py b/opensfm/features.py index 4aeb05105..d1c21e799 100644 --- a/opensfm/features.py +++ b/opensfm/features.py @@ -9,7 +9,7 @@ from opensfm import context, pyfeatures -logger = logging.getLogger(__name__) +logger: logging.Logger = logging.getLogger(__name__) class SemanticData: diff --git a/opensfm/features_processing.py b/opensfm/features_processing.py index 7e545687b..8a45a8b9b 100644 --- a/opensfm/features_processing.py +++ b/opensfm/features_processing.py @@ -12,7 +12,7 @@ from opensfm.dataset_base import DataSetBase -logger = logging.getLogger(__name__) +logger: logging.Logger = logging.getLogger(__name__) def run_features_processing(data: DataSetBase, images: List[str], force: bool) -> None: @@ -124,15 +124,15 @@ class Counter(object): some reason, joblib doesn't like a good old threading.Lock (everything is stuck) """ - def __init__(self): + def __init__(self) ->None: self.number_of_read = 0 self.counter = itertools.count() self.read_lock = threading.Lock() - def increment(self): + def increment(self) -> None: next(self.counter) - def value(self): + def value(self) -> int: with self.read_lock: value = next(self.counter) - self.number_of_read self.number_of_read += 1 diff --git a/opensfm/geo.py b/opensfm/geo.py index e2746cd2f..a0324f9b9 100644 --- a/opensfm/geo.py +++ b/opensfm/geo.py @@ -1,10 +1,12 @@ import numpy as np +from numpy import ndarray +from typing import Tuple WGS84_a = 6378137.0 WGS84_b = 6356752.314245 -def ecef_from_lla(lat, lon, alt: float): +def ecef_from_lla(lat, lon, alt: float) -> Tuple[float, ...]: """ Compute ECEF XYZ from latitude, longitude and altitude. @@ -50,7 +52,7 @@ def lla_from_ecef(x, y, z): return np.degrees(lat), np.degrees(lon), alt -def ecef_from_topocentric_transform(lat, lon, alt: float): +def ecef_from_topocentric_transform(lat, lon, alt: float) -> ndarray: """ Transformation from a topocentric frame at reference position to ECEF. @@ -77,7 +79,7 @@ def ecef_from_topocentric_transform(lat, lon, alt: float): ) -def ecef_from_topocentric_transform_finite_diff(lat, lon, alt: float): +def ecef_from_topocentric_transform_finite_diff(lat, lon, alt: float) -> ndarray: """ Transformation from a topocentric frame at reference position to ECEF. diff --git a/opensfm/geotag_from_gpx.py b/opensfm/geotag_from_gpx.py index 79c1dc7be..7bf29b851 100644 --- a/opensfm/geotag_from_gpx.py +++ b/opensfm/geotag_from_gpx.py @@ -1,6 +1,8 @@ #!/usr/bin/python3 import datetime +from typing import List, Union + import math import os import shutil @@ -55,7 +57,7 @@ def utc_to_localtime(utc_time): return utc_time - utc_offset_timedelta -def get_lat_lon_time(gpx_file, gpx_time="utc"): +def get_lat_lon_time(gpx_file, gpx_time: str="utc"): """ Read location and time stamps from a track in a GPX file. @@ -82,7 +84,7 @@ def get_lat_lon_time(gpx_file, gpx_time="utc"): return points -def compute_bearing(start_lat, start_lon, end_lat, end_lon): +def compute_bearing(start_lat: float, start_lon: float, end_lat: float, end_lon: float) -> float: """ Get the compass bearing from start to end. @@ -168,7 +170,7 @@ def to_deg(value, loc): return (deg, mint, sec, loc_value) -def gpx_lerp(alpha, a, b): +def gpx_lerp(alpha: int, a, b): """Interpolate gpx point as (1 - alpha) * a + alpha * b""" dt = alpha * (b[0] - a[0]).total_seconds() t = a[0] + datetime.timedelta(seconds=dt) @@ -209,15 +211,15 @@ def time_next_point(a, b, last, dt): return gpx_lerp(alpha, a, b) -def time_distance(a, b): +def time_distance(a, b) -> int: return (b[0] - a[0]).total_seconds() -def space_distance(a, b): +def space_distance(a, b) -> float: return geo.gps_distance(a[1:3], b[1:3]) -def sample_gpx(points, dx, dt=None): +def sample_gpx(points, dx: float, dt=None): if dt is not None: dx = float(dt) print("Sampling GPX file every {0} seconds".format(dx)) @@ -243,14 +245,14 @@ def sample_gpx(points, dx, dt=None): def add_gps_to_exif( - filename, + filename: Union[os.PathLike[str], str], lat, lon, bearing, elevation, - updated_filename=None, - remove_image_description=True, -): + updated_filename: Union[None, os.PathLike[str], str]=None, + remove_image_description: bool=True, +) -> None: """ Given lat, lon, bearing, elevation, write to EXIF """ @@ -302,11 +304,11 @@ def add_gps_to_exif( def add_exif_using_timestamp( filename, points, - offset_time=0, + offset_time: int=0, timestamp=None, - orientation=1, + orientation: int=1, image_description=None, -): +) -> None: """ Find lat, lon and bearing of filename and write to EXIF. """ @@ -385,8 +387,8 @@ def add_exif_using_timestamp( if len(sys.argv) > 4: print("Usage: python geotag_from_gpx.py path gpx_file time_offset") raise IOError("Bad input parameters.") - path = sys.argv[1] - gpx_filename = sys.argv[2] + path: str = sys.argv[1] + gpx_filename: str = sys.argv[2] if len(sys.argv) == 4: time_offset = int(sys.argv[3]) @@ -395,10 +397,10 @@ def add_exif_using_timestamp( if path.lower().endswith(".jpg"): # single file - file_list = [path] + file_list: List[str] = [path] else: # folder(s) - file_list = [] + file_list: List[str] = [] for root, _, files in os.walk(path): file_list += [ os.path.join(root, filename) @@ -407,7 +409,7 @@ def add_exif_using_timestamp( ] # start time - t = time.time() + t: float = time.time() # read gpx file to get track locations gpx = get_lat_lon_time(gpx_filename) diff --git a/opensfm/io.py b/opensfm/io.py index c6efb0608..140c024d5 100644 --- a/opensfm/io.py +++ b/opensfm/io.py @@ -3,15 +3,17 @@ import os import shutil from abc import ABC, abstractmethod -from typing import Dict, Any, Iterable, List, IO, Tuple, TextIO, Optional +from pathlib import Path +from typing import Union, Dict, Any, Iterable, List, IO, Tuple, TextIO, Optional import cv2 import numpy as np import pyproj +from numpy import ndarray from opensfm import context, features, geo, pygeometry, pymap, types from PIL import Image -logger = logging.getLogger(__name__) +logger: logging.Logger = logging.getLogger(__name__) def camera_from_json(key: str, obj: Dict[str, Any]) -> pygeometry.Camera: @@ -834,7 +836,6 @@ def _read_gcp_list_lines( if shot_id not in exifs: continue - if key in points: point = points[key] else: @@ -888,17 +889,21 @@ def _parse_utm_projection_string(line: str) -> str: return s.format(zone_number, zone_hemisphere) -def _parse_projection(line: str): +def _parse_projection(line: str) -> Optional[pyproj.Transformer]: """Build a proj4 from the GCP format line.""" crs_4326 = pyproj.CRS.from_epsg(4326) if line.strip() == "WGS84": return None elif line.upper().startswith("WGS84 UTM"): - return pyproj.Transformer.from_proj(pyproj.CRS(_parse_utm_projection_string(line)), crs_4326) + return pyproj.Transformer.from_proj( + pyproj.CRS(_parse_utm_projection_string(line)), crs_4326 + ) elif "+proj" in line: return pyproj.Transformer.from_proj(pyproj.CRS(line), crs_4326) elif line.upper().startswith("EPSG:"): - return pyproj.Transformer.from_proj(pyproj.CRS.from_epsg(int(line.split(":")[1])), crs_4326) + return pyproj.Transformer.from_proj( + pyproj.CRS.from_epsg(int(line.split(":")[1])), crs_4326 + ) else: raise ValueError("Un-supported geo system definition: {}".format(line)) @@ -994,21 +999,20 @@ def json_dump_kwargs(minify: bool = False) -> Dict[str, Any]: return {"indent": indent, "ensure_ascii": False, "separators": separators} -def json_dump(data, fout, minify=False): +def json_dump(data, fout: IO[str], minify: bool = False) -> None: kwargs = json_dump_kwargs(minify) return json.dump(data, fout, **kwargs) -def json_dumps(data, minify=False): +def json_dumps(data, minify: bool = False) -> str: kwargs = json_dump_kwargs(minify) return json.dumps(data, **kwargs) - -def json_load(fp): +def json_load(fp: Union[IO[str], IO[bytes]]) -> Any: return json.load(fp) -def json_loads(text): +def json_loads(text: Union[str, bytes]) -> Any: return json.loads(text) @@ -1054,7 +1058,7 @@ def ply_header( return header -def points_to_ply_string(vertices: List[str], point_num_views: bool = False): +def points_to_ply_string(vertices: List[str], point_num_views: bool = False) -> str: header = ply_header(len(vertices), point_num_views=point_num_views) return "\n".join(header + vertices + [""]) @@ -1065,7 +1069,7 @@ def reconstruction_to_ply( no_cameras: bool = False, no_points: bool = False, point_num_views: bool = False, -): +) -> str: """Export reconstruction points as a PLY string.""" vertices = [] @@ -1132,7 +1136,7 @@ def point_cloud_to_ply( colors: np.ndarray, labels: np.ndarray, fp: TextIO, -): +) -> None: fp.write("ply\n") fp.write("format ascii 1.0\n") fp.write("element vertex {}\n".format(len(points))) @@ -1185,7 +1189,7 @@ def open_rt(path: str) -> IO[Any]: def imread( path: str, grayscale: bool = False, unchanged: bool = False, anydepth: bool = False -): +) -> ndarray: with open(path, "rb") as fb: return imread_from_fileobject(fb, grayscale, unchanged, anydepth) @@ -1240,7 +1244,7 @@ def imwrite(cls, path: str, image: np.ndarray) -> None: imwrite(fwb, image, path) -def imwrite(path: str, image: np.ndarray): +def imwrite(path: str, image: np.ndarray) -> None: with open(path, "wb") as fwb: return imwrite_from_fileobject(fwb, image, path) @@ -1253,16 +1257,17 @@ def imwrite_from_fileobject(fwb, image: np.ndarray, ext: str) -> None: fwb.write(im_buffer) -def image_size_from_fileobject(fb): +def image_size_from_fileobject( + fb: Union[IO[bytes], bytes, Path, str, TextIO] +) -> Tuple[int, int]: """Height and width of an image.""" - try: + if isinstance(fb, TextIO): + image = imread(fb.name) + return image.shape[:2] + else: with Image.open(fb) as img: width, height = img.size return height, width - except Exception: - # Slower fallback - image = imread(fb.name) - return image.shape[:2] def image_size(path: str) -> Tuple[int, int]: @@ -1302,7 +1307,7 @@ def symlink(cls, src_path: str, dst_path: str, **kwargs): @classmethod @abstractmethod - def open(cls, *args, **kwargs): + def open(cls, *args, **kwargs) -> IO[Any]: pass @classmethod @@ -1342,7 +1347,7 @@ def timestamp(cls, path: str): class IoFilesystemDefault(IoFilesystemBase): - def __init__(self): + def __init__(self) -> None: self.type = "default" @classmethod @@ -1376,7 +1381,7 @@ def symlink(cls, src_path: str, dst_path: str, **kwargs): os.symlink(src_path, dst_path, **kwargs) @classmethod - def open(cls, *args, **kwargs): + def open(cls, *args, **kwargs) -> IO[Any]: return open(*args, **kwargs) @classmethod diff --git a/opensfm/large/tools.py b/opensfm/large/tools.py index 5fa1ff348..e66c0d35e 100644 --- a/opensfm/large/tools.py +++ b/opensfm/large/tools.py @@ -2,27 +2,35 @@ import logging from collections import namedtuple from functools import lru_cache +from typing import Dict, List, Tuple import cv2 import networkx as nx +from networkx.classes.reportviews import EdgeView import numpy as np import scipy.spatial as spatial from networkx.algorithms import bipartite -from opensfm import align -from opensfm import context -from opensfm import dataset -from opensfm import geo -from opensfm import multiview -from opensfm import pybundle -from opensfm import reconstruction +from opensfm import ( + align, + context, + dataset, + geo, + multiview, + pybundle, + reconstruction, + types, + pymap, +) -logger = logging.getLogger(__name__) +logger: logging.Logger = logging.getLogger(__name__) PartialReconstruction = namedtuple("PartialReconstruction", ["submodel_path", "index"]) -def kmeans(samples, nclusters, max_iter=100, attempts=20): +def kmeans( + samples, nclusters, max_iter: int = 100, attempts: int = 20 +) -> Tuple[np.ndarray, np.ndarray]: criteria = (cv2.TERM_CRITERIA_MAX_ITER, max_iter, 1.0) flags = cv2.KMEANS_PP_CENTERS @@ -32,7 +40,9 @@ def kmeans(samples, nclusters, max_iter=100, attempts=20): return cv2.kmeans(samples, nclusters, criteria, attempts, flags) -def add_cluster_neighbors(positions, labels, centers, max_distance): +def add_cluster_neighbors( + positions, labels, centers, max_distance +) -> List[List[np.ndarray]]: reflla = np.mean(positions, 0) reference = geo.TopocentricConverter(reflla[0], reflla[1], 0) @@ -58,7 +68,7 @@ def add_cluster_neighbors(positions, labels, centers, max_distance): return clusters -def connected_reconstructions(reconstruction_shots): +def connected_reconstructions(reconstruction_shots) -> EdgeView: g = nx.Graph() for r in reconstruction_shots: g.add_node(r, bipartite=0) @@ -71,10 +81,10 @@ def connected_reconstructions(reconstruction_shots): return p.edges() -def scale_matrix(covariance): +def scale_matrix(covariance: np.ndarray) -> np.ndarray: try: L = np.linalg.cholesky(covariance) - except Exception as e: + except Exception: logger.error( "Could not compute Cholesky of covariance matrix {}".format(covariance) ) @@ -85,7 +95,9 @@ def scale_matrix(covariance): return np.linalg.inv(L) -def invert_similarity(s, A, b): +def invert_similarity( + s: float, A: np.ndarray, b: np.ndarray +) -> Tuple[float, np.ndarray, float]: s_inv = 1 / s A_inv = A.T b_inv = -s_inv * A_inv.dot(b) @@ -93,11 +105,11 @@ def invert_similarity(s, A, b): return s_inv, A_inv, b_inv -def partial_reconstruction_name(key): +def partial_reconstruction_name(key) -> str: return str(key.submodel_path) + "_index" + str(key.index) -def add_camera_constraints_soft(ra, reconstruction_shots, reconstruction_name): +def add_camera_constraints_soft(ra, reconstruction_shots, reconstruction_name) -> None: added_shots = set() for key in reconstruction_shots: shots = reconstruction_shots[key] @@ -137,7 +149,7 @@ def add_camera_constraints_soft(ra, reconstruction_shots, reconstruction_name): def add_camera_constraints_hard( ra, reconstruction_shots, reconstruction_name, add_common_camera_constraint -): +) -> None: for key in reconstruction_shots: shots = reconstruction_shots[key] rec_name = reconstruction_name(key) @@ -178,14 +190,16 @@ def add_camera_constraints_hard( @lru_cache(25) -def load_reconstruction(path, index): +def load_reconstruction( + path, index +) -> Tuple[str, Tuple[types.Reconstruction, pymap.TracksManager]]: d1 = dataset.DataSet(path) r1 = d1.load_reconstruction()[index] g1 = d1.load_tracks_manager() return (path + ("_%s" % index)), (r1, g1) -def add_point_constraints(ra, reconstruction_shots, reconstruction_name): +def add_point_constraints(ra, reconstruction_shots, reconstruction_name) -> None: connections = connected_reconstructions(reconstruction_shots) for connection in connections: @@ -225,7 +239,7 @@ def add_point_constraints(ra, reconstruction_shots, reconstruction_name): ) -def load_reconstruction_shots(meta_data): +def load_reconstruction_shots(meta_data) -> Dict[str, pymap.Shot]: reconstruction_shots = {} for submodel_path in meta_data.get_submodel_paths(): data = dataset.DataSet(submodel_path) @@ -244,8 +258,8 @@ def align_reconstructions( reconstruction_shots, reconstruction_name, use_points_constraints, - camera_constraint_type="soft_camera_constraint", -): + camera_constraint_type: str = "soft_camera_constraint", +) -> Dict[str, Tuple[float, np.ndarray, float]]: ra = pybundle.ReconstructionAlignment() if camera_constraint_type == "soft_camera_constraint": @@ -271,7 +285,7 @@ def align_reconstructions( return transformations -def apply_transformations(transformations): +def apply_transformations(transformations) -> None: submodels = itertools.groupby( sorted(transformations.keys(), key=lambda key: key.submodel_path), lambda key: key.submodel_path, diff --git a/opensfm/masking.py b/opensfm/masking.py index 9c72d4cd6..37bfe0ec8 100644 --- a/opensfm/masking.py +++ b/opensfm/masking.py @@ -6,7 +6,7 @@ from opensfm import upright from opensfm.dataset_base import DataSetBase -logger = logging.getLogger(__name__) +logger: logging.Logger = logging.getLogger(__name__) def mask_from_segmentation( diff --git a/opensfm/matching.py b/opensfm/matching.py index ff0626282..8388e062f 100644 --- a/opensfm/matching.py +++ b/opensfm/matching.py @@ -1,6 +1,6 @@ import logging from timeit import default_timer as timer -from typing import Optional, Dict, Any, Tuple, List, Generator +from typing import Sized, Optional, Dict, Any, Tuple, List, Generator import cv2 import numpy as np @@ -16,7 +16,7 @@ from opensfm.dataset_base import DataSetBase -logger = logging.getLogger(__name__) +logger: logging.Logger = logging.getLogger(__name__) def clear_cache() -> None: @@ -448,7 +448,7 @@ def _match_descriptors_impl( if overriden_config["matching_use_filters"]: matches = apply_adhoc_filters( data, - matches, + list(matches), im1, camera1, features_data1.points, @@ -467,7 +467,7 @@ def _match_descriptors_impl( def match_robust( im1: str, im2: str, - matches, + matches: Sized, camera1: pygeometry.Camera, camera2: pygeometry.Camera, data: DataSetBase, @@ -797,7 +797,7 @@ def robust_match_fundamental( def compute_inliers_bearings( - b1: np.ndarray, b2: np.ndarray, R: np.ndarray, t: np.ndarray, threshold=0.01 + b1: np.ndarray, b2: np.ndarray, R: np.ndarray, t: np.ndarray, threshold: float=0.01 ) -> List[bool]: """Compute points that can be triangulated. @@ -925,7 +925,7 @@ def unfilter_matches(matches, m1, m2) -> np.ndarray: def apply_adhoc_filters( data: DataSetBase, - matches, + matches: List[Tuple[int, int]], im1: str, camera1: pygeometry.Camera, p1: np.ndarray, @@ -1023,7 +1023,7 @@ def _vermont_valid_mask(p: np.ndarray) -> bool: def _not_on_blackvue_watermark( - p1: np.ndarray, p2: np.ndarray, matches, im1: str, im2: str, data: DataSetBase + p1: np.ndarray, p2: np.ndarray, matches: List[Tuple[int, int]], im1: str, im2: str, data: DataSetBase ) -> List[Tuple[int, int]]: """Filter Blackvue's watermark.""" meta1 = data.load_exif(im1) diff --git a/opensfm/mesh.py b/opensfm/mesh.py index 016372b19..47447c275 100644 --- a/opensfm/mesh.py +++ b/opensfm/mesh.py @@ -8,12 +8,12 @@ from opensfm import pygeometry, pymap, types -logger = logging.getLogger(__name__) +logger: logging.Logger = logging.getLogger(__name__) def triangle_mesh( shot_id: str, r: types.Reconstruction, tracks_manager: pymap.TracksManager -): +) -> Tuple[List[Any], List[Any]]: """ Create triangle meshes in a list """ @@ -143,7 +143,7 @@ def triangle_mesh_fisheye( faces = tri.simplices.tolist() # Remove faces having only boundary vertices - def good_face(face): + def good_face(face: List[Any]) -> bool: return ( face[0] >= num_circle_points or face[1] >= num_circle_points diff --git a/opensfm/multiview.py b/opensfm/multiview.py index f53a1c5d5..d9877e61e 100644 --- a/opensfm/multiview.py +++ b/opensfm/multiview.py @@ -200,19 +200,19 @@ class TestLinearKernel: required_samples = 1 - def __init__(self, x, y): - self.x = x - self.y = y + def __init__(self, x: np.ndarray, y: np.ndarray) -> None: + self.x: np.ndarray = x + self.y: np.ndarray = y - def num_samples(self): + def num_samples(self) -> int: return len(self.x) - def fit(self, samples): + def fit(self, samples: np.ndarray)->List[float]: x = self.x[samples[0]] y = self.y[samples[0]] return [y / x] - def evaluate(self, model): + def evaluate(self, model: np.ndarray) -> np.ndarray: return self.y - model * self.x @@ -223,7 +223,7 @@ class PlaneKernel: def __init__( self, points, vectors, verticals, point_threshold=1.0, vector_threshold=5.0 - ): + ) -> None: self.points = points self.vectors = vectors self.verticals = verticals @@ -231,10 +231,10 @@ def __init__( self.point_threshold = point_threshold self.vector_threshold = vector_threshold - def num_samples(self): + def num_samples(self) -> int: return len(self.points) - def sampling(self): + def sampling(self) -> Dict[str, Any]: samples = {} if len(self.vectors) > 0: samples["points"] = self.points[ @@ -250,11 +250,11 @@ def sampling(self): samples["vectors"] = None return samples - def fit(self, samples): + def fit(self, samples: Dict[str, np.ndarray]) -> List[np.ndarray]: model = fit_plane(samples["points"], samples["vectors"], self.verticals) return [model] - def evaluate(self, model): + def evaluate(self, model) -> np.ndarray: # only evaluate on points normal = model[0:3] normal_norm = np.linalg.norm(normal) + 1e-10 diff --git a/opensfm/pairs_selection.py b/opensfm/pairs_selection.py index af13d3297..5c7367843 100644 --- a/opensfm/pairs_selection.py +++ b/opensfm/pairs_selection.py @@ -10,7 +10,7 @@ from opensfm import bow, context, feature_loader, vlad, geo, geometry from opensfm.dataset_base import DataSetBase -logger = logging.getLogger(__name__) +logger: logging.Logger = logging.getLogger(__name__) def has_gps_info(exif: Dict[str, Any]) -> bool: @@ -209,7 +209,7 @@ def match_candidates_by_distance( return pairs -def norm_2d(vec: np.ndarray): +def norm_2d(vec: np.ndarray) -> float: """Return the 2D norm of a vector.""" return math.sqrt(vec[0] ** 2 + vec[1] ** 2) @@ -220,7 +220,7 @@ def match_candidates_by_graph( exifs: Dict[str, Any], reference: geo.TopocentricConverter, rounds: int, -): +) -> Set[Tuple[str, str]]: """Find by triangulating the GPS points on X/Y axises""" if len(images_cand) == 0 or rounds < 1: return set() diff --git a/opensfm/reconstruction.py b/opensfm/reconstruction.py index 468d9c277..1e7ab54d5 100644 --- a/opensfm/reconstruction.py +++ b/opensfm/reconstruction.py @@ -8,7 +8,7 @@ from collections import defaultdict from itertools import combinations from timeit import default_timer as timer -from typing import Dict, Any, List, Tuple, Set, Optional +from typing import Dict, Any, List, Tuple, Set, Optional, Union import cv2 import numpy as np @@ -30,7 +30,7 @@ from opensfm.dataset_base import DataSetBase -logger = logging.getLogger(__name__) +logger: logging.Logger = logging.getLogger(__name__) class ReconstructionAlgorithm(str, enum.Enum): @@ -1476,7 +1476,7 @@ def grow_reconstruction( ) logger.info(f"Adding {' and '.join(new_shots)} to the reconstruction") - step = { + step: Dict[str, Union[List[int], List[str], int, List[int], Any]] = { "images": list(new_shots), "resection": resrep, "memory_usage": current_memory_usage(), diff --git a/opensfm/reconstruction_helpers.py b/opensfm/reconstruction_helpers.py index 657703b00..45a883396 100644 --- a/opensfm/reconstruction_helpers.py +++ b/opensfm/reconstruction_helpers.py @@ -15,7 +15,7 @@ from opensfm.dataset_base import DataSetBase -logger = logging.getLogger(__name__) +logger: logging.Logger = logging.getLogger(__name__) def guess_acceleration_from_orientation_tag(orientation: int) -> List[float]: diff --git a/opensfm/report.py b/opensfm/report.py index b57473cd6..b1bf5f859 100644 --- a/opensfm/report.py +++ b/opensfm/report.py @@ -9,7 +9,7 @@ from opensfm.dataset import DataSet from typing import Any, Dict -logger = logging.getLogger(__name__) +logger: logging.Logger = logging.getLogger(__name__) class Report: diff --git a/opensfm/rig.py b/opensfm/rig.py index 51447900c..4585dae55 100644 --- a/opensfm/rig.py +++ b/opensfm/rig.py @@ -15,7 +15,7 @@ from opensfm.dataset import DataSet -logger = logging.getLogger(__name__) +logger: logging.Logger = logging.getLogger(__name__) TRigPatterns = Dict[str, str] @@ -293,7 +293,7 @@ def create_rig_cameras_from_reconstruction( return rig_cameras -def create_rigs_with_pattern(data: "DataSet", patterns: TRigPatterns): +def create_rigs_with_pattern(data: "DataSet", patterns: TRigPatterns) -> None: """Create rig data (`rig_cameras.json` and `rig_assignments.json`) by performing pattern matching to group images belonging to the same instances, followed by a bit of ad-hoc SfM to find some initial relative poses. @@ -397,20 +397,3 @@ def count_reconstructed_instances( for s in reconstruction.shots: instances_count[instances_map[s]] -= 1 return len(instances) - sum(1 for i in instances_count.values() if i > 0) - - -def same_rig_shot(meta1, meta2): - """True if shots taken at the same time on a rig.""" - have_gps = ( - "gps" in meta1 - and "gps" in meta2 - and "latitude" in meta1["gps"] - and "latitude" in meta2["gps"] - ) - same_gps = ( - have_gps - and meta1["gps"]["latitude"] == meta2["gps"]["latitude"] - and meta1["gps"]["longitude"] == meta2["gps"]["longitude"] - ) - same_time = meta1["capture_time"] == meta2["capture_time"] - return same_gps and same_time diff --git a/opensfm/sensors.py b/opensfm/sensors.py index 54c737f0b..7ff93f13c 100644 --- a/opensfm/sensors.py +++ b/opensfm/sensors.py @@ -1,12 +1,12 @@ from functools import lru_cache - +from typing import Any, Dict, List import yaml from opensfm import context from opensfm import io @lru_cache(1) -def sensor_data(): +def sensor_data() -> Dict[str, Any]: with io.open_rt(context.SENSOR_DATA) as f: data = io.json_load(f) @@ -15,7 +15,7 @@ def sensor_data(): @lru_cache(1) -def camera_calibration(): +def camera_calibration()-> List[Dict[str, Any]]: with io.open_rt(context.CAMERA_CALIBRATION) as f: data = yaml.safe_load(f) return data diff --git a/opensfm/synthetic_data/synthetic_dataset.py b/opensfm/synthetic_data/synthetic_dataset.py index 78b642a52..a6023010d 100644 --- a/opensfm/synthetic_data/synthetic_dataset.py +++ b/opensfm/synthetic_data/synthetic_dataset.py @@ -2,13 +2,13 @@ import logging import os import shelve -from typing import Optional, Dict, Any, List, Tuple, Union +from typing import Any, Dict, Iterator, List, Optional, Tuple, Union import numpy as np from opensfm import tracking, features as oft, types, pymap, pygeometry, io, geo from opensfm.dataset import DataSet -logger = logging.getLogger(__name__) +logger: logging.Logger = logging.getLogger(__name__) class SyntheticFeatures(collections.abc.MutableMapping): @@ -30,19 +30,19 @@ def sync(self) -> None: else: database.sync() - def __getitem__(self, key): + def __getitem__(self, key) -> oft.FeaturesData: return self.database.__getitem__(key) - def __setitem__(self, key, item): + def __setitem__(self, key, item) -> None: return self.database.__setitem__(key, item) - def __delitem__(self, key): + def __delitem__(self, key) -> None: return self.database.__delitem__(key) - def __iter__(self): + def __iter__(self) -> Iterator[str]: return self.database.__iter__() - def __len__(self): + def __len__(self) -> int: return self.database.__len__() diff --git a/opensfm/synthetic_data/synthetic_generator.py b/opensfm/synthetic_data/synthetic_generator.py index 4de6f6c64..cea73f774 100644 --- a/opensfm/synthetic_data/synthetic_generator.py +++ b/opensfm/synthetic_data/synthetic_generator.py @@ -18,9 +18,10 @@ features as oft, geometry, ) +from opensfm.types import Reconstruction -logger = logging.getLogger(__name__) +logger: logging.Logger = logging.getLogger(__name__) def derivative(func: Callable, x: np.ndarray) -> np.ndarray: @@ -169,8 +170,8 @@ def generate_exifs( previous_time = 0 exifs = {} - def _gps_dop(shot): - gps_dop = 15 + def _gps_dop(shot: pymap.Shot) -> float: + gps_dop = 15.0 if isinstance(gps_noise, float): gps_dop = gps_noise if isinstance(gps_noise, dict): @@ -256,7 +257,7 @@ def perturb_rotations(rotations: np.ndarray, angle_sigma: float) -> None: def add_points_to_reconstruction( points: np.ndarray, color: np.ndarray, reconstruction: types.Reconstruction -): +) -> None: shift = len(reconstruction.points) for i in range(points.shape[0]): point = reconstruction.create_point(str(shift + i), points[i, :]) @@ -271,7 +272,7 @@ def add_shots_to_reconstruction( cameras: List[pygeometry.Camera], reconstruction: types.Reconstruction, sequence_key: str, -): +) -> None: for camera in cameras: reconstruction.add_camera(camera) @@ -307,7 +308,7 @@ def create_reconstruction( rig_rotations: List[np.ndarray], rig_cameras: List[List[pymap.RigCamera]], reference: Optional[geo.TopocentricConverter], -): +) -> Reconstruction: reconstruction = types.Reconstruction() if reference is not None: reconstruction.reference = reference diff --git a/opensfm/test/conftest.py b/opensfm/test/conftest.py index 4bedaad71..b8cc2ed9a 100644 --- a/opensfm/test/conftest.py +++ b/opensfm/test/conftest.py @@ -1,18 +1,22 @@ from collections import defaultdict from distutils.version import LooseVersion +from typing import Dict, List, Tuple import numpy as np import pytest -from opensfm import multiview, types, geo -from opensfm.synthetic_data import synthetic_examples -from opensfm.synthetic_data import synthetic_scene +from opensfm import multiview, types, geo, pygeometry, pymap +from opensfm.synthetic_data import ( + synthetic_examples, + synthetic_scene, + synthetic_dataset as sd, +) -def pytest_configure(config): +def pytest_configure(config) -> None: use_legacy_numpy_printoptions() -def use_legacy_numpy_printoptions(): +def use_legacy_numpy_printoptions() -> None: """Ensure numpy use legacy print formant.""" if LooseVersion(np.__version__).version[:2] > [1, 13]: np.set_printoptions(legacy="1.13") @@ -54,7 +58,7 @@ def scene_synthetic() -> synthetic_scene.SyntheticInputData: @pytest.fixture(scope="session") -def scene_synthetic_cube(): +def scene_synthetic_cube() -> Tuple[types.Reconstruction, pymap.TracksManager]: np.random.seed(42) data = synthetic_examples.synthetic_cube_scene() @@ -120,7 +124,14 @@ def scene_synthetic_triangulation() -> synthetic_scene.SyntheticInputData: @pytest.fixture(scope="module") -def pairs_and_poses(): +def pairs_and_poses() -> Tuple[ + Dict[Tuple[str, str], List[Tuple[List[np.ndarray]]]], + Dict[Tuple[str, str], List[Tuple[List[np.ndarray]]]], + pygeometry.Camera, + sd.SyntheticFeatures, + pymap.TracksManager, + types.Reconstruction, +]: np.random.seed(42) data = synthetic_examples.synthetic_cube_scene() @@ -147,10 +158,12 @@ def pairs_and_poses(): @pytest.fixture(scope="module") -def pairs_and_their_E(pairs_and_poses): +def pairs_and_their_E( + pairs_and_poses, +) -> List[Tuple[np.ndarray, np.ndarray, np.ndarray, np.ndarray]]: pairs, poses, camera, _, _, _ = pairs_and_poses - pairs = list(sorted(zip(pairs.values(), poses.values()), key=lambda x: -len(x[0]))) + pairs = sorted(zip(pairs.values(), poses.values()), key=lambda x: -len(x[0])) num_pairs = 20 indices = [np.random.randint(0, len(pairs) - 1) for i in range(num_pairs)] @@ -177,7 +190,9 @@ def pairs_and_their_E(pairs_and_poses): @pytest.fixture(scope="module") -def shots_and_their_points(pairs_and_poses): +def shots_and_their_points( + pairs_and_poses, +) -> List[Tuple[pygeometry.Pose, np.ndarray, np.ndarray]]: _, _, _, _, tracks_manager, reconstruction = pairs_and_poses ret_shots = [] diff --git a/opensfm/test/data_generation.py b/opensfm/test/data_generation.py index 04dc28f18..097d53c09 100644 --- a/opensfm/test/data_generation.py +++ b/opensfm/test/data_generation.py @@ -13,7 +13,7 @@ DATA_PATH = os.path.abspath("data") -def create_berlin_test_folder(tmpdir): +def create_berlin_test_folder(tmpdir) -> opensfm.dataset.DataSet: src = os.path.join(DATA_PATH, "berlin") dst = str(tmpdir.mkdir("berlin")) files = ["images", "masks", "config.yaml", "ground_control_points.json"] diff --git a/opensfm/test/test_dataset.py b/opensfm/test/test_dataset.py index d683aab5d..816bf2a2d 100644 --- a/opensfm/test/test_dataset.py +++ b/opensfm/test/test_dataset.py @@ -23,12 +23,14 @@ def test_dataset_load_features_sift(tmpdir) -> None: before = features.FeaturesData(points, descriptors, colors, semantic_data) data.save_features(image, before) after = data.load_features(image) - + assert after assert np.allclose(points, after.points) assert np.allclose(descriptors, after.descriptors) assert np.allclose(colors, after.colors) + semantic = after.semantic + assert semantic assert np.allclose( segmentations, - after.semantic.segmentation, + semantic.segmentation, ) - assert np.allclose(instances, after.semantic.instances) + assert np.allclose(instances, semantic.instances) diff --git a/opensfm/test/test_datastructures.py b/opensfm/test/test_datastructures.py index 1c8ef983d..2fd07afa2 100644 --- a/opensfm/test/test_datastructures.py +++ b/opensfm/test/test_datastructures.py @@ -1,4 +1,8 @@ import copy +from opensfm.pymap import RigCamera, RigInstance, Shot +from opensfm.types import Reconstruction +from typing import Tuple + import random import numpy as np @@ -638,7 +642,7 @@ def test_pano_shot_create_remove_create() -> None: assert len(rec.pano_shots) == n_shots -def _create_rig_camera(): +def _create_rig_camera() -> RigCamera: rig_camera = pymap.RigCamera() rig_camera.id = "rig_camera" rig_camera.pose = pygeometry.Pose( @@ -647,7 +651,7 @@ def _create_rig_camera(): return rig_camera -def _create_rig_instance(): +def _create_rig_instance() -> Tuple[Reconstruction, RigInstance, Shot]: rec = _create_reconstruction(1, {"0": 2}) rig_camera = rec.add_rig_camera(_create_rig_camera()) rig_instance = pymap.RigInstance("1") diff --git a/opensfm/test/test_io.py b/opensfm/test/test_io.py index 56852bd90..367629152 100644 --- a/opensfm/test/test_io.py +++ b/opensfm/test/test_io.py @@ -71,6 +71,7 @@ def test_parse_projection() -> None: proj = io._parse_projection("WGS84 UTM 31N") easting, northing = 431760, 4582313.7 lat, lon = 41.38946, 2.18378 + assert proj plat, plon = proj.transform(easting, northing) assert np.allclose((lat, lon), (plat, plon)) diff --git a/opensfm/test/test_pairs_selection.py b/opensfm/test/test_pairs_selection.py index 4f3c10c47..3fdc04145 100644 --- a/opensfm/test/test_pairs_selection.py +++ b/opensfm/test/test_pairs_selection.py @@ -1,18 +1,19 @@ import argparse import os.path -from typing import Any, Dict +from typing import Any, Dict, Generator import numpy as np import pytest from opensfm import commands, dataset, feature_loader, pairs_selection, geo from opensfm.test import data_generation +from opensfm.dataset_base import DataSetBase NEIGHBORS = 6 @pytest.fixture(scope="module", autouse=True) -def clear_cache(): +def clear_cache() -> Generator[None, Any, Any]: """ Clear feature loader cache to avoid using cached masks etc from berlin dataset which has the same @@ -47,7 +48,7 @@ def lund_path(tmpdir_factory) -> str: def match_candidates_from_metadata( - data, neighbors: int = NEIGHBORS, assert_count: int = NEIGHBORS + data: DataSetBase, neighbors: int = NEIGHBORS, assert_count: int = NEIGHBORS ) -> None: assert neighbors >= assert_count diff --git a/opensfm/tracking.py b/opensfm/tracking.py index 3122fd220..4cb699ee4 100644 --- a/opensfm/tracking.py +++ b/opensfm/tracking.py @@ -6,9 +6,10 @@ from opensfm import pymap from opensfm.dataset_base import DataSetBase from opensfm.unionfind import UnionFind +from opensfm.pymap import TracksManager -logger = logging.getLogger(__name__) +logger: logging.Logger = logging.getLogger(__name__) def load_features( @@ -64,7 +65,7 @@ def create_tracks_manager( instances: t.Dict[str, np.ndarray], matches: t.Dict[t.Tuple[str, str], t.List[t.Tuple[int, int]]], min_length: int, -): +) -> TracksManager: """Link matches into tracks.""" logger.debug("Merging features onto tracks") uf = UnionFind() diff --git a/opensfm/transformations.py b/opensfm/transformations.py index 1ce93c8f4..96bcd6bdc 100644 --- a/opensfm/transformations.py +++ b/opensfm/transformations.py @@ -186,7 +186,7 @@ """ import math -from typing import Optional, List, Tuple +from typing import Dict, Optional, List, Tuple import numpy @@ -195,7 +195,7 @@ __all__ = [] -def identity_matrix(): +def identity_matrix() -> numpy.ndarray: """Return 4x4 identity/unit matrix. >>> I = identity_matrix() @@ -1591,10 +1591,10 @@ def random_rotation_matrix(rand: Optional[numpy.ndarray] = None) -> numpy.ndarra # epsilon for testing whether a number is close to zero -_EPS = numpy.finfo(float).eps * 4.0 +_EPS: float = numpy.finfo(float).eps * 4.0 # axis sequences for Euler angles -_NEXT_AXIS = [1, 2, 0, 1] +_NEXT_AXIS: List[int] = [1, 2, 0, 1] # map axes strings to/from tuples of inner axis, parity, repetition, frame _AXES2TUPLE = { @@ -1624,7 +1624,7 @@ def random_rotation_matrix(rand: Optional[numpy.ndarray] = None) -> numpy.ndarra "rzyz": (2, 1, 1, 1), } -_TUPLE2AXES = dict((v, k) for k, v in _AXES2TUPLE.items()) +_TUPLE2AXES: Dict[Tuple[int, ...], str] = {v: k for k, v in _AXES2TUPLE.items()} def vector_norm( @@ -1833,7 +1833,7 @@ def is_same_transform(matrix0: numpy.ndarray, matrix1: numpy.ndarray) -> numpy.n return numpy.allclose(matrix0, matrix1) -def _import_module(name, package=None, warn=True, prefix="_py_", ignore="_"): +def _import_module(name: str, package: Optional[str]=None, warn: bool=True, prefix: str="_py_", ignore: str="_") -> Optional[bool]: """Try import all public attributes from module into global namespace. Existing attributes with name clashes are renamed with prefix. diff --git a/opensfm/undistort.py b/opensfm/undistort.py index 6e2053e8e..010752041 100644 --- a/opensfm/undistort.py +++ b/opensfm/undistort.py @@ -17,7 +17,7 @@ from opensfm.dataset import UndistortedDataSet from opensfm.dataset_base import DataSetBase -logger = logging.getLogger(__name__) +logger: logging.Logger = logging.getLogger(__name__) def undistort_reconstruction( diff --git a/opensfm/video.py b/opensfm/video.py index 17f1acbdd..903dda69b 100644 --- a/opensfm/video.py +++ b/opensfm/video.py @@ -1,6 +1,7 @@ import datetime import os from subprocess import Popen, PIPE +from typing import List import cv2 import dateutil.parser @@ -36,12 +37,12 @@ def import_video_with_gpx( video_file, gpx_file, output_path: str, - dx, + dx: float, dt=None, start_time=None, - visual: bool=False, + visual: bool = False, image_description=None, -): +) -> List[str]: points = geotag_from_gpx.get_lat_lon_time(gpx_file) diff --git a/viewer/server.py b/viewer/server.py index 78fd35795..30fdca95d 100644 --- a/viewer/server.py +++ b/viewer/server.py @@ -22,7 +22,7 @@ @app.route("/") -def index(): +def index() -> Response: return send_file(os.path.join(app.static_folder, "index.html")) From 26eeefa2cea934eab6cfe2b7a250089e17ffddfc Mon Sep 17 00:00:00 2001 From: Fabian Schenk Date: Tue, 5 Apr 2022 06:26:32 -0700 Subject: [PATCH 52/81] Remove unused code Summary: `video_end_time` was never used in the code, so I was able to remove the whole try/except block. Reviewed By: YanNoun Differential Revision: D35283729 fbshipit-source-id: c122354379058727a1e86dccee65549cc180b6ea --- opensfm/video.py | 11 ----------- 1 file changed, 11 deletions(-) diff --git a/opensfm/video.py b/opensfm/video.py index 903dda69b..a951240c2 100644 --- a/opensfm/video.py +++ b/opensfm/video.py @@ -60,17 +60,6 @@ def import_video_with_gpx( except Exception: print("Video recording timestamp not found. Using first GPS point time.") video_start_time = points[0][0] - try: - duration = Popen( - ["exiftool", "-MediaDuration", "-b", video_file], stdout=PIPE - ).stdout.read() - video_duration = float(duration) - video_end_time = video_start_time + datetime.timedelta( - seconds=video_duration - ) - except Exception: - print("Video end time not found. Using last GPS point time.") - video_end_time = points[-1][0] print("GPS track starts at: {}".format(points[0][0])) print("Video starts at: {}".format(video_start_time)) From 59e86685e609b6433b54033ffabadbe930ee98fd Mon Sep 17 00:00:00 2001 From: Yann Noutary Date: Wed, 6 Apr 2022 06:23:46 -0700 Subject: [PATCH 53/81] fat: log more GCP statistics before merge Summary: ThiS Diff adds a bit more statistics when using GCPs. Reviewed By: mlopezantequera Differential Revision: D35185660 fbshipit-source-id: 85b6122c77dee3d531af568a14a7030d60124a10 --- opensfm/reconstruction.py | 12 +++++++++--- 1 file changed, 9 insertions(+), 3 deletions(-) diff --git a/opensfm/reconstruction.py b/opensfm/reconstruction.py index 1e7ab54d5..8ec85a66c 100644 --- a/opensfm/reconstruction.py +++ b/opensfm/reconstruction.py @@ -56,7 +56,7 @@ def _add_gcp_to_bundle( gcp_vertical_sd: float, ) -> int: """Add Ground Control Points constraints to the bundle problem.""" - added_gcps = 0 + total_gcp_observations = 0 gcp_sd = np.array([gcp_horizontal_sd, gcp_horizontal_sd, gcp_vertical_sd]) for point in gcp: point_id = "gcp-" + point.id @@ -78,10 +78,13 @@ def _add_gcp_to_bundle( ba.add_point(point_id, coordinates, False) + current_error = 0 if point.lla: point_enu = reference.to_topocentric(*point.lla_vec) ba.add_point_prior(point_id, point_enu, gcp_sd, point.has_altitude) + current_error = np.linalg.norm(np.array(point_enu)-np.array(coordinates)) + gcp_observations = 0 for observation in point.observations: if observation.shot_id in shots: # TODO(pau): move this to a config or per point parameter. @@ -92,8 +95,11 @@ def _add_gcp_to_bundle( observation.projection, scale, ) - added_gcps += 1 - return added_gcps + gcp_observations += 1 + total_gcp_observations += gcp_observations + + logger.warning(f"Adding GCP {point_id} with {gcp_observations} observations with current error of {current_error} meters") + return total_gcp_observations def bundle( From 13a7470294fba0dff37272f5a5d7d70ee5ca1d81 Mon Sep 17 00:00:00 2001 From: Yann Noutary Date: Wed, 6 Apr 2022 06:23:46 -0700 Subject: [PATCH 54/81] fix: prevent reading of absent metadata Summary: This Diff adds check for avoiding reading absent `orientation` or `capture_time` metadata. Reviewed By: paulinus Differential Revision: D35289814 fbshipit-source-id: 99565cb3ebab6b7d36cecec4baeea6c57737305e --- opensfm/align.py | 3 +++ opensfm/reconstruction_helpers.py | 15 +++++++++++---- 2 files changed, 14 insertions(+), 4 deletions(-) diff --git a/opensfm/align.py b/opensfm/align.py index 171510c1e..0fa1fe1c5 100644 --- a/opensfm/align.py +++ b/opensfm/align.py @@ -375,6 +375,9 @@ def estimate_ground_plane( onplane, verticals = [], [] for shot in reconstruction.shots.values(): R = shot.pose.get_rotation_matrix() + if not shot.metadata.orientation.has_value: + continue + x, y, z = get_horizontal_and_vertical_directions( R, shot.metadata.orientation.value ) diff --git a/opensfm/reconstruction_helpers.py b/opensfm/reconstruction_helpers.py index 45a883396..07ccf5fae 100644 --- a/opensfm/reconstruction_helpers.py +++ b/opensfm/reconstruction_helpers.py @@ -87,8 +87,11 @@ def transform_acceleration_from_phone_to_image_axis( return [ix, iy, iz] -def shot_acceleration_in_image_axis(shot: pymap.Shot) -> List[float]: +def shot_acceleration_in_image_axis(shot: pymap.Shot) -> Optional[List[float]]: """Get or guess shot's acceleration.""" + if not shot.metadata.orientation.has_value: + return None + orientation = shot.metadata.orientation.value if not 1 <= orientation <= 8: logger.error( @@ -103,15 +106,17 @@ def shot_acceleration_in_image_axis(shot: pymap.Shot) -> List[float]: return guess_acceleration_from_orientation_tag(orientation) -def rotation_from_shot_metadata(shot: pymap.Shot) -> np.ndarray: +def rotation_from_shot_metadata(shot: pymap.Shot) -> Optional[np.ndarray]: rotation = rotation_from_angles(shot) if rotation is None: rotation = rotation_from_orientation_compass(shot) return rotation -def rotation_from_orientation_compass(shot: pymap.Shot) -> np.ndarray: +def rotation_from_orientation_compass(shot: pymap.Shot) -> Optional[np.ndarray]: up_vector = shot_acceleration_in_image_axis(shot) + if up_vector is None: + return None if shot.metadata.compass_angle.has_value: angle = shot.metadata.compass_angle.value else: @@ -162,7 +167,9 @@ def reconstruction_from_metadata( continue gps_pos = shot.metadata.gps_position.value - shot.pose.set_rotation_matrix(rotation_from_shot_metadata(shot)) + rotation = rotation_from_shot_metadata(shot) + if rotation is not None: + shot.pose.set_rotation_matrix(rotation) shot.pose.set_origin(gps_pos) shot.scale = 1.0 return reconstruction From cdcfe7fa82c6ecbe2cab91ae8eff3563b152c5b6 Mon Sep 17 00:00:00 2001 From: Yann Noutary Date: Wed, 6 Apr 2022 06:23:46 -0700 Subject: [PATCH 55/81] feat: add LLA to shift to cluster synthetic data Summary: This PR adds passing an optional LLA frame to image metadata construction. Reviewed By: fabianschenk Differential Revision: D35248983 fbshipit-source-id: be71122883cc269dce40c9551d0b23789b2eb1f8 --- opensfm/reconstruction_helpers.py | 1 + opensfm/src/bundle/src/bundle_adjuster.cc | 8 +- opensfm/src/geometry/similarity.h | 1 + opensfm/src/sfm/pysfm.pyi | 4 +- opensfm/src/sfm/python/pybind.cc | 2 +- opensfm/src/sfm/retriangulation.h | 4 +- opensfm/src/sfm/src/retriangulation.cc | 122 +++++++++++++++++----- 7 files changed, 107 insertions(+), 35 deletions(-) diff --git a/opensfm/reconstruction_helpers.py b/opensfm/reconstruction_helpers.py index 07ccf5fae..0373c4572 100644 --- a/opensfm/reconstruction_helpers.py +++ b/opensfm/reconstruction_helpers.py @@ -4,6 +4,7 @@ import numpy as np from opensfm import ( + geo, multiview, pygeometry, pymap, diff --git a/opensfm/src/bundle/src/bundle_adjuster.cc b/opensfm/src/bundle/src/bundle_adjuster.cc index 171dad2ab..a4b0ef38a 100644 --- a/opensfm/src/bundle/src/bundle_adjuster.cc +++ b/opensfm/src/bundle/src/bundle_adjuster.cc @@ -88,10 +88,10 @@ void BundleAdjuster::AddCamera(const std::string &id, // identity bias by default auto &bias_data = bias_ - .emplace( - std::piecewise_construct, std::forward_as_tuple(id), - std::forward_as_tuple( - id, geometry::Similarity(Vec3d::Zero(), Vec3d::Zero(), 1.0))) + .emplace(std::piecewise_construct, std::forward_as_tuple(id), + std::forward_as_tuple( + id, geometry::Similarity(Vec3d::Zero().eval(), + Vec3d::Zero().eval(), 1.0))) .first->second; bias_data.SetParametersToOptimize({}); } diff --git a/opensfm/src/geometry/similarity.h b/opensfm/src/geometry/similarity.h index 0e700e6df..43356a4a5 100644 --- a/opensfm/src/geometry/similarity.h +++ b/opensfm/src/geometry/similarity.h @@ -8,6 +8,7 @@ class Similarity { public: Similarity() = default; Similarity(const Vec3d& R, const Vec3d& t, double s) : Rt_(R, t), scale_(s) {} + Similarity(const Mat3d& R, const Vec3d& t, double s) : Rt_(R, t), scale_(s) {} Vec3d Translation() const { return Rt_.TranslationWorldToCamera(); } void SetTranslation(const Vec3d& t) { Rt_.SetWorldToCamTranslation(t); } diff --git a/opensfm/src/sfm/pysfm.pyi b/opensfm/src/sfm/pysfm.pyi index d957c13c2..43a45b978 100644 --- a/opensfm/src/sfm/pysfm.pyi +++ b/opensfm/src/sfm/pysfm.pyi @@ -13,7 +13,7 @@ __all__ = [ "BAHelpers", "add_connections", "count_tracks_per_shot", -"realign_points", +"realign_maps", "remove_connections" ] class BAHelpers: @@ -31,5 +31,5 @@ class BAHelpers: def shot_neighborhood_ids(arg0: opensfm.pymap.Map, arg1: str, arg2: int, arg3: int, arg4: int) -> Tuple[Set[str], Set[str]]: ... def add_connections(arg0: opensfm.pymap.TracksManager, arg1: str, arg2: List[str]) -> None:... def count_tracks_per_shot(arg0: opensfm.pymap.TracksManager, arg1: List[str], arg2: List[str]) -> Dict[str, int]:... -def realign_points(arg0: opensfm.pymap.Map, arg1: opensfm.pymap.Map) -> None:... +def realign_maps(arg0: opensfm.pymap.Map, arg1: opensfm.pymap.Map, arg2: bool) -> None:... def remove_connections(arg0: opensfm.pymap.TracksManager, arg1: str, arg2: List[str]) -> None:... diff --git a/opensfm/src/sfm/python/pybind.cc b/opensfm/src/sfm/python/pybind.cc index f22f387a7..6e6299282 100644 --- a/opensfm/src/sfm/python/pybind.cc +++ b/opensfm/src/sfm/python/pybind.cc @@ -30,6 +30,6 @@ PYBIND11_MODULE(pysfm, m) { .def_static("detect_alignment_constraints", &sfm::BAHelpers::DetectAlignmentConstraints); - m.def("realign_points", &sfm::retriangulation::RealignPoints, + m.def("realign_maps", &sfm::retriangulation::RealignMaps, py::call_guard()); } diff --git a/opensfm/src/sfm/retriangulation.h b/opensfm/src/sfm/retriangulation.h index 0c1d97d61..100a64ca0 100644 --- a/opensfm/src/sfm/retriangulation.h +++ b/opensfm/src/sfm/retriangulation.h @@ -4,7 +4,7 @@ namespace sfm { namespace retriangulation { -void RealignPoints(const map::Map& reference, - map::Map& to_align); +void RealignMaps(const map::Map& reference, map::Map& to_align, + bool update_points); } // namespace retriangulation } // namespace sfm diff --git a/opensfm/src/sfm/src/retriangulation.cc b/opensfm/src/sfm/src/retriangulation.cc index 495bb7df1..78c30c447 100644 --- a/opensfm/src/sfm/src/retriangulation.cc +++ b/opensfm/src/sfm/src/retriangulation.cc @@ -3,45 +3,115 @@ #include #include +#include namespace sfm { namespace retriangulation { -void RealignPoints(const map::Map& reference, map::Map& to_align) { - const auto& all_reference_shots = reference.GetShots(); - const auto& to_align_shots = to_align.GetShots(); - constexpr auto max_dbl = std::numeric_limits::max(); - - for (auto& lm : to_align.GetLandmarks()) { - const auto point = lm.second.GetGlobalPos(); - std::pair best_shot = std::make_pair(max_dbl, ""); - for (const auto& shot_n_obs : lm.second.GetObservations()) { - const auto shot = shot_n_obs.first; - if (all_reference_shots.find(shot->GetId()) == - all_reference_shots.end()) { +void RealignMaps(const map::Map& map_from, map::Map& map_to, + bool update_points) { + const auto& map_from_shots = map_from.GetShots(); + + const auto& from_ref = map_from.GetTopocentricConverter(); + const auto& to_ref = map_to.GetTopocentricConverter(); + const auto& from_to_offset = to_ref.ToTopocentric(from_ref.GetLlaRef()); + + // first, record transforms that remap points of 'to' + std::unordered_map from_to_transforms; + for (const auto& shot_to : map_to.GetShots()) { + if (!map_from.HasShot(shot_to.first)) { + continue; + } + const auto& shot_from = map_from.GetShot(shot_to.first); + auto shot_from_pose = *shot_from.GetPose(); + const auto shot_to_pose = *shot_to.second.GetPose(); + + // put 'from' in LLA of 'to' + shot_from_pose.SetOrigin(shot_from_pose.GetOrigin() + from_to_offset); + + // store the transform that map relative position in 'from' to 'to' : + // + // X_to' = 1 / scale_from * Rcw_from * Rwc_to * ( X_to - Oc_to ) + + // Oc_from_to + // + + const double scale = shot_from.scale != 0. ? (1.0 / shot_from.scale) : 1.0; + const Mat3d R_to_from = shot_from_pose.RotationCameraToWorld() * + shot_to_pose.RotationWorldToCamera(); + const Vec3d t_from_to = -scale * R_to_from * shot_to_pose.GetOrigin() + + shot_from_pose.GetOrigin(); + + from_to_transforms[shot_to.first] = + geometry::Similarity(R_to_from, t_from_to, scale); + } + + // remap points of 'to' using the computed transforms if needed + if (update_points) { + constexpr auto max_dbl = std::numeric_limits::max(); + for (auto& lm : map_to.GetLandmarks()) { + const auto point = lm.second.GetGlobalPos(); + std::pair best_shot = std::make_pair(max_dbl, ""); + for (const auto& shot_n_obs : lm.second.GetObservations()) { + const auto shot = shot_n_obs.first; + if (map_from_shots.find(shot->GetId()) == map_from_shots.end()) { + continue; + } + const Vec3d ray = point - shot->GetPose()->GetOrigin(); + const double dist2 = ray.squaredNorm(); + if (dist2 < best_shot.first) { + best_shot = std::make_pair(dist2, shot->GetId()); + } + } + + if (best_shot.first == max_dbl) { continue; } - const Vec3d ray = point - shot->GetPose()->GetOrigin(); - const double dist2 = ray.squaredNorm(); - if (dist2 < best_shot.first) { - best_shot = std::make_pair(dist2, shot->GetId()); + const auto reference_shot = best_shot.second; + if (from_to_transforms.find(reference_shot) == from_to_transforms.end()) { + continue; } + const auto transform = from_to_transforms.at(reference_shot); + lm.second.SetGlobalPos(transform.Transform(lm.second.GetGlobalPos())); } + } - if (best_shot.first == max_dbl) { + // finally, map shots and instances + std::unordered_set to_delete; + for (auto& shot_to : map_to.GetShots()) { + // remember any shot not in 'from' but in 'to' for further deletion + if (!map_from.HasShot(shot_to.first)) { + to_delete.insert(shot_to.first); continue; } - const auto reference_shot = best_shot.second; - const auto& shot_before = to_align_shots.at(reference_shot); - const auto& shot_after = all_reference_shots.at(reference_shot); + // copy cameras and some metadata + const auto& shot_from = map_from.GetShot(shot_to.first); + auto& camera_to = map_to.GetCamera(shot_to.second.GetCamera()->id); + camera_to.SetParametersValues(shot_from.GetCamera()->GetParametersValues()); + + shot_to.second.scale = shot_from.scale; + shot_to.second.merge_cc = shot_from.merge_cc; + } + + // only map rig instances (assuming rig camera didn't change) + for (auto& rig_instance_to : map_to.GetRigInstances()) { + for (const auto& any_shot : rig_instance_to.second.GetShots()) { + if (map_from_shots.find(any_shot.first) != map_from_shots.end()) { + const auto& any_shot_from = map_from_shots.at(any_shot.first); + auto& to_pose = rig_instance_to.second.GetPose(); + + // assign 'from' pose + to_pose = any_shot_from.GetRigInstance()->GetPose(); - const Mat3d R = shot_after.GetPose()->RotationCameraToWorld() * - shot_before.GetPose()->RotationWorldToCamera(); - const double s = shot_after.scale != 0. ? (1.0 / shot_after.scale) : 1.0; + // put 'from' to 'to' LLA + to_pose.SetOrigin(to_pose.GetOrigin() + from_to_offset); + break; + } + } + } - const auto oc_before = shot_before.GetPose()->GetOrigin(); - const auto oc_after = shot_after.GetPose()->GetOrigin(); - lm.second.SetGlobalPos(s * R * (point - oc_before) + oc_after); + // delete any shot not in 'from' but in 'to' + for (const auto& shot_id : to_delete) { + map_to.RemoveShot(shot_id); } } } // namespace retriangulation From 38a1d72e848be145cee63c357627149edddc7fd2 Mon Sep 17 00:00:00 2001 From: Manuel Lopez Antequera Date: Wed, 6 Apr 2022 08:00:19 -0700 Subject: [PATCH 56/81] Use type comment for PathLike[_] types Summary: https://github.com/python/mypy/issues/5667 https://github.com/python/typeshed/issues/3202 Reviewed By: fabianschenk Differential Revision: D35423993 fbshipit-source-id: 73e2027ea425eb629d6faa864aa0829d59997b07 --- annotation_gui_gcp/main.py | 2 +- opensfm/geotag_from_gpx.py | 20 +++++++++++--------- 2 files changed, 12 insertions(+), 10 deletions(-) diff --git a/annotation_gui_gcp/main.py b/annotation_gui_gcp/main.py index df33f4925..aad365118 100644 --- a/annotation_gui_gcp/main.py +++ b/annotation_gui_gcp/main.py @@ -91,7 +91,7 @@ def load_rig_assignments(root: Path) -> t.Dict[str, t.List[str]]: def load_sequence_database_from_file( root: Path, - fname: Union[PathLike[str], str] = "sequence_database.json", + fname: Union["PathLike[str]", str] = "sequence_database.json", skip_missing: bool = False, ): """ diff --git a/opensfm/geotag_from_gpx.py b/opensfm/geotag_from_gpx.py index 7bf29b851..69bf88385 100644 --- a/opensfm/geotag_from_gpx.py +++ b/opensfm/geotag_from_gpx.py @@ -1,13 +1,12 @@ #!/usr/bin/python3 import datetime -from typing import List, Union - import math import os import shutil import sys import time +from typing import List, Union import numpy as np from opensfm import geo @@ -16,6 +15,7 @@ try: # pyre-fixme[21]: Could not find module `pyexiv2`. import pyexiv2 + # pyre-fixme[21]: Could not find module `pyexiv2.utils`. from pyexiv2.utils import make_fraction except ImportError: @@ -57,7 +57,7 @@ def utc_to_localtime(utc_time): return utc_time - utc_offset_timedelta -def get_lat_lon_time(gpx_file, gpx_time: str="utc"): +def get_lat_lon_time(gpx_file, gpx_time: str = "utc"): """ Read location and time stamps from a track in a GPX file. @@ -84,7 +84,9 @@ def get_lat_lon_time(gpx_file, gpx_time: str="utc"): return points -def compute_bearing(start_lat: float, start_lon: float, end_lat: float, end_lon: float) -> float: +def compute_bearing( + start_lat: float, start_lon: float, end_lat: float, end_lon: float +) -> float: """ Get the compass bearing from start to end. @@ -245,13 +247,13 @@ def sample_gpx(points, dx: float, dt=None): def add_gps_to_exif( - filename: Union[os.PathLike[str], str], + filename: Union["os.PathLike[str]", str], lat, lon, bearing, elevation, - updated_filename: Union[None, os.PathLike[str], str]=None, - remove_image_description: bool=True, + updated_filename: Union[None, "os.PathLike[str]", str] = None, + remove_image_description: bool = True, ) -> None: """ Given lat, lon, bearing, elevation, write to EXIF @@ -304,9 +306,9 @@ def add_gps_to_exif( def add_exif_using_timestamp( filename, points, - offset_time: int=0, + offset_time: int = 0, timestamp=None, - orientation: int=1, + orientation: int = 1, image_description=None, ) -> None: """ From bb8f39031e2dd7c8bb20dcb08799514f09642ab3 Mon Sep 17 00:00:00 2001 From: Fabian Schenk Date: Thu, 7 Apr 2022 01:00:39 -0700 Subject: [PATCH 57/81] Fix return correctly by const & Summary: Return the `Shots` and `Cameras` as `const & const` to avoid an unnecessary copy. Reviewed By: YanNoun Differential Revision: D35398706 fbshipit-source-id: 83afff62f0a5e630242d73910d4051d7bc92d4d6 --- opensfm/src/map/rig.h | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/opensfm/src/map/rig.h b/opensfm/src/map/rig.h index aaede37c0..c355fd2bf 100644 --- a/opensfm/src/map/rig.h +++ b/opensfm/src/map/rig.h @@ -44,14 +44,14 @@ class RigInstance { explicit RigInstance(const RigInstanceId instance_id) : id(instance_id) {} // Getters - std::unordered_map GetShots() const { + const std::unordered_map& GetShots() const { return shots_; } std::unordered_map& GetShots() { return shots_; } std::unordered_map& GetRigCameras() { return shots_rig_cameras_; } - std::unordered_map GetRigCameras() const { + const std::unordered_map& GetRigCameras() const { return shots_rig_cameras_; } std::set GetShotIDs() const; From 151c3f5d31d611f2b6634a82247e7c414f4878e7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Manuel=20L=C3=B3pez=20Antequera?= Date: Thu, 7 Apr 2022 05:16:13 -0700 Subject: [PATCH 58/81] Bump sphinx and networkx (#888) Summary: Pull Request resolved: https://github.com/mapillary/OpenSfM/pull/888 Reviewed By: YanNoun Differential Revision: D35459883 Pulled By: mlopezantequera fbshipit-source-id: 0e61c7f3f851428707462d1df9a3f68e3c14db12 --- requirements.txt | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index 1ee418560..afc04e5cb 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,7 +4,7 @@ flask==1.1.2 fpdf2==2.4.6 joblib==0.14.1 matplotlib -networkx==1.11 +networkx==2.5 numpy Pillow>=8.1.1 pyproj>=1.9.5.1 @@ -12,7 +12,7 @@ pytest==3.0.7 python-dateutil>=2.7 pyyaml==5.4 scipy -Sphinx==3.4.3 +Sphinx==4.0.2 six xmltodict==0.10.2 wheel From 9ca7817de21462620c73f3de4ffd69d8511187c6 Mon Sep 17 00:00:00 2001 From: Pyre Bot Jr <> Date: Tue, 12 Apr 2022 14:05:53 -0700 Subject: [PATCH 59/81] suppress errors in `fbcode/mapillary` - batch 1 Differential Revision: D35591349 fbshipit-source-id: 9a3e9410a4b61664bfb13383c0f737bdb9aafb6e --- opensfm/report.py | 1 + viewer/server.py | 1 + 2 files changed, 2 insertions(+) diff --git a/opensfm/report.py b/opensfm/report.py index b1bf5f859..31cb47186 100644 --- a/opensfm/report.py +++ b/opensfm/report.py @@ -39,6 +39,7 @@ def __init__(self, data: DataSet) -> None: self.stats = self._read_stats_file("stats.json") def save_report(self, filename: str) -> None: + # pyre-fixme[28]: Unexpected keyword argument `dest`. bytestring = self.pdf.output(dest="S") if isinstance(bytestring, str): bytestring = bytestring.encode("utf8") diff --git a/viewer/server.py b/viewer/server.py index 30fdca95d..bddd1ba13 100644 --- a/viewer/server.py +++ b/viewer/server.py @@ -83,6 +83,7 @@ def verified_send(file) -> Response: if os.path.isfile(file): return send_file(file) else: + # pyre-fixme[7]: Expected `Response` but got implicit return value of `None`. abort(404) From 3cadf32a49c9014093539310ab034e3a0aa05313 Mon Sep 17 00:00:00 2001 From: Manuel Lopez Antequera Date: Wed, 20 Apr 2022 08:16:17 -0700 Subject: [PATCH 60/81] Use control points by default Reviewed By: paulinus Differential Revision: D35748226 fbshipit-source-id: 78f8b89de0f3258a431cebe711464541a19008f1 --- opensfm/config.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/opensfm/config.py b/opensfm/config.py index 8815dbc66..38d647163 100644 --- a/opensfm/config.py +++ b/opensfm/config.py @@ -1,8 +1,8 @@ import os from dataclasses import dataclass, asdict +from typing import Any, Dict, IO, Union import yaml -from typing import Any, Dict, IO, Union @dataclass @@ -287,7 +287,7 @@ class OpenSfMConfig: # Enforce GPS position in bundle adjustment bundle_use_gps: bool = True # Enforce Ground Control Point position in bundle adjustment - bundle_use_gcp: bool = False + bundle_use_gcp: bool = True # Compensate GPS with a per-camera similarity transform bundle_compensate_gps_bias: bool = False @@ -376,7 +376,9 @@ def load_config(filepath) -> Dict[str, Any]: return load_config_from_fileobject(fin) -def load_config_from_fileobject(f: Union[IO[bytes], IO[str], bytes, str]) -> Dict[str, Any]: +def load_config_from_fileobject( + f: Union[IO[bytes], IO[str], bytes, str] +) -> Dict[str, Any]: """Load config from a config.yaml fileobject""" config = default_config() From 6eeec43c261da2c3cba9eb9ef4fc7f0efc2dc8a8 Mon Sep 17 00:00:00 2001 From: Pyre Bot Jr <> Date: Tue, 3 May 2022 19:55:36 -0700 Subject: [PATCH 61/81] suppress errors in `fbcode/mapillary` - batch 1 Differential Revision: D36120723 fbshipit-source-id: d4b0aa0fde0b02b41874e65bfd252b660d8db016 --- opensfm/io.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/opensfm/io.py b/opensfm/io.py index 140c024d5..04d4f7112 100644 --- a/opensfm/io.py +++ b/opensfm/io.py @@ -1352,6 +1352,7 @@ def __init__(self) -> None: @classmethod def exists(cls, path: str) -> str: + # pyre-fixme[7]: Expected `str` but got `bool`. return os.path.exists(path) @classmethod @@ -1360,10 +1361,12 @@ def ls(cls, path: str) -> List[str]: @classmethod def isfile(cls, path: str) -> str: + # pyre-fixme[7]: Expected `str` but got `bool`. return os.path.isfile(path) @classmethod def isdir(cls, path: str) -> str: + # pyre-fixme[7]: Expected `str` but got `bool`. return os.path.isdir(path) @classmethod @@ -1419,4 +1422,5 @@ def image_size(cls, path: str) -> Tuple[int, int]: @classmethod def timestamp(cls, path: str) -> str: + # pyre-fixme[7]: Expected `str` but got `float`. return os.path.getmtime(path) From 969154c0ffca3296b6a1d8de0bf80769fb9c973d Mon Sep 17 00:00:00 2001 From: Yann Noutary Date: Thu, 5 May 2022 00:25:38 -0700 Subject: [PATCH 62/81] feat: add robust to common positions costs Summary: This Diff adds a Tukey robustifier to common position constraints. Common position constraints historically helped with rigs that were not parametrized a such (aka. soft rigs), but can occasionnaly mess up if for some reason the device returns identical timestamps/LLA for different images, which can also happens (happended with MapSpots, happened with a Rover chunk), resulting in the scene being "shrinked" (trivial solution to reduce the error is to shrink the scene to zero) One could also removes the common position constraint, as it use is questionnable and ad-hoc, but adding a Tukey loss makes it work. Reviewed By: fabianschenk Differential Revision: D35965827 fbshipit-source-id: 069f93fee4f522f310d4fd7ad0ba858bfbe2681d --- opensfm/src/bundle/src/bundle_adjuster.cc | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/opensfm/src/bundle/src/bundle_adjuster.cc b/opensfm/src/bundle/src/bundle_adjuster.cc index a4b0ef38a..f9a3684bd 100644 --- a/opensfm/src/bundle/src/bundle_adjuster.cc +++ b/opensfm/src/bundle/src/bundle_adjuster.cc @@ -854,7 +854,11 @@ void BundleAdjuster::Run() { } // Add common position errors + ceres::LossFunction *common_position_loss = nullptr; for (auto &c : common_positions_) { + if (common_position_loss == nullptr) { + common_position_loss = new ceres::TukeyLoss(1); + } auto *common_position = new CommonPositionError(c.margin, c.std_deviation); auto *cost_function = new ceres::DynamicAutoDiffCostFunction( @@ -888,7 +892,8 @@ void BundleAdjuster::Run() { common_position->shot_i_rig_camera_index_; } } - problem.AddResidualBlock(cost_function, nullptr, parameter_blocks); + problem.AddResidualBlock(cost_function, common_position_loss, + parameter_blocks); } // Add heatmap cost From 2dc6d97b1f5819296c8c142dd34f46793d265fdd Mon Sep 17 00:00:00 2001 From: Yann Noutary Date: Thu, 5 May 2022 00:25:38 -0700 Subject: [PATCH 63/81] fix: handle no altitude GCPs gracefully Summary: ThiS diff makes the `align` triangulation handle well GCPs withtout altitude, by enforcing the same altitude (and expecting the subsequent computed transform to lie in the 2D subspace). Reviewed By: mlopezantequera Differential Revision: D35818829 fbshipit-source-id: 6ca3f82db6f25f61b5183f9b8b97fa16020efcc6 --- opensfm/align.py | 8 ++++++-- 1 file changed, 6 insertions(+), 2 deletions(-) diff --git a/opensfm/align.py b/opensfm/align.py index 0fa1fe1c5..630da3d21 100644 --- a/opensfm/align.py +++ b/opensfm/align.py @@ -449,8 +449,12 @@ def triangulate_all_gcp( reproj_threshold=0.004, min_ray_angle_degrees=2.0, ) - if x is not None: + if x is not None and len(point.lla): + point_enu = np.array( + reconstruction.reference.to_topocentric(*point.lla_vec) + ) + if not point.has_altitude: + point_enu[2] = x[2] = 0.0 triangulated.append(x) - point_enu = reconstruction.reference.to_topocentric(*point.lla_vec) measured.append(point_enu) return triangulated, measured From 87c7cb6399083d3955639612f4646fbd6969a4f9 Mon Sep 17 00:00:00 2001 From: Yann Noutary Date: Thu, 5 May 2022 00:25:38 -0700 Subject: [PATCH 64/81] refactor: unify GCP triangulation Summary: This Diff unifies GCP triangulation by removing old Python code and using the C++ counterpart instead. We also had to rewrite/add some GCP error logging function. Reviewed By: fabianschenk Differential Revision: D35818830 fbshipit-source-id: b97b744415fd50f946ec3743be3dd36904ba6ecc --- opensfm/reconstruction.py | 55 ------------------------------- opensfm/src/sfm/ba_helpers.h | 10 +++--- opensfm/src/sfm/pysfm.pyi | 2 ++ opensfm/src/sfm/python/pybind.cc | 3 +- opensfm/src/sfm/src/ba_helpers.cc | 33 +++++++++---------- 5 files changed, 24 insertions(+), 79 deletions(-) diff --git a/opensfm/reconstruction.py b/opensfm/reconstruction.py index 8ec85a66c..05bd9bf51 100644 --- a/opensfm/reconstruction.py +++ b/opensfm/reconstruction.py @@ -47,61 +47,6 @@ def _get_camera_from_bundle( camera.set_parameter_value(k, v) -def _add_gcp_to_bundle( - ba: pybundle.BundleAdjuster, - reference: types.TopocentricConverter, - gcp: List[pymap.GroundControlPoint], - shots: Dict[str, pymap.Shot], - gcp_horizontal_sd: float, - gcp_vertical_sd: float, -) -> int: - """Add Ground Control Points constraints to the bundle problem.""" - total_gcp_observations = 0 - gcp_sd = np.array([gcp_horizontal_sd, gcp_horizontal_sd, gcp_vertical_sd]) - for point in gcp: - point_id = "gcp-" + point.id - - coordinates = multiview.triangulate_gcp( - point, - shots, - reproj_threshold=1, - min_ray_angle_degrees=0.1, - ) - if coordinates is None: - if point.lla: - coordinates = reference.to_topocentric(*point.lla_vec) - else: - logger.warning( - "Cannot initialize GCP '{}'." " Ignoring it".format(point.id) - ) - continue - - ba.add_point(point_id, coordinates, False) - - current_error = 0 - if point.lla: - point_enu = reference.to_topocentric(*point.lla_vec) - ba.add_point_prior(point_id, point_enu, gcp_sd, point.has_altitude) - current_error = np.linalg.norm(np.array(point_enu)-np.array(coordinates)) - - gcp_observations = 0 - for observation in point.observations: - if observation.shot_id in shots: - # TODO(pau): move this to a config or per point parameter. - scale = 0.0001 - ba.add_point_projection_observation( - observation.shot_id, - point_id, - observation.projection, - scale, - ) - gcp_observations += 1 - total_gcp_observations += gcp_observations - - logger.warning(f"Adding GCP {point_id} with {gcp_observations} observations with current error of {current_error} meters") - return total_gcp_observations - - def bundle( reconstruction: types.Reconstruction, camera_priors: Dict[str, pygeometry.Camera], diff --git a/opensfm/src/sfm/ba_helpers.h b/opensfm/src/sfm/ba_helpers.h index 467b8692f..e0c8552d1 100644 --- a/opensfm/src/sfm/ba_helpers.h +++ b/opensfm/src/sfm/ba_helpers.h @@ -52,15 +52,15 @@ class BAHelpers { const map::Map& map, const py::dict& config, const AlignedVector& gcp); + static size_t AddGCPToBundle( + bundle::BundleAdjuster& ba, const map::Map& map, + const AlignedVector& gcp, + const py::dict& config); + private: static std::unordered_set DirectShotNeighbors( map::Map& map, const std::unordered_set& shot_ids, const size_t min_common_points, const size_t max_neighbors); - static void AddGCPToBundle( - bundle::BundleAdjuster& ba, const geo::TopocentricConverter& reference, - const AlignedVector& gcp, - const std::unordered_map& shots, - const double& horizontal_sigma, const double& vertical_sigma); static bool TriangulateGCP( const map::GroundControlPoint& point, const std::unordered_map& shots, diff --git a/opensfm/src/sfm/pysfm.pyi b/opensfm/src/sfm/pysfm.pyi index 43a45b978..78b943472 100644 --- a/opensfm/src/sfm/pysfm.pyi +++ b/opensfm/src/sfm/pysfm.pyi @@ -17,6 +17,8 @@ __all__ = [ "remove_connections" ] class BAHelpers: + @staticmethod + def add_gcp_to_bundle(arg0: opensfm.pybundle.BundleAdjuster, arg1: opensfm.pymap.Map, arg2: List[opensfm.pymap.GroundControlPoint], arg3: dict) -> int: ... @staticmethod def bundle(arg0: opensfm.pymap.Map, arg1: Dict[str, opensfm.pygeometry.Camera], arg2: Dict[str, opensfm.pymap.RigCamera], arg3: List[opensfm.pymap.GroundControlPoint], arg4: dict) -> dict: ... @staticmethod diff --git a/opensfm/src/sfm/python/pybind.cc b/opensfm/src/sfm/python/pybind.cc index 6e6299282..77466e1dd 100644 --- a/opensfm/src/sfm/python/pybind.cc +++ b/opensfm/src/sfm/python/pybind.cc @@ -28,7 +28,8 @@ PYBIND11_MODULE(pysfm, m) { .def_static("bundle_to_map", &sfm::BAHelpers::BundleToMap) .def_static("shot_neighborhood_ids", &sfm::BAHelpers::ShotNeighborhoodIds) .def_static("detect_alignment_constraints", - &sfm::BAHelpers::DetectAlignmentConstraints); + &sfm::BAHelpers::DetectAlignmentConstraints) + .def_static("add_gcp_to_bundle", &sfm::BAHelpers::AddGCPToBundle); m.def("realign_maps", &sfm::retriangulation::RealignMaps, py::call_guard()); diff --git a/opensfm/src/sfm/src/ba_helpers.cc b/opensfm/src/sfm/src/ba_helpers.cc index e5d680086..04f2ab6fc 100644 --- a/opensfm/src/sfm/src/ba_helpers.cc +++ b/opensfm/src/sfm/src/ba_helpers.cc @@ -241,10 +241,7 @@ py::tuple BAHelpers::BundleLocal( } if (config["bundle_use_gcp"].cast() && !gcp.empty()) { - const auto& reference = map.GetTopocentricConverter(); - AddGCPToBundle(ba, reference, gcp, map.GetShots(), - config["gcp_horizontal_sd"].cast(), - config["gcp_vertical_sd"].cast()); + AddGCPToBundle(ba, map, gcp, config); } ba.SetPointProjectionLossFunction( @@ -343,12 +340,12 @@ bool BAHelpers::TriangulateGCP( } // Add Ground Control Points constraints to the bundle problem -void BAHelpers::AddGCPToBundle( - bundle::BundleAdjuster& ba, - const geo::TopocentricConverter& reference, - const AlignedVector& gcp, - const std::unordered_map& shots, - const double& horizontal_sigma, const double& vertical_sigma) { +size_t BAHelpers::AddGCPToBundle( + bundle::BundleAdjuster& ba, const map::Map& map, + const AlignedVector& gcp, const py::dict& config) { + const auto& reference = map.GetTopocentricConverter(); + const auto& shots = map.GetShots(); + size_t added_gcp_observations = 0; for (const auto& point : gcp) { const auto point_id = "gcp-" + point.id_; Vec3d coordinates; @@ -362,10 +359,11 @@ void BAHelpers::AddGCPToBundle( constexpr auto point_constant{false}; ba.AddPoint(point_id, coordinates, point_constant); if (!point.lla_.empty()) { - ba.AddPointPrior( - point_id, reference.ToTopocentric(point.GetLlaVec3d()), - Vec3d(horizontal_sigma, horizontal_sigma, vertical_sigma), - point.has_altitude_); + const auto point_std = Vec3d(config["gcp_horizontal_sd"].cast(), + config["gcp_horizontal_sd"].cast(), + config["gcp_vertical_sd"].cast()); + ba.AddPointPrior(point_id, reference.ToTopocentric(point.GetLlaVec3d()), + point_std, point.has_altitude_); } // Now iterate through the observations @@ -375,9 +373,11 @@ void BAHelpers::AddGCPToBundle( constexpr double scale{0.0001}; ba.AddPointProjectionObservation(shot_id, point_id, obs.projection_, scale); + ++added_gcp_observations; } } } + return added_gcp_observations; } py::dict BAHelpers::BundleShotPoses( @@ -669,11 +669,8 @@ py::dict BAHelpers::Bundle( } } - const auto& reference = map.GetTopocentricConverter(); if (config["bundle_use_gcp"].cast() && !gcp.empty()) { - AddGCPToBundle(ba, reference, gcp, map.GetShots(), - config["gcp_horizontal_sd"].cast(), - config["gcp_vertical_sd"].cast()); + AddGCPToBundle(ba, map, gcp, config); } if (config["bundle_compensate_gps_bias"].cast()) { From 0cbdab78be2deacf8ce1a2d95435364128c9a484 Mon Sep 17 00:00:00 2001 From: Yann Noutary Date: Thu, 5 May 2022 00:25:38 -0700 Subject: [PATCH 65/81] feat: add global weighting of GCPs Summary: This Diff adds a global weighting factor to GCP term so that they weigh 2% of the sum of other dominant terms (mostly reprojection, relatives and GPS). Reviewed By: mlopezantequera Differential Revision: D35818827 fbshipit-source-id: a3e61fc704119d3e3c1ab6224e3b0904b5c253fc --- opensfm/config.py | 2 ++ opensfm/src/bundle/bundle_adjuster.h | 2 ++ opensfm/src/bundle/src/bundle_adjuster.cc | 8 +++++++ opensfm/src/sfm/src/ba_helpers.cc | 26 ++++++++++++++++++++--- 4 files changed, 35 insertions(+), 3 deletions(-) diff --git a/opensfm/config.py b/opensfm/config.py index 38d647163..eb7d0af02 100644 --- a/opensfm/config.py +++ b/opensfm/config.py @@ -240,6 +240,8 @@ class OpenSfMConfig: gcp_horizontal_sd: float = 0.01 # The default vertical standard deviation of the GCPs (in meters) gcp_vertical_sd: float = 0.1 + # Global weight for GCPs, expressed a ratio of the sum of (# projections) + (# shots) + (# relative motions) + gcp_global_weight: float = 0.04 # The standard deviation of the rig translation rig_translation_sd: float = 0.1 # The standard deviation of the rig rotation diff --git a/opensfm/src/bundle/bundle_adjuster.h b/opensfm/src/bundle/bundle_adjuster.h index 8fa00da41..aea287a86 100644 --- a/opensfm/src/bundle/bundle_adjuster.h +++ b/opensfm/src/bundle/bundle_adjuster.h @@ -265,6 +265,8 @@ class BundleAdjuster { void ComputeReprojectionErrors(); // Getters + int GetProjectionsCount() const; + int GetRelativeMotionsCount() const; geometry::Camera GetCamera(const std::string &id) const; geometry::Similarity GetBias(const std::string &id) const; Reconstruction GetReconstruction(const std::string &reconstruction_id) const; diff --git a/opensfm/src/bundle/src/bundle_adjuster.cc b/opensfm/src/bundle/src/bundle_adjuster.cc index f9a3684bd..1f75a4151 100644 --- a/opensfm/src/bundle/src/bundle_adjuster.cc +++ b/opensfm/src/bundle/src/bundle_adjuster.cc @@ -1165,6 +1165,14 @@ void BundleAdjuster::ComputeReprojectionErrors() { } } +int BundleAdjuster::GetProjectionsCount() const { + return point_projection_observations_.size(); +} + +int BundleAdjuster::GetRelativeMotionsCount() const { + return relative_motions_.size(); +} + geometry::Camera BundleAdjuster::GetCamera(const std::string &id) const { if (cameras_.find(id) == cameras_.end()) { throw std::runtime_error("Camera " + id + " doesn't exists"); diff --git a/opensfm/src/sfm/src/ba_helpers.cc b/opensfm/src/sfm/src/ba_helpers.cc index 04f2ab6fc..498087b93 100644 --- a/opensfm/src/sfm/src/ba_helpers.cc +++ b/opensfm/src/sfm/src/ba_helpers.cc @@ -345,6 +345,26 @@ size_t BAHelpers::AddGCPToBundle( const AlignedVector& gcp, const py::dict& config) { const auto& reference = map.GetTopocentricConverter(); const auto& shots = map.GetShots(); + + const auto dominant_terms = ba.GetRigInstances().size() + + ba.GetProjectionsCount() + + ba.GetRelativeMotionsCount(); + + size_t total_terms = 0; + for (const auto& point : gcp) { + Vec3d coordinates; + if (TriangulateGCP(point, shots, coordinates) || !point.lla_.empty()) { + ++total_terms; + } + for (const auto& obs : point.observations_) { + total_terms += (shots.count(obs.shot_id_) > 0); + } + } + + const double global_weight = config["gcp_global_weight"].cast() * + dominant_terms / + std::max(1, total_terms); + size_t added_gcp_observations = 0; for (const auto& point : gcp) { const auto point_id = "gcp-" + point.id_; @@ -363,16 +383,16 @@ size_t BAHelpers::AddGCPToBundle( config["gcp_horizontal_sd"].cast(), config["gcp_vertical_sd"].cast()); ba.AddPointPrior(point_id, reference.ToTopocentric(point.GetLlaVec3d()), - point_std, point.has_altitude_); + point_std / global_weight, point.has_altitude_); } // Now iterate through the observations for (const auto& obs : point.observations_) { const auto& shot_id = obs.shot_id_; if (shots.count(shot_id) > 0) { - constexpr double scale{0.0001}; + constexpr double scale{0.001}; ba.AddPointProjectionObservation(shot_id, point_id, obs.projection_, - scale); + scale / global_weight); ++added_gcp_observations; } } From 6ea555acc1aeb579a5a4e34547a36a4756c71312 Mon Sep 17 00:00:00 2001 From: Yann Noutary Date: Thu, 5 May 2022 00:25:38 -0700 Subject: [PATCH 66/81] fix: unify GCP triangulation Summary: This Diff unifies GCP triangulation thresholds : 20 pixels for 1K side imges (we want GCPs to push images, so we assume reprojection error might be big at initial triangulation), and 1.0 degrees for ray angles. Reviewed By: mlopezantequera Differential Revision: D35818828 fbshipit-source-id: 063ade703caeff3fc735402d5caa78c4c81b7c85 --- opensfm/align.py | 2 -- opensfm/multiview.py | 6 +++--- 2 files changed, 3 insertions(+), 5 deletions(-) diff --git a/opensfm/align.py b/opensfm/align.py index 630da3d21..de7dcab32 100644 --- a/opensfm/align.py +++ b/opensfm/align.py @@ -446,8 +446,6 @@ def triangulate_all_gcp( x = multiview.triangulate_gcp( point, reconstruction.shots, - reproj_threshold=0.004, - min_ray_angle_degrees=2.0, ) if x is not None and len(point.lla): point_enu = np.array( diff --git a/opensfm/multiview.py b/opensfm/multiview.py index d9877e61e..9e9fab9fe 100644 --- a/opensfm/multiview.py +++ b/opensfm/multiview.py @@ -207,7 +207,7 @@ def __init__(self, x: np.ndarray, y: np.ndarray) -> None: def num_samples(self) -> int: return len(self.x) - def fit(self, samples: np.ndarray)->List[float]: + def fit(self, samples: np.ndarray) -> List[float]: x = self.x[samples[0]] y = self.y[samples[0]] return [y / x] @@ -682,8 +682,8 @@ def relative_pose_optimize_nonlinear( def triangulate_gcp( point: pymap.GroundControlPoint, shots: Dict[str, pymap.Shot], - reproj_threshold: float, - min_ray_angle_degrees: float, + reproj_threshold: float = 0.02, + min_ray_angle_degrees: float = 1.0, ) -> Optional[np.ndarray]: """Compute the reconstructed position of a GCP from observations.""" From 55e84025e5886b706a637b8f679a5b74736014bb Mon Sep 17 00:00:00 2001 From: Dark Knight <> Date: Thu, 5 May 2022 11:29:35 -0700 Subject: [PATCH 67/81] Revert D35818828: Multisect successfully blamed D35818827 for test or build failures Summary: This diff is reverting D35818828 (https://github.com/mapillary/opensfm/commit/6ea555acc1aeb579a5a4e34547a36a4756c71312) D35818827 (https://github.com/mapillary/opensfm/commit/0cbdab78be2deacf8ce1a2d95435364128c9a484) has been identified to be causing the following test or build failures: Tests affected: - https://www.internalfb.com/intern/test/281475012228841/ Here's the Multisect link: https://www.internalfb.com/intern/testinfra/multisect/903691 Here are the tasks that are relevant to this breakage: T119313786: 1 test started failing for oncall mapillary_sfm_oncall in the last 2 weeks We're generating a revert to back out the changes in this diff, please note the backout may land if someone accepts it. Reviewed By: YanNoun Differential Revision: D36167541 fbshipit-source-id: f30f71281fd53668f76bee762c2bf4723c6defea --- opensfm/align.py | 2 ++ opensfm/multiview.py | 6 +++--- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/opensfm/align.py b/opensfm/align.py index de7dcab32..630da3d21 100644 --- a/opensfm/align.py +++ b/opensfm/align.py @@ -446,6 +446,8 @@ def triangulate_all_gcp( x = multiview.triangulate_gcp( point, reconstruction.shots, + reproj_threshold=0.004, + min_ray_angle_degrees=2.0, ) if x is not None and len(point.lla): point_enu = np.array( diff --git a/opensfm/multiview.py b/opensfm/multiview.py index 9e9fab9fe..d9877e61e 100644 --- a/opensfm/multiview.py +++ b/opensfm/multiview.py @@ -207,7 +207,7 @@ def __init__(self, x: np.ndarray, y: np.ndarray) -> None: def num_samples(self) -> int: return len(self.x) - def fit(self, samples: np.ndarray) -> List[float]: + def fit(self, samples: np.ndarray)->List[float]: x = self.x[samples[0]] y = self.y[samples[0]] return [y / x] @@ -682,8 +682,8 @@ def relative_pose_optimize_nonlinear( def triangulate_gcp( point: pymap.GroundControlPoint, shots: Dict[str, pymap.Shot], - reproj_threshold: float = 0.02, - min_ray_angle_degrees: float = 1.0, + reproj_threshold: float, + min_ray_angle_degrees: float, ) -> Optional[np.ndarray]: """Compute the reconstructed position of a GCP from observations.""" From 4a6d685936f3ff199d399721a1f7112f8bc20d8b Mon Sep 17 00:00:00 2001 From: Dark Knight <> Date: Fri, 6 May 2022 00:22:16 -0700 Subject: [PATCH 68/81] Revert D35818827: Multisect successfully blamed D35818827 for test or build failures Summary: This diff is reverting D35818827 (https://github.com/mapillary/opensfm/commit/0cbdab78be2deacf8ce1a2d95435364128c9a484) Depends on D36167541 (https://github.com/mapillary/opensfm/commit/55e84025e5886b706a637b8f679a5b74736014bb) D35818827 (https://github.com/mapillary/opensfm/commit/0cbdab78be2deacf8ce1a2d95435364128c9a484) has been identified to be causing the following test or build failures: Tests affected: - https://www.internalfb.com/intern/test/281475012228841/ Here's the Multisect link: https://www.internalfb.com/intern/testinfra/multisect/903691 Here are the tasks that are relevant to this breakage: T119313786: 1 test started failing for oncall mapillary_sfm_oncall in the last 2 weeks We're generating a revert to back out the changes in this diff, please note the backout may land if someone accepts it. Reviewed By: YanNoun Differential Revision: D36167550 fbshipit-source-id: 398fdc6043fcaf541c14e60cd07d2551719f2c80 --- opensfm/config.py | 2 -- opensfm/src/bundle/bundle_adjuster.h | 2 -- opensfm/src/bundle/src/bundle_adjuster.cc | 8 ------- opensfm/src/sfm/src/ba_helpers.cc | 26 +++-------------------- 4 files changed, 3 insertions(+), 35 deletions(-) diff --git a/opensfm/config.py b/opensfm/config.py index eb7d0af02..38d647163 100644 --- a/opensfm/config.py +++ b/opensfm/config.py @@ -240,8 +240,6 @@ class OpenSfMConfig: gcp_horizontal_sd: float = 0.01 # The default vertical standard deviation of the GCPs (in meters) gcp_vertical_sd: float = 0.1 - # Global weight for GCPs, expressed a ratio of the sum of (# projections) + (# shots) + (# relative motions) - gcp_global_weight: float = 0.04 # The standard deviation of the rig translation rig_translation_sd: float = 0.1 # The standard deviation of the rig rotation diff --git a/opensfm/src/bundle/bundle_adjuster.h b/opensfm/src/bundle/bundle_adjuster.h index aea287a86..8fa00da41 100644 --- a/opensfm/src/bundle/bundle_adjuster.h +++ b/opensfm/src/bundle/bundle_adjuster.h @@ -265,8 +265,6 @@ class BundleAdjuster { void ComputeReprojectionErrors(); // Getters - int GetProjectionsCount() const; - int GetRelativeMotionsCount() const; geometry::Camera GetCamera(const std::string &id) const; geometry::Similarity GetBias(const std::string &id) const; Reconstruction GetReconstruction(const std::string &reconstruction_id) const; diff --git a/opensfm/src/bundle/src/bundle_adjuster.cc b/opensfm/src/bundle/src/bundle_adjuster.cc index 1f75a4151..f9a3684bd 100644 --- a/opensfm/src/bundle/src/bundle_adjuster.cc +++ b/opensfm/src/bundle/src/bundle_adjuster.cc @@ -1165,14 +1165,6 @@ void BundleAdjuster::ComputeReprojectionErrors() { } } -int BundleAdjuster::GetProjectionsCount() const { - return point_projection_observations_.size(); -} - -int BundleAdjuster::GetRelativeMotionsCount() const { - return relative_motions_.size(); -} - geometry::Camera BundleAdjuster::GetCamera(const std::string &id) const { if (cameras_.find(id) == cameras_.end()) { throw std::runtime_error("Camera " + id + " doesn't exists"); diff --git a/opensfm/src/sfm/src/ba_helpers.cc b/opensfm/src/sfm/src/ba_helpers.cc index 498087b93..04f2ab6fc 100644 --- a/opensfm/src/sfm/src/ba_helpers.cc +++ b/opensfm/src/sfm/src/ba_helpers.cc @@ -345,26 +345,6 @@ size_t BAHelpers::AddGCPToBundle( const AlignedVector& gcp, const py::dict& config) { const auto& reference = map.GetTopocentricConverter(); const auto& shots = map.GetShots(); - - const auto dominant_terms = ba.GetRigInstances().size() + - ba.GetProjectionsCount() + - ba.GetRelativeMotionsCount(); - - size_t total_terms = 0; - for (const auto& point : gcp) { - Vec3d coordinates; - if (TriangulateGCP(point, shots, coordinates) || !point.lla_.empty()) { - ++total_terms; - } - for (const auto& obs : point.observations_) { - total_terms += (shots.count(obs.shot_id_) > 0); - } - } - - const double global_weight = config["gcp_global_weight"].cast() * - dominant_terms / - std::max(1, total_terms); - size_t added_gcp_observations = 0; for (const auto& point : gcp) { const auto point_id = "gcp-" + point.id_; @@ -383,16 +363,16 @@ size_t BAHelpers::AddGCPToBundle( config["gcp_horizontal_sd"].cast(), config["gcp_vertical_sd"].cast()); ba.AddPointPrior(point_id, reference.ToTopocentric(point.GetLlaVec3d()), - point_std / global_weight, point.has_altitude_); + point_std, point.has_altitude_); } // Now iterate through the observations for (const auto& obs : point.observations_) { const auto& shot_id = obs.shot_id_; if (shots.count(shot_id) > 0) { - constexpr double scale{0.001}; + constexpr double scale{0.0001}; ba.AddPointProjectionObservation(shot_id, point_id, obs.projection_, - scale / global_weight); + scale); ++added_gcp_observations; } } From 1fcce830411583dc30f71667c78289693cab4661 Mon Sep 17 00:00:00 2001 From: Yann Noutary Date: Tue, 10 May 2022 04:37:12 -0700 Subject: [PATCH 69/81] fix: correctly generate EXIF synthetic timestamps and fix typo Reviewed By: tobias-o Differential Revision: D36275025 fbshipit-source-id: eddcf02bfee5e7cf0f851dde3663e657d3d11fae --- opensfm/synthetic_data/synthetic_generator.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/opensfm/synthetic_data/synthetic_generator.py b/opensfm/synthetic_data/synthetic_generator.py index cea73f774..68e54e219 100644 --- a/opensfm/synthetic_data/synthetic_generator.py +++ b/opensfm/synthetic_data/synthetic_generator.py @@ -195,7 +195,7 @@ def _gps_dop(shot: pymap.Shot) -> float: pose = shot.pose.get_origin() if previous_pose is not None: - previous_time += np.linalg.norm(pose - previous_pose) * speed_ms + previous_time += np.linalg.norm(pose - previous_pose) / speed_ms previous_pose = pose exif["capture_time"] = previous_time From 746c6c0f6ec167ccb655fd623a9b9ec831b37c26 Mon Sep 17 00:00:00 2001 From: Yann Noutary Date: Thu, 12 May 2022 23:32:59 -0700 Subject: [PATCH 70/81] fix: unflake test on platform 010 Summary: This Diffs strengthen the test `mapillary/opensfm/opensfm/test:tests - test_robust.py::test_outliers_essential_ransac'` by decreasing the probability of getting only non-outlier free samples Reviewed By: paulinus, tobias-o Differential Revision: D36341076 fbshipit-source-id: 3d4989be2f99b8d69391250d0a383577691c89de --- opensfm/test/test_robust.py | 1 + 1 file changed, 1 insertion(+) diff --git a/opensfm/test/test_robust.py b/opensfm/test/test_robust.py index 39942b753..ed296dd4c 100644 --- a/opensfm/test/test_robust.py +++ b/opensfm/test/test_robust.py @@ -222,6 +222,7 @@ def test_outliers_essential_ransac(pairs_and_their_E) -> None: scale_eps_ratio = 0.5 params = pyrobust.RobustEstimatorParams() + params.probability = 1 - 1e-3 result = pyrobust.ransac_essential( f1, f2, scale * (1.0 + scale_eps_ratio), params, pyrobust.RansacType.RANSAC ) From 89570cd43de61f8a63774309bc1570b01b2bca02 Mon Sep 17 00:00:00 2001 From: Yann Noutary Date: Tue, 17 May 2022 07:41:26 -0700 Subject: [PATCH 71/81] fix: prevent pickling of any other objects than strictly allowed Summary: This Diff restricts pickling during matchings loading, by making a custom `Unpickler` that only allow pickling of strictly needed modules (`numpy` ones) Reviewed By: fabianschenk Differential Revision: D36251771 fbshipit-source-id: cc901352c5dc2f5dfd4d93b631cd75b372e13f60 --- opensfm/dataset.py | 22 +++++++++++++++++++++- 1 file changed, 21 insertions(+), 1 deletion(-) diff --git a/opensfm/dataset.py b/opensfm/dataset.py index a2ade713a..60fdd3d2e 100644 --- a/opensfm/dataset.py +++ b/opensfm/dataset.py @@ -347,8 +347,28 @@ def matches_exists(self, image: str) -> bool: return self.io_handler.isfile(self._matches_file(image)) def load_matches(self, image: str) -> Dict[str, np.ndarray]: + # Prevent pickling of anything except what we strictly need + # as 'pickle.load' is RCE-prone. Will raise on any class other + # than the numpy ones we allow. + class MatchingUnpickler(pickle.Unpickler): + modules_map = { + "numpy.core.multiarray._reconstruct": np.core.multiarray, + "numpy.core.multiarray.scalar": np.core.multiarray, + "numpy.ndarray": np, + "numpy.dtype": np, + } + + def find_class(self, module, name): + classname = f"{module}.{name}" + allowed_module = classname in self.modules_map + if not allowed_module: + raise pickle.UnpicklingError( + "global '%s.%s' is forbidden" % (module, name) + ) + return getattr(self.modules_map[classname], name) + with self.io_handler.open(self._matches_file(image), "rb") as fin: - matches = pickle.load(BytesIO(gzip.decompress(fin.read()))) + matches = MatchingUnpickler(BytesIO(gzip.decompress(fin.read()))).load() return matches def save_matches(self, image: str, matches: Dict[str, np.ndarray]) -> None: From 638f62d26755b0fa47df6e249b814569be88d7dc Mon Sep 17 00:00:00 2001 From: Yann Noutary Date: Tue, 17 May 2022 07:41:26 -0700 Subject: [PATCH 72/81] fix: prevent future pickle vulnerabilities Summary: This Diff aims at preventing future vulnerabilities by : - Removing `pickle` use in `numpy` serialization - Use only `numpy` arrays for serializing features data In order to have back-compatibility : - Features version has ben bumped to 3 - Use of empty arrays for no-semantic case - Lazy check of `segmentations and `instances` for version 2, so we can still load them (they've used to be stored as numpy arrays implicitely). But we can't retrieve `labels` from version 2 anymore (not a big deal since they were only descriptives) Reviewed By: fabianschenk Differential Revision: D36252131 fbshipit-source-id: de62ac32becfe0e23abd3d05dd77c36b7184f5ab --- opensfm/features.py | 81 +++++++++++++++++++++++++++++------- opensfm/test/test_dataset.py | 4 +- 2 files changed, 69 insertions(+), 16 deletions(-) diff --git a/opensfm/features.py b/opensfm/features.py index d1c21e799..9d4f5ea5a 100644 --- a/opensfm/features.py +++ b/opensfm/features.py @@ -51,7 +51,7 @@ class FeaturesData: colors: np.ndarray semantic: Optional[SemanticData] - FEATURES_VERSION: int = 2 + FEATURES_VERSION: int = 3 FEATURES_HEADER: str = "OPENSFM_FEATURES_VERSION" def __init__( @@ -111,16 +111,16 @@ def save(self, fileobject: Any, config: Dict[str, Any]): raise RuntimeError("No descriptors found, canot save features data.") semantic = self.semantic if semantic: + instances = semantic.instances np.savez_compressed( fileobject, points=self.points.astype(np.float32), descriptors=descriptors.astype(feature_data_type), colors=self.colors, - segmentations=semantic.segmentation, - instances=semantic.instances, - segmentation_labels=semantic.labels, + segmentations=semantic.segmentation.astype(np.uint8), + instances=instances.astype(np.int16) if instances is not None else [], + segmentation_labels=np.array(semantic.labels).astype(np.str), OPENSFM_FEATURES_VERSION=self.FEATURES_VERSION, - allow_pickle=True, ) else: np.savez_compressed( @@ -128,17 +128,16 @@ def save(self, fileobject: Any, config: Dict[str, Any]): points=self.points.astype(np.float32), descriptors=descriptors.astype(feature_data_type), colors=self.colors, - segmentations=None, - instances=None, - segmentation_labels=None, + segmentations=[], + instances=[], + segmentation_labels=[], OPENSFM_FEATURES_VERSION=self.FEATURES_VERSION, - allow_pickle=True, ) @classmethod def from_file(cls, fileobject: Any, config: Dict[str, Any]) -> "FeaturesData": """Load features from file (path like or file object like)""" - s = np.load(fileobject, allow_pickle=True) + s = np.load(fileobject, allow_pickle=False) version = cls._features_file_version(s) return getattr(cls, "_from_file_v%d" % version)(s, config) @@ -190,17 +189,71 @@ def _from_file_v2( data: Dict[str, Any], config: Dict[str, Any], ) -> "FeaturesData": - """Version 2 of features file + """ + Version 2 of features file - Added segmentation and segmentation labels. + Added segmentation, instances and segmentation labels. This version has been introduced at + e5da878bea455a1e4beac938cb30b796acfe3c8c, but has been superseded by version 3 as this version + uses 'allow_pickle=True' which isn't safe (RCE vulnerability) """ feature_type = config["feature_type"] if feature_type == "HAHOG" and config["hahog_normalize_to_uchar"]: descriptors = data["descriptors"].astype(np.float32) else: descriptors = data["descriptors"] - has_segmentation = (data["segmentations"] != None).all() - has_instances = (data["instances"] != None).all() + + # luckily, because os lazy loading, we can still load 'segmentations' and 'instances' ... + pickle_message = ( + "Cannot load {} as these were generated with " + "version 2 which isn't supported anymore because of RCE vulnerablity." + "Please consider re-extracting features data for this dataset" + ) + try: + has_segmentation = (data["segmentations"] != None).all() + has_instances = (data["instances"] != None).all() + except ValueError: + logger.warning(pickle_message.format("segmentations and instances")) + has_segmentation, has_instances = False, False + + # ... whereas 'labels' can't be loaded anymore, as it is a plain 'list' object. Not an + # issue since these labels are used for description only and not actual filtering. + try: + labels = data["segmentation_labels"] + except ValueError: + logger.warning(pickle_message.format("labels")) + labels = [] + + if has_segmentation or has_instances: + semantic_data = SemanticData( + data["segmentations"] if has_segmentation else None, + data["instances"] if has_instances else None, + labels, + ) + else: + semantic_data = None + return FeaturesData( + data["points"], descriptors, data["colors"].astype(float), semantic_data + ) + + @classmethod + def _from_file_v3( + cls, + data: Dict[str, Any], + config: Dict[str, Any], + ) -> "FeaturesData": + """ + Version 3 of features file + + Same as version 2, except that + """ + feature_type = config["feature_type"] + if feature_type == "HAHOG" and config["hahog_normalize_to_uchar"]: + descriptors = data["descriptors"].astype(np.float32) + else: + descriptors = data["descriptors"] + + has_segmentation = len(data["segmentations"]) > 0 + has_instances = len(data["instances"]) > 0 if has_segmentation or has_instances: semantic_data = SemanticData( diff --git a/opensfm/test/test_dataset.py b/opensfm/test/test_dataset.py index 816bf2a2d..02202e45d 100644 --- a/opensfm/test/test_dataset.py +++ b/opensfm/test/test_dataset.py @@ -14,8 +14,8 @@ def test_dataset_load_features_sift(tmpdir) -> None: points = np.random.random((3, 4)) descriptors = np.random.random((128, 4)) colors = np.random.random((3, 4)) - segmentations = np.random.random((3, 4)) - instances = np.random.random((3, 4)) + segmentations = np.random.randint(low=0, high=255, size=(3, 4)) + instances = np.random.randint(low=0, high=255, size=(3, 4)) semantic_data = features.SemanticData( segmentations, instances, data.segmentation_labels() From c27088f0675d839d9c66066cfe70de512d2e0161 Mon Sep 17 00:00:00 2001 From: Yann Noutary Date: Tue, 17 May 2022 07:41:26 -0700 Subject: [PATCH 73/81] fix: reactivate GCP weighting Reviewed By: paulinus Differential Revision: D36200946 fbshipit-source-id: 4544b2a38859c2c8dda93ab6a04081955ab476c4 --- opensfm/align.py | 2 -- opensfm/config.py | 2 ++ opensfm/multiview.py | 6 ++--- opensfm/src/bundle/bundle_adjuster.h | 2 ++ opensfm/src/bundle/src/bundle_adjuster.cc | 8 ++++++ opensfm/src/sfm/src/ba_helpers.cc | 25 ++++++++++++++++--- .../test/test_reconstruction_incremental.py | 6 ++--- .../test/test_reconstruction_triangulation.py | 2 +- 8 files changed, 41 insertions(+), 12 deletions(-) diff --git a/opensfm/align.py b/opensfm/align.py index 630da3d21..de7dcab32 100644 --- a/opensfm/align.py +++ b/opensfm/align.py @@ -446,8 +446,6 @@ def triangulate_all_gcp( x = multiview.triangulate_gcp( point, reconstruction.shots, - reproj_threshold=0.004, - min_ray_angle_degrees=2.0, ) if x is not None and len(point.lla): point_enu = np.array( diff --git a/opensfm/config.py b/opensfm/config.py index 38d647163..56ea48c6d 100644 --- a/opensfm/config.py +++ b/opensfm/config.py @@ -240,6 +240,8 @@ class OpenSfMConfig: gcp_horizontal_sd: float = 0.01 # The default vertical standard deviation of the GCPs (in meters) gcp_vertical_sd: float = 0.1 + # Global weight for GCPs, expressed a ratio of the sum of (# projections) + (# shots) + (# relative motions) + gcp_global_weight: float = 0.01 # The standard deviation of the rig translation rig_translation_sd: float = 0.1 # The standard deviation of the rig rotation diff --git a/opensfm/multiview.py b/opensfm/multiview.py index d9877e61e..9e9fab9fe 100644 --- a/opensfm/multiview.py +++ b/opensfm/multiview.py @@ -207,7 +207,7 @@ def __init__(self, x: np.ndarray, y: np.ndarray) -> None: def num_samples(self) -> int: return len(self.x) - def fit(self, samples: np.ndarray)->List[float]: + def fit(self, samples: np.ndarray) -> List[float]: x = self.x[samples[0]] y = self.y[samples[0]] return [y / x] @@ -682,8 +682,8 @@ def relative_pose_optimize_nonlinear( def triangulate_gcp( point: pymap.GroundControlPoint, shots: Dict[str, pymap.Shot], - reproj_threshold: float, - min_ray_angle_degrees: float, + reproj_threshold: float = 0.02, + min_ray_angle_degrees: float = 1.0, ) -> Optional[np.ndarray]: """Compute the reconstructed position of a GCP from observations.""" diff --git a/opensfm/src/bundle/bundle_adjuster.h b/opensfm/src/bundle/bundle_adjuster.h index 8fa00da41..aea287a86 100644 --- a/opensfm/src/bundle/bundle_adjuster.h +++ b/opensfm/src/bundle/bundle_adjuster.h @@ -265,6 +265,8 @@ class BundleAdjuster { void ComputeReprojectionErrors(); // Getters + int GetProjectionsCount() const; + int GetRelativeMotionsCount() const; geometry::Camera GetCamera(const std::string &id) const; geometry::Similarity GetBias(const std::string &id) const; Reconstruction GetReconstruction(const std::string &reconstruction_id) const; diff --git a/opensfm/src/bundle/src/bundle_adjuster.cc b/opensfm/src/bundle/src/bundle_adjuster.cc index f9a3684bd..1f75a4151 100644 --- a/opensfm/src/bundle/src/bundle_adjuster.cc +++ b/opensfm/src/bundle/src/bundle_adjuster.cc @@ -1165,6 +1165,14 @@ void BundleAdjuster::ComputeReprojectionErrors() { } } +int BundleAdjuster::GetProjectionsCount() const { + return point_projection_observations_.size(); +} + +int BundleAdjuster::GetRelativeMotionsCount() const { + return relative_motions_.size(); +} + geometry::Camera BundleAdjuster::GetCamera(const std::string &id) const { if (cameras_.find(id) == cameras_.end()) { throw std::runtime_error("Camera " + id + " doesn't exists"); diff --git a/opensfm/src/sfm/src/ba_helpers.cc b/opensfm/src/sfm/src/ba_helpers.cc index 04f2ab6fc..1dd2e1b1c 100644 --- a/opensfm/src/sfm/src/ba_helpers.cc +++ b/opensfm/src/sfm/src/ba_helpers.cc @@ -345,6 +345,25 @@ size_t BAHelpers::AddGCPToBundle( const AlignedVector& gcp, const py::dict& config) { const auto& reference = map.GetTopocentricConverter(); const auto& shots = map.GetShots(); + + const auto dominant_terms = ba.GetRigInstances().size() + + ba.GetProjectionsCount() + + ba.GetRelativeMotionsCount(); + + size_t total_terms = 0; + for (const auto& point : gcp) { + Vec3d coordinates; + if (TriangulateGCP(point, shots, coordinates) || !point.lla_.empty()) { + ++total_terms; + } + for (const auto& obs : point.observations_) { + total_terms += (shots.count(obs.shot_id_) > 0); + } + } + + double global_weight = config["gcp_global_weight"].cast() * + dominant_terms / std::max(1, total_terms); + size_t added_gcp_observations = 0; for (const auto& point : gcp) { const auto point_id = "gcp-" + point.id_; @@ -363,16 +382,16 @@ size_t BAHelpers::AddGCPToBundle( config["gcp_horizontal_sd"].cast(), config["gcp_vertical_sd"].cast()); ba.AddPointPrior(point_id, reference.ToTopocentric(point.GetLlaVec3d()), - point_std, point.has_altitude_); + point_std / global_weight, point.has_altitude_); } // Now iterate through the observations for (const auto& obs : point.observations_) { const auto& shot_id = obs.shot_id_; if (shots.count(shot_id) > 0) { - constexpr double scale{0.0001}; + constexpr double scale{0.001}; ba.AddPointProjectionObservation(shot_id, point_id, obs.projection_, - scale); + scale / global_weight); ++added_gcp_observations; } } diff --git a/opensfm/test/test_reconstruction_incremental.py b/opensfm/test/test_reconstruction_incremental.py index a20141359..7edfe4297 100644 --- a/opensfm/test/test_reconstruction_incremental.py +++ b/opensfm/test/test_reconstruction_incremental.py @@ -32,19 +32,19 @@ def test_reconstruction_incremental( assert 0.7 < errors["ratio_points"] < 1.0 assert 0 < errors["aligned_position_rmse"] < 0.03 - assert 0 < errors["aligned_rotation_rmse"] < 0.002 + assert 0 < errors["aligned_rotation_rmse"] < 0.0022 assert 0 < errors["aligned_points_rmse"] < 0.1 # Sanity check that GPS error is similar to the generated gps_noise assert 4.0 < errors["absolute_gps_rmse"] < 7.0 # Sanity check that GCP error is similar to the generated gcp_noise - assert 0.01 < errors["absolute_gcp_rmse_horizontal"] < 0.03 + assert 0.01 < errors["absolute_gcp_rmse_horizontal"] < 0.033 assert 0.08 < errors["absolute_gcp_rmse_vertical"] < 0.18 # Check that the GPS bias (only translation) is recovered translation = reconstructed_scene[0].biases["1"].translation - assert 9.9 < translation[0] < 10.31 + assert 9.9 < translation[0] < 10.312 assert 99.9 < translation[2] < 100.2 diff --git a/opensfm/test/test_reconstruction_triangulation.py b/opensfm/test/test_reconstruction_triangulation.py index a1e475be2..c7f0d0882 100644 --- a/opensfm/test/test_reconstruction_triangulation.py +++ b/opensfm/test/test_reconstruction_triangulation.py @@ -39,7 +39,7 @@ def test_reconstruction_triangulation( assert 0.01 < errors["absolute_gps_rmse"] < 0.1 # Sanity check that GCP error is similar to the generated gcp_noise - assert 0.01 < errors["absolute_gcp_rmse_horizontal"] < 0.03 + assert 0.01 < errors["absolute_gcp_rmse_horizontal"] < 0.031 assert 0.005 < errors["absolute_gcp_rmse_vertical"] < 0.04 # Check that the GPS bias (only translation) is recovered From faa80c2380ffced865c0685bb6041ff064dd1296 Mon Sep 17 00:00:00 2001 From: Yann Noutary Date: Tue, 17 May 2022 07:41:26 -0700 Subject: [PATCH 74/81] fix: correct gravity conversions MLY/MDC Summary: This Diff fixes the gravity conversions by taking into account the frame/view transformation Reviewed By: fabianschenk Differential Revision: D36313913 fbshipit-source-id: b48c3462fb250857de9f5b5ed6285463ee296b0b --- opensfm/features_processing.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/opensfm/features_processing.py b/opensfm/features_processing.py index 8a45a8b9b..888018d2e 100644 --- a/opensfm/features_processing.py +++ b/opensfm/features_processing.py @@ -197,7 +197,7 @@ def bake_segmentation( exif_height, exif_width, exif_orientation = ( exif["height"], exif["width"], - exif["orientation"], + exif.get("orientation", 1), ) height, width = image.shape[:2] if exif_height != height or exif_width != width: From bddbeab74665fa08f34f89761511b5e089cee6b5 Mon Sep 17 00:00:00 2001 From: Yann Noutary Date: Thu, 19 May 2022 06:59:10 -0700 Subject: [PATCH 75/81] fix: GCPs + synthetic test fixes Reviewed By: fabianschenk Differential Revision: D36478734 fbshipit-source-id: 3eb6f3c65d240b8ea94410efd4b82d101fd64c0b --- opensfm/src/bundle/bundle_adjuster.h | 1 + opensfm/src/bundle/pybundle.pyi | 1 + opensfm/src/bundle/python/pybind.cc | 1 + opensfm/src/bundle/src/bundle_adjuster.cc | 4 ++++ opensfm/test/test_reconstruction_incremental.py | 2 +- opensfm/test/test_reconstruction_triangulation.py | 2 +- 6 files changed, 9 insertions(+), 2 deletions(-) diff --git a/opensfm/src/bundle/bundle_adjuster.h b/opensfm/src/bundle/bundle_adjuster.h index aea287a86..b28a29e1e 100644 --- a/opensfm/src/bundle/bundle_adjuster.h +++ b/opensfm/src/bundle/bundle_adjuster.h @@ -271,6 +271,7 @@ class BundleAdjuster { geometry::Similarity GetBias(const std::string &id) const; Reconstruction GetReconstruction(const std::string &reconstruction_id) const; Point GetPoint(const std::string &id) const; + bool HasPoint(const std::string &id) const; RigCamera GetRigCamera(const std::string &rig_camera_id) const; RigInstance GetRigInstance(const std::string &instance_id) const; std::map GetRigCameras() const; diff --git a/opensfm/src/bundle/pybundle.pyi b/opensfm/src/bundle/pybundle.pyi index 5eaa6067a..4e53f78a8 100644 --- a/opensfm/src/bundle/pybundle.pyi +++ b/opensfm/src/bundle/pybundle.pyi @@ -48,6 +48,7 @@ class BundleAdjuster: def get_reconstruction(self, arg0: str) -> Reconstruction: ... def get_rig_camera_pose(self, arg0: str) -> opensfm.pygeometry.Pose: ... def get_rig_instance_pose(self, arg0: str) -> opensfm.pygeometry.Pose: ... + def has_point(self, arg0: str) -> bool: ... def run(self) -> None: ... def set_adjust_absolute_position_std(self, arg0: bool) -> None: ... def set_compute_covariances(self, arg0: bool) -> None: ... diff --git a/opensfm/src/bundle/python/pybind.cc b/opensfm/src/bundle/python/pybind.cc index 2075a48e5..36b146542 100644 --- a/opensfm/src/bundle/python/pybind.cc +++ b/opensfm/src/bundle/python/pybind.cc @@ -69,6 +69,7 @@ PYBIND11_MODULE(pybundle, m) { .def("add_point", &bundle::BundleAdjuster::AddPoint) .def("add_point_prior", &bundle::BundleAdjuster::AddPointPrior) .def("get_point", &bundle::BundleAdjuster::GetPoint) + .def("has_point", &bundle::BundleAdjuster::HasPoint) .def("add_reconstruction", &bundle::BundleAdjuster::AddReconstruction) .def("add_reconstruction_instance", &bundle::BundleAdjuster::AddReconstructionInstance) diff --git a/opensfm/src/bundle/src/bundle_adjuster.cc b/opensfm/src/bundle/src/bundle_adjuster.cc index 1f75a4151..3195c69c9 100644 --- a/opensfm/src/bundle/src/bundle_adjuster.cc +++ b/opensfm/src/bundle/src/bundle_adjuster.cc @@ -1194,6 +1194,10 @@ Point BundleAdjuster::GetPoint(const std::string &id) const { return points_.at(id); } +bool BundleAdjuster::HasPoint(const std::string &id) const { + return points_.find(id) != points_.end(); +} + Reconstruction BundleAdjuster::GetReconstruction( const std::string &reconstruction_id) const { const auto it = reconstructions_.find(reconstruction_id); diff --git a/opensfm/test/test_reconstruction_incremental.py b/opensfm/test/test_reconstruction_incremental.py index 7edfe4297..4ad885943 100644 --- a/opensfm/test/test_reconstruction_incremental.py +++ b/opensfm/test/test_reconstruction_incremental.py @@ -39,7 +39,7 @@ def test_reconstruction_incremental( assert 4.0 < errors["absolute_gps_rmse"] < 7.0 # Sanity check that GCP error is similar to the generated gcp_noise - assert 0.01 < errors["absolute_gcp_rmse_horizontal"] < 0.033 + assert 0.01 < errors["absolute_gcp_rmse_horizontal"] < 0.05 assert 0.08 < errors["absolute_gcp_rmse_vertical"] < 0.18 # Check that the GPS bias (only translation) is recovered diff --git a/opensfm/test/test_reconstruction_triangulation.py b/opensfm/test/test_reconstruction_triangulation.py index c7f0d0882..a05056681 100644 --- a/opensfm/test/test_reconstruction_triangulation.py +++ b/opensfm/test/test_reconstruction_triangulation.py @@ -39,7 +39,7 @@ def test_reconstruction_triangulation( assert 0.01 < errors["absolute_gps_rmse"] < 0.1 # Sanity check that GCP error is similar to the generated gcp_noise - assert 0.01 < errors["absolute_gcp_rmse_horizontal"] < 0.031 + assert 0.01 < errors["absolute_gcp_rmse_horizontal"] < 0.05 assert 0.005 < errors["absolute_gcp_rmse_vertical"] < 0.04 # Check that the GPS bias (only translation) is recovered From 18dd42a32876961aadeffbcca1b63a6ba9de7c36 Mon Sep 17 00:00:00 2001 From: Yann Noutary Date: Thu, 19 May 2022 06:59:10 -0700 Subject: [PATCH 76/81] fix: use rotation in case of missing GPS/GCPs Summary: This Diff loosen the alignment procedure with orientation priors, by relaxing the need of GPS/GCPs : in that case, we can still use the rotation for alignment (identity scale and translation). Reviewed By: fabianschenk Differential Revision: D36478909 fbshipit-source-id: 3a679a75cb15b0da0bbeda3b20f2993f658ff6fb --- opensfm/align.py | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/opensfm/align.py b/opensfm/align.py index de7dcab32..249e3e21b 100644 --- a/opensfm/align.py +++ b/opensfm/align.py @@ -250,19 +250,19 @@ def compute_orientation_prior_similarity( - horizontal: assumes cameras are looking towards the horizon - vertical: assumes cameras are looking down towards the ground """ - X, Xp = alignment_constraints(config, reconstruction, gcp, use_gps) - X = np.array(X) - Xp = np.array(Xp) - - if len(X) < 1: - return None - p = estimate_ground_plane(reconstruction, config) if p is None: return None Rplane = multiview.plane_horizontalling_rotation(p) if Rplane is None: return None + + X, Xp = alignment_constraints(config, reconstruction, gcp, use_gps) + X = np.array(X) + Xp = np.array(Xp) + if len(X) < 1: + return 1.0, Rplane, np.zeros(3) + X = Rplane.dot(X.T).T # Estimate 2d similarity to align to GPS From 8b4747e6961b30f0e2d637594d05f1f1f097db6e Mon Sep 17 00:00:00 2001 From: Yann Noutary Date: Wed, 25 May 2022 01:15:36 -0700 Subject: [PATCH 77/81] fix: fix edge case in SVD decomposition Summary: This Diff fixes some of the weird cases of failed numpy SVD. Reviewed By: tobias-o Differential Revision: D36629050 fbshipit-source-id: 45a6b0de8249d750e3b4ab4e10f50226da1e41c0 --- opensfm/multiview.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/opensfm/multiview.py b/opensfm/multiview.py index 9e9fab9fe..1da50ea5d 100644 --- a/opensfm/multiview.py +++ b/opensfm/multiview.py @@ -543,7 +543,11 @@ def motion_from_plane_homography( Report. INRIA, June 1988. https://hal.inria.fr/inria-00075698/document """ - u, l, vh = np.linalg.svd(H) + try: + u, l, vh = np.linalg.svd(H) + except ValueError: + return None + d1, d2, d3 = l s = np.linalg.det(u) * np.linalg.det(vh) From f86ec2913d0b305d79976aa135abd87c8f502d9a Mon Sep 17 00:00:00 2001 From: Manuel Lopez Antequera Date: Wed, 25 May 2022 03:06:00 -0700 Subject: [PATCH 78/81] Add optional unique id field for control point observations and control points Summary: The new numeric ids identify each control point observation on an image, as well as the surveyed point. The existing string id field is still used to identify the group of image observations + survey point (the pymap.GroundControlPoint) Reviewed By: paulinus Differential Revision: D36347954 fbshipit-source-id: 6e653b1263fb73ca4acff9db33c2bcf4a0da60c5 --- opensfm/src/map/ground_control_points.h | 7 ++++++- opensfm/src/map/pymap.pyi | 8 ++++++++ opensfm/src/map/python/pybind.cc | 2 ++ 3 files changed, 16 insertions(+), 1 deletion(-) diff --git a/opensfm/src/map/ground_control_points.h b/opensfm/src/map/ground_control_points.h index 3815ed3aa..c22c1f82c 100644 --- a/opensfm/src/map/ground_control_points.h +++ b/opensfm/src/map/ground_control_points.h @@ -9,23 +9,28 @@ struct GroundControlPointObservation { Attributes: shot_id: the shot where the point is observed projection: 2d coordinates of the observation + uid: a unique id for this observation **/ GroundControlPointObservation() = default; GroundControlPointObservation(const ShotId& shot_id, const Vec2d& proj) : shot_id_(shot_id), projection_(proj) {} ShotId shot_id_ = ""; Vec2d projection_ = Vec2d::Zero(); + LandmarkUniqueId uid_ = 0; }; struct GroundControlPoint { /**A ground control point with its observations. Attributes: - lla: latitue, longitude and altitude + lla: latitude, longitude and altitude has_altitude: true if z coordinate is known observations: list of observations of the point on images + id: a unique id for this point group (survey point + image observations) + survey_point_id: a unique id for the point on the ground */ GroundControlPoint() = default; LandmarkId id_ = ""; + LandmarkUniqueId survey_point_id_ = 0; bool has_altitude_ = false; AlignedVector observations_; std::map lla_; diff --git a/opensfm/src/map/pymap.pyi b/opensfm/src/map/pymap.pyi index 11f00531f..71c4e2124 100644 --- a/opensfm/src/map/pymap.pyi +++ b/opensfm/src/map/pymap.pyi @@ -87,6 +87,10 @@ class GroundControlPoint: def observations(self) -> List[GroundControlPointObservation]:... @observations.setter def observations(self, arg1: List[GroundControlPointObservation]) -> None:... + @property + def survey_point_id(self) -> int:... + @survey_point_id.setter + def survey_point_id(self, arg0: int) -> None:... class GroundControlPointObservation: @overload def __init__(self) -> None: ... @@ -100,6 +104,10 @@ class GroundControlPointObservation: def shot_id(self) -> str:... @shot_id.setter def shot_id(self, arg0: str) -> None:... + @property + def uid(self) -> int:... + @uid.setter + def uid(self, arg0: int) -> None:... class Landmark: def __init__(self, arg0: str, arg1: numpy.ndarray) -> None: ... def get_observations(self) -> Dict[Shot, int]: ... diff --git a/opensfm/src/map/python/pybind.cc b/opensfm/src/map/python/pybind.cc index 028bd5db1..9d3bea65b 100644 --- a/opensfm/src/map/python/pybind.cc +++ b/opensfm/src/map/python/pybind.cc @@ -247,12 +247,14 @@ PYBIND11_MODULE(pymap, m) { .def(py::init()) .def(py::init()) .def_readwrite("shot_id", &map::GroundControlPointObservation::shot_id_) + .def_readwrite("uid", &map::GroundControlPointObservation::uid_) .def_readwrite("projection", &map::GroundControlPointObservation::projection_); py::class_(m, "GroundControlPoint") .def(py::init()) .def_readwrite("id", &map::GroundControlPoint::id_) + .def_readwrite("survey_point_id", &map::GroundControlPoint::survey_point_id_) .def_readwrite("has_altitude", &map::GroundControlPoint::has_altitude_) .def_readwrite("lla", &map::GroundControlPoint::lla_) .def_property("lla_vec", &map::GroundControlPoint::GetLlaVec3d, From a66c27bc9860a1a8b777c4697bf3cecce19e9b5b Mon Sep 17 00:00:00 2001 From: Yann Noutary Date: Wed, 25 May 2022 03:35:09 -0700 Subject: [PATCH 79/81] fix: handle edge cases in absolute pose Summary: This Diff fixes the edge cases that could raise `assert` in `std::complex`. Reviewed By: tobias-o Differential Revision: D36627047 fbshipit-source-id: 66af79cfdbf334ed139cfdccfe1628aca5ee6910 --- opensfm/src/foundation/numeric.h | 4 +++- opensfm/src/foundation/src/numeric.cc | 14 ++++++++++---- opensfm/src/geometry/absolute_pose.h | 7 ++++++- 3 files changed, 19 insertions(+), 6 deletions(-) diff --git a/opensfm/src/foundation/numeric.h b/opensfm/src/foundation/numeric.h index 901906723..19cdb8bef 100644 --- a/opensfm/src/foundation/numeric.h +++ b/opensfm/src/foundation/numeric.h @@ -1,6 +1,7 @@ #pragma once #include + #include #include @@ -48,7 +49,8 @@ void SkewMatrixT(const V& v, M* matrix) { Eigen::Matrix3d ClosestRotationMatrix(const Eigen::Matrix3d& matrix); -std::array SolveQuartic(const std::array& coefficients); +bool SolveQuartic(const std::array& coefficients, + std::array& roots); std::array RefineQuarticRoots( const std::array& coefficients, const std::array& roots); diff --git a/opensfm/src/foundation/src/numeric.cc b/opensfm/src/foundation/src/numeric.cc index 6c0cb815d..509f73e40 100644 --- a/opensfm/src/foundation/src/numeric.cc +++ b/opensfm/src/foundation/src/numeric.cc @@ -1,5 +1,6 @@ -#include #include +#include + #include namespace foundation { @@ -28,7 +29,8 @@ static std::complex ComplexCbrt(const std::complex& z) { return pow(z, 1. / 3.); } -std::array SolveQuartic(const std::array& coefficients) { +bool SolveQuartic(const std::array& coefficients, + std::array& roots) { constexpr double eps = std::numeric_limits::epsilon(); const double a = std::abs(coefficients[4]) > eps ? coefficients[4] @@ -44,12 +46,16 @@ std::array SolveQuartic(const std::array& coefficients) { const double Q3 = 8. * b * c - 16. * d - 2. * b * b * b; const double Q4 = 3. * b * b - 8. * c; + if (std::abs(Q1) < eps && std::abs(Q2) < eps && std::abs(Q3) < eps && + std::abs(Q4) < eps) { + return false; + } + const std::complex Q5 = ComplexCbrt(Q2 / 2. + ComplexSqrt(Q2 * Q2 / 4. - Q1 * Q1 * Q1)); const std::complex Q6 = (Q1 / Q5 + Q5) / 3.; const std::complex Q7 = 2. * ComplexSqrt(Q4 / 12. + Q6); - std::array roots; roots[0] = (-b - Q7 - ComplexSqrt(4. * Q4 / 6. - 4. * Q6 - Q3 / Q7)).real() / 4.; roots[1] = @@ -58,7 +64,7 @@ std::array SolveQuartic(const std::array& coefficients) { (-b + Q7 - ComplexSqrt(4. * Q4 / 6. - 4. * Q6 + Q3 / Q7)).real() / 4.; roots[3] = (-b + Q7 + ComplexSqrt(4. * Q4 / 6. - 4. * Q6 + Q3 / Q7)).real() / 4.; - return roots; + return true; } std::array RefineQuarticRoots( diff --git a/opensfm/src/geometry/absolute_pose.h b/opensfm/src/geometry/absolute_pose.h index a14fdb3fa..faa7a2633 100644 --- a/opensfm/src/geometry/absolute_pose.h +++ b/opensfm/src/geometry/absolute_pose.h @@ -2,6 +2,7 @@ #include #include + #include #include #include @@ -76,7 +77,11 @@ std::vector> AbsolutePoseThreePoints(IT begin, const auto alpha0 = SQUARE(g7) - SQUARE(g2) - SQUARE(g4); std::array coefficients = {alpha0, alpha1, alpha2, alpha3, alpha4}; - std::array roots = foundation::SolveQuartic(coefficients); + std::array roots; + const auto solve_result = foundation::SolveQuartic(coefficients, roots); + if (!solve_result) { + return RTs; + } roots = foundation::RefineQuarticRoots(coefficients, roots); Eigen::Matrix3d c_barre, c_barre_barre; From d15ff564ac46f067298e4341aa512c140e2f190c Mon Sep 17 00:00:00 2001 From: Manuel Lopez Antequera Date: Thu, 26 May 2022 11:09:16 -0700 Subject: [PATCH 80/81] Populate GCP and observation uids in synthetic datasets Reviewed By: jbroll Differential Revision: D36704960 fbshipit-source-id: a12f230b2209fc04e58e42055e7391a59cb2e9e3 --- opensfm/synthetic_data/synthetic_generator.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/opensfm/synthetic_data/synthetic_generator.py b/opensfm/synthetic_data/synthetic_generator.py index 68e54e219..f4c7343a9 100644 --- a/opensfm/synthetic_data/synthetic_generator.py +++ b/opensfm/synthetic_data/synthetic_generator.py @@ -2,7 +2,7 @@ import math import time from collections import defaultdict -from typing import Callable, Tuple, List, Dict, Any, Optional, Union +from typing import Any, Callable, Dict, List, Optional, Tuple, Union import cv2 import numpy as np @@ -10,13 +10,13 @@ import scipy.signal as signal import scipy.spatial as spatial from opensfm import ( + features as oft, geo, + geometry, pygeometry, + pymap, reconstruction as rc, types, - pymap, - features as oft, - geometry, ) from opensfm.types import Reconstruction @@ -151,7 +151,7 @@ def generate_causal_noise( dims = [np.arange(-scale, scale) for _ in range(dimensions)] mesh = np.meshgrid(*dims) dist = np.linalg.norm(mesh, axis=0) - filter_kernel = np.exp(-(dist ** 2) / (2 * scale)) + filter_kernel = np.exp(-(dist**2) / (2 * scale)) noise = np.random.randn(dimensions, n) * sigma return signal.fftconvolve(noise, filter_kernel, mode="same") @@ -459,6 +459,7 @@ def generate_track_data( point = reconstruction.points[gcp_id] gcp = pymap.GroundControlPoint() gcp.id = f"gcp-{gcp_id}" + gcp.survey_point_id = int(gcp_id) enu = point.coordinates + gcp_shift + sigmas_gcp[i] lat, lon, alt = reconstruction.reference.to_lla(*enu) gcp.lla = {"latitude": lat, "longitude": lon, "altitude": alt} @@ -467,6 +468,7 @@ def generate_track_data( o = pymap.GroundControlPointObservation() o.shot_id = shot_id o.projection = obs.point + o.uid = obs.id gcp.add_observation(o) gcps[gcp.id] = gcp From 379962a4e048438a898c985a136e7ffe6e3891c8 Mon Sep 17 00:00:00 2001 From: Jonas Lindner Date: Wed, 1 Jun 2022 07:22:32 -0700 Subject: [PATCH 81/81] Fix some typos (#889) Summary: Fix some typos in Error Messages, Documentation and Comments. Pull Request resolved: https://github.com/mapillary/OpenSfM/pull/889 Reviewed By: paulinus Differential Revision: D36596448 Pulled By: fabianschenk fbshipit-source-id: 55188c931b435bbad1eb6bfa50b520cf904b6845 --- annotation_gui_gcp/js/WebView.js | 2 +- annotation_gui_gcp/lib/views/image_view.py | 2 +- bin/plot_features | 2 +- doc/source/annotation_tool.rst | 4 ++-- doc/source/dense.rst | 2 +- doc/source/gcp.rst | 2 +- doc/source/quality_report.rst | 8 ++++---- doc/source/reporting.rst | 2 +- doc/source/rig.rst | 2 +- doc/source/using.rst | 4 ++-- opensfm/actions/export_bundler.py | 2 +- opensfm/config.py | 4 ++-- opensfm/features.py | 2 +- opensfm/features_processing.py | 2 +- opensfm/geotag_from_gpx.py | 2 +- opensfm/matching.py | 2 +- opensfm/multiview.py | 2 +- opensfm/pairs_selection.py | 12 ++++++------ opensfm/reconstruction.py | 2 +- opensfm/rig.py | 4 ++-- opensfm/src/bundle/python/pybind.cc | 2 +- opensfm/src/bundle/reconstruction_alignment.h | 2 +- opensfm/src/bundle/test/reprojection_errors_test.cc | 2 +- opensfm/src/geometry/essential.h | 2 +- opensfm/src/geometry/pose.h | 2 +- opensfm/src/geometry/test/covariance_test.cc | 2 +- opensfm/src/geometry/transformations_functions.h | 2 +- opensfm/src/map/python/pybind.cc | 2 +- opensfm/src/map/shot.h | 2 +- opensfm/test/test_bundle.py | 2 +- opensfm/test/test_datastructures.py | 6 +++--- opensfm/test/test_triangulation.py | 2 +- opensfm/types.py | 2 +- 33 files changed, 47 insertions(+), 47 deletions(-) diff --git a/annotation_gui_gcp/js/WebView.js b/annotation_gui_gcp/js/WebView.js index 4d0aa2c23..bc7330741 100644 --- a/annotation_gui_gcp/js/WebView.js +++ b/annotation_gui_gcp/js/WebView.js @@ -8,7 +8,7 @@ function initialize_event_source(handlers) { if (this.readyState == EventSource.CONNECTING) { console.log(`Reconnecting (readyState=${this.readyState})...`); } else { - console.log("Error has occured.", err); + console.log("Error has occurred.", err); } }; diff --git a/annotation_gui_gcp/lib/views/image_view.py b/annotation_gui_gcp/lib/views/image_view.py index f0f9390ab..16dddbed8 100644 --- a/annotation_gui_gcp/lib/views/image_view.py +++ b/annotation_gui_gcp/lib/views/image_view.py @@ -104,7 +104,7 @@ def process_client_message(self, data: Dict[str, Any]) -> None: "add_or_update_point_observation", "remove_point_observation", ): - raise ValueError(f"Unknown commmand {command}") + raise ValueError(f"Unknown command {command}") if data["point_id"] != self.main_ui.curr_point: print(data["point_id"], self.main_ui.curr_point) diff --git a/bin/plot_features b/bin/plot_features index 16594418a..8cd55242b 100755 --- a/bin/plot_features +++ b/bin/plot_features @@ -48,7 +48,7 @@ if __name__ == "__main__": if not features_data: continue points = features_data.points - print("ploting {0} points".format(len(points))) + print("plotting {0} points".format(len(points))) plt.figure() plt.title('Image: ' + image + ', features: ' + str(len(points))) fig = plot_features(data.load_image(image), points) diff --git a/doc/source/annotation_tool.rst b/doc/source/annotation_tool.rst index a3f6d2995..1813f6186 100644 --- a/doc/source/annotation_tool.rst +++ b/doc/source/annotation_tool.rst @@ -138,7 +138,7 @@ Detailed documentation for this is not available as the feature is experimental, - Start from a dataset containing more than one reconstruction in ``reconstruction.json``. - Launch the tool: - - If the two reconstructions come from different sequences, lauch as usual. + - If the two reconstructions come from different sequences, launch as usual. - If the two reconstructions come from the same sequence, launch using the ``--group-by-reconstruction`` argument. This will split the images into two windows, one for each reconstruction. @@ -150,7 +150,7 @@ Use the 'Rigid', 'Flex' or 'Full' buttons to run the alignment using the annotat - The 'Rigid' option triangulates the control points in each reconstruction independently and finds a rigid transform to align them. - The 'Flex' option additionally re-runs bundle adjustment, allowing for some deformation of both reconstructions to fit the annotations. - The 'Full' option attempts to obtain positional covariances for each camera pose. - If succesful, the frame list on the image views is populated with the positional covariance norm. Lower is better. + If successful, the frame list on the image views is populated with the positional covariance norm. Lower is better. After running analysis, the reprojection errors are overlaid on the image views as shown in :ref:`running-alignment`. The aligned reconstructions are saved with new filenames in the root folder and can be viewed in 3D with the OpenSfM viewer. diff --git a/doc/source/dense.rst b/doc/source/dense.rst index 673419bbb..177548070 100644 --- a/doc/source/dense.rst +++ b/doc/source/dense.rst @@ -123,7 +123,7 @@ Undistortion The dense module assumes that images are taken with perspective projection and no radial distortion. For perspective images, undistorted versions can be generated by taking into account the computed distortion parameters, :math:`k1` and :math:`k2`. -Spherical images (360 panoramas) however can not be unwarped into a single persepective view. We need to generate multiple perspective views to cover the field of view of a panorama. +Spherical images (360 panoramas) however can not be unwarped into a single perspective view. We need to generate multiple perspective views to cover the field of view of a panorama. This means that the undistortion process will create new views of the reconstruction. Thus the undistortion process is one where a reconstruction is taken as input and a new reconstruction is produced as output. The input may contain radially distorted images and panoramas and the output reconstruction will only have undistorted perspective images. diff --git a/doc/source/gcp.rst b/doc/source/gcp.rst index e8253ff4a..ad92fb99f 100644 --- a/doc/source/gcp.rst +++ b/doc/source/gcp.rst @@ -6,7 +6,7 @@ When EXIF data contains GPS location, it is used by OpenSfM to georeference the Ground control points (GCP) are landmarks visible on the images for which the geospatial position (latitude, longitude and altitude) is known. A single GCP can be observed in one or more images. -OpenSfM uses GCP in two steps of the reconstruction process: alignment and bundle adjustment. In the alignment step, points are used to globaly move the reconstruction so that the observed GCP align with their GPS position. Two or more observations for each GCP are required for it to be used during the aligment step. +OpenSfM uses GCP in two steps of the reconstruction process: alignment and bundle adjustment. In the alignment step, points are used to globaly move the reconstruction so that the observed GCP align with their GPS position. Two or more observations for each GCP are required for it to be used during the alignment step. In the bundle adjustment step, GCP observations are used as a constraint to refine the reconstruction. In this step, all ground control points are used. No minimum number of observation is required. diff --git a/doc/source/quality_report.rst b/doc/source/quality_report.rst index c55f36ada..5faebacc0 100644 --- a/doc/source/quality_report.rst +++ b/doc/source/quality_report.rst @@ -26,9 +26,9 @@ Processing Summary - Reconstructed Images : reconstructed images over total number of images - Reconstructed Points : reconstructed points over total number of points in the `tracks.csv` file (`create_tracks`) - Reconstructed Components : number of continously reconstructed sets of images - - Detected Features : median number (accross images) of detected features - - Reconstructed Features : median number (accross images) of reconstructed features - - Geographic Reference : indicated wether GPS and/or GCP have been used for geo-alignment + - Detected Features : median number (across images) of detected features + - Reconstructed Features : median number (across images) of reconstructed features + - Geographic Reference : indicated whether GPS and/or GCP have been used for geo-alignment - GPS / GCP errors : GPS and/or GCP RMS errors |topview| @@ -45,7 +45,7 @@ The heatmap represent the density of detected features : the gradient goes from |feat2| -The table below lists minimum/maximum/mean and median detected and reconstructed features accross images. +The table below lists minimum/maximum/mean and median detected and reconstructed features across images. Reconstruction Details ~~~~~~~~~~~~~~~~~~~~~~ diff --git a/doc/source/reporting.rst b/doc/source/reporting.rst index acbb7877f..efdb036bf 100644 --- a/doc/source/reporting.rst +++ b/doc/source/reporting.rst @@ -4,7 +4,7 @@ Reporting ========= -OpenSfM commands write reports on the work done. Reports are stored in the ``reports`` folder in json format so that they can be loaded programatically. Here is the list of reports produced and the data included. +OpenSfM commands write reports on the work done. Reports are stored in the ``reports`` folder in json format so that they can be loaded programmatically. Here is the list of reports produced and the data included. Feature detection ----------------- diff --git a/doc/source/rig.rst b/doc/source/rig.rst index feed127df..c261ee906 100644 --- a/doc/source/rig.rst +++ b/doc/source/rig.rst @@ -23,7 +23,7 @@ The following terms define such assembly and capture in OpenSfM terminology : }, ... - - A `RigInstance` is a list of `Shots`, each of which correspond to a `RigCamera` of the `RigModel` and the actual pose of the `RigModel` in the world : it's indeed an instanciation of the `RigModel` by combining `Shots`. These instances are defined in the `rig_assignments.json` file as follows:: + - A `RigInstance` is a list of `Shots`, each of which correspond to a `RigCamera` of the `RigModel` and the actual pose of the `RigModel` in the world : it's indeed an instantiation of the `RigModel` by combining `Shots`. These instances are defined in the `rig_assignments.json` file as follows:: { "RIG_INSTANCE_ID1": { diff --git a/doc/source/using.rst b/doc/source/using.rst index 0bf5d541c..2dfb8e80d 100644 --- a/doc/source/using.rst +++ b/doc/source/using.rst @@ -148,13 +148,13 @@ The following data is extracted for each image: - ``camera``: The camera ID string. Used to identify a camera. When multiple images have the same camera ID string, they will be assumed to be taken with the same camera and will share its parameters. -Once the metadata for all images has been extracted, a list of camera models is created and stored in ``camera_models.json``. A camera model is created for each diferent camera ID string found on the images. +Once the metadata for all images has been extracted, a list of camera models is created and stored in ``camera_models.json``. A camera model is created for each different camera ID string found on the images. For each camera ID, the cammera model parameters are chosen using the following procedure. - If the camera ID exists in the ``camera_models_overrides.json`` then the parameters are taken from that file. - Otherwise, if the camera ID exists in an internal calibration database, then the camera parameters are taken from the database. -- Otherwise, the camera parameters are inferred from the avalable EXIF metadata. +- Otherwise, the camera parameters are inferred from the available EXIF metadata. Providing additional metadata diff --git a/opensfm/actions/export_bundler.py b/opensfm/actions/export_bundler.py index 53e893ad6..9a6002aa3 100644 --- a/opensfm/actions/export_bundler.py +++ b/opensfm/actions/export_bundler.py @@ -61,7 +61,7 @@ def export_bundler( shot = shots[shot_id] camera = shot.camera if shot.camera.projection_type == "brown": - # Will aproximate Brown model, not optimal + # Will approximate Brown model, not optimal focal_normalized = camera.focal_x else: focal_normalized = camera.focal diff --git a/opensfm/config.py b/opensfm/config.py index 56ea48c6d..55bf4a34f 100644 --- a/opensfm/config.py +++ b/opensfm/config.py @@ -11,7 +11,7 @@ class OpenSfMConfig: # Params for metadata ################################## use_exif_size: bool = True - # Treat images from unknown camera models as comming from different cameras + # Treat images from unknown camera models as coming from different cameras unknown_camera_models_are_different: bool = False default_focal_prior: float = 0.85 @@ -84,7 +84,7 @@ class OpenSfMConfig: lowes_ratio: float = 0.8 # FLANN, BRUTEFORCE, or WORDS matcher_type: str = "FLANN" - # Match symmetricly or one-way + # Match symmetrically or one-way symmetric_matching: bool = True ################################## diff --git a/opensfm/features.py b/opensfm/features.py index 9d4f5ea5a..56eac1500 100644 --- a/opensfm/features.py +++ b/opensfm/features.py @@ -108,7 +108,7 @@ def save(self, fileobject: Any, config: Dict[str, Any]): feature_data_type = np.float32 descriptors = self.descriptors if descriptors is None: - raise RuntimeError("No descriptors found, canot save features data.") + raise RuntimeError("No descriptors found, cannot save features data.") semantic = self.semantic if semantic: instances = semantic.instances diff --git a/opensfm/features_processing.py b/opensfm/features_processing.py index 888018d2e..78c757408 100644 --- a/opensfm/features_processing.py +++ b/opensfm/features_processing.py @@ -30,7 +30,7 @@ def run_features_processing(data: DataSetBase, images: List[str], force: bool) - f"Planning to use {mem_available} MB of RAM for both processing queue and parallel processing." ) - # 50% for the queue / 50% for parralel processing + # 50% for the queue / 50% for parallel processing expected_mb = mem_available / 2 expected_images = min( max_queue_size, int(expected_mb / average_image_size(data)) diff --git a/opensfm/geotag_from_gpx.py b/opensfm/geotag_from_gpx.py index 69bf88385..a206f907d 100644 --- a/opensfm/geotag_from_gpx.py +++ b/opensfm/geotag_from_gpx.py @@ -322,7 +322,7 @@ def add_exif_using_timestamp( t = metadata["Exif.Photo.DateTimeOriginal"].value - # subtract offset in s beween gpx time and exif time + # subtract offset in s between gpx time and exif time t = t - datetime.timedelta(seconds=offset_time) try: diff --git a/opensfm/matching.py b/opensfm/matching.py index 8388e062f..e85c6979e 100644 --- a/opensfm/matching.py +++ b/opensfm/matching.py @@ -171,7 +171,7 @@ def match_arguments( None, None, ]: - """Generate arguments for parralel processing of pair matching""" + """Generate arguments for parallel processing of pair matching""" for im1, im2 in pairs: yield im1, im2, cameras, exifs, data, config_override, poses diff --git a/opensfm/multiview.py b/opensfm/multiview.py index 1da50ea5d..7f1c51184 100644 --- a/opensfm/multiview.py +++ b/opensfm/multiview.py @@ -303,7 +303,7 @@ def fit_plane_ransac( def fit_plane( points: np.ndarray, vectors: Optional[np.ndarray], verticals: Optional[np.ndarray] ) -> np.ndarray: - """Estimate a plane fron on-plane points and vectors. + """Estimate a plane from on-plane points and vectors. >>> x = [[0,0,0], [1,0,0], [0,1,0]] >>> p = fit_plane(x, None, None) diff --git a/opensfm/pairs_selection.py b/opensfm/pairs_selection.py index 5c7367843..0aa3b5ea3 100644 --- a/opensfm/pairs_selection.py +++ b/opensfm/pairs_selection.py @@ -315,7 +315,7 @@ def compute_bow_affinity( max_gps_distance: float, max_gps_neighbors: int, ) -> List[Tuple[str, List[float], List[str]]]: - """Compute afinity scores between references and candidates + """Compute affinity scores between references and candidates images using BoW-based distance. """ preempted_candidates, need_load = preempt_candidates( @@ -388,7 +388,7 @@ def compute_vlad_affinity( max_gps_neighbors: int, histograms: Dict[str, np.ndarray], ) -> List[Tuple[str, List[float], List[str]]]: - """Compute afinity scores between references and candidates + """Compute affinity scores between references and candidates images using VLAD-based distance. """ preempted_candidates, need_load = preempt_candidates( @@ -397,7 +397,7 @@ def compute_vlad_affinity( if len(preempted_candidates) == 0: logger.warning( - f"Couln't preempt any candidate with GPS, using ALL {len(images_cand)} as candidates" + f"Couldn't preempt any candidate with GPS, using ALL {len(images_cand)} as candidates" ) preempted_candidates = {image: images_cand for image in images_ref} need_load = set(images_ref + images_cand) @@ -493,7 +493,7 @@ def create_parallel_matching_args( def match_bow_unwrap_args( args: Tuple[str, Iterable[str], Dict[str, np.ndarray]] ) -> Tuple[str, List[float], List[str]]: - """Wrapper for parralel processing of BoW""" + """Wrapper for parallel processing of BoW""" image, other_images, histograms = args return bow_distances(image, other_images, histograms) @@ -501,7 +501,7 @@ def match_bow_unwrap_args( def match_vlad_unwrap_args( args: Tuple[str, Iterable[str], Dict[str, np.ndarray]] ) -> Tuple[str, List[float], List[str]]: - """Wrapper for parralel processing of VLAD""" + """Wrapper for parallel processing of VLAD""" image, other_images, histograms = args return vlad.vlad_distances(image, other_images, histograms) @@ -725,7 +725,7 @@ def vlad_histogram_unwrap_args( if vlad_descriptor is not None: return image, vlad_descriptor else: - logger.warning(f"Couln't compute VLAD descriptor for image {image}") + logger.warning(f"Couldn't compute VLAD descriptor for image {image}") return None diff --git a/opensfm/reconstruction.py b/opensfm/reconstruction.py index 05bd9bf51..d31424492 100644 --- a/opensfm/reconstruction.py +++ b/opensfm/reconstruction.py @@ -231,7 +231,7 @@ def add_shot( shot_id: str, pose: pygeometry.Pose, ) -> Set[str]: - """Add a shot to the recontruction. + """Add a shot to the reconstruction. In case of a shot belonging to a rig instance, the pose of shot will drive the initial pose setup of the rig instance. diff --git a/opensfm/rig.py b/opensfm/rig.py index 4585dae55..e20003e42 100644 --- a/opensfm/rig.py +++ b/opensfm/rig.py @@ -338,7 +338,7 @@ def create_rigs_with_pattern(data: "DataSet", patterns: TRigPatterns) -> None: reconstructions = subset_data.load_reconstruction() if len(reconstructions) == 0: - logger.error("Couldn't run sucessful SfM on the subset of images.") + logger.error("Couldn't run successful SfM on the subset of images.") continue reconstruction = reconstructions[0] @@ -381,7 +381,7 @@ def create_rigs_with_pattern(data: "DataSet", patterns: TRigPatterns) -> None: data.save_rig_assignments(instances_per_rig) else: logger.error( - "Could not run any sucessful SfM on images subset for rig calibration" + "Could not run any successful SfM on images subset for rig calibration" ) diff --git a/opensfm/src/bundle/python/pybind.cc b/opensfm/src/bundle/python/pybind.cc index 36b146542..c8c63e4cd 100644 --- a/opensfm/src/bundle/python/pybind.cc +++ b/opensfm/src/bundle/python/pybind.cc @@ -110,7 +110,7 @@ PYBIND11_MODULE(pybundle, m) { .def("full_report", &bundle::BundleAdjuster::FullReport); /////////////////////////////////// - // Reconstruction Aligment + // Reconstruction Alignment // py::class_(m, "ReconstructionAlignment") .def(py::init()) diff --git a/opensfm/src/bundle/reconstruction_alignment.h b/opensfm/src/bundle/reconstruction_alignment.h index 75f20f7fc..8b108401a 100644 --- a/opensfm/src/bundle/reconstruction_alignment.h +++ b/opensfm/src/bundle/reconstruction_alignment.h @@ -479,7 +479,7 @@ class ReconstructionAlignment { void Run() { ceres::Problem problem; - // Init paramater blocks. + // Init parameter blocks. for (auto &i : shots_) { if (i.second.constant) { problem.AddParameterBlock(i.second.parameters, RA_SHOT_NUM_PARAMS); diff --git a/opensfm/src/bundle/test/reprojection_errors_test.cc b/opensfm/src/bundle/test/reprojection_errors_test.cc index 14074db9a..70a4ab7bf 100644 --- a/opensfm/src/bundle/test/reprojection_errors_test.cc +++ b/opensfm/src/bundle/test/reprojection_errors_test.cc @@ -169,7 +169,7 @@ TEST_F(ReprojectionError2DFixture, Fisheye624AnalyticErrorEvaluatesOK) { TEST_F(ReprojectionError2DFixture, DualAnalyticErrorEvaluatesOK) { constexpr int size = 4; - // transtion, focal, k1, k2 + // transition, focal, k1, k2 constexpr std::array camera{0.5, 0.3, 0.1, -0.03}; RunTest(geometry::ProjectionType::DUAL, &camera[0]); } diff --git a/opensfm/src/geometry/essential.h b/opensfm/src/geometry/essential.h index 3cbb2293e..a45aeb565 100644 --- a/opensfm/src/geometry/essential.h +++ b/opensfm/src/geometry/essential.h @@ -25,7 +25,7 @@ #include // In the following code, polynomials are expressed as vectors containing -// their coeficients in the basis of monomials: +// their coefficients in the basis of monomials: // // [xxx xxy xyy yyy xxz xyz yyz xzz yzz zzz xx xy yy xz yz zz x y z 1] // diff --git a/opensfm/src/geometry/pose.h b/opensfm/src/geometry/pose.h index 07bcbbc19..c015e7b7f 100644 --- a/opensfm/src/geometry/pose.h +++ b/opensfm/src/geometry/pose.h @@ -148,7 +148,7 @@ class Pose { Pose Compose(const Pose& base_pose) const { /* This is the C++ version of the original Python version - The relation to relativeTo ist the following + The relation to relativeTo is the following pose1.compose(pose2.inverse()) == pose1.RelativeTo(pose2) */ const Mat3d& selfR = RotationWorldToCamera(); diff --git a/opensfm/src/geometry/test/covariance_test.cc b/opensfm/src/geometry/test/covariance_test.cc index bab5f24b0..9a235bfac 100644 --- a/opensfm/src/geometry/test/covariance_test.cc +++ b/opensfm/src/geometry/test/covariance_test.cc @@ -114,7 +114,7 @@ TEST_F(CovarianceFixture, EvaluatesPointCovarianceOK) { {observation, observation}, point_tmp) .first; - // Two pose looking with an angle of 90 degres + // Two pose looking with an angle of 90 degrees ASSERT_TRUE(covariance.determinant() > 1e-12); } diff --git a/opensfm/src/geometry/transformations_functions.h b/opensfm/src/geometry/transformations_functions.h index 63ab3ac2b..d2d2b8261 100644 --- a/opensfm/src/geometry/transformations_functions.h +++ b/opensfm/src/geometry/transformations_functions.h @@ -172,7 +172,7 @@ struct PoseFunctor : Functor<3, 6, 3> { } } - /* R.(x-t) derivatives are pretty straightfoward : dR00, dR01, ... dR22, + /* R.(x-t) derivatives are pretty straightforward : dR00, dR01, ... dR22, * dx, dy, dz, dtx, dty, dtz */ const T xyz[] = {point[0] - rt[Tx], point[1] - rt[Ty], point[2] - rt[Tz]}; Eigen::Matrix point_rotation = diff --git a/opensfm/src/map/python/pybind.cc b/opensfm/src/map/python/pybind.cc index 9d3bea65b..c376ed08a 100644 --- a/opensfm/src/map/python/pybind.cc +++ b/opensfm/src/map/python/pybind.cc @@ -47,7 +47,7 @@ PYBIND11_MODULE(pymap, m) { py::module::import("opensfm.pygeometry"); py::module::import("opensfm.pygeo"); - // Some initial defintions to resolve cyclic dependencies + // Some initial definitions to resolve cyclic dependencies // Landmark <> Shot py::class_ shotCls(m, "Shot"); // Landmark/Shot/...View <> Map diff --git a/opensfm/src/map/shot.h b/opensfm/src/map/shot.h index 9d6ff13bc..498843a7f 100644 --- a/opensfm/src/map/shot.h +++ b/opensfm/src/map/shot.h @@ -167,7 +167,7 @@ class Shot { RigInstance* rig_instance_; RigCamera* rig_camera_; - // Camera pointer (can optionaly belong to the shot) + // Camera pointer (can optionally belong to the shot) foundation::OptionalValue own_camera_; const geometry::Camera* const shot_camera_; diff --git a/opensfm/test/test_bundle.py b/opensfm/test/test_bundle.py index eb613833d..a3e12afaf 100644 --- a/opensfm/test/test_bundle.py +++ b/opensfm/test/test_bundle.py @@ -100,7 +100,7 @@ def test_bundle_projection_fixed_internals(scene_synthetic) -> None: camera_priors = dict(reference.cameras.items()) rig_priors = dict(reference.rig_cameras.items()) graph = tracking.as_graph(scene_synthetic.tracks_manager) - # Create the connnections in the reference + # Create the connections in the reference for point_id in reference.points.keys(): if point_id in graph: for shot_id, g_obs in graph[point_id].items(): diff --git a/opensfm/test/test_datastructures.py b/opensfm/test/test_datastructures.py index 2fd07afa2..3e8f1e779 100644 --- a/opensfm/test/test_datastructures.py +++ b/opensfm/test/test_datastructures.py @@ -748,7 +748,7 @@ def test_shot_metadata_different() -> None: shot2 = rec.shots["1"] _helper_populate_metadata(shot1.metadata) - # When getting their metdata object, they should be different + # When getting their metadata object, they should be different assert shot1.metadata is not shot2.metadata @@ -823,7 +823,7 @@ def test_single_point_coordinates() -> None: rec = types.Reconstruction() pt = rec.create_point("0") - # When assiging coordinates + # When assigning coordinates coord = np.random.rand(3) pt.coordinates = coord @@ -836,7 +836,7 @@ def test_single_point_color() -> None: rec = types.Reconstruction() pt = rec.create_point("0") - # When assiging color + # When assigning color color = np.random.randint(low=0, high=255, size=(3,)) pt.color = color diff --git a/opensfm/test/test_triangulation.py b/opensfm/test/test_triangulation.py index a0b16a5d9..25c2b86a9 100644 --- a/opensfm/test/test_triangulation.py +++ b/opensfm/test/test_triangulation.py @@ -100,7 +100,7 @@ def test_triangulate_two_bearings_midpoint_failed() -> None: b1 = unit_vector([0.0, 0, 1]) o2 = np.array([1.0, 0, 0]) - # almost parralel. 1e-5 will make it triangulate again. + # almost parallel. 1e-5 will make it triangulate again. b2 = b1 + np.array([-1e-10, 0, 0]) ok, X = pygeometry.triangulate_two_bearings_midpoint( diff --git a/opensfm/types.py b/opensfm/types.py index 396d410b5..0687f18f2 100644 --- a/opensfm/types.py +++ b/opensfm/types.py @@ -34,7 +34,7 @@ class Reconstruction(object): """ def __init__(self) -> None: - """Defaut constructor""" + """Default constructor""" self.map = pymap.Map() self.camera_view = pymap.CameraView(self.map) self.bias_view = pymap.BiasView(self.map)