From 3daab3e0f76c6bf93b4157db09f0671f31323516 Mon Sep 17 00:00:00 2001 From: Justin Ruan Date: Mon, 13 Nov 2023 18:55:09 +0800 Subject: [PATCH 1/2] Add fuse_first_association option to bot-sort --- boxmot/trackers/botsort/bot_sort.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/boxmot/trackers/botsort/bot_sort.py b/boxmot/trackers/botsort/bot_sort.py index e76adfaf68..1f1ce74ad1 100644 --- a/boxmot/trackers/botsort/bot_sort.py +++ b/boxmot/trackers/botsort/bot_sort.py @@ -196,6 +196,7 @@ def __init__( appearance_thresh: float = 0.25, cmc_method: str = "sparseOptFlow", frame_rate=30, + fuse_first_associate: bool = False, ): self.tracked_stracks = [] # type: list[STrack] self.lost_stracks = [] # type: list[STrack] @@ -222,6 +223,7 @@ def __init__( ) self.cmc = SparseOptFlow() + self.fuse_first_associate = fuse_first_associate def update(self, dets, img): assert isinstance( @@ -288,6 +290,8 @@ def update(self, dets, img): # Associate with high score detection boxes ious_dists = iou_distance(strack_pool, detections) ious_dists_mask = ious_dists > self.proximity_thresh + if self.fuse_first_associate: + ious_dists = fuse_score(ious_dists, detections) emb_dists = embedding_distance(strack_pool, detections) / 2.0 emb_dists[emb_dists > self.appearance_thresh] = 1.0 From 296d8aea898363030f2e0abcaeaf4eb6df18d99a Mon Sep 17 00:00:00 2001 From: Justin Ruan Date: Mon, 13 Nov 2023 18:57:06 +0800 Subject: [PATCH 2/2] Remove duplicated computation for camera motion --- boxmot/motion/cmc/sof.py | 10 +++------- 1 file changed, 3 insertions(+), 7 deletions(-) diff --git a/boxmot/motion/cmc/sof.py b/boxmot/motion/cmc/sof.py index dd436172b4..bbebd75d5e 100644 --- a/boxmot/motion/cmc/sof.py +++ b/boxmot/motion/cmc/sof.py @@ -118,20 +118,16 @@ def apply(self, img, dets): return H # sparse otical flow for sparse features using Lucas-Kanade with pyramids + # calculate new positions of the keypoints between the previous frame (self.prev_img) + # and the current frame (img) using sparse optical flow (Lucas-Kanade with pyramids) try: - matchedKeypoints, status, err = cv2.calcOpticalFlowPyrLK( + next_keypoints, status, err = cv2.calcOpticalFlowPyrLK( self.prev_img, img, self.prev_keypoints, None ) except Exception as e: LOGGER.warning(f'calcOpticalFlowPyrLK failed: {e}') return H - # calculate new positions of the keypoints between the previous frame (self.prev_img) - # and the current frame (img) using sparse optical flow (Lucas-Kanade with pyramids) - next_keypoints, status, err = cv2.calcOpticalFlowPyrLK( - self.prev_img, img, self.prev_keypoints, None - ) - # for simplicity, if no keypoints are found, we discard the frame if next_keypoints is None: return H