From a05882add1fa5a86141f16f924b6fdf8f25403e3 Mon Sep 17 00:00:00 2001 From: Grzegorz Klimaszewski <166530809+grzegorz-roboflow@users.noreply.github.com> Date: Thu, 19 Sep 2024 12:15:02 +0200 Subject: [PATCH 001/161] Make BaseTrack._count instance variable --- supervision/tracker/byte_tracker/basetrack.py | 25 ++++++++----------- 1 file changed, 11 insertions(+), 14 deletions(-) diff --git a/supervision/tracker/byte_tracker/basetrack.py b/supervision/tracker/byte_tracker/basetrack.py index 806f75384..d280274cd 100644 --- a/supervision/tracker/byte_tracker/basetrack.py +++ b/supervision/tracker/byte_tracker/basetrack.py @@ -12,9 +12,8 @@ class TrackState(Enum): class BaseTrack: - _count = 0 - def __init__(self): + self._count = 0 self.track_id = 0 self.is_activated = False self.state = TrackState.New @@ -34,18 +33,16 @@ def __init__(self): def end_frame(self) -> int: return self.frame_id - @staticmethod - def next_id() -> int: - BaseTrack._count += 1 - return BaseTrack._count - - @staticmethod - def reset_counter(): - BaseTrack._count = 0 - BaseTrack.track_id = 0 - BaseTrack.start_frame = 0 - BaseTrack.frame_id = 0 - BaseTrack.time_since_update = 0 + def next_id(self) -> int: + self._count += 1 + return self._count + + def reset_counter(self): + self._count = 0 + self.track_id = 0 + self.start_frame = 0 + self.frame_id = 0 + self.time_since_update = 0 def activate(self, *args): raise NotImplementedError From db2aa721f83a3f8b6a3ad80cb0e507bdc41360e1 Mon Sep 17 00:00:00 2001 From: Grzegorz Klimaszewski <166530809+grzegorz-roboflow@users.noreply.github.com> Date: Thu, 19 Sep 2024 13:38:16 +0200 Subject: [PATCH 002/161] Let upper layer to handle track_id generation --- supervision/tracker/byte_tracker/basetrack.py | 6 --- supervision/tracker/byte_tracker/core.py | 47 +++++++++---------- test/tracker/__init__.py | 0 test/tracker/test_byte_tracker.py | 37 +++++++++++++++ 4 files changed, 58 insertions(+), 32 deletions(-) create mode 100644 test/tracker/__init__.py create mode 100644 test/tracker/test_byte_tracker.py diff --git a/supervision/tracker/byte_tracker/basetrack.py b/supervision/tracker/byte_tracker/basetrack.py index d280274cd..b78bc5961 100644 --- a/supervision/tracker/byte_tracker/basetrack.py +++ b/supervision/tracker/byte_tracker/basetrack.py @@ -13,7 +13,6 @@ class TrackState(Enum): class BaseTrack: def __init__(self): - self._count = 0 self.track_id = 0 self.is_activated = False self.state = TrackState.New @@ -33,12 +32,7 @@ def __init__(self): def end_frame(self) -> int: return self.frame_id - def next_id(self) -> int: - self._count += 1 - return self._count - def reset_counter(self): - self._count = 0 self.track_id = 0 self.start_frame = 0 self.frame_id = 0 diff --git a/supervision/tracker/byte_tracker/core.py b/supervision/tracker/byte_tracker/core.py index 89e1e2f2c..cd8ea3e57 100644 --- a/supervision/tracker/byte_tracker/core.py +++ b/supervision/tracker/byte_tracker/core.py @@ -1,4 +1,4 @@ -from typing import List, Tuple +from typing import List, Optional, Tuple import numpy as np @@ -11,10 +11,11 @@ class STrack(BaseTrack): shared_kalman = KalmanFilter() - _external_count = 0 def __init__(self, tlwh, score, class_ids, minimum_consecutive_frames): + super().__init__() # wait activate + self._external_count = 0 self._tlwh = np.asarray(tlwh, dtype=np.float32) self.kalman_filter = None self.mean, self.covariance = None, None @@ -54,10 +55,10 @@ def multi_predict(stracks): stracks[i].mean = mean stracks[i].covariance = cov - def activate(self, kalman_filter, frame_id): + def activate(self, kalman_filter, frame_id, track_id): """Start a new tracklet""" self.kalman_filter = kalman_filter - self.internal_track_id = self.next_id() + self.internal_track_id = track_id self.mean, self.covariance = self.kalman_filter.initiate( self.tlwh_to_xyah(self._tlwh) ) @@ -68,12 +69,12 @@ def activate(self, kalman_filter, frame_id): self.is_activated = True if self.minimum_consecutive_frames == 1: - self.external_track_id = self.next_external_id() + self.external_track_id = track_id self.frame_id = frame_id self.start_frame = frame_id - def re_activate(self, new_track, frame_id, new_id=False): + def re_activate(self, new_track, frame_id, new_id: Optional[int] = None): self.mean, self.covariance = self.kalman_filter.update( self.mean, self.covariance, self.tlwh_to_xyah(new_track.tlwh) ) @@ -82,10 +83,10 @@ def re_activate(self, new_track, frame_id, new_id=False): self.frame_id = frame_id if new_id: - self.internal_track_id = self.next_id() + self.internal_track_id = new_id self.score = new_track.score - def update(self, new_track, frame_id): + def update(self, new_track, frame_id, track_id): """ Update a matched track :type new_track: STrack @@ -104,7 +105,7 @@ def update(self, new_track, frame_id): if self.tracklet_len == self.minimum_consecutive_frames: self.is_activated = True if self.external_track_id == -1: - self.external_track_id = self.next_external_id() + self.external_track_id = track_id self.score = new_track.score @@ -142,15 +143,6 @@ def tlwh_to_xyah(tlwh): def to_xyah(self): return self.tlwh_to_xyah(self.tlwh) - @staticmethod - def next_external_id(): - STrack._external_count += 1 - return STrack._external_count - - @staticmethod - def reset_external_counter(): - STrack._external_count = 0 - @staticmethod def tlbr_to_tlwh(tlbr): ret = np.asarray(tlbr).copy() @@ -225,6 +217,7 @@ def __init__( self.track_activation_threshold = track_activation_threshold self.minimum_matching_threshold = minimum_matching_threshold + self._count = 0 self.frame_id = 0 self.det_thresh = self.track_activation_threshold + 0.1 self.max_time_lost = int(frame_rate / 30.0 * lost_track_buffer) @@ -235,6 +228,10 @@ def __init__( self.lost_tracks: List[STrack] = [] self.removed_tracks: List[STrack] = [] + def _next_id(self) -> int: + self._count += 1 + return self._count + def update_with_detections(self, detections: Detections) -> Detections: """ Updates the tracker with the provided detections and returns the updated @@ -314,8 +311,6 @@ def reset(self): self.tracked_tracks: List[STrack] = [] self.lost_tracks: List[STrack] = [] self.removed_tracks: List[STrack] = [] - BaseTrack.reset_counter() - STrack.reset_external_counter() def update_with_tensors(self, tensors: np.ndarray) -> List[STrack]: """ @@ -384,10 +379,10 @@ def update_with_tensors(self, tensors: np.ndarray) -> List[STrack]: track = strack_pool[itracked] det = detections[idet] if track.state == TrackState.Tracked: - track.update(detections[idet], self.frame_id) + track.update(detections[idet], self.frame_id, self._next_id()) activated_starcks.append(track) else: - track.re_activate(det, self.frame_id, new_id=False) + track.re_activate(det, self.frame_id) refind_stracks.append(track) """ Step 3: Second association, with low score detection boxes""" @@ -413,10 +408,10 @@ def update_with_tensors(self, tensors: np.ndarray) -> List[STrack]: track = r_tracked_stracks[itracked] det = detections_second[idet] if track.state == TrackState.Tracked: - track.update(det, self.frame_id) + track.update(det, self.frame_id, self._next_id()) activated_starcks.append(track) else: - track.re_activate(det, self.frame_id, new_id=False) + track.re_activate(det, self.frame_id) refind_stracks.append(track) for it in u_track: @@ -434,7 +429,7 @@ def update_with_tensors(self, tensors: np.ndarray) -> List[STrack]: dists, thresh=0.7 ) for itracked, idet in matches: - unconfirmed[itracked].update(detections[idet], self.frame_id) + unconfirmed[itracked].update(detections[idet], self.frame_id, self._next_id()) activated_starcks.append(unconfirmed[itracked]) for it in u_unconfirmed: track = unconfirmed[it] @@ -446,7 +441,7 @@ def update_with_tensors(self, tensors: np.ndarray) -> List[STrack]: track = detections[inew] if track.score < self.det_thresh: continue - track.activate(self.kalman_filter, self.frame_id) + track.activate(self.kalman_filter, self.frame_id, self._next_id()) activated_starcks.append(track) """ Step 5: Update state""" for track in self.lost_tracks: diff --git a/test/tracker/__init__.py b/test/tracker/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/test/tracker/test_byte_tracker.py b/test/tracker/test_byte_tracker.py new file mode 100644 index 000000000..673cd90b9 --- /dev/null +++ b/test/tracker/test_byte_tracker.py @@ -0,0 +1,37 @@ +import numpy as np +import pytest +import supervision as sv + + +@pytest.mark.parametrize( + "detections, expected_results", + [ + ( + [ + sv.Detections( + xyxy=np.array([[10, 10, 20, 20], [30, 30, 40, 40]]), + class_id=np.array([1, 1]), + confidence=np.array([1, 1]), + ), + sv.Detections( + xyxy=np.array([[10, 10, 20, 20], [30, 30, 40, 40]]), + class_id=np.array([1, 1]), + confidence=np.array([1, 1]), + ), + ], + sv.Detections( + xyxy=np.array([[10, 10, 20, 20], [30, 30, 40, 40]]), + class_id=np.array([1, 1]), + confidence=np.array([1, 1]), + tracker_id=np.array([1, 2]), + ) + ), + ], +) +def test_byte_tracker( + detections: list[sv.Detections], + expected_results: sv.Detections, +) -> None: + byte_tracker = sv.ByteTrack() + tracked_detections = [byte_tracker.update_with_detections(d) for d in detections] + assert tracked_detections[-1] == expected_results From 3da10edb47ea601718cedec347156bc5b0140882 Mon Sep 17 00:00:00 2001 From: Grzegorz Klimaszewski <166530809+grzegorz-roboflow@users.noreply.github.com> Date: Thu, 19 Sep 2024 13:40:31 +0200 Subject: [PATCH 003/161] Remove unused variable --- supervision/tracker/byte_tracker/core.py | 1 - 1 file changed, 1 deletion(-) diff --git a/supervision/tracker/byte_tracker/core.py b/supervision/tracker/byte_tracker/core.py index cd8ea3e57..8f64c29da 100644 --- a/supervision/tracker/byte_tracker/core.py +++ b/supervision/tracker/byte_tracker/core.py @@ -15,7 +15,6 @@ class STrack(BaseTrack): def __init__(self, tlwh, score, class_ids, minimum_consecutive_frames): super().__init__() # wait activate - self._external_count = 0 self._tlwh = np.asarray(tlwh, dtype=np.float32) self.kalman_filter = None self.mean, self.covariance = None, None From 1e43de0a4179155123d007215acf27e4d05f5395 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 19 Sep 2024 11:41:16 +0000 Subject: [PATCH 004/161] =?UTF-8?q?fix(pre=5Fcommit):=20=F0=9F=8E=A8=20aut?= =?UTF-8?q?o=20format=20pre-commit=20hooks?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- supervision/tracker/byte_tracker/core.py | 4 +++- test/tracker/test_byte_tracker.py | 3 ++- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/supervision/tracker/byte_tracker/core.py b/supervision/tracker/byte_tracker/core.py index 8f64c29da..b3a324005 100644 --- a/supervision/tracker/byte_tracker/core.py +++ b/supervision/tracker/byte_tracker/core.py @@ -428,7 +428,9 @@ def update_with_tensors(self, tensors: np.ndarray) -> List[STrack]: dists, thresh=0.7 ) for itracked, idet in matches: - unconfirmed[itracked].update(detections[idet], self.frame_id, self._next_id()) + unconfirmed[itracked].update( + detections[idet], self.frame_id, self._next_id() + ) activated_starcks.append(unconfirmed[itracked]) for it in u_unconfirmed: track = unconfirmed[it] diff --git a/test/tracker/test_byte_tracker.py b/test/tracker/test_byte_tracker.py index 673cd90b9..7ca94bbfd 100644 --- a/test/tracker/test_byte_tracker.py +++ b/test/tracker/test_byte_tracker.py @@ -1,5 +1,6 @@ import numpy as np import pytest + import supervision as sv @@ -24,7 +25,7 @@ class_id=np.array([1, 1]), confidence=np.array([1, 1]), tracker_id=np.array([1, 2]), - ) + ), ), ], ) From a0655aed087c414bc1d4aa895a0fe17c4ab4915c Mon Sep 17 00:00:00 2001 From: Grzegorz Klimaszewski <166530809+grzegorz-roboflow@users.noreply.github.com> Date: Thu, 19 Sep 2024 13:44:26 +0200 Subject: [PATCH 005/161] fix test --- test/tracker/test_byte_tracker.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/test/tracker/test_byte_tracker.py b/test/tracker/test_byte_tracker.py index 7ca94bbfd..47ad1a9d0 100644 --- a/test/tracker/test_byte_tracker.py +++ b/test/tracker/test_byte_tracker.py @@ -1,3 +1,4 @@ +from typing import List import numpy as np import pytest @@ -30,7 +31,7 @@ ], ) def test_byte_tracker( - detections: list[sv.Detections], + detections: List[sv.Detections], expected_results: sv.Detections, ) -> None: byte_tracker = sv.ByteTrack() From df9257b8fc4e45f96b12d90228faa3f29705c4b8 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 19 Sep 2024 11:45:31 +0000 Subject: [PATCH 006/161] =?UTF-8?q?fix(pre=5Fcommit):=20=F0=9F=8E=A8=20aut?= =?UTF-8?q?o=20format=20pre-commit=20hooks?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- test/tracker/test_byte_tracker.py | 1 + 1 file changed, 1 insertion(+) diff --git a/test/tracker/test_byte_tracker.py b/test/tracker/test_byte_tracker.py index 47ad1a9d0..98efeb093 100644 --- a/test/tracker/test_byte_tracker.py +++ b/test/tracker/test_byte_tracker.py @@ -1,4 +1,5 @@ from typing import List + import numpy as np import pytest From 266bf3f10c8141c3631f584df51b427f5ab12659 Mon Sep 17 00:00:00 2001 From: Grzegorz Klimaszewski <166530809+grzegorz-roboflow@users.noreply.github.com> Date: Thu, 19 Sep 2024 13:49:43 +0200 Subject: [PATCH 007/161] Reset _count --- supervision/tracker/byte_tracker/core.py | 1 + 1 file changed, 1 insertion(+) diff --git a/supervision/tracker/byte_tracker/core.py b/supervision/tracker/byte_tracker/core.py index b3a324005..013af2ee0 100644 --- a/supervision/tracker/byte_tracker/core.py +++ b/supervision/tracker/byte_tracker/core.py @@ -307,6 +307,7 @@ def reset(self): ensuring the tracker starts with a clean state for each new video. """ self.frame_id = 0 + self._count = 0 self.tracked_tracks: List[STrack] = [] self.lost_tracks: List[STrack] = [] self.removed_tracks: List[STrack] = [] From cc640dc8909d4bce93577305c8bbd6a1efc3599c Mon Sep 17 00:00:00 2001 From: Grzegorz Klimaszewski <166530809+grzegorz-roboflow@users.noreply.github.com> Date: Mon, 23 Sep 2024 17:35:44 +0200 Subject: [PATCH 008/161] Extend LineZone to filter out miscounts --- supervision/detection/line_zone.py | 52 +++++- test/detection/test_line_counter.py | 244 ++++++++++++++++++++++++++++ 2 files changed, 290 insertions(+), 6 deletions(-) diff --git a/supervision/detection/line_zone.py b/supervision/detection/line_zone.py index aa7f2d4e3..bc11a7703 100644 --- a/supervision/detection/line_zone.py +++ b/supervision/detection/line_zone.py @@ -1,5 +1,6 @@ +from collections import deque import warnings -from typing import Dict, Iterable, Optional, Tuple +from typing import Deque, Dict, Iterable, List, Optional, Tuple import cv2 import numpy as np @@ -67,6 +68,7 @@ def __init__( Position.BOTTOM_LEFT, Position.BOTTOM_RIGHT, ), + max_linger: int = 1, ): """ Args: @@ -77,10 +79,16 @@ def __init__( to consider when deciding on whether the detection has passed the line counter or not. By default, this contains the four corners of the detection's bounding box + max_linger: An integer indicating the number of consequtive frames + detections should stay away from the line after crossing it + to consider crossing completed. This configuration option + is useful when dealing with unstable bounding boxes or when + detections may linger on the line """ self.vector = Vector(start=start, end=end) self.limits = self.calculate_region_of_interest_limits(vector=self.vector) - self.tracker_state: Dict[str, bool] = {} + self.max_linger = max(1, max_linger) + self.crossing_state: Dict[str, Deque[bool]] = {} self.in_count: int = 0 self.out_count: int = 0 self.triggering_anchors = triggering_anchors @@ -178,14 +186,24 @@ def trigger(self, detections: Detections) -> Tuple[np.ndarray, np.ndarray]: continue tracker_state = has_any_left_trigger[i] - if tracker_id not in self.tracker_state: - self.tracker_state[tracker_id] = tracker_state + if tracker_id not in self.crossing_state: + self.crossing_state[tracker_id] = deque([tracker_state], maxlen=self.max_linger) continue - if self.tracker_state.get(tracker_id) == tracker_state: + crossing_state = self.crossing_state[tracker_id] + prev_frame_tracker_state = crossing_state[-1] + if self.max_linger == 1 and prev_frame_tracker_state == tracker_state: continue - self.tracker_state[tracker_id] = tracker_state + crossing_in_progress = crossing_state.count(True) != 0 and crossing_state.count(False) != 0 + crossing_state.appendleft(tracker_state) + all_on_same_side = crossing_state.count(not tracker_state) == 0 + if not all_on_same_side: + continue + else: + if self.max_linger > 1 and not crossing_in_progress: + continue + if tracker_state: self.in_count += 1 crossed_in[i] = True @@ -193,6 +211,28 @@ def trigger(self, detections: Detections) -> Tuple[np.ndarray, np.ndarray]: self.out_count += 1 crossed_out[i] = True + if self.max_linger == 1: + return crossed_in, crossed_out + + this_frame_trackers = set(detections.tracker_id) + for tracker_id in list(self.crossing_state.keys()): + if tracker_id in this_frame_trackers: + continue + crossing_state = self.crossing_state[tracker_id] + crossing_in_progress = crossing_state.count(True) != 0 and crossing_state.count(False) != 0 + if not crossing_in_progress: + continue + tracker_state = crossing_state[0] + crossing_state.appendleft(tracker_state) + all_on_same_side = crossing_state.count(not tracker_state) == 0 + if not all_on_same_side: + continue + + if tracker_state: + self.in_count += 1 + else: + self.out_count += 1 + return crossed_in, crossed_out diff --git a/test/detection/test_line_counter.py b/test/detection/test_line_counter.py index b7e4d33dd..ac635d310 100644 --- a/test/detection/test_line_counter.py +++ b/test/detection/test_line_counter.py @@ -303,6 +303,19 @@ def test_line_zone_one_detection_default_anchors( [False, True, False, True], [False, False, True, False], ), + ( # Scrape line, left side, center anchor (along line point), then hold position + Vector(Point(0, 0), Point(10, 0)), + [ + [-2, 4, 2, 6], + [-2, 4 - 10, 2, 6 - 10], + [-2, 4, 2, 6], + [-2, 4 - 10, 2, 6 - 10], + [-2, 4 - 10, 2, 6 - 10], + ], + [Position.CENTER], + [False, True, False, True, False], + [False, False, True, False, False], + ), ( # Scrape line, right side, corner anchors Vector(Point(0, 0), Point(10, 0)), [ @@ -477,3 +490,234 @@ def test_line_zone_multiple_detections( assert crossed_in_list == expected_crossed_in assert crossed_out_list == expected_crossed_out + + +@pytest.mark.parametrize( + "vector, xyxy_sequence, triggering_anchors, max_linger, expected_crossed_in, " + "expected_crossed_out", + [ + ( # Detection lingers around line, all crosses counted + Vector(Point(0, 0), Point(10, 0)), + [ + [2, 4, 3, 6], + [2, 4 - 10, 3, 6], + [2, 4, 3, 6], + [2, 4 - 10, 3, 6], + [2, 4 - 10, 3, 6], + ], + [ + Position.TOP_LEFT, + ], + 1, + [False, True, False, True, False], + [False, False, True, False, False], + ), + ( # Detection lingers around line, only final cross counted + Vector(Point(0, 0), Point(10, 0)), + [ + [2, 4, 3, 6], + [2, 4 - 10, 3, 6], + [2, 4, 3, 6], + [2, 4 - 10, 3, 6], + [2, 4 - 10, 3, 6], + ], + [ + Position.TOP_LEFT, + ], + 2, + [False, False, False, False, True], + [False, False, False, False, False], + ), + ( # Detection lingers around line for a long time + Vector(Point(0, 0), Point(10, 0)), + [ + [2, 4, 3, 6], + [2, 4 - 10, 3, 6], + [2, 4, 3, 6], + [2, 4 - 10, 3, 6], + [2, 4, 3, 6], + [2, 4 - 10, 3, 6], + [2, 4, 3, 6], + [2, 4 - 10, 3, 6], + [2, 4, 3, 6], + [2, 4 - 10, 3, 6], + [2, 4, 3, 6], + [2, 4 - 10, 3, 6], + [2, 4 - 10, 3, 6], + ], + [ + Position.TOP_LEFT, + ], + 2, + [False] * 12 + [True], + [False] * 13, + ), + ( # Detection lingers around line, longer cycle + Vector(Point(0, 0), Point(10, 0)), + [ + [2, 4, 3, 6], + [2, 4 - 10, 3, 6], + [2, 4, 3, 6], + [2, 4, 3, 6], + [2, 4, 3, 6], + [2, 4 - 10, 3, 6], + [2, 4 - 10, 3, 6], + [2, 4 - 10, 3, 6], + [2, 4 - 10, 3, 6], + ], + [ + Position.TOP_LEFT, + ], + 4, + [False] * 8 + [True], + [False] * 9, + ), + ], +) +def test_line_zone_one_detection_long_horizon( + vector: Vector, + xyxy_sequence: List[List[float]], + triggering_anchors: List[Position], + max_linger: int, + expected_crossed_in: List[bool], + expected_crossed_out: List[bool], +) -> None: + line_zone = LineZone( + start=vector.start, end=vector.end, triggering_anchors=triggering_anchors, max_linger=max_linger, + ) + + crossed_in_list = [] + crossed_out_list = [] + for i, bbox in enumerate(xyxy_sequence): + detections = mock_detections( + xyxy=[bbox], + tracker_id=[0], + ) + crossed_in, crossed_out = line_zone.trigger(detections) + crossed_in_list.append(crossed_in[0]) + crossed_out_list.append(crossed_out[0]) + + assert ( + crossed_in_list == expected_crossed_in + ), f"expected {expected_crossed_in}, got {crossed_in_list}" + assert ( + crossed_out_list == expected_crossed_out + ), f"expected {expected_crossed_out}, got {crossed_out_list}" + + +@pytest.mark.parametrize( + "vector, xyxy_sequence, anchors, max_linger, expected_crossed_in, " + "expected_crossed_out, expected_count_in, expected_count_out, exception", + [ + ( # One stays, one crosses, one disappears before crossing, max_linger==1 + Vector(Point(0, 0), Point(10, 0)), + [ + [[4, 4, 6, 6], [4, 4, 6, 6], [4, 4, 6, 6]], + [[4, 4, 6, 6], [4, 4 - 10, 6, 6 - 10], [4, 4, 6, 6]], + [[4, 4, 6, 6], [4, 4, 6, 6]], + [[4, 4, 6, 6], [4, 4 - 10, 6, 6 - 10]], + [[4, 4, 6, 6], [4, 4 - 10, 6, 6 - 10]], + ], + [ + Position.TOP_LEFT, + ], + 1, + [[False, False, False], [False, True, False], [False, False], [False, True], [False, False]], + [[False, False, False], [False, False, False], [False, True], [False, False], [False, False]], + [0, 1, 1, 2, 2], + [0, 0, 1, 1, 1], + DoesNotRaise(), + ), + ( # One stays, one crosses, one disappears immediately after crossing, max_linger==1 + Vector(Point(0, 0), Point(10, 0)), + [ + [[4, 4, 6, 6], [4, 4, 6, 6], [4, 4, 6, 6]], + [[4, 4, 6, 6], [4, 4 - 10, 6, 6 - 10], [4, 4, 6, 6]], + [[4, 4, 6, 6], [4, 4, 6, 6], [4, 4 - 10, 6, 6 - 10]], + [[4, 4, 6, 6], [4, 4 - 10, 6, 6 - 10]], + [[4, 4, 6, 6], [4, 4 - 10, 6, 6 - 10]], + ], + [ + Position.TOP_LEFT, + ], + 1, + [[False, False, False], [False, True, False], [False, False, True,], [False, True], [False, False]], + [[False, False, False], [False, False, False], [False, True, False], [False, False], [False, False]], + [0, 1, 2, 3, 3], + [0, 0, 1, 1, 1], + DoesNotRaise(), + ), + ( # One stays, one crosses, one disappears before crossing, max_linger==2 + Vector(Point(0, 0), Point(10, 0)), + [ + [[4, 4, 6, 6], [4, 4, 6, 6], [4, 4, 6, 6]], + [[4, 4, 6, 6], [4, 4 - 10, 6, 6 - 10], [4, 4, 6, 6]], + [[4, 4, 6, 6], [4, 4, 6, 6]], + [[4, 4, 6, 6], [4, 4 - 10, 6, 6 - 10]], + [[4, 4, 6, 6], [4, 4 - 10, 6, 6 - 10]], + ], + [ + Position.TOP_LEFT, + ], + 2, + [[False, False, False], [False, False, False], [False, False], [False, False], [False, True]], + [[False, False, False], [False, False, False], [False, False], [False, False], [False, False]], + [0, 0, 0, 0, 1], + [0, 0, 0, 0, 0], + DoesNotRaise(), + ), + ( # One stays, one crosses, one disappears immediately after crossing, max_linger==2 + Vector(Point(0, 0), Point(10, 0)), + [ + [[4, 4, 6, 6], [4, 4, 6, 6], [4, 4, 6, 6]], + [[4, 4, 6, 6], [4, 4 - 10, 6, 6 - 10], [4, 4, 6, 6]], + [[4, 4, 6, 6], [4, 4, 6, 6], [4, 4 - 10, 6, 6 - 10]], + [[4, 4, 6, 6], [4, 4 - 10, 6, 6 - 10]], + [[4, 4, 6, 6], [4, 4 - 10, 6, 6 - 10]], + ], + [ + Position.TOP_LEFT, + ], + 2, + [[False, False, False], [False, False, False], [False, False, False,], [False, False], [False, True]], + [[False, False, False], [False, False, False], [False, False, False], [False, False], [False, False]], + [0, 0, 0, 1, 2], + [0, 0, 0, 0, 0], + DoesNotRaise(), + ), + ], +) +def test_line_zone_long_horizon_disappearing_detections( + vector: Vector, + xyxy_sequence: List[List[Optional[List[float]]]], + anchors: List[Position], + max_linger: int, + expected_crossed_in: List[List[bool]], + expected_crossed_out: List[List[bool]], + expected_count_in: List[int], + expected_count_out: List[int], + exception: Exception, +) -> None: + with exception: + line_zone = LineZone( + start=vector.start, end=vector.end, triggering_anchors=anchors, max_linger=max_linger + ) + crossed_in_list = [] + crossed_out_list = [] + count_in_list = [] + count_out_list = [] + for bboxes in xyxy_sequence: + detections = mock_detections( + xyxy=bboxes, + tracker_id=[i for i in range(0, len(bboxes))], + ) + crossed_in, crossed_out = line_zone.trigger(detections) + crossed_in_list.append(list(crossed_in)) + crossed_out_list.append(list(crossed_out)) + count_in_list.append(line_zone.in_count) + count_out_list.append(line_zone.out_count) + + assert crossed_in_list == expected_crossed_in + assert crossed_out_list == expected_crossed_out + assert count_in_list == expected_count_in + assert count_out_list == expected_count_out From 0e26f1b22e05aec929ca0acda528c9a8ab918827 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 23 Sep 2024 15:44:35 +0000 Subject: [PATCH 009/161] =?UTF-8?q?fix(pre=5Fcommit):=20=F0=9F=8E=A8=20aut?= =?UTF-8?q?o=20format=20pre-commit=20hooks?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- supervision/detection/line_zone.py | 16 ++++-- test/detection/test_line_counter.py | 82 +++++++++++++++++++++++++---- 2 files changed, 83 insertions(+), 15 deletions(-) diff --git a/supervision/detection/line_zone.py b/supervision/detection/line_zone.py index bc11a7703..74405baee 100644 --- a/supervision/detection/line_zone.py +++ b/supervision/detection/line_zone.py @@ -1,6 +1,6 @@ -from collections import deque import warnings -from typing import Deque, Dict, Iterable, List, Optional, Tuple +from collections import deque +from typing import Deque, Dict, Iterable, Optional, Tuple import cv2 import numpy as np @@ -187,7 +187,9 @@ def trigger(self, detections: Detections) -> Tuple[np.ndarray, np.ndarray]: tracker_state = has_any_left_trigger[i] if tracker_id not in self.crossing_state: - self.crossing_state[tracker_id] = deque([tracker_state], maxlen=self.max_linger) + self.crossing_state[tracker_id] = deque( + [tracker_state], maxlen=self.max_linger + ) continue crossing_state = self.crossing_state[tracker_id] @@ -195,7 +197,9 @@ def trigger(self, detections: Detections) -> Tuple[np.ndarray, np.ndarray]: if self.max_linger == 1 and prev_frame_tracker_state == tracker_state: continue - crossing_in_progress = crossing_state.count(True) != 0 and crossing_state.count(False) != 0 + crossing_in_progress = ( + crossing_state.count(True) != 0 and crossing_state.count(False) != 0 + ) crossing_state.appendleft(tracker_state) all_on_same_side = crossing_state.count(not tracker_state) == 0 if not all_on_same_side: @@ -219,7 +223,9 @@ def trigger(self, detections: Detections) -> Tuple[np.ndarray, np.ndarray]: if tracker_id in this_frame_trackers: continue crossing_state = self.crossing_state[tracker_id] - crossing_in_progress = crossing_state.count(True) != 0 and crossing_state.count(False) != 0 + crossing_in_progress = ( + crossing_state.count(True) != 0 and crossing_state.count(False) != 0 + ) if not crossing_in_progress: continue tracker_state = crossing_state[0] diff --git a/test/detection/test_line_counter.py b/test/detection/test_line_counter.py index ac635d310..9daa3e8e6 100644 --- a/test/detection/test_line_counter.py +++ b/test/detection/test_line_counter.py @@ -583,7 +583,10 @@ def test_line_zone_one_detection_long_horizon( expected_crossed_out: List[bool], ) -> None: line_zone = LineZone( - start=vector.start, end=vector.end, triggering_anchors=triggering_anchors, max_linger=max_linger, + start=vector.start, + end=vector.end, + triggering_anchors=triggering_anchors, + max_linger=max_linger, ) crossed_in_list = [] @@ -622,8 +625,20 @@ def test_line_zone_one_detection_long_horizon( Position.TOP_LEFT, ], 1, - [[False, False, False], [False, True, False], [False, False], [False, True], [False, False]], - [[False, False, False], [False, False, False], [False, True], [False, False], [False, False]], + [ + [False, False, False], + [False, True, False], + [False, False], + [False, True], + [False, False], + ], + [ + [False, False, False], + [False, False, False], + [False, True], + [False, False], + [False, False], + ], [0, 1, 1, 2, 2], [0, 0, 1, 1, 1], DoesNotRaise(), @@ -641,8 +656,24 @@ def test_line_zone_one_detection_long_horizon( Position.TOP_LEFT, ], 1, - [[False, False, False], [False, True, False], [False, False, True,], [False, True], [False, False]], - [[False, False, False], [False, False, False], [False, True, False], [False, False], [False, False]], + [ + [False, False, False], + [False, True, False], + [ + False, + False, + True, + ], + [False, True], + [False, False], + ], + [ + [False, False, False], + [False, False, False], + [False, True, False], + [False, False], + [False, False], + ], [0, 1, 2, 3, 3], [0, 0, 1, 1, 1], DoesNotRaise(), @@ -660,8 +691,20 @@ def test_line_zone_one_detection_long_horizon( Position.TOP_LEFT, ], 2, - [[False, False, False], [False, False, False], [False, False], [False, False], [False, True]], - [[False, False, False], [False, False, False], [False, False], [False, False], [False, False]], + [ + [False, False, False], + [False, False, False], + [False, False], + [False, False], + [False, True], + ], + [ + [False, False, False], + [False, False, False], + [False, False], + [False, False], + [False, False], + ], [0, 0, 0, 0, 1], [0, 0, 0, 0, 0], DoesNotRaise(), @@ -679,8 +722,24 @@ def test_line_zone_one_detection_long_horizon( Position.TOP_LEFT, ], 2, - [[False, False, False], [False, False, False], [False, False, False,], [False, False], [False, True]], - [[False, False, False], [False, False, False], [False, False, False], [False, False], [False, False]], + [ + [False, False, False], + [False, False, False], + [ + False, + False, + False, + ], + [False, False], + [False, True], + ], + [ + [False, False, False], + [False, False, False], + [False, False, False], + [False, False], + [False, False], + ], [0, 0, 0, 1, 2], [0, 0, 0, 0, 0], DoesNotRaise(), @@ -700,7 +759,10 @@ def test_line_zone_long_horizon_disappearing_detections( ) -> None: with exception: line_zone = LineZone( - start=vector.start, end=vector.end, triggering_anchors=anchors, max_linger=max_linger + start=vector.start, + end=vector.end, + triggering_anchors=anchors, + max_linger=max_linger, ) crossed_in_list = [] crossed_out_list = [] From 95f6ef0b2222de8bc0420e925454f33f99242396 Mon Sep 17 00:00:00 2001 From: Grzegorz Klimaszewski <166530809+grzegorz-roboflow@users.noreply.github.com> Date: Mon, 23 Sep 2024 17:49:05 +0200 Subject: [PATCH 010/161] Address CI linter --- test/detection/test_line_counter.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/test/detection/test_line_counter.py b/test/detection/test_line_counter.py index 9daa3e8e6..dee8d1faa 100644 --- a/test/detection/test_line_counter.py +++ b/test/detection/test_line_counter.py @@ -303,7 +303,7 @@ def test_line_zone_one_detection_default_anchors( [False, True, False, True], [False, False, True, False], ), - ( # Scrape line, left side, center anchor (along line point), then hold position + ( # Scrape line, left side, center anchor (along line point) Vector(Point(0, 0), Point(10, 0)), [ [-2, 4, 2, 6], @@ -612,7 +612,7 @@ def test_line_zone_one_detection_long_horizon( "vector, xyxy_sequence, anchors, max_linger, expected_crossed_in, " "expected_crossed_out, expected_count_in, expected_count_out, exception", [ - ( # One stays, one crosses, one disappears before crossing, max_linger==1 + ( # One stays, one crosses, one disappears before crossing Vector(Point(0, 0), Point(10, 0)), [ [[4, 4, 6, 6], [4, 4, 6, 6], [4, 4, 6, 6]], @@ -643,7 +643,7 @@ def test_line_zone_one_detection_long_horizon( [0, 0, 1, 1, 1], DoesNotRaise(), ), - ( # One stays, one crosses, one disappears immediately after crossing, max_linger==1 + ( # One stays, one crosses, one disappears immediately after crossing Vector(Point(0, 0), Point(10, 0)), [ [[4, 4, 6, 6], [4, 4, 6, 6], [4, 4, 6, 6]], @@ -678,7 +678,7 @@ def test_line_zone_one_detection_long_horizon( [0, 0, 1, 1, 1], DoesNotRaise(), ), - ( # One stays, one crosses, one disappears before crossing, max_linger==2 + ( # One stays, one crosses, one disappears before crossing Vector(Point(0, 0), Point(10, 0)), [ [[4, 4, 6, 6], [4, 4, 6, 6], [4, 4, 6, 6]], @@ -709,7 +709,7 @@ def test_line_zone_one_detection_long_horizon( [0, 0, 0, 0, 0], DoesNotRaise(), ), - ( # One stays, one crosses, one disappears immediately after crossing, max_linger==2 + ( # One stays, one crosses, one disappears immediately after crossing Vector(Point(0, 0), Point(10, 0)), [ [[4, 4, 6, 6], [4, 4, 6, 6], [4, 4, 6, 6]], From 3863a1f2e96d6f3d01be41a3cd15362acd62b924 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 2 Oct 2024 08:10:00 +0000 Subject: [PATCH 011/161] =?UTF-8?q?fix(pre=5Fcommit):=20=F0=9F=8E=A8=20aut?= =?UTF-8?q?o=20format=20pre-commit=20hooks?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- supervision/detection/line_zone.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/supervision/detection/line_zone.py b/supervision/detection/line_zone.py index 10b173142..f5d0d321a 100644 --- a/supervision/detection/line_zone.py +++ b/supervision/detection/line_zone.py @@ -244,7 +244,7 @@ def trigger(self, detections: Detections) -> Tuple[np.ndarray, np.ndarray]: if tracker_id not in self.crossing_state: self.crossing_state[tracker_id] = ( class_ids, - deque([tracker_state], maxlen=self.max_linger) + deque([tracker_state], maxlen=self.max_linger), ) continue @@ -259,7 +259,9 @@ def trigger(self, detections: Detections) -> Tuple[np.ndarray, np.ndarray]: crossing_state.appendleft(tracker_state) all_on_same_side = crossing_state.count(not tracker_state) == 0 if class_ids: - if len(class_ids) != len(crossing_state_class_ids) or not all(class_ids == crossing_state_class_ids): + if len(class_ids) != len(crossing_state_class_ids) or not all( + class_ids == crossing_state_class_ids + ): self.crossing_state[tracker_id] = (class_ids, tracker_state) if not all_on_same_side: continue From edf2277ea0fefcef3451200067d7d5ddb3358aa4 Mon Sep 17 00:00:00 2001 From: Grzegorz Klimaszewski <166530809+grzegorz-roboflow@users.noreply.github.com> Date: Wed, 2 Oct 2024 10:19:45 +0200 Subject: [PATCH 012/161] fix typo in docstring --- supervision/detection/line_zone.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/supervision/detection/line_zone.py b/supervision/detection/line_zone.py index f5d0d321a..235ecfd4f 100644 --- a/supervision/detection/line_zone.py +++ b/supervision/detection/line_zone.py @@ -85,7 +85,7 @@ def __init__( to consider when deciding on whether the detection has passed the line counter or not. By default, this contains the four corners of the detection's bounding box - max_linger: An integer indicating the number of consequtive frames + max_linger: An integer indicating the number of consecutive frames detections should stay away from the line after crossing it to consider crossing completed. This configuration option is useful when dealing with unstable bounding boxes or when From 124c7d9b907790bde68475af77ff2e2a1fd7815a Mon Sep 17 00:00:00 2001 From: Huzail Date: Sat, 5 Oct 2024 21:36:21 +0530 Subject: [PATCH 013/161] Added class-agnostic --- supervision/metrics/mean_average_precision.py | 14 +++++++++++++- 1 file changed, 13 insertions(+), 1 deletion(-) diff --git a/supervision/metrics/mean_average_precision.py b/supervision/metrics/mean_average_precision.py index 04a5fe9dd..56fd3967b 100644 --- a/supervision/metrics/mean_average_precision.py +++ b/supervision/metrics/mean_average_precision.py @@ -75,6 +75,13 @@ def update( f"The number of predictions ({len(predictions)}) and" f" targets ({len(targets)}) during the update must be the same." ) + # class-agnostic + if self._class_agnostic: + # Set all class_ids to 0 to ignore class distinction + for prediction in predictions: + prediction.class_id[:] = 0 + for target in targets: + target.class_id[:] = 0 self._predictions_list.extend(predictions) self._targets_list.extend(targets) @@ -239,13 +246,18 @@ def _match_detection_batch( target_classes: np.ndarray, iou: np.ndarray, iou_thresholds: np.ndarray, + class_agnostic: bool, ) -> np.ndarray: num_predictions, num_iou_levels = ( predictions_classes.shape[0], iou_thresholds.shape[0], ) correct = np.zeros((num_predictions, num_iou_levels), dtype=bool) - correct_class = target_classes[:, None] == predictions_classes + + if class_agnostic: + correct_class = np.ones_like(iou, dtype=bool) # Treat all as the same class + else: + correct_class = target_classes[:, None] == predictions_classes for i, iou_level in enumerate(iou_thresholds): matched_indices = np.where((iou >= iou_level) & correct_class) From 6d30eef4a96f9a3dd3513097d9dd3800141774ba Mon Sep 17 00:00:00 2001 From: Huzail Date: Sun, 6 Oct 2024 01:37:48 +0530 Subject: [PATCH 014/161] Added _match_detection_batch_class_agnostic --- supervision/metrics/mean_average_precision.py | 43 +++++++++++++++---- 1 file changed, 35 insertions(+), 8 deletions(-) diff --git a/supervision/metrics/mean_average_precision.py b/supervision/metrics/mean_average_precision.py index 56fd3967b..11fbef5ac 100644 --- a/supervision/metrics/mean_average_precision.py +++ b/supervision/metrics/mean_average_precision.py @@ -184,9 +184,14 @@ def _compute( "Unsupported metric target for IoU calculation" ) - matches = self._match_detection_batch( - predictions.class_id, targets.class_id, iou, iou_thresholds - ) + # Match detections: if class_agnostic is set, skip class matching + if self._class_agnostic: + matches = self._match_detection_batch_class_agnostic(iou, iou_thresholds) + else: + matches = self._match_detection_batch( + predictions.class_id, targets.class_id, iou, iou_thresholds + ) + stats.append( ( matches, @@ -246,7 +251,6 @@ def _match_detection_batch( target_classes: np.ndarray, iou: np.ndarray, iou_thresholds: np.ndarray, - class_agnostic: bool, ) -> np.ndarray: num_predictions, num_iou_levels = ( predictions_classes.shape[0], @@ -254,10 +258,8 @@ def _match_detection_batch( ) correct = np.zeros((num_predictions, num_iou_levels), dtype=bool) - if class_agnostic: - correct_class = np.ones_like(iou, dtype=bool) # Treat all as the same class - else: - correct_class = target_classes[:, None] == predictions_classes + + correct_class = target_classes[:, None] == predictions_classes for i, iou_level in enumerate(iou_thresholds): matched_indices = np.where((iou >= iou_level) & correct_class) @@ -275,7 +277,32 @@ def _match_detection_batch( correct[matches[:, 1].astype(int), i] = True return correct + @staticmethod + def _match_detection_batch_class_agnostic( + iou: np.ndarray, + iou_thresholds: np.ndarray, + ) -> np.ndarray: + """ + Match detections for class-agnostic case, ignoring the class labels. + """ + num_predictions, num_iou_levels = iou.shape[0], iou_thresholds.shape[0] + correct = np.zeros((num_predictions, num_iou_levels), dtype=bool) + + for i, iou_level in enumerate(iou_thresholds): + matched_indices = np.where(iou >= iou_level) + if matched_indices[0].shape[0]: + combined_indices = np.stack(matched_indices, axis=1) + iou_values = iou[matched_indices][:, None] + matches = np.hstack([combined_indices, iou_values]) + if matched_indices[0].shape[0] > 1: + matches = matches[matches[:, 2].argsort()[::-1]] + matches = matches[np.unique(matches[:, 1], return_index=True)[1]] + matches = matches[np.unique(matches[:, 0], return_index=True)[1]] + + correct[matches[:, 1].astype(int), i] = True + + return correct @staticmethod def _average_precisions_per_class( matches: np.ndarray, From e0db07a80fa6cfbbade4d572334766880ce42f8b Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Sat, 5 Oct 2024 21:14:39 +0000 Subject: [PATCH 015/161] =?UTF-8?q?fix(pre=5Fcommit):=20=F0=9F=8E=A8=20aut?= =?UTF-8?q?o=20format=20pre-commit=20hooks?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- supervision/metrics/mean_average_precision.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/supervision/metrics/mean_average_precision.py b/supervision/metrics/mean_average_precision.py index 11fbef5ac..ff2d9e8e4 100644 --- a/supervision/metrics/mean_average_precision.py +++ b/supervision/metrics/mean_average_precision.py @@ -186,7 +186,9 @@ def _compute( # Match detections: if class_agnostic is set, skip class matching if self._class_agnostic: - matches = self._match_detection_batch_class_agnostic(iou, iou_thresholds) + matches = self._match_detection_batch_class_agnostic( + iou, iou_thresholds + ) else: matches = self._match_detection_batch( predictions.class_id, targets.class_id, iou, iou_thresholds @@ -258,7 +260,6 @@ def _match_detection_batch( ) correct = np.zeros((num_predictions, num_iou_levels), dtype=bool) - correct_class = target_classes[:, None] == predictions_classes for i, iou_level in enumerate(iou_thresholds): @@ -277,6 +278,7 @@ def _match_detection_batch( correct[matches[:, 1].astype(int), i] = True return correct + @staticmethod def _match_detection_batch_class_agnostic( iou: np.ndarray, @@ -303,6 +305,7 @@ def _match_detection_batch_class_agnostic( correct[matches[:, 1].astype(int), i] = True return correct + @staticmethod def _average_precisions_per_class( matches: np.ndarray, From c70988123df0811dd07a0261c600a9c02014e239 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 7 Oct 2024 01:02:26 +0000 Subject: [PATCH 016/161] :arrow_up: Bump ruff from 0.6.8 to 0.6.9 Bumps [ruff](https://github.com/astral-sh/ruff) from 0.6.8 to 0.6.9. - [Release notes](https://github.com/astral-sh/ruff/releases) - [Changelog](https://github.com/astral-sh/ruff/blob/main/CHANGELOG.md) - [Commits](https://github.com/astral-sh/ruff/compare/0.6.8...0.6.9) --- updated-dependencies: - dependency-name: ruff dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- poetry.lock | 38 +++++++++++++++++++------------------- 1 file changed, 19 insertions(+), 19 deletions(-) diff --git a/poetry.lock b/poetry.lock index 519b7a2f8..6c9d12a76 100644 --- a/poetry.lock +++ b/poetry.lock @@ -3850,29 +3850,29 @@ files = [ [[package]] name = "ruff" -version = "0.6.8" +version = "0.6.9" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.6.8-py3-none-linux_armv6l.whl", hash = "sha256:77944bca110ff0a43b768f05a529fecd0706aac7bcce36d7f1eeb4cbfca5f0f2"}, - {file = "ruff-0.6.8-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:27b87e1801e786cd6ede4ada3faa5e254ce774de835e6723fd94551464c56b8c"}, - {file = "ruff-0.6.8-py3-none-macosx_11_0_arm64.whl", hash = "sha256:cd48f945da2a6334f1793d7f701725a76ba93bf3d73c36f6b21fb04d5338dcf5"}, - {file = "ruff-0.6.8-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:677e03c00f37c66cea033274295a983c7c546edea5043d0c798833adf4cf4c6f"}, - {file = "ruff-0.6.8-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9f1476236b3eacfacfc0f66aa9e6cd39f2a624cb73ea99189556015f27c0bdeb"}, - {file = "ruff-0.6.8-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f5a2f17c7d32991169195d52a04c95b256378bbf0de8cb98478351eb70d526f"}, - {file = "ruff-0.6.8-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:5fd0d4b7b1457c49e435ee1e437900ced9b35cb8dc5178921dfb7d98d65a08d0"}, - {file = "ruff-0.6.8-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f8034b19b993e9601f2ddf2c517451e17a6ab5cdb1c13fdff50c1442a7171d87"}, - {file = "ruff-0.6.8-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6cfb227b932ba8ef6e56c9f875d987973cd5e35bc5d05f5abf045af78ad8e098"}, - {file = "ruff-0.6.8-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ef0411eccfc3909269fed47c61ffebdcb84a04504bafa6b6df9b85c27e813b0"}, - {file = "ruff-0.6.8-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:007dee844738c3d2e6c24ab5bc7d43c99ba3e1943bd2d95d598582e9c1b27750"}, - {file = "ruff-0.6.8-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:ce60058d3cdd8490e5e5471ef086b3f1e90ab872b548814e35930e21d848c9ce"}, - {file = "ruff-0.6.8-py3-none-musllinux_1_2_i686.whl", hash = "sha256:1085c455d1b3fdb8021ad534379c60353b81ba079712bce7a900e834859182fa"}, - {file = "ruff-0.6.8-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:70edf6a93b19481affd287d696d9e311388d808671bc209fb8907b46a8c3af44"}, - {file = "ruff-0.6.8-py3-none-win32.whl", hash = "sha256:792213f7be25316f9b46b854df80a77e0da87ec66691e8f012f887b4a671ab5a"}, - {file = "ruff-0.6.8-py3-none-win_amd64.whl", hash = "sha256:ec0517dc0f37cad14a5319ba7bba6e7e339d03fbf967a6d69b0907d61be7a263"}, - {file = "ruff-0.6.8-py3-none-win_arm64.whl", hash = "sha256:8d3bb2e3fbb9875172119021a13eed38849e762499e3cfde9588e4b4d70968dc"}, - {file = "ruff-0.6.8.tar.gz", hash = "sha256:a5bf44b1aa0adaf6d9d20f86162b34f7c593bfedabc51239953e446aefc8ce18"}, + {file = "ruff-0.6.9-py3-none-linux_armv6l.whl", hash = "sha256:064df58d84ccc0ac0fcd63bc3090b251d90e2a372558c0f057c3f75ed73e1ccd"}, + {file = "ruff-0.6.9-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:140d4b5c9f5fc7a7b074908a78ab8d384dd7f6510402267bc76c37195c02a7ec"}, + {file = "ruff-0.6.9-py3-none-macosx_11_0_arm64.whl", hash = "sha256:53fd8ca5e82bdee8da7f506d7b03a261f24cd43d090ea9db9a1dc59d9313914c"}, + {file = "ruff-0.6.9-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:645d7d8761f915e48a00d4ecc3686969761df69fb561dd914a773c1a8266e14e"}, + {file = "ruff-0.6.9-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eae02b700763e3847595b9d2891488989cac00214da7f845f4bcf2989007d577"}, + {file = "ruff-0.6.9-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d5ccc9e58112441de8ad4b29dcb7a86dc25c5f770e3c06a9d57e0e5eba48829"}, + {file = "ruff-0.6.9-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:417b81aa1c9b60b2f8edc463c58363075412866ae4e2b9ab0f690dc1e87ac1b5"}, + {file = "ruff-0.6.9-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3c866b631f5fbce896a74a6e4383407ba7507b815ccc52bcedabb6810fdb3ef7"}, + {file = "ruff-0.6.9-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7b118afbb3202f5911486ad52da86d1d52305b59e7ef2031cea3425142b97d6f"}, + {file = "ruff-0.6.9-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a67267654edc23c97335586774790cde402fb6bbdb3c2314f1fc087dee320bfa"}, + {file = "ruff-0.6.9-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3ef0cc774b00fec123f635ce5c547dac263f6ee9fb9cc83437c5904183b55ceb"}, + {file = "ruff-0.6.9-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:12edd2af0c60fa61ff31cefb90aef4288ac4d372b4962c2864aeea3a1a2460c0"}, + {file = "ruff-0.6.9-py3-none-musllinux_1_2_i686.whl", hash = "sha256:55bb01caeaf3a60b2b2bba07308a02fca6ab56233302406ed5245180a05c5625"}, + {file = "ruff-0.6.9-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:925d26471fa24b0ce5a6cdfab1bb526fb4159952385f386bdcc643813d472039"}, + {file = "ruff-0.6.9-py3-none-win32.whl", hash = "sha256:eb61ec9bdb2506cffd492e05ac40e5bc6284873aceb605503d8494180d6fc84d"}, + {file = "ruff-0.6.9-py3-none-win_amd64.whl", hash = "sha256:785d31851c1ae91f45b3d8fe23b8ae4b5170089021fbb42402d811135f0b7117"}, + {file = "ruff-0.6.9-py3-none-win_arm64.whl", hash = "sha256:a9641e31476d601f83cd602608739a0840e348bda93fec9f1ee816f8b6798b93"}, + {file = "ruff-0.6.9.tar.gz", hash = "sha256:b076ef717a8e5bc819514ee1d602bbdca5b4420ae13a9cf61a0c0a4f53a2baa2"}, ] [[package]] From 1523ee21af44a478f3fccd32897de5c560481dbe Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 7 Oct 2024 01:04:39 +0000 Subject: [PATCH 017/161] :arrow_up: Bump build from 1.2.2 to 1.2.2.post1 Bumps [build](https://github.com/pypa/build) from 1.2.2 to 1.2.2.post1. - [Release notes](https://github.com/pypa/build/releases) - [Changelog](https://github.com/pypa/build/blob/main/CHANGELOG.rst) - [Commits](https://github.com/pypa/build/compare/1.2.2...1.2.2.post1) --- updated-dependencies: - dependency-name: build dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 519b7a2f8..03aa85c39 100644 --- a/poetry.lock +++ b/poetry.lock @@ -259,13 +259,13 @@ css = ["tinycss2 (>=1.1.0,<1.3)"] [[package]] name = "build" -version = "1.2.2" +version = "1.2.2.post1" description = "A simple, correct Python build frontend" optional = false python-versions = ">=3.8" files = [ - {file = "build-1.2.2-py3-none-any.whl", hash = "sha256:277ccc71619d98afdd841a0e96ac9fe1593b823af481d3b0cea748e8894e0613"}, - {file = "build-1.2.2.tar.gz", hash = "sha256:119b2fb462adef986483438377a13b2f42064a2a3a4161f24a0cca698a07ac8c"}, + {file = "build-1.2.2.post1-py3-none-any.whl", hash = "sha256:1d61c0887fa860c01971625baae8bdd338e517b836a2f70dd1f7aa3a6b2fc5b5"}, + {file = "build-1.2.2.post1.tar.gz", hash = "sha256:b36993e92ca9375a219c99e606a122ff365a760a2d4bba0caa09bd5278b608b7"}, ] [package.dependencies] From 7862f9cc70f6732cdde46cf1703f6a92eceb9f89 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 7 Oct 2024 18:27:48 +0000 Subject: [PATCH 018/161] =?UTF-8?q?chore(pre=5Fcommit):=20=E2=AC=86=20pre?= =?UTF-8?q?=5Fcommit=20autoupdate?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/pre-commit/pre-commit-hooks: v4.6.0 → v5.0.0](https://github.com/pre-commit/pre-commit-hooks/compare/v4.6.0...v5.0.0) - [github.com/astral-sh/ruff-pre-commit: v0.6.8 → v0.6.9](https://github.com/astral-sh/ruff-pre-commit/compare/v0.6.8...v0.6.9) --- .pre-commit-config.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index d8e34a975..2ca4357cb 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -7,7 +7,7 @@ ci: repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.6.0 + rev: v5.0.0 hooks: - id: trailing-whitespace exclude: test/.*\.py @@ -32,7 +32,7 @@ repos: additional_dependencies: ["bandit[toml]"] - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.6.8 + rev: v0.6.9 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] From 3231f7f498005e2c88ee12f3d61721e10bcb226a Mon Sep 17 00:00:00 2001 From: LinasKo Date: Wed, 9 Oct 2024 11:50:08 +0300 Subject: [PATCH 019/161] Minor fix: mAP can be 1.01 Fix division to avoid cases where 1.01 mAP is possible. Discovered in: https://colab.research.google.com/drive/10EaQ4lJNXzcmub7doO2vGn2t1LgDdkcC?usp=sharing --- supervision/metrics/mean_average_precision.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/supervision/metrics/mean_average_precision.py b/supervision/metrics/mean_average_precision.py index 04a5fe9dd..cc15f5653 100644 --- a/supervision/metrics/mean_average_precision.py +++ b/supervision/metrics/mean_average_precision.py @@ -230,7 +230,7 @@ def _compute_average_precision(recall: np.ndarray, precision: np.ndarray) -> flo for r, p in zip(recall[::-1], precision[::-1]): precision_levels[recall_levels <= r] = p - average_precision = (1 / 100 * precision_levels).sum() + average_precision = (1 / 101 * precision_levels).sum() return average_precision @staticmethod From 921e8f32d4ae2f43f55028cdc198f7cdd65c7672 Mon Sep 17 00:00:00 2001 From: Onuralp SEZER Date: Wed, 9 Oct 2024 16:46:02 +0300 Subject: [PATCH 020/161] =?UTF-8?q?ci:=20=F0=9F=91=B7=20python3.13=20ci=20?= =?UTF-8?q?tests=20and=20tox=20ver=20added?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Onuralp SEZER --- .github/workflows/test.yml | 2 +- tox.ini | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 1ce4a267f..b33f8e537 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -9,7 +9,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] + python-version: ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] steps: - name: 🛎️ Checkout uses: actions/checkout@v4 diff --git a/tox.ini b/tox.ini index 46886c13c..3f44d215e 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = py38,py39,py310,py311,py312 +envlist = py38,py39,py310,py311,py312,py313 [testenv] changedir = test From a4f79b59f1a6303330662f800b40b0498f73a582 Mon Sep 17 00:00:00 2001 From: DemyCode Date: Wed, 9 Oct 2024 16:48:54 +0200 Subject: [PATCH 021/161] feat: adding type hinting to package --- pyproject.toml | 2 +- supervision/py.typed | 0 2 files changed, 1 insertion(+), 1 deletion(-) create mode 100644 supervision/py.typed diff --git a/pyproject.toml b/pyproject.toml index c9aa384e7..f63011062 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,7 +9,7 @@ maintainers = [ ] readme = "README.md" license = "MIT" -packages = [{ include = "supervision" }] +packages = [{ include = "supervision" }, { include = "supervision/py.typed" }] homepage = "https://github.com/roboflow/supervision" repository = "https://github.com/roboflow/supervision" documentation = "https://supervision.roboflow.com/latest/" diff --git a/supervision/py.typed b/supervision/py.typed new file mode 100644 index 000000000..e69de29bb From baffd983144ccd74a60c16ce96166d83ab21d4e2 Mon Sep 17 00:00:00 2001 From: Ahmad Huzail Khan <104365728+AHuzail@users.noreply.github.com> Date: Wed, 9 Oct 2024 18:54:31 +0000 Subject: [PATCH 022/161] removed comments --- supervision/metrics/mean_average_precision.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/supervision/metrics/mean_average_precision.py b/supervision/metrics/mean_average_precision.py index d672344bf..7b7dd93c2 100644 --- a/supervision/metrics/mean_average_precision.py +++ b/supervision/metrics/mean_average_precision.py @@ -75,9 +75,8 @@ def update( f"The number of predictions ({len(predictions)}) and" f" targets ({len(targets)}) during the update must be the same." ) - # class-agnostic + if self._class_agnostic: - # Set all class_ids to 0 to ignore class distinction for prediction in predictions: prediction.class_id[:] = 0 for target in targets: @@ -184,7 +183,7 @@ def _compute( "Unsupported metric target for IoU calculation" ) - # Match detections: if class_agnostic is set, skip class matching + if self._class_agnostic: matches = self._match_detection_batch_class_agnostic( iou, iou_thresholds From 12c6b873e3ef641cb67c67ff6cbd2691ac6a1cb2 Mon Sep 17 00:00:00 2001 From: Ahmad Huzail Khan <104365728+AHuzail@users.noreply.github.com> Date: Wed, 9 Oct 2024 19:01:40 +0000 Subject: [PATCH 023/161] modified: supervision/metrics/mean_average_precision.py --- supervision/metrics/mean_average_precision.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/supervision/metrics/mean_average_precision.py b/supervision/metrics/mean_average_precision.py index 7b7dd93c2..9aa069354 100644 --- a/supervision/metrics/mean_average_precision.py +++ b/supervision/metrics/mean_average_precision.py @@ -78,9 +78,9 @@ def update( if self._class_agnostic: for prediction in predictions: - prediction.class_id[:] = 0 + prediction.class_id[:] = -1 for target in targets: - target.class_id[:] = 0 + target.class_id[:] = -1 self._predictions_list.extend(predictions) self._targets_list.extend(targets) From e433527942c624b1556c0a0f46190eea9fae22b5 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 9 Oct 2024 19:04:45 +0000 Subject: [PATCH 024/161] =?UTF-8?q?fix(pre=5Fcommit):=20=F0=9F=8E=A8=20aut?= =?UTF-8?q?o=20format=20pre-commit=20hooks?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- supervision/metrics/mean_average_precision.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/supervision/metrics/mean_average_precision.py b/supervision/metrics/mean_average_precision.py index 9aa069354..ef8f4c74a 100644 --- a/supervision/metrics/mean_average_precision.py +++ b/supervision/metrics/mean_average_precision.py @@ -75,7 +75,7 @@ def update( f"The number of predictions ({len(predictions)}) and" f" targets ({len(targets)}) during the update must be the same." ) - + if self._class_agnostic: for prediction in predictions: prediction.class_id[:] = -1 @@ -183,7 +183,6 @@ def _compute( "Unsupported metric target for IoU calculation" ) - if self._class_agnostic: matches = self._match_detection_batch_class_agnostic( iou, iou_thresholds From 16a4491b596786c603556fa055244b366b243486 Mon Sep 17 00:00:00 2001 From: Ahmad Huzail Khan <104365728+AHuzail@users.noreply.github.com> Date: Wed, 9 Oct 2024 19:29:41 +0000 Subject: [PATCH 025/161] modified class MeanAveragePrecisionResult for class_agnostic --- supervision/metrics/mean_average_precision.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/supervision/metrics/mean_average_precision.py b/supervision/metrics/mean_average_precision.py index 9aa069354..b02a2742f 100644 --- a/supervision/metrics/mean_average_precision.py +++ b/supervision/metrics/mean_average_precision.py @@ -424,6 +424,7 @@ class MeanAveragePrecisionResult: Attributes: metric_target (MetricTarget): the type of data used for the metric - boxes, masks or oriented bounding boxes. + class_agnostic: When computing class-agnostic results, class ID is set to `-1`. mAP_map50_95 (float): the mAP score at IoU thresholds from `0.5` to `0.95`. mAP_map50 (float): the mAP score at IoU threshold of `0.5`. mAP_map75 (float): the mAP score at IoU threshold of `0.75`. @@ -443,6 +444,7 @@ class and IoU threshold. Shape: `(num_target_classes, num_iou_thresholds)` """ metric_target: MetricTarget + is_class_agnostic: bool @property def map50_95(self) -> float: @@ -477,6 +479,7 @@ def __str__(self) -> str: out_str = ( f"{self.__class__.__name__}:\n" f"Metric target: {self.metric_target}\n" + f"Class-agnostic: {self.is_class_agnostic}\n" f"mAP @ 50:95: {self.map50_95:.4f}\n" f"mAP @ 50: {self.map50:.4f}\n" f"mAP @ 75: {self.map75:.4f}\n" From 62a3cfe480f749c27affbaace1f6463f10ace180 Mon Sep 17 00:00:00 2001 From: Ahmad Huzail Khan <104365728+AHuzail@users.noreply.github.com> Date: Wed, 9 Oct 2024 19:33:56 +0000 Subject: [PATCH 026/161] modified: supervision/metrics/mean_average_precision.py --- supervision/metrics/mean_average_precision.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/supervision/metrics/mean_average_precision.py b/supervision/metrics/mean_average_precision.py index ec9d0fd4a..b496ffaa3 100644 --- a/supervision/metrics/mean_average_precision.py +++ b/supervision/metrics/mean_average_precision.py @@ -478,7 +478,7 @@ def __str__(self) -> str: out_str = ( f"{self.__class__.__name__}:\n" f"Metric target: {self.metric_target}\n" - f"Class-agnostic: {self.is_class_agnostic}\n" + f"Class agnostic: {self.is_class_agnostic}\n" f"mAP @ 50:95: {self.map50_95:.4f}\n" f"mAP @ 50: {self.map50:.4f}\n" f"mAP @ 75: {self.map75:.4f}\n" From 0f42a87ee8d55370c52d0709da6d484d24b47107 Mon Sep 17 00:00:00 2001 From: Ahmad Huzail Khan <104365728+AHuzail@users.noreply.github.com> Date: Wed, 9 Oct 2024 19:44:50 +0000 Subject: [PATCH 027/161] modified: supervision/metrics/mean_average_precision.py --- supervision/metrics/mean_average_precision.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/supervision/metrics/mean_average_precision.py b/supervision/metrics/mean_average_precision.py index b496ffaa3..3d3f10f4e 100644 --- a/supervision/metrics/mean_average_precision.py +++ b/supervision/metrics/mean_average_precision.py @@ -215,6 +215,7 @@ def _compute( return MeanAveragePrecisionResult( metric_target=self._metric_target, + is_class_agnostic= self._class_agnostic, mAP_scores=mAP_scores, iou_thresholds=iou_thresholds, matched_classes=unique_classes, @@ -443,7 +444,7 @@ class and IoU threshold. Shape: `(num_target_classes, num_iou_thresholds)` """ metric_target: MetricTarget - is_class_agnostic: bool + is_class_agnostic: @property def map50_95(self) -> float: From 201a186539528b712b78e572deda7ed0ec7e3588 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 9 Oct 2024 19:45:22 +0000 Subject: [PATCH 028/161] =?UTF-8?q?fix(pre=5Fcommit):=20=F0=9F=8E=A8=20aut?= =?UTF-8?q?o=20format=20pre-commit=20hooks?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- supervision/metrics/mean_average_precision.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/supervision/metrics/mean_average_precision.py b/supervision/metrics/mean_average_precision.py index 3d3f10f4e..3c7ae7ae3 100644 --- a/supervision/metrics/mean_average_precision.py +++ b/supervision/metrics/mean_average_precision.py @@ -444,7 +444,7 @@ class and IoU threshold. Shape: `(num_target_classes, num_iou_thresholds)` """ metric_target: MetricTarget - is_class_agnostic: + is_class_agnostic: @property def map50_95(self) -> float: From 53b26d0c05edadb6db80c1e7f97fde131bc1544c Mon Sep 17 00:00:00 2001 From: Ahmad Huzail Khan <104365728+AHuzail@users.noreply.github.com> Date: Wed, 9 Oct 2024 19:48:05 +0000 Subject: [PATCH 029/161] modified: supervision/metrics/mean_average_precision.py --- supervision/metrics/mean_average_precision.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/supervision/metrics/mean_average_precision.py b/supervision/metrics/mean_average_precision.py index 3d3f10f4e..05885784c 100644 --- a/supervision/metrics/mean_average_precision.py +++ b/supervision/metrics/mean_average_precision.py @@ -444,7 +444,7 @@ class and IoU threshold. Shape: `(num_target_classes, num_iou_thresholds)` """ metric_target: MetricTarget - is_class_agnostic: + is_class_agnostic: bool @property def map50_95(self) -> float: From 75b6384a9aacaf0a90a942bc9db6cf76d02eebfd Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 9 Oct 2024 19:52:56 +0000 Subject: [PATCH 030/161] =?UTF-8?q?fix(pre=5Fcommit):=20=F0=9F=8E=A8=20aut?= =?UTF-8?q?o=20format=20pre-commit=20hooks?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- supervision/metrics/mean_average_precision.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/supervision/metrics/mean_average_precision.py b/supervision/metrics/mean_average_precision.py index aeff48b38..c5338a6dc 100644 --- a/supervision/metrics/mean_average_precision.py +++ b/supervision/metrics/mean_average_precision.py @@ -215,7 +215,7 @@ def _compute( return MeanAveragePrecisionResult( metric_target=self._metric_target, - is_class_agnostic= self._class_agnostic, + is_class_agnostic=self._class_agnostic, mAP_scores=mAP_scores, iou_thresholds=iou_thresholds, matched_classes=unique_classes, @@ -443,7 +443,6 @@ class and IoU threshold. Shape: `(num_target_classes, num_iou_thresholds)` for large objects. """ - metric_target: MetricTarget is_class_agnostic: bool From f1e32e7c079d17b9444863983b24f8bdbe01afdf Mon Sep 17 00:00:00 2001 From: Ahmad Huzail Khan <104365728+AHuzail@users.noreply.github.com> Date: Wed, 9 Oct 2024 20:16:25 +0000 Subject: [PATCH 031/161] Added deepcopy --- supervision/metrics/mean_average_precision.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/supervision/metrics/mean_average_precision.py b/supervision/metrics/mean_average_precision.py index c5338a6dc..b05d60093 100644 --- a/supervision/metrics/mean_average_precision.py +++ b/supervision/metrics/mean_average_precision.py @@ -77,6 +77,9 @@ def update( ) if self._class_agnostic: + predictions = deepcopy(predictions) + targets = deepcopy(targets) + for prediction in predictions: prediction.class_id[:] = -1 for target in targets: From 5fc7c3aa53727dd4310330e7b80fcb9a12348d54 Mon Sep 17 00:00:00 2001 From: Ahmad Huzail Khan <104365728+AHuzail@users.noreply.github.com> Date: Wed, 9 Oct 2024 20:19:26 +0000 Subject: [PATCH 032/161] only using _match_detection_batch --- supervision/metrics/mean_average_precision.py | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/supervision/metrics/mean_average_precision.py b/supervision/metrics/mean_average_precision.py index b05d60093..4e3aa26ca 100644 --- a/supervision/metrics/mean_average_precision.py +++ b/supervision/metrics/mean_average_precision.py @@ -186,14 +186,11 @@ def _compute( "Unsupported metric target for IoU calculation" ) - if self._class_agnostic: - matches = self._match_detection_batch_class_agnostic( - iou, iou_thresholds - ) - else: - matches = self._match_detection_batch( - predictions.class_id, targets.class_id, iou, iou_thresholds - ) + + + matches = self._match_detection_batch( + predictions.class_id, targets.class_id, iou, iou_thresholds + ) stats.append( ( From 9659f7b2ebd161c308cd33957f84c3a21cb3dfe2 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Wed, 9 Oct 2024 20:21:00 +0000 Subject: [PATCH 033/161] =?UTF-8?q?fix(pre=5Fcommit):=20=F0=9F=8E=A8=20aut?= =?UTF-8?q?o=20format=20pre-commit=20hooks?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- supervision/metrics/mean_average_precision.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/supervision/metrics/mean_average_precision.py b/supervision/metrics/mean_average_precision.py index 4e3aa26ca..ff6062982 100644 --- a/supervision/metrics/mean_average_precision.py +++ b/supervision/metrics/mean_average_precision.py @@ -186,8 +186,6 @@ def _compute( "Unsupported metric target for IoU calculation" ) - - matches = self._match_detection_batch( predictions.class_id, targets.class_id, iou, iou_thresholds ) From 05b17c483af5eab63ee46d73c4c3554f9e13247f Mon Sep 17 00:00:00 2001 From: Ahmad Huzail Khan <104365728+AHuzail@users.noreply.github.com> Date: Wed, 9 Oct 2024 20:25:10 +0000 Subject: [PATCH 034/161] Deleted _match_detection_batch_class_agnostic --- supervision/metrics/mean_average_precision.py | 27 ------------------- 1 file changed, 27 deletions(-) diff --git a/supervision/metrics/mean_average_precision.py b/supervision/metrics/mean_average_precision.py index ff6062982..ed5567192 100644 --- a/supervision/metrics/mean_average_precision.py +++ b/supervision/metrics/mean_average_precision.py @@ -276,33 +276,6 @@ def _match_detection_batch( return correct - @staticmethod - def _match_detection_batch_class_agnostic( - iou: np.ndarray, - iou_thresholds: np.ndarray, - ) -> np.ndarray: - """ - Match detections for class-agnostic case, ignoring the class labels. - """ - num_predictions, num_iou_levels = iou.shape[0], iou_thresholds.shape[0] - correct = np.zeros((num_predictions, num_iou_levels), dtype=bool) - - for i, iou_level in enumerate(iou_thresholds): - matched_indices = np.where(iou >= iou_level) - if matched_indices[0].shape[0]: - combined_indices = np.stack(matched_indices, axis=1) - iou_values = iou[matched_indices][:, None] - matches = np.hstack([combined_indices, iou_values]) - - if matched_indices[0].shape[0] > 1: - matches = matches[matches[:, 2].argsort()[::-1]] - matches = matches[np.unique(matches[:, 1], return_index=True)[1]] - matches = matches[np.unique(matches[:, 0], return_index=True)[1]] - - correct[matches[:, 1].astype(int), i] = True - - return correct - @staticmethod def _average_precisions_per_class( matches: np.ndarray, From 4c928eda2e7592b00bc6d202ccd79d62a887adad Mon Sep 17 00:00:00 2001 From: Soham Date: Thu, 10 Oct 2024 15:22:55 +0530 Subject: [PATCH 035/161] feat: add detection metadata and update related methods --- supervision/detection/core.py | 20 +++++++++++++++++++- supervision/detection/utils.py | 25 ++++++++++++++++++++++++- 2 files changed, 43 insertions(+), 2 deletions(-) diff --git a/supervision/detection/core.py b/supervision/detection/core.py index 113948fc9..a970cc2d2 100644 --- a/supervision/detection/core.py +++ b/supervision/detection/core.py @@ -34,6 +34,7 @@ is_data_equal, mask_to_xyxy, merge_data, + merge_metadata, process_roboflow_result, xywh_to_xyxy, ) @@ -125,6 +126,9 @@ class simplifies data manipulation and filtering, providing a uniform API for data (Dict[str, Union[np.ndarray, List]]): A dictionary containing additional data where each key is a string representing the data type, and the value is either a NumPy array or a list of corresponding data. + metadata (Dict[str, Any]): A dictionary containing collection-level metadata + that applies to the entire set of detections. This may include information such + as the video name, camera parameters, timestamp, or other global metadata. """ # noqa: E501 // docs xyxy: np.ndarray @@ -133,6 +137,7 @@ class simplifies data manipulation and filtering, providing a uniform API for class_id: Optional[np.ndarray] = None tracker_id: Optional[np.ndarray] = None data: Dict[str, Union[np.ndarray, List]] = field(default_factory=dict) + metadata: Dict[str, Any] = field(default_factory=dict) def __post_init__(self): validate_detections_fields( @@ -185,6 +190,7 @@ def __eq__(self, other: Detections): np.array_equal(self.confidence, other.confidence), np.array_equal(self.tracker_id, other.tracker_id), is_data_equal(self.data, other.data), + self.metadata == other.metadata ] ) @@ -985,6 +991,7 @@ def is_empty(self) -> bool: """ empty_detections = Detections.empty() empty_detections.data = self.data + empty_detections.metadata = self.metadata return self == empty_detections @classmethod @@ -1041,12 +1048,18 @@ def merge(cls, detections_list: List[Detections]) -> Detections: array([0.1, 0.2, 0.3]) ``` """ + metadata_list = [ + detections.metadata for detections in detections_list + ] + detections_list = [ detections for detections in detections_list if not detections.is_empty() ] + + metadata = merge_metadata(metadata_list) if len(detections_list) == 0: - return Detections.empty() + return Detections.empty(metadata=metadata) for detections in detections_list: validate_detections_fields( @@ -1085,6 +1098,7 @@ def stack_or_none(name: str): class_id=class_id, tracker_id=tracker_id, data=data, + metadata=metadata ) def get_anchors_coordinates(self, anchor: Position) -> np.ndarray: @@ -1198,6 +1212,7 @@ def __getitem__( class_id=self.class_id[index] if self.class_id is not None else None, tracker_id=self.tracker_id[index] if self.tracker_id is not None else None, data=get_data_item(self.data, index), + metadata=self.metadata ) def __setitem__(self, key: str, value: Union[np.ndarray, List]): @@ -1458,6 +1473,8 @@ def merge_inner_detection_object_pair( winning_detection = detections_1 else: winning_detection = detections_2 + + metadata = merge_metadata([detections_1.metadata, detections_2.metadata]) return Detections( xyxy=merged_xyxy, @@ -1466,6 +1483,7 @@ def merge_inner_detection_object_pair( class_id=winning_detection.class_id, tracker_id=winning_detection.tracker_id, data=winning_detection.data, + metadata=metadata ) diff --git a/supervision/detection/utils.py b/supervision/detection/utils.py index 43fcec5a0..efc980992 100644 --- a/supervision/detection/utils.py +++ b/supervision/detection/utils.py @@ -1,5 +1,5 @@ from itertools import chain -from typing import Dict, List, Optional, Tuple, Union +from typing import Dict, List, Optional, Tuple, Union, Any import cv2 import numpy as np @@ -865,6 +865,29 @@ def merge_data( return merged_data +def merge_metadata(metadata_list: List[Dict[str, Any]]) -> Dict[str, Any]: + """ + Merge metadata from a list of metadata dictionaries. + + This function combines the metadata dictionaries. If a key appears in more than one + dictionary, the values must be identical for the merge to succeed. + + Args: + metadata_list (List[Dict[str, Any]]): A list of metadata dictionaries to merge. + + Returns: + Dict[str, Any]: A single merged metadata dictionary. + """ + merged_metadata = {} + for metadata in metadata_list: + for key, value in metadata.items(): + if key in merged_metadata: + if merged_metadata[key] != value: + raise ValueError(f"Conflicting metadata for key: {key}") + else: + merged_metadata[key] = value + return merged_metadata + def get_data_item( data: Dict[str, Union[np.ndarray, List]], From 2530c41ffa9de5b62e39ffabe66410191ce02dd2 Mon Sep 17 00:00:00 2001 From: LinasKo Date: Thu, 10 Oct 2024 13:00:07 +0300 Subject: [PATCH 036/161] docs: fix datatype of class_agnostic in MeanAveragePrecisionResult --- supervision/metrics/mean_average_precision.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/supervision/metrics/mean_average_precision.py b/supervision/metrics/mean_average_precision.py index ed5567192..dbd60b2e7 100644 --- a/supervision/metrics/mean_average_precision.py +++ b/supervision/metrics/mean_average_precision.py @@ -395,7 +395,8 @@ class MeanAveragePrecisionResult: Attributes: metric_target (MetricTarget): the type of data used for the metric - boxes, masks or oriented bounding boxes. - class_agnostic: When computing class-agnostic results, class ID is set to `-1`. + class_agnostic (bool): When computing class-agnostic results, class ID + is set to `-1`. mAP_map50_95 (float): the mAP score at IoU thresholds from `0.5` to `0.95`. mAP_map50 (float): the mAP score at IoU threshold of `0.5`. mAP_map75 (float): the mAP score at IoU threshold of `0.75`. From c5dfb47d5c94714e16bd688226cafaab64d7c8f0 Mon Sep 17 00:00:00 2001 From: LinasKo Date: Thu, 10 Oct 2024 13:02:07 +0300 Subject: [PATCH 037/161] Remove old unused code: CLASS_ID_NONE --- supervision/metrics/__init__.py | 1 - supervision/metrics/core.py | 3 --- 2 files changed, 4 deletions(-) diff --git a/supervision/metrics/__init__.py b/supervision/metrics/__init__.py index 17a6cd485..8ae33e639 100644 --- a/supervision/metrics/__init__.py +++ b/supervision/metrics/__init__.py @@ -1,5 +1,4 @@ from supervision.metrics.core import ( - CLASS_ID_NONE, AveragingMethod, Metric, MetricTarget, diff --git a/supervision/metrics/core.py b/supervision/metrics/core.py index 1440fd435..d1818441e 100644 --- a/supervision/metrics/core.py +++ b/supervision/metrics/core.py @@ -4,9 +4,6 @@ from enum import Enum from typing import Any -CLASS_ID_NONE = -1 -"""Used by metrics module as class ID, when none is present""" - class Metric(ABC): """ From b59f867bcb70776aac60dbfcd908b3d906523861 Mon Sep 17 00:00:00 2001 From: Soham Date: Thu, 10 Oct 2024 16:02:20 +0530 Subject: [PATCH 038/161] format with precommit --- supervision/detection/core.py | 18 ++++++++---------- supervision/detection/utils.py | 3 ++- 2 files changed, 10 insertions(+), 11 deletions(-) diff --git a/supervision/detection/core.py b/supervision/detection/core.py index a970cc2d2..8216d054b 100644 --- a/supervision/detection/core.py +++ b/supervision/detection/core.py @@ -190,7 +190,7 @@ def __eq__(self, other: Detections): np.array_equal(self.confidence, other.confidence), np.array_equal(self.tracker_id, other.tracker_id), is_data_equal(self.data, other.data), - self.metadata == other.metadata + self.metadata == other.metadata, ] ) @@ -1048,14 +1048,12 @@ def merge(cls, detections_list: List[Detections]) -> Detections: array([0.1, 0.2, 0.3]) ``` """ - metadata_list = [ - detections.metadata for detections in detections_list - ] - + metadata_list = [detections.metadata for detections in detections_list] + detections_list = [ detections for detections in detections_list if not detections.is_empty() ] - + metadata = merge_metadata(metadata_list) if len(detections_list) == 0: @@ -1098,7 +1096,7 @@ def stack_or_none(name: str): class_id=class_id, tracker_id=tracker_id, data=data, - metadata=metadata + metadata=metadata, ) def get_anchors_coordinates(self, anchor: Position) -> np.ndarray: @@ -1212,7 +1210,7 @@ def __getitem__( class_id=self.class_id[index] if self.class_id is not None else None, tracker_id=self.tracker_id[index] if self.tracker_id is not None else None, data=get_data_item(self.data, index), - metadata=self.metadata + metadata=self.metadata, ) def __setitem__(self, key: str, value: Union[np.ndarray, List]): @@ -1473,7 +1471,7 @@ def merge_inner_detection_object_pair( winning_detection = detections_1 else: winning_detection = detections_2 - + metadata = merge_metadata([detections_1.metadata, detections_2.metadata]) return Detections( @@ -1483,7 +1481,7 @@ def merge_inner_detection_object_pair( class_id=winning_detection.class_id, tracker_id=winning_detection.tracker_id, data=winning_detection.data, - metadata=metadata + metadata=metadata, ) diff --git a/supervision/detection/utils.py b/supervision/detection/utils.py index efc980992..e9060edbe 100644 --- a/supervision/detection/utils.py +++ b/supervision/detection/utils.py @@ -1,5 +1,5 @@ from itertools import chain -from typing import Dict, List, Optional, Tuple, Union, Any +from typing import Any, Dict, List, Optional, Tuple, Union import cv2 import numpy as np @@ -865,6 +865,7 @@ def merge_data( return merged_data + def merge_metadata(metadata_list: List[Dict[str, Any]]) -> Dict[str, Any]: """ Merge metadata from a list of metadata dictionaries. From a988b7282256e3867498d18d302eca7b3014c375 Mon Sep 17 00:00:00 2001 From: Soham Date: Thu, 10 Oct 2024 17:44:52 +0530 Subject: [PATCH 039/161] fix: merge_metadata function --- supervision/detection/utils.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/supervision/detection/utils.py b/supervision/detection/utils.py index e9060edbe..074f409cd 100644 --- a/supervision/detection/utils.py +++ b/supervision/detection/utils.py @@ -884,9 +884,13 @@ def merge_metadata(metadata_list: List[Dict[str, Any]]) -> Dict[str, Any]: for key, value in metadata.items(): if key in merged_metadata: if merged_metadata[key] != value: - raise ValueError(f"Conflicting metadata for key: {key}") + if not isinstance(merged_metadata[key], list): + merged_metadata[key] = [merged_metadata[key]] + if value not in merged_metadata[key]: + merged_metadata[key].append(value) else: merged_metadata[key] = value + return merged_metadata From 6b3735001b95fe56a216fdacdca5926e06f6704b Mon Sep 17 00:00:00 2001 From: Soham Date: Thu, 10 Oct 2024 18:59:50 +0530 Subject: [PATCH 040/161] fix: merge_metadata for conflicting keys --- supervision/detection/utils.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/supervision/detection/utils.py b/supervision/detection/utils.py index 074f409cd..71e67c49c 100644 --- a/supervision/detection/utils.py +++ b/supervision/detection/utils.py @@ -878,16 +878,17 @@ def merge_metadata(metadata_list: List[Dict[str, Any]]) -> Dict[str, Any]: Returns: Dict[str, Any]: A single merged metadata dictionary. + + Raises: + ValueError: If there are conflicting values for the same key. """ merged_metadata = {} + for metadata in metadata_list: for key, value in metadata.items(): if key in merged_metadata: if merged_metadata[key] != value: - if not isinstance(merged_metadata[key], list): - merged_metadata[key] = [merged_metadata[key]] - if value not in merged_metadata[key]: - merged_metadata[key].append(value) + raise ValueError(f"Conflicting metadata for key: {key}.") else: merged_metadata[key] = value From dcdeea85d1b475efc93d180df3e1b355157d10c1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Jo=C3=A3o?= Date: Thu, 10 Oct 2024 14:58:57 -0300 Subject: [PATCH 041/161] replaces github hosted to roboflow cdn images links --- .../small-object-detection-with-sahi.ipynb | 36 +++++++++---------- 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/docs/notebooks/small-object-detection-with-sahi.ipynb b/docs/notebooks/small-object-detection-with-sahi.ipynb index 1654ff3c0..986849de3 100644 --- a/docs/notebooks/small-object-detection-with-sahi.ipynb +++ b/docs/notebooks/small-object-detection-with-sahi.ipynb @@ -15,7 +15,7 @@ "\n", "This cookbook shows how to use [Slicing Aided Hyper Inference (SAHI) ](https://arxiv.org/abs/2202.06934) for small object detection with `supervision`.\n", "\n", - "![\"Small Object Detection\"](https://raw.githubusercontent.com/ediardo/notebooks/main/sahi/animation.gif \"Small Object Detection\")\n", + "![\"Small Object Detection\"](https://media.roboflow.com/supervision/cookbooks/sahi/animation.gif \"Small Object Detection\")\n", "\n", "Click the Open in Colab button to run the cookbook on Google Colab.\n", "\n", @@ -64,13 +64,13 @@ "source": [ "## Crowd counting with Computer Vision\n", "\n", - "How would you go about solving the problem of counting people in crowds? After some tests, I found that the best approach is to detect people\u2019s heads. Other body parts are likely occluded by other people, but heads are usually exposed, especially in aerial or high-level shots.\n", + "How would you go about solving the problem of counting people in crowds? After some tests, I found that the best approach is to detect people’s heads. Other body parts are likely occluded by other people, but heads are usually exposed, especially in aerial or high-level shots.\n", "\n", "### Using an Open-Source Public Model for People Detection\n", "\n", - "Detecting people (or their heads) is a common problem that has been addressed by many researchers in the past. In this project, we\u2019ll use an open-source public dataset and a fine-tuned model to perform inference on images.\n", + "Detecting people (or their heads) is a common problem that has been addressed by many researchers in the past. In this project, we’ll use an open-source public dataset and a fine-tuned model to perform inference on images.\n", "\n", - "![Roboflow Universe](https://raw.githubusercontent.com/ediardo/notebooks/main/sahi/roboflow_universe.png \"Open source model for counting people's heads\")\n", + "![Roboflow Universe](https://media.roboflow.com/supervision/cookbooks/sahi/roboflow_universe.png \"Open source model for counting people's heads\")\n", "\n", "Some details about the project [\"people_counterv0 Computer Vision Project\"](https://universe.roboflow.com/sit-cx0ng/people_counterv0):\n", "\n", @@ -179,9 +179,9 @@ "\n", "## Let's try our model's performance\n", "\n", - "Before we dive into the SAHI technique for small object detection, it\u2019s useful to see how a fine-tuned model performs with the image as is\u2014without any pre-processing or slicing. The goal is to understand when the model starts to fail so that we can progressively move towards an efficient slicing strategy.\n", + "Before we dive into the SAHI technique for small object detection, it’s useful to see how a fine-tuned model performs with the image as is—without any pre-processing or slicing. The goal is to understand when the model starts to fail so that we can progressively move towards an efficient slicing strategy.\n", "\n", - "Let\u2019s run the model!" + "Let’s run the model!" ] }, { @@ -324,7 +324,7 @@ "id": "AutFkxbuxkPa" }, "source": [ - "The model shows strong performance in detecting people in the lower half of the image, but it struggles to accurately predict boxes in the upper half. This suggests two key insights: first, the model is proficient at identifying people\u2019s heads from various angles, and second, using SAHI could effectively address the detection challenges in the upper portion of the image. Now, it\u2019s time to try SAHI!" + "The model shows strong performance in detecting people in the lower half of the image, but it struggles to accurately predict boxes in the upper half. This suggests two key insights: first, the model is proficient at identifying people’s heads from various angles, and second, using SAHI could effectively address the detection challenges in the upper portion of the image. Now, it’s time to try SAHI!" ] }, { @@ -376,7 +376,7 @@ "\n", "## Slicing our image with `supervision`\n", "\n", - "Let\u2019s begin by visualizing how these tiles would appear on our image. Let's start with a small set of 2x2 tiles, with a zero overlap both vertically (height) and horizontally (width) between the tiles. The final values of these parameters will ultimately depend on your use case, so trial and error is encouraged!\n", + "Let’s begin by visualizing how these tiles would appear on our image. Let's start with a small set of 2x2 tiles, with a zero overlap both vertically (height) and horizontally (width) between the tiles. The final values of these parameters will ultimately depend on your use case, so trial and error is encouraged!\n", "\n", "Some of the methods below are for visualizing the tiles and overlapping. You'll only need the `calculate_tile_size` method in your application to calculate the size of the tiles.\n", "\n", @@ -694,13 +694,13 @@ "id": "W6TvNnXpewwc" }, "source": [ - "Great! We\u2019ve detected 726 people, up from the 185 we initially detected without image slicing. The model is still detecting people from different angles, but it continues to struggle with detecting people located in the farther parts of the plaza. It\u2019s time to increase the number of tiles\u2014in other words, zoom in so the model can capture more details of the small heads of people.\n", + "Great! We’ve detected 726 people, up from the 185 we initially detected without image slicing. The model is still detecting people from different angles, but it continues to struggle with detecting people located in the farther parts of the plaza. It’s time to increase the number of tiles—in other words, zoom in so the model can capture more details of the small heads of people.\n", "\n", "![Missing detections](https://raw.githubusercontent.com/ediardo/notebooks/main/sahi/detections.png)\n", "\n", "### Increasing Tile Density: Moving to a 5x5 Grid\n", "\n", - "Now that we\u2019ve seen improvements with a 2x2 grid, it\u2019s time to push the model further. By increasing the number of tiles to a 5x5 grid, we effectively zoom in on the image, allowing the model to capture finer details, such as smaller and more distant features that might have been missed before. This approach will help us understand how well the model performs with even more zoomed-in images. Let\u2019s explore how this change affects our detection accuracy and overall performance." + "Now that we’ve seen improvements with a 2x2 grid, it’s time to push the model further. By increasing the number of tiles to a 5x5 grid, we effectively zoom in on the image, allowing the model to capture finer details, such as smaller and more distant features that might have been missed before. This approach will help us understand how well the model performs with even more zoomed-in images. Let’s explore how this change affects our detection accuracy and overall performance." ] }, { @@ -778,19 +778,19 @@ "id": "eFQasUU3xkPb" }, "source": [ - "We\u2019ve just detected 1,494 people using a 25-tile grid (5 rows x 5 columns), a significant increase from the 726 people detected with the 4-tile (2x2) grid. However, as we increase the number of tiles, a new challenge arises: duplicate detections or missed detections along the edges of the tiles. This issue becomes evident in these examples, where overlapping or gaps between tiles lead to inaccuracies in our model\u2019s detection.\n", + "We’ve just detected 1,494 people using a 25-tile grid (5 rows x 5 columns), a significant increase from the 726 people detected with the 4-tile (2x2) grid. However, as we increase the number of tiles, a new challenge arises: duplicate detections or missed detections along the edges of the tiles. This issue becomes evident in these examples, where overlapping or gaps between tiles lead to inaccuracies in our model’s detection.\n", "\n", "| Example| Observations |\n", "|----|----|\n", - "| ![Overlapping](https://github.com/ediardo/notebooks/blob/main/sahi/overlapping_1.png?raw=true \"Overlapping\") | False Negative, Incomplete bbox |\n", - "| ![Overlapping](https://raw.githubusercontent.com/ediardo/notebooks/main/sahi/overlapping_2.png \"Overlapping\")| Double detection, Incomplete bbox|\n", - "| ![Overlapping](https://raw.githubusercontent.com/ediardo/notebooks/main/sahi/overlapping_3.png \"Overlapping\")| Incomplete bounding box|\n", + "| ![Overlapping](https://media.roboflow.com/supervision/cookbooks/sahi/overlapping_1.png \"Overlapping\") | False Negative, Incomplete bbox |\n", + "| ![Overlapping](https://media.roboflow.com/supervision/cookbooks/sahi/overlapping_2.png \"Overlapping\")| Double detection, Incomplete bbox|\n", + "| ![Overlapping](https://media.roboflow.com/supervision/cookbooks/sahi/overlapping_3.png \"Overlapping\")| Incomplete bounding box|\n", "\n", "## Improving Object Detection Near Boundaries with Overlapping\n", "\n", "When objects, like people, appear at the edges of tiles, they might be detected twice or missed entirely if they span across two tiles. This can lead to inaccurate detection results. To solve this, we use overlapping tiles, allowing the model to see parts of adjacent tiles simultaneously. This overlap helps ensure that objects near the boundaries are fully captured, reducing duplicates and improving accuracy.\n", "\n", - "We\u2019ll set the overlap ratio to `(0.2, 0.2)` on the tile\u2019s width and height. This overlap helps ensure that objects near the boundaries are fully captured, reducing duplicates and improving accuracy." + "We’ll set the overlap ratio to `(0.2, 0.2)` on the tile’s width and height. This overlap helps ensure that objects near the boundaries are fully captured, reducing duplicates and improving accuracy." ] }, { @@ -869,14 +869,14 @@ "source": [ "## Non-Max Supression vs Non-Max Merge\n", "\n", - "When dealing with overlapping detections, it\u2019s essential to determine which detections represent the same object and which are unique. Non-Maximum Suppression (NMS) and Non-Maximum Merging (NMM) are two techniques commonly used to address this challenge. NMS works by eliminating redundant detections based on confidence scores, while NMM combines overlapping detections to enhance the representation of objects spanning multiple tiles. Understanding the difference between these methods helps optimize object detection, particularly near tile boundaries.\n", + "When dealing with overlapping detections, it’s essential to determine which detections represent the same object and which are unique. Non-Maximum Suppression (NMS) and Non-Maximum Merging (NMM) are two techniques commonly used to address this challenge. NMS works by eliminating redundant detections based on confidence scores, while NMM combines overlapping detections to enhance the representation of objects spanning multiple tiles. Understanding the difference between these methods helps optimize object detection, particularly near tile boundaries.\n", "\n", "In `supervision`, the `overlap_filter` parameter allows us to specify the strategy for handling overlapping detections in slices. This parameter can take on two values:\n", "\n", "- `sv.OverlapFilter.NON_MAX_SUPRESSION` (default): Eliminates redundant detections by keeping the one with the highest confidence score.\n", "- `sv.OverlapFilter.NON_MAX_MERGE`: Combines overlapping detections to create a more comprehensive representation of objects spanning multiple tiles.\n", "\n", - "It\u2019s important to note that this method is not perfect and may require further testing and fine-tuning to achieve optimal results in various use cases. You should validate the outputs and adjust parameters as needed to handle specific scenarios effectively." + "It’s important to note that this method is not perfect and may require further testing and fine-tuning to achieve optimal results in various use cases. You should validate the outputs and adjust parameters as needed to handle specific scenarios effectively." ] }, { @@ -1023,7 +1023,7 @@ "source": [ "## Conclusion\n", "\n", - "In this cookbook, we\u2019ve explored the advantages of using the SAHI technique for enhancing small object detection and the importance of experimenting with various tiling strategies to effectively zoom into images. By combining these approaches, we can improve the accuracy and reliability of object detection models, particularly in challenging scenarios where objects are small or located near the boundaries of tiles. These methods offer practical solutions to common challenges in computer vision, empowering developers to build more robust and precise detection systems.\n", + "In this cookbook, we’ve explored the advantages of using the SAHI technique for enhancing small object detection and the importance of experimenting with various tiling strategies to effectively zoom into images. By combining these approaches, we can improve the accuracy and reliability of object detection models, particularly in challenging scenarios where objects are small or located near the boundaries of tiles. These methods offer practical solutions to common challenges in computer vision, empowering developers to build more robust and precise detection systems.\n", "\n", "![\"Crowd Detection\"](https://raw.githubusercontent.com/ediardo/notebooks/main/sahi/5x5_nms.png \"Crowd Detection\")\n" ] From bfe4f0f82902cf60af70ca406282fb1ad6872d85 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 10 Oct 2024 19:06:36 +0000 Subject: [PATCH 042/161] =?UTF-8?q?fix(pre=5Fcommit):=20=F0=9F=8E=A8=20aut?= =?UTF-8?q?o=20format=20pre-commit=20hooks?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../small-object-detection-with-sahi.ipynb | 26 +++++++++---------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/docs/notebooks/small-object-detection-with-sahi.ipynb b/docs/notebooks/small-object-detection-with-sahi.ipynb index 986849de3..db69b0852 100644 --- a/docs/notebooks/small-object-detection-with-sahi.ipynb +++ b/docs/notebooks/small-object-detection-with-sahi.ipynb @@ -64,11 +64,11 @@ "source": [ "## Crowd counting with Computer Vision\n", "\n", - "How would you go about solving the problem of counting people in crowds? After some tests, I found that the best approach is to detect people’s heads. Other body parts are likely occluded by other people, but heads are usually exposed, especially in aerial or high-level shots.\n", + "How would you go about solving the problem of counting people in crowds? After some tests, I found that the best approach is to detect people\u2019s heads. Other body parts are likely occluded by other people, but heads are usually exposed, especially in aerial or high-level shots.\n", "\n", "### Using an Open-Source Public Model for People Detection\n", "\n", - "Detecting people (or their heads) is a common problem that has been addressed by many researchers in the past. In this project, we’ll use an open-source public dataset and a fine-tuned model to perform inference on images.\n", + "Detecting people (or their heads) is a common problem that has been addressed by many researchers in the past. In this project, we\u2019ll use an open-source public dataset and a fine-tuned model to perform inference on images.\n", "\n", "![Roboflow Universe](https://media.roboflow.com/supervision/cookbooks/sahi/roboflow_universe.png \"Open source model for counting people's heads\")\n", "\n", @@ -179,9 +179,9 @@ "\n", "## Let's try our model's performance\n", "\n", - "Before we dive into the SAHI technique for small object detection, it’s useful to see how a fine-tuned model performs with the image as is—without any pre-processing or slicing. The goal is to understand when the model starts to fail so that we can progressively move towards an efficient slicing strategy.\n", + "Before we dive into the SAHI technique for small object detection, it\u2019s useful to see how a fine-tuned model performs with the image as is\u2014without any pre-processing or slicing. The goal is to understand when the model starts to fail so that we can progressively move towards an efficient slicing strategy.\n", "\n", - "Let’s run the model!" + "Let\u2019s run the model!" ] }, { @@ -324,7 +324,7 @@ "id": "AutFkxbuxkPa" }, "source": [ - "The model shows strong performance in detecting people in the lower half of the image, but it struggles to accurately predict boxes in the upper half. This suggests two key insights: first, the model is proficient at identifying people’s heads from various angles, and second, using SAHI could effectively address the detection challenges in the upper portion of the image. Now, it’s time to try SAHI!" + "The model shows strong performance in detecting people in the lower half of the image, but it struggles to accurately predict boxes in the upper half. This suggests two key insights: first, the model is proficient at identifying people\u2019s heads from various angles, and second, using SAHI could effectively address the detection challenges in the upper portion of the image. Now, it\u2019s time to try SAHI!" ] }, { @@ -376,7 +376,7 @@ "\n", "## Slicing our image with `supervision`\n", "\n", - "Let’s begin by visualizing how these tiles would appear on our image. Let's start with a small set of 2x2 tiles, with a zero overlap both vertically (height) and horizontally (width) between the tiles. The final values of these parameters will ultimately depend on your use case, so trial and error is encouraged!\n", + "Let\u2019s begin by visualizing how these tiles would appear on our image. Let's start with a small set of 2x2 tiles, with a zero overlap both vertically (height) and horizontally (width) between the tiles. The final values of these parameters will ultimately depend on your use case, so trial and error is encouraged!\n", "\n", "Some of the methods below are for visualizing the tiles and overlapping. You'll only need the `calculate_tile_size` method in your application to calculate the size of the tiles.\n", "\n", @@ -694,13 +694,13 @@ "id": "W6TvNnXpewwc" }, "source": [ - "Great! We’ve detected 726 people, up from the 185 we initially detected without image slicing. The model is still detecting people from different angles, but it continues to struggle with detecting people located in the farther parts of the plaza. It’s time to increase the number of tiles—in other words, zoom in so the model can capture more details of the small heads of people.\n", + "Great! We\u2019ve detected 726 people, up from the 185 we initially detected without image slicing. The model is still detecting people from different angles, but it continues to struggle with detecting people located in the farther parts of the plaza. It\u2019s time to increase the number of tiles\u2014in other words, zoom in so the model can capture more details of the small heads of people.\n", "\n", "![Missing detections](https://raw.githubusercontent.com/ediardo/notebooks/main/sahi/detections.png)\n", "\n", "### Increasing Tile Density: Moving to a 5x5 Grid\n", "\n", - "Now that we’ve seen improvements with a 2x2 grid, it’s time to push the model further. By increasing the number of tiles to a 5x5 grid, we effectively zoom in on the image, allowing the model to capture finer details, such as smaller and more distant features that might have been missed before. This approach will help us understand how well the model performs with even more zoomed-in images. Let’s explore how this change affects our detection accuracy and overall performance." + "Now that we\u2019ve seen improvements with a 2x2 grid, it\u2019s time to push the model further. By increasing the number of tiles to a 5x5 grid, we effectively zoom in on the image, allowing the model to capture finer details, such as smaller and more distant features that might have been missed before. This approach will help us understand how well the model performs with even more zoomed-in images. Let\u2019s explore how this change affects our detection accuracy and overall performance." ] }, { @@ -778,7 +778,7 @@ "id": "eFQasUU3xkPb" }, "source": [ - "We’ve just detected 1,494 people using a 25-tile grid (5 rows x 5 columns), a significant increase from the 726 people detected with the 4-tile (2x2) grid. However, as we increase the number of tiles, a new challenge arises: duplicate detections or missed detections along the edges of the tiles. This issue becomes evident in these examples, where overlapping or gaps between tiles lead to inaccuracies in our model’s detection.\n", + "We\u2019ve just detected 1,494 people using a 25-tile grid (5 rows x 5 columns), a significant increase from the 726 people detected with the 4-tile (2x2) grid. However, as we increase the number of tiles, a new challenge arises: duplicate detections or missed detections along the edges of the tiles. This issue becomes evident in these examples, where overlapping or gaps between tiles lead to inaccuracies in our model\u2019s detection.\n", "\n", "| Example| Observations |\n", "|----|----|\n", @@ -790,7 +790,7 @@ "\n", "When objects, like people, appear at the edges of tiles, they might be detected twice or missed entirely if they span across two tiles. This can lead to inaccurate detection results. To solve this, we use overlapping tiles, allowing the model to see parts of adjacent tiles simultaneously. This overlap helps ensure that objects near the boundaries are fully captured, reducing duplicates and improving accuracy.\n", "\n", - "We’ll set the overlap ratio to `(0.2, 0.2)` on the tile’s width and height. This overlap helps ensure that objects near the boundaries are fully captured, reducing duplicates and improving accuracy." + "We\u2019ll set the overlap ratio to `(0.2, 0.2)` on the tile\u2019s width and height. This overlap helps ensure that objects near the boundaries are fully captured, reducing duplicates and improving accuracy." ] }, { @@ -869,14 +869,14 @@ "source": [ "## Non-Max Supression vs Non-Max Merge\n", "\n", - "When dealing with overlapping detections, it’s essential to determine which detections represent the same object and which are unique. Non-Maximum Suppression (NMS) and Non-Maximum Merging (NMM) are two techniques commonly used to address this challenge. NMS works by eliminating redundant detections based on confidence scores, while NMM combines overlapping detections to enhance the representation of objects spanning multiple tiles. Understanding the difference between these methods helps optimize object detection, particularly near tile boundaries.\n", + "When dealing with overlapping detections, it\u2019s essential to determine which detections represent the same object and which are unique. Non-Maximum Suppression (NMS) and Non-Maximum Merging (NMM) are two techniques commonly used to address this challenge. NMS works by eliminating redundant detections based on confidence scores, while NMM combines overlapping detections to enhance the representation of objects spanning multiple tiles. Understanding the difference between these methods helps optimize object detection, particularly near tile boundaries.\n", "\n", "In `supervision`, the `overlap_filter` parameter allows us to specify the strategy for handling overlapping detections in slices. This parameter can take on two values:\n", "\n", "- `sv.OverlapFilter.NON_MAX_SUPRESSION` (default): Eliminates redundant detections by keeping the one with the highest confidence score.\n", "- `sv.OverlapFilter.NON_MAX_MERGE`: Combines overlapping detections to create a more comprehensive representation of objects spanning multiple tiles.\n", "\n", - "It’s important to note that this method is not perfect and may require further testing and fine-tuning to achieve optimal results in various use cases. You should validate the outputs and adjust parameters as needed to handle specific scenarios effectively." + "It\u2019s important to note that this method is not perfect and may require further testing and fine-tuning to achieve optimal results in various use cases. You should validate the outputs and adjust parameters as needed to handle specific scenarios effectively." ] }, { @@ -1023,7 +1023,7 @@ "source": [ "## Conclusion\n", "\n", - "In this cookbook, we’ve explored the advantages of using the SAHI technique for enhancing small object detection and the importance of experimenting with various tiling strategies to effectively zoom into images. By combining these approaches, we can improve the accuracy and reliability of object detection models, particularly in challenging scenarios where objects are small or located near the boundaries of tiles. These methods offer practical solutions to common challenges in computer vision, empowering developers to build more robust and precise detection systems.\n", + "In this cookbook, we\u2019ve explored the advantages of using the SAHI technique for enhancing small object detection and the importance of experimenting with various tiling strategies to effectively zoom into images. By combining these approaches, we can improve the accuracy and reliability of object detection models, particularly in challenging scenarios where objects are small or located near the boundaries of tiles. These methods offer practical solutions to common challenges in computer vision, empowering developers to build more robust and precise detection systems.\n", "\n", "![\"Crowd Detection\"](https://raw.githubusercontent.com/ediardo/notebooks/main/sahi/5x5_nms.png \"Crowd Detection\")\n" ] From 68a1052cfc01307d524ab9fc61bf9942c28a00dc Mon Sep 17 00:00:00 2001 From: Soham Date: Fri, 11 Oct 2024 01:14:00 +0530 Subject: [PATCH 043/161] fix: is_empty no longer compares metadata directly --- supervision/detection/core.py | 1 - 1 file changed, 1 deletion(-) diff --git a/supervision/detection/core.py b/supervision/detection/core.py index 8216d054b..8b470d601 100644 --- a/supervision/detection/core.py +++ b/supervision/detection/core.py @@ -991,7 +991,6 @@ def is_empty(self) -> bool: """ empty_detections = Detections.empty() empty_detections.data = self.data - empty_detections.metadata = self.metadata return self == empty_detections @classmethod From c7611e384bd8e1d12faa641bf5227bfe449c0405 Mon Sep 17 00:00:00 2001 From: Soham Date: Fri, 11 Oct 2024 01:44:47 +0530 Subject: [PATCH 044/161] fix: update empty and is_empty methods --- supervision/detection/core.py | 14 ++++++++++---- 1 file changed, 10 insertions(+), 4 deletions(-) diff --git a/supervision/detection/core.py b/supervision/detection/core.py index 8b470d601..6ef52a03f 100644 --- a/supervision/detection/core.py +++ b/supervision/detection/core.py @@ -964,7 +964,7 @@ def from_ncnn(cls, ncnn_results) -> Detections: ) @classmethod - def empty(cls) -> Detections: + def empty(cls, metadata: Optional[Dict[str, Any]] = None) -> Detections: """ Create an empty Detections object with no bounding boxes, confidences, or class IDs. @@ -983,15 +983,21 @@ def empty(cls) -> Detections: xyxy=np.empty((0, 4), dtype=np.float32), confidence=np.array([], dtype=np.float32), class_id=np.array([], dtype=int), + metadata=metadata if metadata is not None else {}, ) def is_empty(self) -> bool: """ Returns `True` if the `Detections` object is considered empty. """ - empty_detections = Detections.empty() - empty_detections.data = self.data - return self == empty_detections + return ( + len(self.xyxy) == 0 + and (self.mask is None or len(self.mask) == 0) + and (self.class_id is None or len(self.class_id) == 0) + and (self.confidence is None or len(self.confidence) == 0) + and (self.tracker_id is None or len(self.tracker_id) == 0) + and not self.data + ) @classmethod def merge(cls, detections_list: List[Detections]) -> Detections: From 892185e4c60ca14913e53bf15adc1f913dd9b7fa Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 11 Oct 2024 00:35:27 +0000 Subject: [PATCH 045/161] :arrow_up: Bump mkdocs-material from 9.5.39 to 9.5.40 Bumps [mkdocs-material](https://github.com/squidfunk/mkdocs-material) from 9.5.39 to 9.5.40. - [Release notes](https://github.com/squidfunk/mkdocs-material/releases) - [Changelog](https://github.com/squidfunk/mkdocs-material/blob/master/CHANGELOG) - [Commits](https://github.com/squidfunk/mkdocs-material/compare/9.5.39...9.5.40) --- updated-dependencies: - dependency-name: mkdocs-material dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 5d47a083a..aebc04c7d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2252,13 +2252,13 @@ pygments = ">2.12.0" [[package]] name = "mkdocs-material" -version = "9.5.39" +version = "9.5.40" description = "Documentation that simply works" optional = false python-versions = ">=3.8" files = [ - {file = "mkdocs_material-9.5.39-py3-none-any.whl", hash = "sha256:0f2f68c8db89523cb4a59705cd01b4acd62b2f71218ccb67e1e004e560410d2b"}, - {file = "mkdocs_material-9.5.39.tar.gz", hash = "sha256:25faa06142afa38549d2b781d475a86fb61de93189f532b88e69bf11e5e5c3be"}, + {file = "mkdocs_material-9.5.40-py3-none-any.whl", hash = "sha256:8e7a16ada34e79a7b6459ff2602584222f522c738b6a023d1bea853d5049da6f"}, + {file = "mkdocs_material-9.5.40.tar.gz", hash = "sha256:b69d70e667ec51fc41f65e006a3184dd00d95b2439d982cb1586e4c018943156"}, ] [package.dependencies] From b86b660634d6037542472307c17e6594e6f6aec5 Mon Sep 17 00:00:00 2001 From: DivyaVijay1234 Date: Fri, 11 Oct 2024 16:56:22 +0530 Subject: [PATCH 046/161] feat-Added Default color --- supervision/draw/utils.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/supervision/draw/utils.py b/supervision/draw/utils.py index 19ce4a258..50c911d4c 100644 --- a/supervision/draw/utils.py +++ b/supervision/draw/utils.py @@ -9,7 +9,7 @@ def draw_line( - scene: np.ndarray, start: Point, end: Point, color: Color, thickness: int = 2 + scene: np.ndarray, start: Point, end: Point, color: Color=Color(163, 81, 251), thickness: int = 2 ) -> np.ndarray: """ Draws a line on a given scene. @@ -35,7 +35,7 @@ def draw_line( def draw_rectangle( - scene: np.ndarray, rect: Rect, color: Color, thickness: int = 2 + scene: np.ndarray, rect: Rect, color: Color=Color(163, 81, 251), thickness: int = 2 ) -> np.ndarray: """ Draws a rectangle on an image. @@ -60,7 +60,7 @@ def draw_rectangle( def draw_filled_rectangle( - scene: np.ndarray, rect: Rect, color: Color, opacity: float = 1 + scene: np.ndarray, rect: Rect, color: Color=Color(163, 81, 251), opacity: float = 1 ) -> np.ndarray: """ Draws a filled rectangle on an image. @@ -151,7 +151,7 @@ def draw_rounded_rectangle( def draw_polygon( - scene: np.ndarray, polygon: np.ndarray, color: Color, thickness: int = 2 + scene: np.ndarray, polygon: np.ndarray, color: Color=Color(163, 81, 251), thickness: int = 2 ) -> np.ndarray: """Draw a polygon on a scene. @@ -171,7 +171,7 @@ def draw_polygon( def draw_filled_polygon( - scene: np.ndarray, polygon: np.ndarray, color: Color, opacity: float = 1 + scene: np.ndarray, polygon: np.ndarray, color: Color=Color(163, 81, 251), opacity: float = 1 ) -> np.ndarray: """Draw a filled polygon on a scene. From 5242614b01969b261f86982ee84db7a797dacac3 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 11 Oct 2024 11:38:20 +0000 Subject: [PATCH 047/161] =?UTF-8?q?fix(pre=5Fcommit):=20=F0=9F=8E=A8=20aut?= =?UTF-8?q?o=20format=20pre-commit=20hooks?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- supervision/draw/utils.py | 26 +++++++++++++++++++++----- 1 file changed, 21 insertions(+), 5 deletions(-) diff --git a/supervision/draw/utils.py b/supervision/draw/utils.py index 50c911d4c..beec3f31c 100644 --- a/supervision/draw/utils.py +++ b/supervision/draw/utils.py @@ -9,7 +9,11 @@ def draw_line( - scene: np.ndarray, start: Point, end: Point, color: Color=Color(163, 81, 251), thickness: int = 2 + scene: np.ndarray, + start: Point, + end: Point, + color: Color = Color(163, 81, 251), + thickness: int = 2, ) -> np.ndarray: """ Draws a line on a given scene. @@ -35,7 +39,10 @@ def draw_line( def draw_rectangle( - scene: np.ndarray, rect: Rect, color: Color=Color(163, 81, 251), thickness: int = 2 + scene: np.ndarray, + rect: Rect, + color: Color = Color(163, 81, 251), + thickness: int = 2, ) -> np.ndarray: """ Draws a rectangle on an image. @@ -60,7 +67,10 @@ def draw_rectangle( def draw_filled_rectangle( - scene: np.ndarray, rect: Rect, color: Color=Color(163, 81, 251), opacity: float = 1 + scene: np.ndarray, + rect: Rect, + color: Color = Color(163, 81, 251), + opacity: float = 1, ) -> np.ndarray: """ Draws a filled rectangle on an image. @@ -151,7 +161,10 @@ def draw_rounded_rectangle( def draw_polygon( - scene: np.ndarray, polygon: np.ndarray, color: Color=Color(163, 81, 251), thickness: int = 2 + scene: np.ndarray, + polygon: np.ndarray, + color: Color = Color(163, 81, 251), + thickness: int = 2, ) -> np.ndarray: """Draw a polygon on a scene. @@ -171,7 +184,10 @@ def draw_polygon( def draw_filled_polygon( - scene: np.ndarray, polygon: np.ndarray, color: Color=Color(163, 81, 251), opacity: float = 1 + scene: np.ndarray, + polygon: np.ndarray, + color: Color = Color(163, 81, 251), + opacity: float = 1, ) -> np.ndarray: """Draw a filled polygon on a scene. From a6897d4117447c28c2a2629d732177098cfc4fc0 Mon Sep 17 00:00:00 2001 From: DivyaVijay1234 Date: Fri, 11 Oct 2024 20:01:42 +0530 Subject: [PATCH 048/161] feat: add default color --- supervision/draw/utils.py | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/supervision/draw/utils.py b/supervision/draw/utils.py index 50c911d4c..1c207b5df 100644 --- a/supervision/draw/utils.py +++ b/supervision/draw/utils.py @@ -9,7 +9,7 @@ def draw_line( - scene: np.ndarray, start: Point, end: Point, color: Color=Color(163, 81, 251), thickness: int = 2 + scene: np.ndarray, start: Point, end: Point, color: Color=Color.ROBOFLOW, thickness: int = 2 ) -> np.ndarray: """ Draws a line on a given scene. @@ -35,7 +35,7 @@ def draw_line( def draw_rectangle( - scene: np.ndarray, rect: Rect, color: Color=Color(163, 81, 251), thickness: int = 2 + scene: np.ndarray, rect: Rect, color: Color=Color.ROBOFLOW, thickness: int = 2 ) -> np.ndarray: """ Draws a rectangle on an image. @@ -60,7 +60,7 @@ def draw_rectangle( def draw_filled_rectangle( - scene: np.ndarray, rect: Rect, color: Color=Color(163, 81, 251), opacity: float = 1 + scene: np.ndarray, rect: Rect, color: Color=Color.ROBOFLOW, opacity: float = 1 ) -> np.ndarray: """ Draws a filled rectangle on an image. @@ -151,7 +151,7 @@ def draw_rounded_rectangle( def draw_polygon( - scene: np.ndarray, polygon: np.ndarray, color: Color=Color(163, 81, 251), thickness: int = 2 + scene: np.ndarray, polygon: np.ndarray, color: Color=Color.ROBOFLOW, thickness: int = 2 ) -> np.ndarray: """Draw a polygon on a scene. @@ -171,7 +171,7 @@ def draw_polygon( def draw_filled_polygon( - scene: np.ndarray, polygon: np.ndarray, color: Color=Color(163, 81, 251), opacity: float = 1 + scene: np.ndarray, polygon: np.ndarray, color: Color=Color.ROBOFLOW, opacity: float = 1 ) -> np.ndarray: """Draw a filled polygon on a scene. From 7e7eb8bd5ae5e264ee5e57a0bc7f9220c447d8f7 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Fri, 11 Oct 2024 14:35:04 +0000 Subject: [PATCH 049/161] =?UTF-8?q?fix(pre=5Fcommit):=20=F0=9F=8E=A8=20aut?= =?UTF-8?q?o=20format=20pre-commit=20hooks?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- supervision/draw/utils.py | 20 +++++++++++++++----- 1 file changed, 15 insertions(+), 5 deletions(-) diff --git a/supervision/draw/utils.py b/supervision/draw/utils.py index 1c207b5df..643970280 100644 --- a/supervision/draw/utils.py +++ b/supervision/draw/utils.py @@ -9,7 +9,11 @@ def draw_line( - scene: np.ndarray, start: Point, end: Point, color: Color=Color.ROBOFLOW, thickness: int = 2 + scene: np.ndarray, + start: Point, + end: Point, + color: Color = Color.ROBOFLOW, + thickness: int = 2, ) -> np.ndarray: """ Draws a line on a given scene. @@ -35,7 +39,7 @@ def draw_line( def draw_rectangle( - scene: np.ndarray, rect: Rect, color: Color=Color.ROBOFLOW, thickness: int = 2 + scene: np.ndarray, rect: Rect, color: Color = Color.ROBOFLOW, thickness: int = 2 ) -> np.ndarray: """ Draws a rectangle on an image. @@ -60,7 +64,7 @@ def draw_rectangle( def draw_filled_rectangle( - scene: np.ndarray, rect: Rect, color: Color=Color.ROBOFLOW, opacity: float = 1 + scene: np.ndarray, rect: Rect, color: Color = Color.ROBOFLOW, opacity: float = 1 ) -> np.ndarray: """ Draws a filled rectangle on an image. @@ -151,7 +155,10 @@ def draw_rounded_rectangle( def draw_polygon( - scene: np.ndarray, polygon: np.ndarray, color: Color=Color.ROBOFLOW, thickness: int = 2 + scene: np.ndarray, + polygon: np.ndarray, + color: Color = Color.ROBOFLOW, + thickness: int = 2, ) -> np.ndarray: """Draw a polygon on a scene. @@ -171,7 +178,10 @@ def draw_polygon( def draw_filled_polygon( - scene: np.ndarray, polygon: np.ndarray, color: Color=Color.ROBOFLOW, opacity: float = 1 + scene: np.ndarray, + polygon: np.ndarray, + color: Color = Color.ROBOFLOW, + opacity: float = 1, ) -> np.ndarray: """Draw a filled polygon on a scene. From c339315b318106e2a7bd760a4d0f20a1a39e1187 Mon Sep 17 00:00:00 2001 From: Onuralp SEZER Date: Sat, 12 Oct 2024 18:58:45 +0300 Subject: [PATCH 050/161] =?UTF-8?q?feat:=20=E2=9C=A8=20set=20default=20col?= =?UTF-8?q?ors=20for=20polygon=20drawing=20functions=20and=20add=20documen?= =?UTF-8?q?tations?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- supervision/detection/tools/polygon_zone.py | 4 ++-- supervision/draw/utils.py | 6 +++--- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/supervision/detection/tools/polygon_zone.py b/supervision/detection/tools/polygon_zone.py index ac7203ff8..b8b728fc5 100644 --- a/supervision/detection/tools/polygon_zone.py +++ b/supervision/detection/tools/polygon_zone.py @@ -88,7 +88,7 @@ class PolygonZoneAnnotator: Attributes: zone (PolygonZone): The polygon zone to be annotated - color (Color): The color to draw the polygon lines + color (Color): The color to draw the polygon lines, default is white thickness (int): The thickness of the polygon lines, default is 2 text_color (Color): The color of the text on the polygon, default is black text_scale (float): The scale of the text on the polygon, default is 0.5 @@ -104,7 +104,7 @@ class PolygonZoneAnnotator: def __init__( self, zone: PolygonZone, - color: Color, + color: Color = Color.WHITE, thickness: int = 2, text_color: Color = Color.BLACK, text_scale: float = 0.5, diff --git a/supervision/draw/utils.py b/supervision/draw/utils.py index 643970280..0c3767ff7 100644 --- a/supervision/draw/utils.py +++ b/supervision/draw/utils.py @@ -22,7 +22,7 @@ def draw_line( scene (np.ndarray): The scene on which the line will be drawn start (Point): The starting point of the line end (Point): The end point of the line - color (Color): The color of the line + color (Color): The color of the line, defaults to Color.ROBOFLOW thickness (int): The thickness of the line Returns: @@ -165,7 +165,7 @@ def draw_polygon( Parameters: scene (np.ndarray): The scene to draw the polygon on. polygon (np.ndarray): The polygon to be drawn, given as a list of vertices. - color (Color): The color of the polygon. + color (Color): The color of the polygon. Defaults to Color.ROBOFLOW. thickness (int): The thickness of the polygon lines, by default 2. Returns: @@ -188,7 +188,7 @@ def draw_filled_polygon( Parameters: scene (np.ndarray): The scene to draw the polygon on. polygon (np.ndarray): The polygon to be drawn, given as a list of vertices. - color (Color): The color of the polygon. + color (Color): The color of the polygon. Defaults to Color.ROBOFLOW. opacity (float): The opacity of polygon when drawn on the scene. Returns: From 1d3f61af85ba037ba9dc35f1f7cc2581a55e8e41 Mon Sep 17 00:00:00 2001 From: prakharjain Date: Sun, 13 Oct 2024 13:10:24 +0530 Subject: [PATCH 051/161] Added support by using oriented_box_iou_batch --- supervision/metrics/mean_average_precision.py | 17 +++++------------ 1 file changed, 5 insertions(+), 12 deletions(-) diff --git a/supervision/metrics/mean_average_precision.py b/supervision/metrics/mean_average_precision.py index dbd60b2e7..2429c1898 100644 --- a/supervision/metrics/mean_average_precision.py +++ b/supervision/metrics/mean_average_precision.py @@ -9,7 +9,7 @@ from supervision.config import ORIENTED_BOX_COORDINATES from supervision.detection.core import Detections -from supervision.detection.utils import box_iou_batch, mask_iou_batch +from supervision.detection.utils import box_iou_batch, mask_iou_batch, oriented_box_iou_batch from supervision.draw.color import LEGACY_COLOR_PALETTE from supervision.metrics.core import Metric, MetricTarget from supervision.metrics.utils.object_size import ( @@ -36,11 +36,6 @@ def __init__( class_agnostic (bool): Whether to treat all data as a single class. """ self._metric_target = metric_target - if self._metric_target == MetricTarget.ORIENTED_BOUNDING_BOXES: - raise NotImplementedError( - "Mean Average Precision is not implemented for oriented bounding boxes." - ) - self._class_agnostic = class_agnostic self._predictions_list: List[Detections] = [] @@ -181,15 +176,14 @@ def _compute( iou = box_iou_batch(target_contents, prediction_contents) elif self._metric_target == MetricTarget.MASKS: iou = mask_iou_batch(target_contents, prediction_contents) + elif self._metric_target == MetricTarget.ORIENTED_BOUNDING_BOXES: + iou = oriented_box_iou_batch(target_contents, prediction_contents) else: - raise NotImplementedError( - "Unsupported metric target for IoU calculation" - ) + raise ValueError(f"Unsupported metric target: {self._metric_target}") matches = self._match_detection_batch( predictions.class_id, targets.class_id, iou, iou_thresholds ) - stats.append( ( matches, @@ -213,7 +207,6 @@ def _compute( return MeanAveragePrecisionResult( metric_target=self._metric_target, - is_class_agnostic=self._class_agnostic, mAP_scores=mAP_scores, iou_thresholds=iou_thresholds, matched_classes=unique_classes, @@ -345,7 +338,7 @@ def _detections_content(self, detections: Detections) -> np.ndarray: ) if self._metric_target == MetricTarget.ORIENTED_BOUNDING_BOXES: if obb := detections.data.get(ORIENTED_BOX_COORDINATES): - return np.ndarray(obb, dtype=np.float32) + return np.array(obb, dtype=np.float32) return self._make_empty_content() raise ValueError(f"Invalid metric target: {self._metric_target}") From 9822406cde35e34e9f3006787ef09da38efcc28e Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Sun, 13 Oct 2024 07:45:02 +0000 Subject: [PATCH 052/161] =?UTF-8?q?fix(pre=5Fcommit):=20=F0=9F=8E=A8=20aut?= =?UTF-8?q?o=20format=20pre-commit=20hooks?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- supervision/metrics/mean_average_precision.py | 14 +++++++++++--- 1 file changed, 11 insertions(+), 3 deletions(-) diff --git a/supervision/metrics/mean_average_precision.py b/supervision/metrics/mean_average_precision.py index 2429c1898..58d4764c1 100644 --- a/supervision/metrics/mean_average_precision.py +++ b/supervision/metrics/mean_average_precision.py @@ -9,7 +9,11 @@ from supervision.config import ORIENTED_BOX_COORDINATES from supervision.detection.core import Detections -from supervision.detection.utils import box_iou_batch, mask_iou_batch, oriented_box_iou_batch +from supervision.detection.utils import ( + box_iou_batch, + mask_iou_batch, + oriented_box_iou_batch, +) from supervision.draw.color import LEGACY_COLOR_PALETTE from supervision.metrics.core import Metric, MetricTarget from supervision.metrics.utils.object_size import ( @@ -177,9 +181,13 @@ def _compute( elif self._metric_target == MetricTarget.MASKS: iou = mask_iou_batch(target_contents, prediction_contents) elif self._metric_target == MetricTarget.ORIENTED_BOUNDING_BOXES: - iou = oriented_box_iou_batch(target_contents, prediction_contents) + iou = oriented_box_iou_batch( + target_contents, prediction_contents + ) else: - raise ValueError(f"Unsupported metric target: {self._metric_target}") + raise ValueError( + f"Unsupported metric target: {self._metric_target}" + ) matches = self._match_detection_batch( predictions.class_id, targets.class_id, iou, iou_thresholds From 302041d8aadb823b895f273bc4731df5dd00c163 Mon Sep 17 00:00:00 2001 From: LinasKo Date: Mon, 14 Oct 2024 13:56:52 +0300 Subject: [PATCH 053/161] Removed welcome workflow * It doesn't work, and trip new contributors up --- .github/workflows/welcome.yml | 18 ------------------ 1 file changed, 18 deletions(-) delete mode 100644 .github/workflows/welcome.yml diff --git a/.github/workflows/welcome.yml b/.github/workflows/welcome.yml deleted file mode 100644 index 4cb70b93f..000000000 --- a/.github/workflows/welcome.yml +++ /dev/null @@ -1,18 +0,0 @@ -name: Welcome WorkFlow - -on: - issues: - types: [opened] - pull_request_target: - types: [opened] - -jobs: - build: - name: 👋 Welcome - runs-on: ubuntu-latest - steps: - - uses: actions/first-interaction@v1.3.0 - with: - repo-token: ${{ secrets.GITHUB_TOKEN }} - issue-message: "Hello there, thank you for opening an Issue ! 🙏🏻 The team was notified and they will get back to you asap." - pr-message: "Hello there, thank you for opening an PR ! 🙏🏻 The team was notified and they will get back to you asap." From 62a4d45c6c98acfb12be48c93f4f3880941edcd6 Mon Sep 17 00:00:00 2001 From: Onuralp SEZER Date: Mon, 14 Oct 2024 13:47:49 +0300 Subject: [PATCH 054/161] =?UTF-8?q?feat:=20=F0=9F=9A=80=20python3.13=20sup?= =?UTF-8?q?port=20for=20poetry=20and=20library?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Onuralp SEZER --- .github/workflows/poetry-test.yml | 37 ++ poetry.lock | 661 ++++++++++++++++++++++-------- pyproject.toml | 15 + 3 files changed, 541 insertions(+), 172 deletions(-) create mode 100644 .github/workflows/poetry-test.yml diff --git a/.github/workflows/poetry-test.yml b/.github/workflows/poetry-test.yml new file mode 100644 index 000000000..95e2eb7fe --- /dev/null +++ b/.github/workflows/poetry-test.yml @@ -0,0 +1,37 @@ +name: 🔧 Poetry Check and Installation Test Workflow +on: + push: + paths: + - 'poetry.lock' + - 'pyproject.toml' + pull_request: + paths: + - 'poetry.lock' + - 'pyproject.toml' + workflow_dispatch: + +jobs: + poetry-tests: + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest, windows-latest, macos-latest] + python-version: ["3.8", "3.9", "3.10", "3.11", "3.12", "3.13"] + runs-on: ${{ matrix.os }} + steps: + - name: 📥 Checkout the repository + uses: actions/checkout@v4 + + - name: 🐍 Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v5 + with: + python-version: ${{ matrix.python-version }} + + - name: 📦 Install the base dependencies + run: python -m pip install --upgrade poetry + + - name: 🔍 Check the correctness of the project config + run: poetry check + + - name: 🚀 Do Install the package Test + run: poetry install diff --git a/poetry.lock b/poetry.lock index aebc04c7d..1994447dd 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2,13 +2,13 @@ [[package]] name = "anyio" -version = "4.5.0" +version = "4.6.2" description = "High level compatibility layer for multiple asynchronous event loop implementations" optional = false python-versions = ">=3.8" files = [ - {file = "anyio-4.5.0-py3-none-any.whl", hash = "sha256:fdeb095b7cc5a5563175eedd926ec4ae55413bb4be5770c424af0ba46ccb4a78"}, - {file = "anyio-4.5.0.tar.gz", hash = "sha256:c5a275fe5ca0afd788001f58fca1e69e29ce706d746e317d660e21f70c530ef9"}, + {file = "anyio-4.6.2-py3-none-any.whl", hash = "sha256:6caec6b1391f6f6d7b2ef2258d2902d36753149f67478f7df4be8e54d03a8f54"}, + {file = "anyio-4.6.2.tar.gz", hash = "sha256:f72a7bb3dd0752b3bd8b17a844a019d7fbf6ae218c588f4f9ba1b2f600b12347"}, ] [package.dependencies] @@ -19,7 +19,7 @@ typing-extensions = {version = ">=4.1", markers = "python_version < \"3.11\""} [package.extras] doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (>=0.21.0b1)"] +test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21.0b1)"] trio = ["trio (>=0.26.1)"] [[package]] @@ -448,101 +448,116 @@ files = [ [[package]] name = "charset-normalizer" -version = "3.3.2" +version = "3.4.0" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7.0" files = [ - {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, - {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc"}, + {file = "charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99"}, + {file = "charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7"}, + {file = "charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67"}, + {file = "charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-win32.whl", hash = "sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149"}, + {file = "charset_normalizer-3.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-win32.whl", hash = "sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613"}, + {file = "charset_normalizer-3.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-win32.whl", hash = "sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2"}, + {file = "charset_normalizer-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca"}, + {file = "charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079"}, + {file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"}, ] [[package]] @@ -661,6 +676,90 @@ mypy = ["contourpy[bokeh,docs]", "docutils-stubs", "mypy (==1.4.1)", "types-Pill test = ["Pillow", "contourpy[test-no-images]", "matplotlib"] test-no-images = ["pytest", "pytest-cov", "wurlitzer"] +[[package]] +name = "contourpy" +version = "1.3.0" +description = "Python library for calculating contours of 2D quadrilateral grids" +optional = false +python-versions = ">=3.9" +files = [ + {file = "contourpy-1.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:880ea32e5c774634f9fcd46504bf9f080a41ad855f4fef54f5380f5133d343c7"}, + {file = "contourpy-1.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:76c905ef940a4474a6289c71d53122a4f77766eef23c03cd57016ce19d0f7b42"}, + {file = "contourpy-1.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:92f8557cbb07415a4d6fa191f20fd9d2d9eb9c0b61d1b2f52a8926e43c6e9af7"}, + {file = "contourpy-1.3.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36f965570cff02b874773c49bfe85562b47030805d7d8360748f3eca570f4cab"}, + {file = "contourpy-1.3.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cacd81e2d4b6f89c9f8a5b69b86490152ff39afc58a95af002a398273e5ce589"}, + {file = "contourpy-1.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69375194457ad0fad3a839b9e29aa0b0ed53bb54db1bfb6c3ae43d111c31ce41"}, + {file = "contourpy-1.3.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7a52040312b1a858b5e31ef28c2e865376a386c60c0e248370bbea2d3f3b760d"}, + {file = "contourpy-1.3.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3faeb2998e4fcb256542e8a926d08da08977f7f5e62cf733f3c211c2a5586223"}, + {file = "contourpy-1.3.0-cp310-cp310-win32.whl", hash = "sha256:36e0cff201bcb17a0a8ecc7f454fe078437fa6bda730e695a92f2d9932bd507f"}, + {file = "contourpy-1.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:87ddffef1dbe5e669b5c2440b643d3fdd8622a348fe1983fad7a0f0ccb1cd67b"}, + {file = "contourpy-1.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0fa4c02abe6c446ba70d96ece336e621efa4aecae43eaa9b030ae5fb92b309ad"}, + {file = "contourpy-1.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:834e0cfe17ba12f79963861e0f908556b2cedd52e1f75e6578801febcc6a9f49"}, + {file = "contourpy-1.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dbc4c3217eee163fa3984fd1567632b48d6dfd29216da3ded3d7b844a8014a66"}, + {file = "contourpy-1.3.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4865cd1d419e0c7a7bf6de1777b185eebdc51470800a9f42b9e9decf17762081"}, + {file = "contourpy-1.3.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:303c252947ab4b14c08afeb52375b26781ccd6a5ccd81abcdfc1fafd14cf93c1"}, + {file = "contourpy-1.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:637f674226be46f6ba372fd29d9523dd977a291f66ab2a74fbeb5530bb3f445d"}, + {file = "contourpy-1.3.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:76a896b2f195b57db25d6b44e7e03f221d32fe318d03ede41f8b4d9ba1bff53c"}, + {file = "contourpy-1.3.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e1fd23e9d01591bab45546c089ae89d926917a66dceb3abcf01f6105d927e2cb"}, + {file = "contourpy-1.3.0-cp311-cp311-win32.whl", hash = "sha256:d402880b84df3bec6eab53cd0cf802cae6a2ef9537e70cf75e91618a3801c20c"}, + {file = "contourpy-1.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:6cb6cc968059db9c62cb35fbf70248f40994dfcd7aa10444bbf8b3faeb7c2d67"}, + {file = "contourpy-1.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:570ef7cf892f0afbe5b2ee410c507ce12e15a5fa91017a0009f79f7d93a1268f"}, + {file = "contourpy-1.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:da84c537cb8b97d153e9fb208c221c45605f73147bd4cadd23bdae915042aad6"}, + {file = "contourpy-1.3.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0be4d8425bfa755e0fd76ee1e019636ccc7c29f77a7c86b4328a9eb6a26d0639"}, + {file = "contourpy-1.3.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9c0da700bf58f6e0b65312d0a5e695179a71d0163957fa381bb3c1f72972537c"}, + {file = "contourpy-1.3.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:eb8b141bb00fa977d9122636b16aa67d37fd40a3d8b52dd837e536d64b9a4d06"}, + {file = "contourpy-1.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3634b5385c6716c258d0419c46d05c8aa7dc8cb70326c9a4fb66b69ad2b52e09"}, + {file = "contourpy-1.3.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0dce35502151b6bd35027ac39ba6e5a44be13a68f55735c3612c568cac3805fd"}, + {file = "contourpy-1.3.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:aea348f053c645100612b333adc5983d87be69acdc6d77d3169c090d3b01dc35"}, + {file = "contourpy-1.3.0-cp312-cp312-win32.whl", hash = "sha256:90f73a5116ad1ba7174341ef3ea5c3150ddf20b024b98fb0c3b29034752c8aeb"}, + {file = "contourpy-1.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:b11b39aea6be6764f84360fce6c82211a9db32a7c7de8fa6dd5397cf1d079c3b"}, + {file = "contourpy-1.3.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:3e1c7fa44aaae40a2247e2e8e0627f4bea3dd257014764aa644f319a5f8600e3"}, + {file = "contourpy-1.3.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:364174c2a76057feef647c802652f00953b575723062560498dc7930fc9b1cb7"}, + {file = "contourpy-1.3.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:32b238b3b3b649e09ce9aaf51f0c261d38644bdfa35cbaf7b263457850957a84"}, + {file = "contourpy-1.3.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d51fca85f9f7ad0b65b4b9fe800406d0d77017d7270d31ec3fb1cc07358fdea0"}, + {file = "contourpy-1.3.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:732896af21716b29ab3e988d4ce14bc5133733b85956316fb0c56355f398099b"}, + {file = "contourpy-1.3.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d73f659398a0904e125280836ae6f88ba9b178b2fed6884f3b1f95b989d2c8da"}, + {file = "contourpy-1.3.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c6c7c2408b7048082932cf4e641fa3b8ca848259212f51c8c59c45aa7ac18f14"}, + {file = "contourpy-1.3.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f317576606de89da6b7e0861cf6061f6146ead3528acabff9236458a6ba467f8"}, + {file = "contourpy-1.3.0-cp313-cp313-win32.whl", hash = "sha256:31cd3a85dbdf1fc002280c65caa7e2b5f65e4a973fcdf70dd2fdcb9868069294"}, + {file = "contourpy-1.3.0-cp313-cp313-win_amd64.whl", hash = "sha256:4553c421929ec95fb07b3aaca0fae668b2eb5a5203d1217ca7c34c063c53d087"}, + {file = "contourpy-1.3.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:345af746d7766821d05d72cb8f3845dfd08dd137101a2cb9b24de277d716def8"}, + {file = "contourpy-1.3.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3bb3808858a9dc68f6f03d319acd5f1b8a337e6cdda197f02f4b8ff67ad2057b"}, + {file = "contourpy-1.3.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:420d39daa61aab1221567b42eecb01112908b2cab7f1b4106a52caaec8d36973"}, + {file = "contourpy-1.3.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4d63ee447261e963af02642ffcb864e5a2ee4cbfd78080657a9880b8b1868e18"}, + {file = "contourpy-1.3.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:167d6c890815e1dac9536dca00828b445d5d0df4d6a8c6adb4a7ec3166812fa8"}, + {file = "contourpy-1.3.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:710a26b3dc80c0e4febf04555de66f5fd17e9cf7170a7b08000601a10570bda6"}, + {file = "contourpy-1.3.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:75ee7cb1a14c617f34a51d11fa7524173e56551646828353c4af859c56b766e2"}, + {file = "contourpy-1.3.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:33c92cdae89ec5135d036e7218e69b0bb2851206077251f04a6c4e0e21f03927"}, + {file = "contourpy-1.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:a11077e395f67ffc2c44ec2418cfebed032cd6da3022a94fc227b6faf8e2acb8"}, + {file = "contourpy-1.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e8134301d7e204c88ed7ab50028ba06c683000040ede1d617298611f9dc6240c"}, + {file = "contourpy-1.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e12968fdfd5bb45ffdf6192a590bd8ddd3ba9e58360b29683c6bb71a7b41edca"}, + {file = "contourpy-1.3.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fd2a0fc506eccaaa7595b7e1418951f213cf8255be2600f1ea1b61e46a60c55f"}, + {file = "contourpy-1.3.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4cfb5c62ce023dfc410d6059c936dcf96442ba40814aefbfa575425a3a7f19dc"}, + {file = "contourpy-1.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:68a32389b06b82c2fdd68276148d7b9275b5f5cf13e5417e4252f6d1a34f72a2"}, + {file = "contourpy-1.3.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:94e848a6b83da10898cbf1311a815f770acc9b6a3f2d646f330d57eb4e87592e"}, + {file = "contourpy-1.3.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:d78ab28a03c854a873787a0a42254a0ccb3cb133c672f645c9f9c8f3ae9d0800"}, + {file = "contourpy-1.3.0-cp39-cp39-win32.whl", hash = "sha256:81cb5ed4952aae6014bc9d0421dec7c5835c9c8c31cdf51910b708f548cf58e5"}, + {file = "contourpy-1.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:14e262f67bd7e6eb6880bc564dcda30b15e351a594657e55b7eec94b6ef72843"}, + {file = "contourpy-1.3.0-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:fe41b41505a5a33aeaed2a613dccaeaa74e0e3ead6dd6fd3a118fb471644fd6c"}, + {file = "contourpy-1.3.0-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eca7e17a65f72a5133bdbec9ecf22401c62bcf4821361ef7811faee695799779"}, + {file = "contourpy-1.3.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:1ec4dc6bf570f5b22ed0d7efba0dfa9c5b9e0431aeea7581aa217542d9e809a4"}, + {file = "contourpy-1.3.0-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:00ccd0dbaad6d804ab259820fa7cb0b8036bda0686ef844d24125d8287178ce0"}, + {file = "contourpy-1.3.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8ca947601224119117f7c19c9cdf6b3ab54c5726ef1d906aa4a69dfb6dd58102"}, + {file = "contourpy-1.3.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c6ec93afeb848a0845a18989da3beca3eec2c0f852322efe21af1931147d12cb"}, + {file = "contourpy-1.3.0.tar.gz", hash = "sha256:7ffa0db17717a8ffb127efd0c95a4362d996b892c2904db72428d5b52e1938a4"}, +] + +[package.dependencies] +numpy = ">=1.23" + +[package.extras] +bokeh = ["bokeh", "selenium"] +docs = ["furo", "sphinx (>=7.2)", "sphinx-copybutton"] +mypy = ["contourpy[bokeh,docs]", "docutils-stubs", "mypy (==1.11.1)", "types-Pillow"] +test = ["Pillow", "contourpy[test-no-images]", "matplotlib"] +test-no-images = ["pytest", "pytest-cov", "pytest-rerunfailures", "pytest-xdist", "wurlitzer"] + [[package]] name = "cryptography" version = "43.0.1" @@ -746,33 +845,37 @@ tests = ["pytest", "pytest-cov", "pytest-xdist"] [[package]] name = "debugpy" -version = "1.8.6" +version = "1.8.7" description = "An implementation of the Debug Adapter Protocol for Python" optional = false python-versions = ">=3.8" files = [ - {file = "debugpy-1.8.6-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:30f467c5345d9dfdcc0afdb10e018e47f092e383447500f125b4e013236bf14b"}, - {file = "debugpy-1.8.6-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d73d8c52614432f4215d0fe79a7e595d0dd162b5c15233762565be2f014803b"}, - {file = "debugpy-1.8.6-cp310-cp310-win32.whl", hash = "sha256:e3e182cd98eac20ee23a00653503315085b29ab44ed66269482349d307b08df9"}, - {file = "debugpy-1.8.6-cp310-cp310-win_amd64.whl", hash = "sha256:e3a82da039cfe717b6fb1886cbbe5c4a3f15d7df4765af857f4307585121c2dd"}, - {file = "debugpy-1.8.6-cp311-cp311-macosx_14_0_universal2.whl", hash = "sha256:67479a94cf5fd2c2d88f9615e087fcb4fec169ec780464a3f2ba4a9a2bb79955"}, - {file = "debugpy-1.8.6-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fb8653f6cbf1dd0a305ac1aa66ec246002145074ea57933978346ea5afdf70b"}, - {file = "debugpy-1.8.6-cp311-cp311-win32.whl", hash = "sha256:cdaf0b9691879da2d13fa39b61c01887c34558d1ff6e5c30e2eb698f5384cd43"}, - {file = "debugpy-1.8.6-cp311-cp311-win_amd64.whl", hash = "sha256:43996632bee7435583952155c06881074b9a742a86cee74e701d87ca532fe833"}, - {file = "debugpy-1.8.6-cp312-cp312-macosx_14_0_universal2.whl", hash = "sha256:db891b141fc6ee4b5fc6d1cc8035ec329cabc64bdd2ae672b4550c87d4ecb128"}, - {file = "debugpy-1.8.6-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:567419081ff67da766c898ccf21e79f1adad0e321381b0dfc7a9c8f7a9347972"}, - {file = "debugpy-1.8.6-cp312-cp312-win32.whl", hash = "sha256:c9834dfd701a1f6bf0f7f0b8b1573970ae99ebbeee68314116e0ccc5c78eea3c"}, - {file = "debugpy-1.8.6-cp312-cp312-win_amd64.whl", hash = "sha256:e4ce0570aa4aca87137890d23b86faeadf184924ad892d20c54237bcaab75d8f"}, - {file = "debugpy-1.8.6-cp38-cp38-macosx_14_0_x86_64.whl", hash = "sha256:df5dc9eb4ca050273b8e374a4cd967c43be1327eeb42bfe2f58b3cdfe7c68dcb"}, - {file = "debugpy-1.8.6-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0a85707c6a84b0c5b3db92a2df685b5230dd8fb8c108298ba4f11dba157a615a"}, - {file = "debugpy-1.8.6-cp38-cp38-win32.whl", hash = "sha256:538c6cdcdcdad310bbefd96d7850be1cd46e703079cc9e67d42a9ca776cdc8a8"}, - {file = "debugpy-1.8.6-cp38-cp38-win_amd64.whl", hash = "sha256:22140bc02c66cda6053b6eb56dfe01bbe22a4447846581ba1dd6df2c9f97982d"}, - {file = "debugpy-1.8.6-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:c1cef65cffbc96e7b392d9178dbfd524ab0750da6c0023c027ddcac968fd1caa"}, - {file = "debugpy-1.8.6-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1e60bd06bb3cc5c0e957df748d1fab501e01416c43a7bdc756d2a992ea1b881"}, - {file = "debugpy-1.8.6-cp39-cp39-win32.whl", hash = "sha256:f7158252803d0752ed5398d291dee4c553bb12d14547c0e1843ab74ee9c31123"}, - {file = "debugpy-1.8.6-cp39-cp39-win_amd64.whl", hash = "sha256:3358aa619a073b620cd0d51d8a6176590af24abcc3fe2e479929a154bf591b51"}, - {file = "debugpy-1.8.6-py2.py3-none-any.whl", hash = "sha256:b48892df4d810eff21d3ef37274f4c60d32cdcafc462ad5647239036b0f0649f"}, - {file = "debugpy-1.8.6.zip", hash = "sha256:c931a9371a86784cee25dec8d65bc2dc7a21f3f1552e3833d9ef8f919d22280a"}, + {file = "debugpy-1.8.7-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:95fe04a573b8b22896c404365e03f4eda0ce0ba135b7667a1e57bd079793b96b"}, + {file = "debugpy-1.8.7-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:628a11f4b295ffb4141d8242a9bb52b77ad4a63a2ad19217a93be0f77f2c28c9"}, + {file = "debugpy-1.8.7-cp310-cp310-win32.whl", hash = "sha256:85ce9c1d0eebf622f86cc68618ad64bf66c4fc3197d88f74bb695a416837dd55"}, + {file = "debugpy-1.8.7-cp310-cp310-win_amd64.whl", hash = "sha256:29e1571c276d643757ea126d014abda081eb5ea4c851628b33de0c2b6245b037"}, + {file = "debugpy-1.8.7-cp311-cp311-macosx_14_0_universal2.whl", hash = "sha256:caf528ff9e7308b74a1749c183d6808ffbedbb9fb6af78b033c28974d9b8831f"}, + {file = "debugpy-1.8.7-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cba1d078cf2e1e0b8402e6bda528bf8fda7ccd158c3dba6c012b7897747c41a0"}, + {file = "debugpy-1.8.7-cp311-cp311-win32.whl", hash = "sha256:171899588bcd412151e593bd40d9907133a7622cd6ecdbdb75f89d1551df13c2"}, + {file = "debugpy-1.8.7-cp311-cp311-win_amd64.whl", hash = "sha256:6e1c4ffb0c79f66e89dfd97944f335880f0d50ad29525dc792785384923e2211"}, + {file = "debugpy-1.8.7-cp312-cp312-macosx_14_0_universal2.whl", hash = "sha256:4d27d842311353ede0ad572600c62e4bcd74f458ee01ab0dd3a1a4457e7e3706"}, + {file = "debugpy-1.8.7-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:703c1fd62ae0356e194f3e7b7a92acd931f71fe81c4b3be2c17a7b8a4b546ec2"}, + {file = "debugpy-1.8.7-cp312-cp312-win32.whl", hash = "sha256:2f729228430ef191c1e4df72a75ac94e9bf77413ce5f3f900018712c9da0aaca"}, + {file = "debugpy-1.8.7-cp312-cp312-win_amd64.whl", hash = "sha256:45c30aaefb3e1975e8a0258f5bbd26cd40cde9bfe71e9e5a7ac82e79bad64e39"}, + {file = "debugpy-1.8.7-cp313-cp313-macosx_14_0_universal2.whl", hash = "sha256:d050a1ec7e925f514f0f6594a1e522580317da31fbda1af71d1530d6ea1f2b40"}, + {file = "debugpy-1.8.7-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2f4349a28e3228a42958f8ddaa6333d6f8282d5edaea456070e48609c5983b7"}, + {file = "debugpy-1.8.7-cp313-cp313-win32.whl", hash = "sha256:11ad72eb9ddb436afb8337891a986302e14944f0f755fd94e90d0d71e9100bba"}, + {file = "debugpy-1.8.7-cp313-cp313-win_amd64.whl", hash = "sha256:2efb84d6789352d7950b03d7f866e6d180284bc02c7e12cb37b489b7083d81aa"}, + {file = "debugpy-1.8.7-cp38-cp38-macosx_14_0_x86_64.whl", hash = "sha256:4b908291a1d051ef3331484de8e959ef3e66f12b5e610c203b5b75d2725613a7"}, + {file = "debugpy-1.8.7-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:da8df5b89a41f1fd31503b179d0a84a5fdb752dddd5b5388dbd1ae23cda31ce9"}, + {file = "debugpy-1.8.7-cp38-cp38-win32.whl", hash = "sha256:b12515e04720e9e5c2216cc7086d0edadf25d7ab7e3564ec8b4521cf111b4f8c"}, + {file = "debugpy-1.8.7-cp38-cp38-win_amd64.whl", hash = "sha256:93176e7672551cb5281577cdb62c63aadc87ec036f0c6a486f0ded337c504596"}, + {file = "debugpy-1.8.7-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:90d93e4f2db442f8222dec5ec55ccfc8005821028982f1968ebf551d32b28907"}, + {file = "debugpy-1.8.7-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b6db2a370e2700557a976eaadb16243ec9c91bd46f1b3bb15376d7aaa7632c81"}, + {file = "debugpy-1.8.7-cp39-cp39-win32.whl", hash = "sha256:a6cf2510740e0c0b4a40330640e4b454f928c7b99b0c9dbf48b11efba08a8cda"}, + {file = "debugpy-1.8.7-cp39-cp39-win_amd64.whl", hash = "sha256:6a9d9d6d31846d8e34f52987ee0f1a904c7baa4912bf4843ab39dadf9b8f3e0d"}, + {file = "debugpy-1.8.7-py2.py3-none-any.whl", hash = "sha256:57b00de1c8d2c84a61b90880f7e5b6deaf4c312ecbde3a0e8912f2a56c4ac9ae"}, + {file = "debugpy-1.8.7.zip", hash = "sha256:18b8f731ed3e2e1df8e9cdaa23fb1fc9c24e570cd0081625308ec51c82efe42e"}, ] [[package]] @@ -799,13 +902,13 @@ files = [ [[package]] name = "distlib" -version = "0.3.8" +version = "0.3.9" description = "Distribution utilities" optional = false python-versions = "*" files = [ - {file = "distlib-0.3.8-py2.py3-none-any.whl", hash = "sha256:034db59a0b96f8ca18035f36290806a9a6e6bd9d1ff91e45a7f172eb17e51784"}, - {file = "distlib-0.3.8.tar.gz", hash = "sha256:1530ea13e350031b6312d8580ddb6b27a104275a31106523b8f123787f494f64"}, + {file = "distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87"}, + {file = "distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403"}, ] [[package]] @@ -1021,13 +1124,13 @@ test = ["coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", [[package]] name = "griffe" -version = "1.3.1" +version = "1.4.0" description = "Signatures for entire Python programs. Extract the structure, the frame, the skeleton of your project, to generate API documentation or find breaking changes in your API." optional = false python-versions = ">=3.8" files = [ - {file = "griffe-1.3.1-py3-none-any.whl", hash = "sha256:940aeb630bc3054b4369567f150b6365be6f11eef46b0ed8623aea96e6d17b19"}, - {file = "griffe-1.3.1.tar.gz", hash = "sha256:3f86a716b631a4c0f96a43cb75d05d3c85975003c20540426c0eba3b0581c56a"}, + {file = "griffe-1.4.0-py3-none-any.whl", hash = "sha256:e589de8b8c137e99a46ec45f9598fc0ac5b6868ce824b24db09c02d117b89bc5"}, + {file = "griffe-1.4.0.tar.gz", hash = "sha256:8fccc585896d13f1221035d32c50dec65830c87d23f9adb9b1e6f3d63574f7f5"}, ] [package.dependencies] @@ -1047,13 +1150,13 @@ files = [ [[package]] name = "httpcore" -version = "1.0.5" +version = "1.0.6" description = "A minimal low-level HTTP client." optional = false python-versions = ">=3.8" files = [ - {file = "httpcore-1.0.5-py3-none-any.whl", hash = "sha256:421f18bac248b25d310f3cacd198d55b8e6125c107797b609ff9b7a6ba7991b5"}, - {file = "httpcore-1.0.5.tar.gz", hash = "sha256:34a38e2f9291467ee3b44e89dd52615370e152954ba21721378a87b2960f7a61"}, + {file = "httpcore-1.0.6-py3-none-any.whl", hash = "sha256:27b59625743b85577a8c0e10e55b50b5368a4f2cfe8cc7bcfa9cf00829c2682f"}, + {file = "httpcore-1.0.6.tar.gz", hash = "sha256:73f6dbd6eb8c21bbf7ef8efad555481853f5f6acdeaff1edb0694289269ee17f"}, ] [package.dependencies] @@ -1064,7 +1167,7 @@ h11 = ">=0.13,<0.15" asyncio = ["anyio (>=4.0,<5.0)"] http2 = ["h2 (>=3,<5)"] socks = ["socksio (==1.*)"] -trio = ["trio (>=0.22.0,<0.26.0)"] +trio = ["trio (>=0.22.0,<1.0)"] [[package]] name = "httpx" @@ -1320,21 +1423,25 @@ test = ["portend", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-c [[package]] name = "jaraco-functools" -version = "4.0.2" +version = "4.1.0" description = "Functools like those found in stdlib" optional = false python-versions = ">=3.8" files = [ - {file = "jaraco.functools-4.0.2-py3-none-any.whl", hash = "sha256:c9d16a3ed4ccb5a889ad8e0b7a343401ee5b2a71cee6ed192d3f68bc351e94e3"}, - {file = "jaraco_functools-4.0.2.tar.gz", hash = "sha256:3460c74cd0d32bf82b9576bbb3527c4364d5b27a21f5158a62aed6c4b42e23f5"}, + {file = "jaraco.functools-4.1.0-py3-none-any.whl", hash = "sha256:ad159f13428bc4acbf5541ad6dec511f91573b90fba04df61dafa2a1231cf649"}, + {file = "jaraco_functools-4.1.0.tar.gz", hash = "sha256:70f7e0e2ae076498e212562325e805204fc092d7b4c17e0e86c959e249701a9d"}, ] [package.dependencies] more-itertools = "*" [package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] +cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -test = ["jaraco.classes", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["jaraco.classes", "pytest (>=6,!=8.1.*)"] +type = ["pytest-mypy"] [[package]] name = "jedi" @@ -1970,6 +2077,68 @@ files = [ {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, ] +[[package]] +name = "matplotlib" +version = "3.7.2" +description = "Python plotting package" +optional = false +python-versions = ">=3.8" +files = [ + {file = "matplotlib-3.7.2-cp310-cp310-macosx_10_12_universal2.whl", hash = "sha256:2699f7e73a76d4c110f4f25be9d2496d6ab4f17345307738557d345f099e07de"}, + {file = "matplotlib-3.7.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:a8035ba590658bae7562786c9cc6ea1a84aa49d3afab157e414c9e2ea74f496d"}, + {file = "matplotlib-3.7.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2f8e4a49493add46ad4a8c92f63e19d548b2b6ebbed75c6b4c7f46f57d36cdd1"}, + {file = "matplotlib-3.7.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71667eb2ccca4c3537d9414b1bc00554cb7f91527c17ee4ec38027201f8f1603"}, + {file = "matplotlib-3.7.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:152ee0b569a37630d8628534c628456b28686e085d51394da6b71ef84c4da201"}, + {file = "matplotlib-3.7.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:070f8dddd1f5939e60aacb8fa08f19551f4b0140fab16a3669d5cd6e9cb28fc8"}, + {file = "matplotlib-3.7.2-cp310-cp310-win32.whl", hash = "sha256:fdbb46fad4fb47443b5b8ac76904b2e7a66556844f33370861b4788db0f8816a"}, + {file = "matplotlib-3.7.2-cp310-cp310-win_amd64.whl", hash = "sha256:23fb1750934e5f0128f9423db27c474aa32534cec21f7b2153262b066a581fd1"}, + {file = "matplotlib-3.7.2-cp311-cp311-macosx_10_12_universal2.whl", hash = "sha256:30e1409b857aa8a747c5d4f85f63a79e479835f8dffc52992ac1f3f25837b544"}, + {file = "matplotlib-3.7.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:50e0a55ec74bf2d7a0ebf50ac580a209582c2dd0f7ab51bc270f1b4a0027454e"}, + {file = "matplotlib-3.7.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ac60daa1dc83e8821eed155796b0f7888b6b916cf61d620a4ddd8200ac70cd64"}, + {file = "matplotlib-3.7.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:305e3da477dc8607336ba10bac96986d6308d614706cae2efe7d3ffa60465b24"}, + {file = "matplotlib-3.7.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1c308b255efb9b06b23874236ec0f10f026673ad6515f602027cc8ac7805352d"}, + {file = "matplotlib-3.7.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60c521e21031632aa0d87ca5ba0c1c05f3daacadb34c093585a0be6780f698e4"}, + {file = "matplotlib-3.7.2-cp311-cp311-win32.whl", hash = "sha256:26bede320d77e469fdf1bde212de0ec889169b04f7f1179b8930d66f82b30cbc"}, + {file = "matplotlib-3.7.2-cp311-cp311-win_amd64.whl", hash = "sha256:af4860132c8c05261a5f5f8467f1b269bf1c7c23902d75f2be57c4a7f2394b3e"}, + {file = "matplotlib-3.7.2-cp38-cp38-macosx_10_12_universal2.whl", hash = "sha256:a1733b8e84e7e40a9853e505fe68cc54339f97273bdfe6f3ed980095f769ddc7"}, + {file = "matplotlib-3.7.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:d9881356dc48e58910c53af82b57183879129fa30492be69058c5b0d9fddf391"}, + {file = "matplotlib-3.7.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f081c03f413f59390a80b3e351cc2b2ea0205839714dbc364519bcf51f4b56ca"}, + {file = "matplotlib-3.7.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1cd120fca3407a225168238b790bd5c528f0fafde6172b140a2f3ab7a4ea63e9"}, + {file = "matplotlib-3.7.2-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a2c1590b90aa7bd741b54c62b78de05d4186271e34e2377e0289d943b3522273"}, + {file = "matplotlib-3.7.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d2ff3c984b8a569bc1383cd468fc06b70d7b59d5c2854ca39f1436ae8394117"}, + {file = "matplotlib-3.7.2-cp38-cp38-win32.whl", hash = "sha256:5dea00b62d28654b71ca92463656d80646675628d0828e08a5f3b57e12869e13"}, + {file = "matplotlib-3.7.2-cp38-cp38-win_amd64.whl", hash = "sha256:0f506a1776ee94f9e131af1ac6efa6e5bc7cb606a3e389b0ccb6e657f60bb676"}, + {file = "matplotlib-3.7.2-cp39-cp39-macosx_10_12_universal2.whl", hash = "sha256:6515e878f91894c2e4340d81f0911857998ccaf04dbc1bba781e3d89cbf70608"}, + {file = "matplotlib-3.7.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:71f7a8c6b124e904db550f5b9fe483d28b896d4135e45c4ea381ad3b8a0e3256"}, + {file = "matplotlib-3.7.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:12f01b92ecd518e0697da4d97d163b2b3aa55eb3eb4e2c98235b3396d7dad55f"}, + {file = "matplotlib-3.7.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a7e28d6396563955f7af437894a36bf2b279462239a41028323e04b85179058b"}, + {file = "matplotlib-3.7.2-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbcf59334ff645e6a67cd5f78b4b2cdb76384cdf587fa0d2dc85f634a72e1a3e"}, + {file = "matplotlib-3.7.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:318c89edde72ff95d8df67d82aca03861240512994a597a435a1011ba18dbc7f"}, + {file = "matplotlib-3.7.2-cp39-cp39-win32.whl", hash = "sha256:ce55289d5659b5b12b3db4dc9b7075b70cef5631e56530f14b2945e8836f2d20"}, + {file = "matplotlib-3.7.2-cp39-cp39-win_amd64.whl", hash = "sha256:2ecb5be2b2815431c81dc115667e33da0f5a1bcf6143980d180d09a717c4a12e"}, + {file = "matplotlib-3.7.2-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:fdcd28360dbb6203fb5219b1a5658df226ac9bebc2542a9e8f457de959d713d0"}, + {file = "matplotlib-3.7.2-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0c3cca3e842b11b55b52c6fb8bd6a4088693829acbfcdb3e815fa9b7d5c92c1b"}, + {file = "matplotlib-3.7.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ebf577c7a6744e9e1bd3fee45fc74a02710b214f94e2bde344912d85e0c9af7c"}, + {file = "matplotlib-3.7.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:936bba394682049919dda062d33435b3be211dc3dcaa011e09634f060ec878b2"}, + {file = "matplotlib-3.7.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:bc221ffbc2150458b1cd71cdd9ddd5bb37962b036e41b8be258280b5b01da1dd"}, + {file = "matplotlib-3.7.2-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:35d74ebdb3f71f112b36c2629cf32323adfbf42679e2751252acd468f5001c07"}, + {file = "matplotlib-3.7.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:717157e61b3a71d3d26ad4e1770dc85156c9af435659a25ee6407dc866cb258d"}, + {file = "matplotlib-3.7.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:20f844d6be031948148ba49605c8b96dfe7d3711d1b63592830d650622458c11"}, + {file = "matplotlib-3.7.2.tar.gz", hash = "sha256:a8cdb91dddb04436bd2f098b8fdf4b81352e68cf4d2c6756fcc414791076569b"}, +] + +[package.dependencies] +contourpy = ">=1.0.1" +cycler = ">=0.10" +fonttools = ">=4.22.0" +importlib-resources = {version = ">=3.2.0", markers = "python_version < \"3.10\""} +kiwisolver = ">=1.0.1" +numpy = ">=1.20" +packaging = ">=20.0" +pillow = ">=6.2.0" +pyparsing = ">=2.3.1,<3.1" +python-dateutil = ">=2.7" + [[package]] name = "matplotlib" version = "3.7.5" @@ -2655,6 +2824,68 @@ files = [ {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, ] +[[package]] +name = "numpy" +version = "2.1.2" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.10" +files = [ + {file = "numpy-2.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:30d53720b726ec36a7f88dc873f0eec8447fbc93d93a8f079dfac2629598d6ee"}, + {file = "numpy-2.1.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e8d3ca0a72dd8846eb6f7dfe8f19088060fcb76931ed592d29128e0219652884"}, + {file = "numpy-2.1.2-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:fc44e3c68ff00fd991b59092a54350e6e4911152682b4782f68070985aa9e648"}, + {file = "numpy-2.1.2-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:7c1c60328bd964b53f8b835df69ae8198659e2b9302ff9ebb7de4e5a5994db3d"}, + {file = "numpy-2.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6cdb606a7478f9ad91c6283e238544451e3a95f30fb5467fbf715964341a8a86"}, + {file = "numpy-2.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d666cb72687559689e9906197e3bec7b736764df6a2e58ee265e360663e9baf7"}, + {file = "numpy-2.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c6eef7a2dbd0abfb0d9eaf78b73017dbfd0b54051102ff4e6a7b2980d5ac1a03"}, + {file = "numpy-2.1.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:12edb90831ff481f7ef5f6bc6431a9d74dc0e5ff401559a71e5e4611d4f2d466"}, + {file = "numpy-2.1.2-cp310-cp310-win32.whl", hash = "sha256:a65acfdb9c6ebb8368490dbafe83c03c7e277b37e6857f0caeadbbc56e12f4fb"}, + {file = "numpy-2.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:860ec6e63e2c5c2ee5e9121808145c7bf86c96cca9ad396c0bd3e0f2798ccbe2"}, + {file = "numpy-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b42a1a511c81cc78cbc4539675713bbcf9d9c3913386243ceff0e9429ca892fe"}, + {file = "numpy-2.1.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:faa88bc527d0f097abdc2c663cddf37c05a1c2f113716601555249805cf573f1"}, + {file = "numpy-2.1.2-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:c82af4b2ddd2ee72d1fc0c6695048d457e00b3582ccde72d8a1c991b808bb20f"}, + {file = "numpy-2.1.2-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:13602b3174432a35b16c4cfb5de9a12d229727c3dd47a6ce35111f2ebdf66ff4"}, + {file = "numpy-2.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ebec5fd716c5a5b3d8dfcc439be82a8407b7b24b230d0ad28a81b61c2f4659a"}, + {file = "numpy-2.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2b49c3c0804e8ecb05d59af8386ec2f74877f7ca8fd9c1e00be2672e4d399b1"}, + {file = "numpy-2.1.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:2cbba4b30bf31ddbe97f1c7205ef976909a93a66bb1583e983adbd155ba72ac2"}, + {file = "numpy-2.1.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8e00ea6fc82e8a804433d3e9cedaa1051a1422cb6e443011590c14d2dea59146"}, + {file = "numpy-2.1.2-cp311-cp311-win32.whl", hash = "sha256:5006b13a06e0b38d561fab5ccc37581f23c9511879be7693bd33c7cd15ca227c"}, + {file = "numpy-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:f1eb068ead09f4994dec71c24b2844f1e4e4e013b9629f812f292f04bd1510d9"}, + {file = "numpy-2.1.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d7bf0a4f9f15b32b5ba53147369e94296f5fffb783db5aacc1be15b4bf72f43b"}, + {file = "numpy-2.1.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b1d0fcae4f0949f215d4632be684a539859b295e2d0cb14f78ec231915d644db"}, + {file = "numpy-2.1.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:f751ed0a2f250541e19dfca9f1eafa31a392c71c832b6bb9e113b10d050cb0f1"}, + {file = "numpy-2.1.2-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:bd33f82e95ba7ad632bc57837ee99dba3d7e006536200c4e9124089e1bf42426"}, + {file = "numpy-2.1.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1b8cde4f11f0a975d1fd59373b32e2f5a562ade7cde4f85b7137f3de8fbb29a0"}, + {file = "numpy-2.1.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d95f286b8244b3649b477ac066c6906fbb2905f8ac19b170e2175d3d799f4df"}, + {file = "numpy-2.1.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ab4754d432e3ac42d33a269c8567413bdb541689b02d93788af4131018cbf366"}, + {file = "numpy-2.1.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e585c8ae871fd38ac50598f4763d73ec5497b0de9a0ab4ef5b69f01c6a046142"}, + {file = "numpy-2.1.2-cp312-cp312-win32.whl", hash = "sha256:9c6c754df29ce6a89ed23afb25550d1c2d5fdb9901d9c67a16e0b16eaf7e2550"}, + {file = "numpy-2.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:456e3b11cb79ac9946c822a56346ec80275eaf2950314b249b512896c0d2505e"}, + {file = "numpy-2.1.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a84498e0d0a1174f2b3ed769b67b656aa5460c92c9554039e11f20a05650f00d"}, + {file = "numpy-2.1.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4d6ec0d4222e8ffdab1744da2560f07856421b367928026fb540e1945f2eeeaf"}, + {file = "numpy-2.1.2-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:259ec80d54999cc34cd1eb8ded513cb053c3bf4829152a2e00de2371bd406f5e"}, + {file = "numpy-2.1.2-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:675c741d4739af2dc20cd6c6a5c4b7355c728167845e3c6b0e824e4e5d36a6c3"}, + {file = "numpy-2.1.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:05b2d4e667895cc55e3ff2b56077e4c8a5604361fc21a042845ea3ad67465aa8"}, + {file = "numpy-2.1.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:43cca367bf94a14aca50b89e9bc2061683116cfe864e56740e083392f533ce7a"}, + {file = "numpy-2.1.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:76322dcdb16fccf2ac56f99048af32259dcc488d9b7e25b51e5eca5147a3fb98"}, + {file = "numpy-2.1.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:32e16a03138cabe0cb28e1007ee82264296ac0983714094380b408097a418cfe"}, + {file = "numpy-2.1.2-cp313-cp313-win32.whl", hash = "sha256:242b39d00e4944431a3cd2db2f5377e15b5785920421993770cddb89992c3f3a"}, + {file = "numpy-2.1.2-cp313-cp313-win_amd64.whl", hash = "sha256:f2ded8d9b6f68cc26f8425eda5d3877b47343e68ca23d0d0846f4d312ecaa445"}, + {file = "numpy-2.1.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:2ffef621c14ebb0188a8633348504a35c13680d6da93ab5cb86f4e54b7e922b5"}, + {file = "numpy-2.1.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:ad369ed238b1959dfbade9018a740fb9392c5ac4f9b5173f420bd4f37ba1f7a0"}, + {file = "numpy-2.1.2-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:d82075752f40c0ddf57e6e02673a17f6cb0f8eb3f587f63ca1eaab5594da5b17"}, + {file = "numpy-2.1.2-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:1600068c262af1ca9580a527d43dc9d959b0b1d8e56f8a05d830eea39b7c8af6"}, + {file = "numpy-2.1.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a26ae94658d3ba3781d5e103ac07a876b3e9b29db53f68ed7df432fd033358a8"}, + {file = "numpy-2.1.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13311c2db4c5f7609b462bc0f43d3c465424d25c626d95040f073e30f7570e35"}, + {file = "numpy-2.1.2-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:2abbf905a0b568706391ec6fa15161fad0fb5d8b68d73c461b3c1bab6064dd62"}, + {file = "numpy-2.1.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:ef444c57d664d35cac4e18c298c47d7b504c66b17c2ea91312e979fcfbdfb08a"}, + {file = "numpy-2.1.2-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:bdd407c40483463898b84490770199d5714dcc9dd9b792f6c6caccc523c00952"}, + {file = "numpy-2.1.2-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:da65fb46d4cbb75cb417cddf6ba5e7582eb7bb0b47db4b99c9fe5787ce5d91f5"}, + {file = "numpy-2.1.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c193d0b0238638e6fc5f10f1b074a6993cb13b0b431f64079a509d63d3aa8b7"}, + {file = "numpy-2.1.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:a7d80b2e904faa63068ead63107189164ca443b42dd1930299e0d1cb041cec2e"}, + {file = "numpy-2.1.2.tar.gz", hash = "sha256:13532a088217fa624c99b843eeb54640de23b3414b14aa66d023805eb731066c"}, +] + [[package]] name = "opencv-python" version = "4.10.0.84" @@ -2800,6 +3031,24 @@ files = [ [package.dependencies] types-pytz = ">=2022.1.1" +[[package]] +name = "pandas-stubs" +version = "2.0.3.230814" +description = "Type annotations for pandas" +optional = true +python-versions = ">=3.8" +files = [ + {file = "pandas_stubs-2.0.3.230814-py3-none-any.whl", hash = "sha256:4b3dfc027d49779176b7daa031a3405f7b839bcb6e312f4b9f29fea5feec5b4f"}, + {file = "pandas_stubs-2.0.3.230814.tar.gz", hash = "sha256:1d5cc09e36e3d9f9a1ed9dceae4e03eeb26d1b898dd769996925f784365c8769"}, +] + +[package.dependencies] +numpy = [ + {version = "<=1.24.3", markers = "python_full_version <= \"3.8.0\""}, + {version = ">=1.25.0", markers = "python_version >= \"3.9\""}, +] +types-pytz = ">=2022.1.1" + [[package]] name = "pandocfilters" version = "1.5.1" @@ -3142,13 +3391,13 @@ windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pymdown-extensions" -version = "10.10.2" +version = "10.11.2" description = "Extension pack for Python Markdown." optional = false python-versions = ">=3.8" files = [ - {file = "pymdown_extensions-10.10.2-py3-none-any.whl", hash = "sha256:513a9e9432b197cf0539356c8f1fc376e0d10b70ad150cadeb649a5628aacd45"}, - {file = "pymdown_extensions-10.10.2.tar.gz", hash = "sha256:65d82324ef2497931bc858c8320540c6264ab0d9a292707edb61f4fe0cd56633"}, + {file = "pymdown_extensions-10.11.2-py3-none-any.whl", hash = "sha256:41cdde0a77290e480cf53892f5c5e50921a7ee3e5cd60ba91bf19837b33badcf"}, + {file = "pymdown_extensions-10.11.2.tar.gz", hash = "sha256:bc8847ecc9e784a098efd35e20cba772bc5a1b529dfcef9dc1972db9021a1049"}, ] [package.dependencies] @@ -3158,6 +3407,20 @@ pyyaml = "*" [package.extras] extra = ["pygments (>=2.12)"] +[[package]] +name = "pyparsing" +version = "3.0.9" +description = "pyparsing module - Classes and methods to define and execute parsing grammars" +optional = false +python-versions = ">=3.6.8" +files = [ + {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, + {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, +] + +[package.extras] +diagrams = ["jinja2", "railroad-diagrams"] + [[package]] name = "pyparsing" version = "3.1.4" @@ -3193,13 +3456,13 @@ testing = ["covdefaults (>=2.3)", "pytest (>=8.3.3)", "pytest-cov (>=5)", "pytes [[package]] name = "pyproject-hooks" -version = "1.1.0" +version = "1.2.0" description = "Wrappers to call pyproject.toml-based build backend hooks." optional = false python-versions = ">=3.7" files = [ - {file = "pyproject_hooks-1.1.0-py3-none-any.whl", hash = "sha256:7ceeefe9aec63a1064c18d939bdc3adf2d8aa1988a510afec15151578b232aa2"}, - {file = "pyproject_hooks-1.1.0.tar.gz", hash = "sha256:4b37730834edbd6bd37f26ece6b44802fb1c1ee2ece0e54ddff8bfc06db86965"}, + {file = "pyproject_hooks-1.2.0-py3-none-any.whl", hash = "sha256:9e5c6bfa8dcc30091c74b0cf803c81fdd29d94f01992a7707bc97babb1141913"}, + {file = "pyproject_hooks-1.2.0.tar.gz", hash = "sha256:1e859bd5c40fae9448642dd871adf459e5e2084186e8d2c2a79a824c970da1f8"}, ] [[package]] @@ -3262,25 +3525,29 @@ files = [ [[package]] name = "pywin32" -version = "306" +version = "308" description = "Python for Window Extensions" optional = false python-versions = "*" files = [ - {file = "pywin32-306-cp310-cp310-win32.whl", hash = "sha256:06d3420a5155ba65f0b72f2699b5bacf3109f36acbe8923765c22938a69dfc8d"}, - {file = "pywin32-306-cp310-cp310-win_amd64.whl", hash = "sha256:84f4471dbca1887ea3803d8848a1616429ac94a4a8d05f4bc9c5dcfd42ca99c8"}, - {file = "pywin32-306-cp311-cp311-win32.whl", hash = "sha256:e65028133d15b64d2ed8f06dd9fbc268352478d4f9289e69c190ecd6818b6407"}, - {file = "pywin32-306-cp311-cp311-win_amd64.whl", hash = "sha256:a7639f51c184c0272e93f244eb24dafca9b1855707d94c192d4a0b4c01e1100e"}, - {file = "pywin32-306-cp311-cp311-win_arm64.whl", hash = "sha256:70dba0c913d19f942a2db25217d9a1b726c278f483a919f1abfed79c9cf64d3a"}, - {file = "pywin32-306-cp312-cp312-win32.whl", hash = "sha256:383229d515657f4e3ed1343da8be101000562bf514591ff383ae940cad65458b"}, - {file = "pywin32-306-cp312-cp312-win_amd64.whl", hash = "sha256:37257794c1ad39ee9be652da0462dc2e394c8159dfd913a8a4e8eb6fd346da0e"}, - {file = "pywin32-306-cp312-cp312-win_arm64.whl", hash = "sha256:5821ec52f6d321aa59e2db7e0a35b997de60c201943557d108af9d4ae1ec7040"}, - {file = "pywin32-306-cp37-cp37m-win32.whl", hash = "sha256:1c73ea9a0d2283d889001998059f5eaaba3b6238f767c9cf2833b13e6a685f65"}, - {file = "pywin32-306-cp37-cp37m-win_amd64.whl", hash = "sha256:72c5f621542d7bdd4fdb716227be0dd3f8565c11b280be6315b06ace35487d36"}, - {file = "pywin32-306-cp38-cp38-win32.whl", hash = "sha256:e4c092e2589b5cf0d365849e73e02c391c1349958c5ac3e9d5ccb9a28e017b3a"}, - {file = "pywin32-306-cp38-cp38-win_amd64.whl", hash = "sha256:e8ac1ae3601bee6ca9f7cb4b5363bf1c0badb935ef243c4733ff9a393b1690c0"}, - {file = "pywin32-306-cp39-cp39-win32.whl", hash = "sha256:e25fd5b485b55ac9c057f67d94bc203f3f6595078d1fb3b458c9c28b7153a802"}, - {file = "pywin32-306-cp39-cp39-win_amd64.whl", hash = "sha256:39b61c15272833b5c329a2989999dcae836b1eed650252ab1b7bfbe1d59f30f4"}, + {file = "pywin32-308-cp310-cp310-win32.whl", hash = "sha256:796ff4426437896550d2981b9c2ac0ffd75238ad9ea2d3bfa67a1abd546d262e"}, + {file = "pywin32-308-cp310-cp310-win_amd64.whl", hash = "sha256:4fc888c59b3c0bef905ce7eb7e2106a07712015ea1c8234b703a088d46110e8e"}, + {file = "pywin32-308-cp310-cp310-win_arm64.whl", hash = "sha256:a5ab5381813b40f264fa3495b98af850098f814a25a63589a8e9eb12560f450c"}, + {file = "pywin32-308-cp311-cp311-win32.whl", hash = "sha256:5d8c8015b24a7d6855b1550d8e660d8daa09983c80e5daf89a273e5c6fb5095a"}, + {file = "pywin32-308-cp311-cp311-win_amd64.whl", hash = "sha256:575621b90f0dc2695fec346b2d6302faebd4f0f45c05ea29404cefe35d89442b"}, + {file = "pywin32-308-cp311-cp311-win_arm64.whl", hash = "sha256:100a5442b7332070983c4cd03f2e906a5648a5104b8a7f50175f7906efd16bb6"}, + {file = "pywin32-308-cp312-cp312-win32.whl", hash = "sha256:587f3e19696f4bf96fde9d8a57cec74a57021ad5f204c9e627e15c33ff568897"}, + {file = "pywin32-308-cp312-cp312-win_amd64.whl", hash = "sha256:00b3e11ef09ede56c6a43c71f2d31857cf7c54b0ab6e78ac659497abd2834f47"}, + {file = "pywin32-308-cp312-cp312-win_arm64.whl", hash = "sha256:9b4de86c8d909aed15b7011182c8cab38c8850de36e6afb1f0db22b8959e3091"}, + {file = "pywin32-308-cp313-cp313-win32.whl", hash = "sha256:1c44539a37a5b7b21d02ab34e6a4d314e0788f1690d65b48e9b0b89f31abbbed"}, + {file = "pywin32-308-cp313-cp313-win_amd64.whl", hash = "sha256:fd380990e792eaf6827fcb7e187b2b4b1cede0585e3d0c9e84201ec27b9905e4"}, + {file = "pywin32-308-cp313-cp313-win_arm64.whl", hash = "sha256:ef313c46d4c18dfb82a2431e3051ac8f112ccee1a34f29c263c583c568db63cd"}, + {file = "pywin32-308-cp37-cp37m-win32.whl", hash = "sha256:1f696ab352a2ddd63bd07430080dd598e6369152ea13a25ebcdd2f503a38f1ff"}, + {file = "pywin32-308-cp37-cp37m-win_amd64.whl", hash = "sha256:13dcb914ed4347019fbec6697a01a0aec61019c1046c2b905410d197856326a6"}, + {file = "pywin32-308-cp38-cp38-win32.whl", hash = "sha256:5794e764ebcabf4ff08c555b31bd348c9025929371763b2183172ff4708152f0"}, + {file = "pywin32-308-cp38-cp38-win_amd64.whl", hash = "sha256:3b92622e29d651c6b783e368ba7d6722b1634b8e70bd376fd7610fe1992e19de"}, + {file = "pywin32-308-cp39-cp39-win32.whl", hash = "sha256:7873ca4dc60ab3287919881a7d4f88baee4a6e639aa6962de25a98ba6b193341"}, + {file = "pywin32-308-cp39-cp39-win_amd64.whl", hash = "sha256:71b3322d949b4cc20776436a9c9ba0eeedcbc9c650daa536df63f0ff111bb920"}, ] [[package]] @@ -3719,19 +3986,19 @@ files = [ [[package]] name = "rich" -version = "13.8.1" +version = "13.9.2" description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" optional = false -python-versions = ">=3.7.0" +python-versions = ">=3.8.0" files = [ - {file = "rich-13.8.1-py3-none-any.whl", hash = "sha256:1760a3c0848469b97b558fc61c85233e3dafb69c7a071b4d60c38099d3cd4c06"}, - {file = "rich-13.8.1.tar.gz", hash = "sha256:8260cda28e3db6bf04d2d1ef4dbc03ba80a824c88b0e7668a0f23126a424844a"}, + {file = "rich-13.9.2-py3-none-any.whl", hash = "sha256:8c82a3d3f8dcfe9e734771313e606b39d8247bb6b826e196f4914b333b743cf1"}, + {file = "rich-13.9.2.tar.gz", hash = "sha256:51a2c62057461aaf7152b4d611168f93a9fc73068f8ded2790f29fe2b5366d0c"}, ] [package.dependencies] markdown-it-py = ">=2.2.0" pygments = ">=2.13.0,<3.0.0" -typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.9\""} +typing-extensions = {version = ">=4.0.0,<5.0", markers = "python_version < \"3.11\""} [package.extras] jupyter = ["ipywidgets (>=7.5.1,<9)"] @@ -3955,6 +4222,56 @@ dev = ["cython-lint (>=0.12.2)", "doit (>=0.36.0)", "mypy", "pycodestyle", "pyde doc = ["jupyterlite-pyodide-kernel", "jupyterlite-sphinx (>=0.12.0)", "jupytext", "matplotlib (>=3.5)", "myst-nb", "numpydoc", "pooch", "pydata-sphinx-theme (>=0.15.2)", "sphinx (>=5.0.0)", "sphinx-design (>=0.4.0)"] test = ["array-api-strict", "asv", "gmpy2", "hypothesis (>=6.30)", "mpmath", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] +[[package]] +name = "scipy" +version = "1.14.1" +description = "Fundamental algorithms for scientific computing in Python" +optional = false +python-versions = ">=3.10" +files = [ + {file = "scipy-1.14.1-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:b28d2ca4add7ac16ae8bb6632a3c86e4b9e4d52d3e34267f6e1b0c1f8d87e389"}, + {file = "scipy-1.14.1-cp310-cp310-macosx_12_0_arm64.whl", hash = "sha256:d0d2821003174de06b69e58cef2316a6622b60ee613121199cb2852a873f8cf3"}, + {file = "scipy-1.14.1-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:8bddf15838ba768bb5f5083c1ea012d64c9a444e16192762bd858f1e126196d0"}, + {file = "scipy-1.14.1-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:97c5dddd5932bd2a1a31c927ba5e1463a53b87ca96b5c9bdf5dfd6096e27efc3"}, + {file = "scipy-1.14.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ff0a7e01e422c15739ecd64432743cf7aae2b03f3084288f399affcefe5222d"}, + {file = "scipy-1.14.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8e32dced201274bf96899e6491d9ba3e9a5f6b336708656466ad0522d8528f69"}, + {file = "scipy-1.14.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8426251ad1e4ad903a4514712d2fa8fdd5382c978010d1c6f5f37ef286a713ad"}, + {file = "scipy-1.14.1-cp310-cp310-win_amd64.whl", hash = "sha256:a49f6ed96f83966f576b33a44257d869756df6cf1ef4934f59dd58b25e0327e5"}, + {file = "scipy-1.14.1-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:2da0469a4ef0ecd3693761acbdc20f2fdeafb69e6819cc081308cc978153c675"}, + {file = "scipy-1.14.1-cp311-cp311-macosx_12_0_arm64.whl", hash = "sha256:c0ee987efa6737242745f347835da2cc5bb9f1b42996a4d97d5c7ff7928cb6f2"}, + {file = "scipy-1.14.1-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:3a1b111fac6baec1c1d92f27e76511c9e7218f1695d61b59e05e0fe04dc59617"}, + {file = "scipy-1.14.1-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:8475230e55549ab3f207bff11ebfc91c805dc3463ef62eda3ccf593254524ce8"}, + {file = "scipy-1.14.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:278266012eb69f4a720827bdd2dc54b2271c97d84255b2faaa8f161a158c3b37"}, + {file = "scipy-1.14.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fef8c87f8abfb884dac04e97824b61299880c43f4ce675dd2cbeadd3c9b466d2"}, + {file = "scipy-1.14.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b05d43735bb2f07d689f56f7b474788a13ed8adc484a85aa65c0fd931cf9ccd2"}, + {file = "scipy-1.14.1-cp311-cp311-win_amd64.whl", hash = "sha256:716e389b694c4bb564b4fc0c51bc84d381735e0d39d3f26ec1af2556ec6aad94"}, + {file = "scipy-1.14.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:631f07b3734d34aced009aaf6fedfd0eb3498a97e581c3b1e5f14a04164a456d"}, + {file = "scipy-1.14.1-cp312-cp312-macosx_12_0_arm64.whl", hash = "sha256:af29a935803cc707ab2ed7791c44288a682f9c8107bc00f0eccc4f92c08d6e07"}, + {file = "scipy-1.14.1-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:2843f2d527d9eebec9a43e6b406fb7266f3af25a751aa91d62ff416f54170bc5"}, + {file = "scipy-1.14.1-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:eb58ca0abd96911932f688528977858681a59d61a7ce908ffd355957f7025cfc"}, + {file = "scipy-1.14.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:30ac8812c1d2aab7131a79ba62933a2a76f582d5dbbc695192453dae67ad6310"}, + {file = "scipy-1.14.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f9ea80f2e65bdaa0b7627fb00cbeb2daf163caa015e59b7516395fe3bd1e066"}, + {file = "scipy-1.14.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:edaf02b82cd7639db00dbff629995ef185c8df4c3ffa71a5562a595765a06ce1"}, + {file = "scipy-1.14.1-cp312-cp312-win_amd64.whl", hash = "sha256:2ff38e22128e6c03ff73b6bb0f85f897d2362f8c052e3b8ad00532198fbdae3f"}, + {file = "scipy-1.14.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:1729560c906963fc8389f6aac023739ff3983e727b1a4d87696b7bf108316a79"}, + {file = "scipy-1.14.1-cp313-cp313-macosx_12_0_arm64.whl", hash = "sha256:4079b90df244709e675cdc8b93bfd8a395d59af40b72e339c2287c91860deb8e"}, + {file = "scipy-1.14.1-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:e0cf28db0f24a38b2a0ca33a85a54852586e43cf6fd876365c86e0657cfe7d73"}, + {file = "scipy-1.14.1-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:0c2f95de3b04e26f5f3ad5bb05e74ba7f68b837133a4492414b3afd79dfe540e"}, + {file = "scipy-1.14.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b99722ea48b7ea25e8e015e8341ae74624f72e5f21fc2abd45f3a93266de4c5d"}, + {file = "scipy-1.14.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5149e3fd2d686e42144a093b206aef01932a0059c2a33ddfa67f5f035bdfe13e"}, + {file = "scipy-1.14.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:e4f5a7c49323533f9103d4dacf4e4f07078f360743dec7f7596949149efeec06"}, + {file = "scipy-1.14.1-cp313-cp313-win_amd64.whl", hash = "sha256:baff393942b550823bfce952bb62270ee17504d02a1801d7fd0719534dfb9c84"}, + {file = "scipy-1.14.1.tar.gz", hash = "sha256:5a275584e726026a5699459aa72f828a610821006228e841b94275c4a7c08417"}, +] + +[package.dependencies] +numpy = ">=1.23.5,<2.3" + +[package.extras] +dev = ["cython-lint (>=0.12.2)", "doit (>=0.36.0)", "mypy (==1.10.0)", "pycodestyle", "pydevtool", "rich-click", "ruff (>=0.0.292)", "types-psutil", "typing_extensions"] +doc = ["jupyterlite-pyodide-kernel", "jupyterlite-sphinx (>=0.13.1)", "jupytext", "matplotlib (>=3.5)", "myst-nb", "numpydoc", "pooch", "pydata-sphinx-theme (>=0.15.2)", "sphinx (>=5.0.0,<=7.3.7)", "sphinx-design (>=0.4.0)"] +test = ["Cython", "array-api-strict (>=2.0)", "asv", "gmpy2", "hypothesis (>=6.30)", "meson", "mpmath", "ninja", "pooch", "pytest", "pytest-cov", "pytest-timeout", "pytest-xdist", "scikit-umfpack", "threadpoolctl"] + [[package]] name = "secretstorage" version = "3.3.3" @@ -4110,13 +4427,13 @@ test = ["pytest", "ruff"] [[package]] name = "tomli" -version = "2.0.1" +version = "2.0.2" description = "A lil' TOML parser" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, + {file = "tomli-2.0.2-py3-none-any.whl", hash = "sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38"}, + {file = "tomli-2.0.2.tar.gz", hash = "sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed"}, ] [[package]] @@ -4240,24 +4557,24 @@ types-setuptools = "*" [[package]] name = "types-python-dateutil" -version = "2.9.0.20240906" +version = "2.9.0.20241003" description = "Typing stubs for python-dateutil" optional = false python-versions = ">=3.8" files = [ - {file = "types-python-dateutil-2.9.0.20240906.tar.gz", hash = "sha256:9706c3b68284c25adffc47319ecc7947e5bb86b3773f843c73906fd598bc176e"}, - {file = "types_python_dateutil-2.9.0.20240906-py3-none-any.whl", hash = "sha256:27c8cc2d058ccb14946eebcaaa503088f4f6dbc4fb6093d3d456a49aef2753f6"}, + {file = "types-python-dateutil-2.9.0.20241003.tar.gz", hash = "sha256:58cb85449b2a56d6684e41aeefb4c4280631246a0da1a719bdbe6f3fb0317446"}, + {file = "types_python_dateutil-2.9.0.20241003-py3-none-any.whl", hash = "sha256:250e1d8e80e7bbc3a6c99b907762711d1a1cdd00e978ad39cb5940f6f0a87f3d"}, ] [[package]] name = "types-pytz" -version = "2024.2.0.20240913" +version = "2024.2.0.20241003" description = "Typing stubs for pytz" optional = true python-versions = ">=3.8" files = [ - {file = "types-pytz-2024.2.0.20240913.tar.gz", hash = "sha256:4433b5df4a6fc587bbed41716d86a5ba5d832b4378e506f40d34bc9c81df2c24"}, - {file = "types_pytz-2024.2.0.20240913-py3-none-any.whl", hash = "sha256:a1eebf57ebc6e127a99d2fa2ba0a88d2b173784ef9b3defcc2004ab6855a44df"}, + {file = "types-pytz-2024.2.0.20241003.tar.gz", hash = "sha256:575dc38f385a922a212bac00a7d6d2e16e141132a3c955078f4a4fd13ed6cb44"}, + {file = "types_pytz-2024.2.0.20241003-py3-none-any.whl", hash = "sha256:3e22df1336c0c6ad1d29163c8fda82736909eb977281cb823c57f8bae07118b7"}, ] [[package]] @@ -4287,13 +4604,13 @@ urllib3 = ">=2" [[package]] name = "types-setuptools" -version = "75.1.0.20240917" +version = "75.1.0.20241014" description = "Typing stubs for setuptools" optional = false python-versions = ">=3.8" files = [ - {file = "types-setuptools-75.1.0.20240917.tar.gz", hash = "sha256:12f12a165e7ed383f31def705e5c0fa1c26215dd466b0af34bd042f7d5331f55"}, - {file = "types_setuptools-75.1.0.20240917-py3-none-any.whl", hash = "sha256:06f78307e68d1bbde6938072c57b81cf8a99bc84bd6dc7e4c5014730b097dc0c"}, + {file = "types-setuptools-75.1.0.20241014.tar.gz", hash = "sha256:29b0560a8d4b4a91174be085847002c69abfcb048e20b33fc663005aedf56804"}, + {file = "types_setuptools-75.1.0.20241014-py3-none-any.whl", hash = "sha256:caab58366741fb99673d0138b6e2d760717f154cfb981b74fea5e8de40f0b703"}, ] [[package]] @@ -4545,4 +4862,4 @@ metrics = ["pandas", "pandas-stubs"] [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "ab2e2c455fa1a7d74271da71f8c1b6f096bbebd92a79b4ec646523ef7d8530b0" +content-hash = "fe9efd8caf098dc0301e1d7007ac3b00647b48cff6b060cc0eff57656d082099" diff --git a/pyproject.toml b/pyproject.toml index f63011062..6464346b7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -46,11 +46,26 @@ python = "^3.8" numpy = [ { version = ">=1.21.2,<1.23.3", python = "<=3.10" }, { version = ">=1.23.3", python = ">3.10" }, + { version = ">=2.1.0", python = ">=3.13" }, ] scipy = [ { version = "1.10.0", python = "<3.9" }, { version = "^1.10.0", python = ">=3.9" }, + { version = ">=1.14.1", python = ">=3.13" }, + +] + +# Matplotlib sub-dependency +# The 'contourpy' package is required by Matplotlib for contour plotting. +# We need to ensure compatibility with both Python 3.8 and Python 3.13. +# +# For Python 3.8 and above, we use version 1.0.7 or higher, as it is the lowest major version that supports Python 3.8. +# For Python 3.13 and above, we use version 1.3.0 or higher, as it is the first version that explicitly supports Python 3.13. +contourpy = [ + { version = ">=1.0.7", python = ">=3.8" }, + { version = ">=1.3.0", python = ">=3.13" }, ] + matplotlib = ">=3.6.0" pyyaml = ">=5.3" defusedxml = "^0.7.1" From a879c1d39ac716ab38ecbf28ca297ee4b375316c Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 15 Oct 2024 00:26:36 +0000 Subject: [PATCH 055/161] :arrow_up: Bump mypy from 1.11.2 to 1.12.0 Bumps [mypy](https://github.com/python/mypy) from 1.11.2 to 1.12.0. - [Changelog](https://github.com/python/mypy/blob/master/CHANGELOG.md) - [Commits](https://github.com/python/mypy/compare/v1.11.2...v1.12.0) --- updated-dependencies: - dependency-name: mypy dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- poetry.lock | 277 +++++++++++++++++++--------------------------------- 1 file changed, 98 insertions(+), 179 deletions(-) diff --git a/poetry.lock b/poetry.lock index 1994447dd..45db2b43f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -602,6 +602,68 @@ traitlets = ">=4" [package.extras] test = ["pytest"] +[[package]] +name = "contourpy" +version = "1.1.0" +description = "Python library for calculating contours of 2D quadrilateral grids" +optional = false +python-versions = ">=3.8" +files = [ + {file = "contourpy-1.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:89f06eff3ce2f4b3eb24c1055a26981bffe4e7264acd86f15b97e40530b794bc"}, + {file = "contourpy-1.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dffcc2ddec1782dd2f2ce1ef16f070861af4fb78c69862ce0aab801495dda6a3"}, + {file = "contourpy-1.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25ae46595e22f93592d39a7eac3d638cda552c3e1160255258b695f7b58e5655"}, + {file = "contourpy-1.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:17cfaf5ec9862bc93af1ec1f302457371c34e688fbd381f4035a06cd47324f48"}, + {file = "contourpy-1.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18a64814ae7bce73925131381603fff0116e2df25230dfc80d6d690aa6e20b37"}, + {file = "contourpy-1.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90c81f22b4f572f8a2110b0b741bb64e5a6427e0a198b2cdc1fbaf85f352a3aa"}, + {file = "contourpy-1.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:53cc3a40635abedbec7f1bde60f8c189c49e84ac180c665f2cd7c162cc454baa"}, + {file = "contourpy-1.1.0-cp310-cp310-win32.whl", hash = "sha256:9b2dd2ca3ac561aceef4c7c13ba654aaa404cf885b187427760d7f7d4c57cff8"}, + {file = "contourpy-1.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:1f795597073b09d631782e7245016a4323cf1cf0b4e06eef7ea6627e06a37ff2"}, + {file = "contourpy-1.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0b7b04ed0961647691cfe5d82115dd072af7ce8846d31a5fac6c142dcce8b882"}, + {file = "contourpy-1.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:27bc79200c742f9746d7dd51a734ee326a292d77e7d94c8af6e08d1e6c15d545"}, + {file = "contourpy-1.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:052cc634bf903c604ef1a00a5aa093c54f81a2612faedaa43295809ffdde885e"}, + {file = "contourpy-1.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9382a1c0bc46230fb881c36229bfa23d8c303b889b788b939365578d762b5c18"}, + {file = "contourpy-1.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5cec36c5090e75a9ac9dbd0ff4a8cf7cecd60f1b6dc23a374c7d980a1cd710e"}, + {file = "contourpy-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f0cbd657e9bde94cd0e33aa7df94fb73c1ab7799378d3b3f902eb8eb2e04a3a"}, + {file = "contourpy-1.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:181cbace49874f4358e2929aaf7ba84006acb76694102e88dd15af861996c16e"}, + {file = "contourpy-1.1.0-cp311-cp311-win32.whl", hash = "sha256:edb989d31065b1acef3828a3688f88b2abb799a7db891c9e282df5ec7e46221b"}, + {file = "contourpy-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:fb3b7d9e6243bfa1efb93ccfe64ec610d85cfe5aec2c25f97fbbd2e58b531256"}, + {file = "contourpy-1.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bcb41692aa09aeb19c7c213411854402f29f6613845ad2453d30bf421fe68fed"}, + {file = "contourpy-1.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5d123a5bc63cd34c27ff9c7ac1cd978909e9c71da12e05be0231c608048bb2ae"}, + {file = "contourpy-1.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62013a2cf68abc80dadfd2307299bfa8f5aa0dcaec5b2954caeb5fa094171103"}, + {file = "contourpy-1.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0b6616375d7de55797d7a66ee7d087efe27f03d336c27cf1f32c02b8c1a5ac70"}, + {file = "contourpy-1.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:317267d915490d1e84577924bd61ba71bf8681a30e0d6c545f577363157e5e94"}, + {file = "contourpy-1.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d551f3a442655f3dcc1285723f9acd646ca5858834efeab4598d706206b09c9f"}, + {file = "contourpy-1.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e7a117ce7df5a938fe035cad481b0189049e8d92433b4b33aa7fc609344aafa1"}, + {file = "contourpy-1.1.0-cp38-cp38-win32.whl", hash = "sha256:108dfb5b3e731046a96c60bdc46a1a0ebee0760418951abecbe0fc07b5b93b27"}, + {file = "contourpy-1.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:d4f26b25b4f86087e7d75e63212756c38546e70f2a92d2be44f80114826e1cd4"}, + {file = "contourpy-1.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc00bb4225d57bff7ebb634646c0ee2a1298402ec10a5fe7af79df9a51c1bfd9"}, + {file = "contourpy-1.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:189ceb1525eb0655ab8487a9a9c41f42a73ba52d6789754788d1883fb06b2d8a"}, + {file = "contourpy-1.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f2931ed4741f98f74b410b16e5213f71dcccee67518970c42f64153ea9313b9"}, + {file = "contourpy-1.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:30f511c05fab7f12e0b1b7730ebdc2ec8deedcfb505bc27eb570ff47c51a8f15"}, + {file = "contourpy-1.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:143dde50520a9f90e4a2703f367cf8ec96a73042b72e68fcd184e1279962eb6f"}, + {file = "contourpy-1.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e94bef2580e25b5fdb183bf98a2faa2adc5b638736b2c0a4da98691da641316a"}, + {file = "contourpy-1.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ed614aea8462735e7d70141374bd7650afd1c3f3cb0c2dbbcbe44e14331bf002"}, + {file = "contourpy-1.1.0-cp39-cp39-win32.whl", hash = "sha256:71551f9520f008b2950bef5f16b0e3587506ef4f23c734b71ffb7b89f8721999"}, + {file = "contourpy-1.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:438ba416d02f82b692e371858143970ed2eb6337d9cdbbede0d8ad9f3d7dd17d"}, + {file = "contourpy-1.1.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a698c6a7a432789e587168573a864a7ea374c6be8d4f31f9d87c001d5a843493"}, + {file = "contourpy-1.1.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:397b0ac8a12880412da3551a8cb5a187d3298a72802b45a3bd1805e204ad8439"}, + {file = "contourpy-1.1.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:a67259c2b493b00e5a4d0f7bfae51fb4b3371395e47d079a4446e9b0f4d70e76"}, + {file = "contourpy-1.1.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2b836d22bd2c7bb2700348e4521b25e077255ebb6ab68e351ab5aa91ca27e027"}, + {file = "contourpy-1.1.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:084eaa568400cfaf7179b847ac871582199b1b44d5699198e9602ecbbb5f6104"}, + {file = "contourpy-1.1.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:911ff4fd53e26b019f898f32db0d4956c9d227d51338fb3b03ec72ff0084ee5f"}, + {file = "contourpy-1.1.0.tar.gz", hash = "sha256:e53046c3863828d21d531cc3b53786e6580eb1ba02477e8681009b6aa0870b21"}, +] + +[package.dependencies] +numpy = ">=1.16" + +[package.extras] +bokeh = ["bokeh", "selenium"] +docs = ["furo", "sphinx-copybutton"] +mypy = ["contourpy[bokeh,docs]", "docutils-stubs", "mypy (==1.2.0)", "types-Pillow"] +test = ["Pillow", "contourpy[test-no-images]", "matplotlib"] +test-no-images = ["pytest", "pytest-cov", "wurlitzer"] + [[package]] name = "contourpy" version = "1.1.1" @@ -664,10 +726,7 @@ files = [ ] [package.dependencies] -numpy = [ - {version = ">=1.16,<2.0", markers = "python_version <= \"3.11\""}, - {version = ">=1.26.0rc1,<2.0", markers = "python_version >= \"3.12\""}, -] +numpy = {version = ">=1.16,<2.0", markers = "python_version <= \"3.11\""} [package.extras] bokeh = ["bokeh", "selenium"] @@ -2139,74 +2198,6 @@ pillow = ">=6.2.0" pyparsing = ">=2.3.1,<3.1" python-dateutil = ">=2.7" -[[package]] -name = "matplotlib" -version = "3.7.5" -description = "Python plotting package" -optional = false -python-versions = ">=3.8" -files = [ - {file = "matplotlib-3.7.5-cp310-cp310-macosx_10_12_universal2.whl", hash = "sha256:4a87b69cb1cb20943010f63feb0b2901c17a3b435f75349fd9865713bfa63925"}, - {file = "matplotlib-3.7.5-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:d3ce45010fefb028359accebb852ca0c21bd77ec0f281952831d235228f15810"}, - {file = "matplotlib-3.7.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fbea1e762b28400393d71be1a02144aa16692a3c4c676ba0178ce83fc2928fdd"}, - {file = "matplotlib-3.7.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec0e1adc0ad70ba8227e957551e25a9d2995e319c29f94a97575bb90fa1d4469"}, - {file = "matplotlib-3.7.5-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6738c89a635ced486c8a20e20111d33f6398a9cbebce1ced59c211e12cd61455"}, - {file = "matplotlib-3.7.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1210b7919b4ed94b5573870f316bca26de3e3b07ffdb563e79327dc0e6bba515"}, - {file = "matplotlib-3.7.5-cp310-cp310-win32.whl", hash = "sha256:068ebcc59c072781d9dcdb82f0d3f1458271c2de7ca9c78f5bd672141091e9e1"}, - {file = "matplotlib-3.7.5-cp310-cp310-win_amd64.whl", hash = "sha256:f098ffbaab9df1e3ef04e5a5586a1e6b1791380698e84938d8640961c79b1fc0"}, - {file = "matplotlib-3.7.5-cp311-cp311-macosx_10_12_universal2.whl", hash = "sha256:f65342c147572673f02a4abec2d5a23ad9c3898167df9b47c149f32ce61ca078"}, - {file = "matplotlib-3.7.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4ddf7fc0e0dc553891a117aa083039088d8a07686d4c93fb8a810adca68810af"}, - {file = "matplotlib-3.7.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0ccb830fc29442360d91be48527809f23a5dcaee8da5f4d9b2d5b867c1b087b8"}, - {file = "matplotlib-3.7.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:efc6bb28178e844d1f408dd4d6341ee8a2e906fc9e0fa3dae497da4e0cab775d"}, - {file = "matplotlib-3.7.5-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b15c4c2d374f249f324f46e883340d494c01768dd5287f8bc00b65b625ab56c"}, - {file = "matplotlib-3.7.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d028555421912307845e59e3de328260b26d055c5dac9b182cc9783854e98fb"}, - {file = "matplotlib-3.7.5-cp311-cp311-win32.whl", hash = "sha256:fe184b4625b4052fa88ef350b815559dd90cc6cc8e97b62f966e1ca84074aafa"}, - {file = "matplotlib-3.7.5-cp311-cp311-win_amd64.whl", hash = "sha256:084f1f0f2f1010868c6f1f50b4e1c6f2fb201c58475494f1e5b66fed66093647"}, - {file = "matplotlib-3.7.5-cp312-cp312-macosx_10_12_universal2.whl", hash = "sha256:34bceb9d8ddb142055ff27cd7135f539f2f01be2ce0bafbace4117abe58f8fe4"}, - {file = "matplotlib-3.7.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:c5a2134162273eb8cdfd320ae907bf84d171de948e62180fa372a3ca7cf0f433"}, - {file = "matplotlib-3.7.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:039ad54683a814002ff37bf7981aa1faa40b91f4ff84149beb53d1eb64617980"}, - {file = "matplotlib-3.7.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d742ccd1b09e863b4ca58291728db645b51dab343eebb08d5d4b31b308296ce"}, - {file = "matplotlib-3.7.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:743b1c488ca6a2bc7f56079d282e44d236bf375968bfd1b7ba701fd4d0fa32d6"}, - {file = "matplotlib-3.7.5-cp312-cp312-win_amd64.whl", hash = "sha256:fbf730fca3e1f23713bc1fae0a57db386e39dc81ea57dc305c67f628c1d7a342"}, - {file = "matplotlib-3.7.5-cp38-cp38-macosx_10_12_universal2.whl", hash = "sha256:cfff9b838531698ee40e40ea1a8a9dc2c01edb400b27d38de6ba44c1f9a8e3d2"}, - {file = "matplotlib-3.7.5-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:1dbcca4508bca7847fe2d64a05b237a3dcaec1f959aedb756d5b1c67b770c5ee"}, - {file = "matplotlib-3.7.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4cdf4ef46c2a1609a50411b66940b31778db1e4b73d4ecc2eaa40bd588979b13"}, - {file = "matplotlib-3.7.5-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:167200ccfefd1674b60e957186dfd9baf58b324562ad1a28e5d0a6b3bea77905"}, - {file = "matplotlib-3.7.5-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:53e64522934df6e1818b25fd48cf3b645b11740d78e6ef765fbb5fa5ce080d02"}, - {file = "matplotlib-3.7.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3e3bc79b2d7d615067bd010caff9243ead1fc95cf735c16e4b2583173f717eb"}, - {file = "matplotlib-3.7.5-cp38-cp38-win32.whl", hash = "sha256:6b641b48c6819726ed47c55835cdd330e53747d4efff574109fd79b2d8a13748"}, - {file = "matplotlib-3.7.5-cp38-cp38-win_amd64.whl", hash = "sha256:f0b60993ed3488b4532ec6b697059897891927cbfc2b8d458a891b60ec03d9d7"}, - {file = "matplotlib-3.7.5-cp39-cp39-macosx_10_12_universal2.whl", hash = "sha256:090964d0afaff9c90e4d8de7836757e72ecfb252fb02884016d809239f715651"}, - {file = "matplotlib-3.7.5-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:9fc6fcfbc55cd719bc0bfa60bde248eb68cf43876d4c22864603bdd23962ba25"}, - {file = "matplotlib-3.7.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5e7cc3078b019bb863752b8b60e8b269423000f1603cb2299608231996bd9d54"}, - {file = "matplotlib-3.7.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e4e9a868e8163abaaa8259842d85f949a919e1ead17644fb77a60427c90473c"}, - {file = "matplotlib-3.7.5-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fa7ebc995a7d747dacf0a717d0eb3aa0f0c6a0e9ea88b0194d3a3cd241a1500f"}, - {file = "matplotlib-3.7.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3785bfd83b05fc0e0c2ae4c4a90034fe693ef96c679634756c50fe6efcc09856"}, - {file = "matplotlib-3.7.5-cp39-cp39-win32.whl", hash = "sha256:29b058738c104d0ca8806395f1c9089dfe4d4f0f78ea765c6c704469f3fffc81"}, - {file = "matplotlib-3.7.5-cp39-cp39-win_amd64.whl", hash = "sha256:fd4028d570fa4b31b7b165d4a685942ae9cdc669f33741e388c01857d9723eab"}, - {file = "matplotlib-3.7.5-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2a9a3f4d6a7f88a62a6a18c7e6a84aedcaf4faf0708b4ca46d87b19f1b526f88"}, - {file = "matplotlib-3.7.5-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b9b3fd853d4a7f008a938df909b96db0b454225f935d3917520305b90680579c"}, - {file = "matplotlib-3.7.5-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0ad550da9f160737d7890217c5eeed4337d07e83ca1b2ca6535078f354e7675"}, - {file = "matplotlib-3.7.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:20da7924a08306a861b3f2d1da0d1aa9a6678e480cf8eacffe18b565af2813e7"}, - {file = "matplotlib-3.7.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b45c9798ea6bb920cb77eb7306409756a7fab9db9b463e462618e0559aecb30e"}, - {file = "matplotlib-3.7.5-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a99866267da1e561c7776fe12bf4442174b79aac1a47bd7e627c7e4d077ebd83"}, - {file = "matplotlib-3.7.5-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2b6aa62adb6c268fc87d80f963aca39c64615c31830b02697743c95590ce3fbb"}, - {file = "matplotlib-3.7.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:e530ab6a0afd082d2e9c17eb1eb064a63c5b09bb607b2b74fa41adbe3e162286"}, - {file = "matplotlib-3.7.5.tar.gz", hash = "sha256:1e5c971558ebc811aa07f54c7b7c677d78aa518ef4c390e14673a09e0860184a"}, -] - -[package.dependencies] -contourpy = ">=1.0.1" -cycler = ">=0.10" -fonttools = ">=4.22.0" -importlib-resources = {version = ">=3.2.0", markers = "python_version < \"3.10\""} -kiwisolver = ">=1.0.1" -numpy = ">=1.20,<2" -packaging = ">=20.0" -pillow = ">=6.2.0" -pyparsing = ">=2.3.1" -python-dateutil = ">=2.7" - [[package]] name = "matplotlib-inline" version = "0.1.7" @@ -2518,38 +2509,43 @@ files = [ [[package]] name = "mypy" -version = "1.11.2" +version = "1.12.0" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.11.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d42a6dd818ffce7be66cce644f1dff482f1d97c53ca70908dff0b9ddc120b77a"}, - {file = "mypy-1.11.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:801780c56d1cdb896eacd5619a83e427ce436d86a3bdf9112527f24a66618fef"}, - {file = "mypy-1.11.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:41ea707d036a5307ac674ea172875f40c9d55c5394f888b168033177fce47383"}, - {file = "mypy-1.11.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6e658bd2d20565ea86da7d91331b0eed6d2eee22dc031579e6297f3e12c758c8"}, - {file = "mypy-1.11.2-cp310-cp310-win_amd64.whl", hash = "sha256:478db5f5036817fe45adb7332d927daa62417159d49783041338921dcf646fc7"}, - {file = "mypy-1.11.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:75746e06d5fa1e91bfd5432448d00d34593b52e7e91a187d981d08d1f33d4385"}, - {file = "mypy-1.11.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a976775ab2256aadc6add633d44f100a2517d2388906ec4f13231fafbb0eccca"}, - {file = "mypy-1.11.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cd953f221ac1379050a8a646585a29574488974f79d8082cedef62744f0a0104"}, - {file = "mypy-1.11.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:57555a7715c0a34421013144a33d280e73c08df70f3a18a552938587ce9274f4"}, - {file = "mypy-1.11.2-cp311-cp311-win_amd64.whl", hash = "sha256:36383a4fcbad95f2657642a07ba22ff797de26277158f1cc7bd234821468b1b6"}, - {file = "mypy-1.11.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e8960dbbbf36906c5c0b7f4fbf2f0c7ffb20f4898e6a879fcf56a41a08b0d318"}, - {file = "mypy-1.11.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:06d26c277962f3fb50e13044674aa10553981ae514288cb7d0a738f495550b36"}, - {file = "mypy-1.11.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6e7184632d89d677973a14d00ae4d03214c8bc301ceefcdaf5c474866814c987"}, - {file = "mypy-1.11.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3a66169b92452f72117e2da3a576087025449018afc2d8e9bfe5ffab865709ca"}, - {file = "mypy-1.11.2-cp312-cp312-win_amd64.whl", hash = "sha256:969ea3ef09617aff826885a22ece0ddef69d95852cdad2f60c8bb06bf1f71f70"}, - {file = "mypy-1.11.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:37c7fa6121c1cdfcaac97ce3d3b5588e847aa79b580c1e922bb5d5d2902df19b"}, - {file = "mypy-1.11.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4a8a53bc3ffbd161b5b2a4fff2f0f1e23a33b0168f1c0778ec70e1a3d66deb86"}, - {file = "mypy-1.11.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2ff93107f01968ed834f4256bc1fc4475e2fecf6c661260066a985b52741ddce"}, - {file = "mypy-1.11.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:edb91dded4df17eae4537668b23f0ff6baf3707683734b6a818d5b9d0c0c31a1"}, - {file = "mypy-1.11.2-cp38-cp38-win_amd64.whl", hash = "sha256:ee23de8530d99b6db0573c4ef4bd8f39a2a6f9b60655bf7a1357e585a3486f2b"}, - {file = "mypy-1.11.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:801ca29f43d5acce85f8e999b1e431fb479cb02d0e11deb7d2abb56bdaf24fd6"}, - {file = "mypy-1.11.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:af8d155170fcf87a2afb55b35dc1a0ac21df4431e7d96717621962e4b9192e70"}, - {file = "mypy-1.11.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f7821776e5c4286b6a13138cc935e2e9b6fde05e081bdebf5cdb2bb97c9df81d"}, - {file = "mypy-1.11.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:539c570477a96a4e6fb718b8d5c3e0c0eba1f485df13f86d2970c91f0673148d"}, - {file = "mypy-1.11.2-cp39-cp39-win_amd64.whl", hash = "sha256:3f14cd3d386ac4d05c5a39a51b84387403dadbd936e17cb35882134d4f8f0d24"}, - {file = "mypy-1.11.2-py3-none-any.whl", hash = "sha256:b499bc07dbdcd3de92b0a8b29fdf592c111276f6a12fe29c30f6c417dd546d12"}, - {file = "mypy-1.11.2.tar.gz", hash = "sha256:7f9993ad3e0ffdc95c2a14b66dee63729f021968bff8ad911867579c65d13a79"}, + {file = "mypy-1.12.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4397081e620dc4dc18e2f124d5e1d2c288194c2c08df6bdb1db31c38cd1fe1ed"}, + {file = "mypy-1.12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:684a9c508a283f324804fea3f0effeb7858eb03f85c4402a967d187f64562469"}, + {file = "mypy-1.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6cabe4cda2fa5eca7ac94854c6c37039324baaa428ecbf4de4567279e9810f9e"}, + {file = "mypy-1.12.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:060a07b10e999ac9e7fa249ce2bdcfa9183ca2b70756f3bce9df7a92f78a3c0a"}, + {file = "mypy-1.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:0eff042d7257f39ba4ca06641d110ca7d2ad98c9c1fb52200fe6b1c865d360ff"}, + {file = "mypy-1.12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4b86de37a0da945f6d48cf110d5206c5ed514b1ca2614d7ad652d4bf099c7de7"}, + {file = "mypy-1.12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:20c7c5ce0c1be0b0aea628374e6cf68b420bcc772d85c3c974f675b88e3e6e57"}, + {file = "mypy-1.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a64ee25f05fc2d3d8474985c58042b6759100a475f8237da1f4faf7fcd7e6309"}, + {file = "mypy-1.12.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:faca7ab947c9f457a08dcb8d9a8664fd438080e002b0fa3e41b0535335edcf7f"}, + {file = "mypy-1.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:5bc81701d52cc8767005fdd2a08c19980de9ec61a25dbd2a937dfb1338a826f9"}, + {file = "mypy-1.12.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:8462655b6694feb1c99e433ea905d46c478041a8b8f0c33f1dab00ae881b2164"}, + {file = "mypy-1.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:923ea66d282d8af9e0f9c21ffc6653643abb95b658c3a8a32dca1eff09c06475"}, + {file = "mypy-1.12.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1ebf9e796521f99d61864ed89d1fb2926d9ab6a5fab421e457cd9c7e4dd65aa9"}, + {file = "mypy-1.12.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e478601cc3e3fa9d6734d255a59c7a2e5c2934da4378f3dd1e3411ea8a248642"}, + {file = "mypy-1.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:c72861b7139a4f738344faa0e150834467521a3fba42dc98264e5aa9507dd601"}, + {file = "mypy-1.12.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:52b9e1492e47e1790360a43755fa04101a7ac72287b1a53ce817f35899ba0521"}, + {file = "mypy-1.12.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:48d3e37dd7d9403e38fa86c46191de72705166d40b8c9f91a3de77350daa0893"}, + {file = "mypy-1.12.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2f106db5ccb60681b622ac768455743ee0e6a857724d648c9629a9bd2ac3f721"}, + {file = "mypy-1.12.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:233e11b3f73ee1f10efada2e6da0f555b2f3a5316e9d8a4a1224acc10e7181d3"}, + {file = "mypy-1.12.0-cp313-cp313-win_amd64.whl", hash = "sha256:4ae8959c21abcf9d73aa6c74a313c45c0b5a188752bf37dace564e29f06e9c1b"}, + {file = "mypy-1.12.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:eafc1b7319b40ddabdc3db8d7d48e76cfc65bbeeafaa525a4e0fa6b76175467f"}, + {file = "mypy-1.12.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9b9ce1ad8daeb049c0b55fdb753d7414260bad8952645367e70ac91aec90e07e"}, + {file = "mypy-1.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bfe012b50e1491d439172c43ccb50db66d23fab714d500b57ed52526a1020bb7"}, + {file = "mypy-1.12.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2c40658d4fa1ab27cb53d9e2f1066345596af2f8fe4827defc398a09c7c9519b"}, + {file = "mypy-1.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:dee78a8b9746c30c1e617ccb1307b351ded57f0de0d287ca6276378d770006c0"}, + {file = "mypy-1.12.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b5df6c8a8224f6b86746bda716bbe4dbe0ce89fd67b1fa4661e11bfe38e8ec8"}, + {file = "mypy-1.12.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5feee5c74eb9749e91b77f60b30771563327329e29218d95bedbe1257e2fe4b0"}, + {file = "mypy-1.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:77278e8c6ffe2abfba6db4125de55f1024de9a323be13d20e4f73b8ed3402bd1"}, + {file = "mypy-1.12.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:dcfb754dea911039ac12434d1950d69a2f05acd4d56f7935ed402be09fad145e"}, + {file = "mypy-1.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:06de0498798527451ffb60f68db0d368bd2bae2bbfb5237eae616d4330cc87aa"}, + {file = "mypy-1.12.0-py3-none-any.whl", hash = "sha256:fd313226af375d52e1e36c383f39bf3836e1f192801116b31b090dfcd3ec5266"}, + {file = "mypy-1.12.0.tar.gz", hash = "sha256:65a22d87e757ccd95cbbf6f7e181e6caa87128255eb2b6be901bb71b26d8a99d"}, ] [package.dependencies] @@ -2779,51 +2775,6 @@ files = [ {file = "numpy-1.23.2.tar.gz", hash = "sha256:b78d00e48261fbbd04aa0d7427cf78d18401ee0abd89c7559bbf422e5b1c7d01"}, ] -[[package]] -name = "numpy" -version = "1.26.4" -description = "Fundamental package for array computing in Python" -optional = false -python-versions = ">=3.9" -files = [ - {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, - {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, - {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, - {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, - {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, - {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, - {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, - {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, - {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, - {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, - {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, - {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, - {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, - {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, - {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, - {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, - {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, - {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, - {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, - {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, - {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, - {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, - {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, - {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, - {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, - {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, - {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, - {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, - {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, - {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, - {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, - {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, - {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, -] - [[package]] name = "numpy" version = "2.1.2" @@ -2904,12 +2855,12 @@ files = [ [package.dependencies] numpy = [ + {version = ">=1.26.0", markers = "python_version >= \"3.12\""}, {version = ">=1.21.0", markers = "python_version <= \"3.9\" and platform_system == \"Darwin\" and platform_machine == \"arm64\" and python_version >= \"3.8\""}, {version = ">=1.19.3", markers = "platform_system == \"Linux\" and platform_machine == \"aarch64\" and python_version >= \"3.8\" and python_version < \"3.10\" or python_version > \"3.9\" and python_version < \"3.10\" or python_version >= \"3.9\" and platform_system != \"Darwin\" and python_version < \"3.10\" or python_version >= \"3.9\" and platform_machine != \"arm64\" and python_version < \"3.10\""}, {version = ">=1.17.3", markers = "(platform_system != \"Darwin\" and platform_system != \"Linux\") and python_version >= \"3.8\" and python_version < \"3.9\" or platform_system != \"Darwin\" and python_version >= \"3.8\" and python_version < \"3.9\" and platform_machine != \"aarch64\" or platform_machine != \"arm64\" and python_version >= \"3.8\" and python_version < \"3.9\" and platform_system != \"Linux\" or (platform_machine != \"arm64\" and platform_machine != \"aarch64\") and python_version >= \"3.8\" and python_version < \"3.9\""}, {version = ">=1.21.4", markers = "python_version >= \"3.10\" and platform_system == \"Darwin\" and python_version < \"3.11\""}, {version = ">=1.21.2", markers = "platform_system != \"Darwin\" and python_version >= \"3.10\" and python_version < \"3.11\""}, - {version = ">=1.26.0", markers = "python_version >= \"3.12\""}, {version = ">=1.23.5", markers = "python_version >= \"3.11\" and python_version < \"3.12\""}, ] @@ -2986,9 +2937,9 @@ files = [ [package.dependencies] numpy = [ + {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, {version = ">=1.20.3", markers = "python_version < \"3.10\""}, {version = ">=1.21.0", markers = "python_version >= \"3.10\" and python_version < \"3.11\""}, - {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, ] python-dateutil = ">=2.8.2" pytz = ">=2020.1" @@ -3031,24 +2982,6 @@ files = [ [package.dependencies] types-pytz = ">=2022.1.1" -[[package]] -name = "pandas-stubs" -version = "2.0.3.230814" -description = "Type annotations for pandas" -optional = true -python-versions = ">=3.8" -files = [ - {file = "pandas_stubs-2.0.3.230814-py3-none-any.whl", hash = "sha256:4b3dfc027d49779176b7daa031a3405f7b839bcb6e312f4b9f29fea5feec5b4f"}, - {file = "pandas_stubs-2.0.3.230814.tar.gz", hash = "sha256:1d5cc09e36e3d9f9a1ed9dceae4e03eeb26d1b898dd769996925f784365c8769"}, -] - -[package.dependencies] -numpy = [ - {version = "<=1.24.3", markers = "python_full_version <= \"3.8.0\""}, - {version = ">=1.25.0", markers = "python_version >= \"3.9\""}, -] -types-pytz = ">=2022.1.1" - [[package]] name = "pandocfilters" version = "1.5.1" @@ -3421,20 +3354,6 @@ files = [ [package.extras] diagrams = ["jinja2", "railroad-diagrams"] -[[package]] -name = "pyparsing" -version = "3.1.4" -description = "pyparsing module - Classes and methods to define and execute parsing grammars" -optional = false -python-versions = ">=3.6.8" -files = [ - {file = "pyparsing-3.1.4-py3-none-any.whl", hash = "sha256:a6a7ee4235a3f944aa1fa2249307708f893fe5717dc603503c6c7969c070fb7c"}, - {file = "pyparsing-3.1.4.tar.gz", hash = "sha256:f86ec8d1a83f11977c9a6ea7598e8c27fc5cddfa5b07ea2241edbbde1d7bc032"}, -] - -[package.extras] -diagrams = ["jinja2", "railroad-diagrams"] - [[package]] name = "pyproject-api" version = "1.8.0" From e9d5f861a28eab7b6bc819d3e620e01576fedefc Mon Sep 17 00:00:00 2001 From: prakharjain Date: Tue, 15 Oct 2024 22:44:01 +0530 Subject: [PATCH 056/161] Added support by using oriented_box_iou_batch Final --- supervision/detection/utils.py | 8 ++-- supervision/metrics/mean_average_precision.py | 38 +++++-------------- 2 files changed, 14 insertions(+), 32 deletions(-) diff --git a/supervision/detection/utils.py b/supervision/detection/utils.py index 43fcec5a0..a0a4d3e21 100644 --- a/supervision/detection/utils.py +++ b/supervision/detection/utils.py @@ -23,8 +23,8 @@ def polygon_to_mask(polygon: np.ndarray, resolution_wh: Tuple[int, int]) -> np.n np.ndarray: The generated 2D mask, where the polygon is marked with `1`'s and the rest is filled with `0`'s. """ - width, height = resolution_wh - mask = np.zeros((height, width)) + width, height = map(int, resolution_wh) + mask = np.zeros((height, width), dtype=np.uint8) cv2.fillPoly(mask, [polygon], color=1) return mask @@ -163,9 +163,9 @@ def oriented_box_iou_batch( boxes_true = boxes_true.reshape(-1, 4, 2) boxes_detection = boxes_detection.reshape(-1, 4, 2) - max_height = max(boxes_true[:, :, 0].max(), boxes_detection[:, :, 0].max()) + 1 + max_height = int(max(boxes_true[:, :, 0].max(), boxes_detection[:, :, 0].max()) + 1) # adding 1 because we are 0-indexed - max_width = max(boxes_true[:, :, 1].max(), boxes_detection[:, :, 1].max()) + 1 + max_width = int(max(boxes_true[:, :, 1].max(), boxes_detection[:, :, 1].max()) + 1) mask_true = np.zeros((boxes_true.shape[0], max_height, max_width)) for i, box_true in enumerate(boxes_true): diff --git a/supervision/metrics/mean_average_precision.py b/supervision/metrics/mean_average_precision.py index 58d4764c1..f0613f2b9 100644 --- a/supervision/metrics/mean_average_precision.py +++ b/supervision/metrics/mean_average_precision.py @@ -9,11 +9,7 @@ from supervision.config import ORIENTED_BOX_COORDINATES from supervision.detection.core import Detections -from supervision.detection.utils import ( - box_iou_batch, - mask_iou_batch, - oriented_box_iou_batch, -) +from supervision.detection.utils import box_iou_batch, mask_iou_batch, oriented_box_iou_batch from supervision.draw.color import LEGACY_COLOR_PALETTE from supervision.metrics.core import Metric, MetricTarget from supervision.metrics.utils.object_size import ( @@ -40,6 +36,9 @@ def __init__( class_agnostic (bool): Whether to treat all data as a single class. """ self._metric_target = metric_target + if self._metric_target == MetricTarget.ORIENTED_BOUNDING_BOXES: + pass + self._class_agnostic = class_agnostic self._predictions_list: List[Detections] = [] @@ -75,15 +74,6 @@ def update( f" targets ({len(targets)}) during the update must be the same." ) - if self._class_agnostic: - predictions = deepcopy(predictions) - targets = deepcopy(targets) - - for prediction in predictions: - prediction.class_id[:] = -1 - for target in targets: - target.class_id[:] = -1 - self._predictions_list.extend(predictions) self._targets_list.extend(targets) @@ -181,13 +171,9 @@ def _compute( elif self._metric_target == MetricTarget.MASKS: iou = mask_iou_batch(target_contents, prediction_contents) elif self._metric_target == MetricTarget.ORIENTED_BOUNDING_BOXES: - iou = oriented_box_iou_batch( - target_contents, prediction_contents - ) + iou = oriented_box_iou_batch(target_contents, prediction_contents) else: - raise ValueError( - f"Unsupported metric target: {self._metric_target}" - ) + raise NotImplementedError("Unsupported metric target for IoU calculation") matches = self._match_detection_batch( predictions.class_id, targets.class_id, iou, iou_thresholds @@ -242,7 +228,7 @@ def _compute_average_precision(recall: np.ndarray, precision: np.ndarray) -> flo for r, p in zip(recall[::-1], precision[::-1]): precision_levels[recall_levels <= r] = p - average_precision = (1 / 101 * precision_levels).sum() + average_precision = (1 / 100 * precision_levels).sum() return average_precision @staticmethod @@ -257,7 +243,6 @@ def _match_detection_batch( iou_thresholds.shape[0], ) correct = np.zeros((num_predictions, num_iou_levels), dtype=bool) - correct_class = target_classes[:, None] == predictions_classes for i, iou_level in enumerate(iou_thresholds): @@ -345,7 +330,8 @@ def _detections_content(self, detections: Detections) -> np.ndarray: else self._make_empty_content() ) if self._metric_target == MetricTarget.ORIENTED_BOUNDING_BOXES: - if obb := detections.data.get(ORIENTED_BOX_COORDINATES): + obb = detections.data.get(ORIENTED_BOX_COORDINATES) + if obb is not None and len(obb) > 0: return np.array(obb, dtype=np.float32) return self._make_empty_content() raise ValueError(f"Invalid metric target: {self._metric_target}") @@ -396,8 +382,6 @@ class MeanAveragePrecisionResult: Attributes: metric_target (MetricTarget): the type of data used for the metric - boxes, masks or oriented bounding boxes. - class_agnostic (bool): When computing class-agnostic results, class ID - is set to `-1`. mAP_map50_95 (float): the mAP score at IoU thresholds from `0.5` to `0.95`. mAP_map50 (float): the mAP score at IoU threshold of `0.5`. mAP_map75 (float): the mAP score at IoU threshold of `0.75`. @@ -417,7 +401,6 @@ class and IoU threshold. Shape: `(num_target_classes, num_iou_thresholds)` """ metric_target: MetricTarget - is_class_agnostic: bool @property def map50_95(self) -> float: @@ -452,7 +435,6 @@ def __str__(self) -> str: out_str = ( f"{self.__class__.__name__}:\n" f"Metric target: {self.metric_target}\n" - f"Class agnostic: {self.is_class_agnostic}\n" f"mAP @ 50:95: {self.map50_95:.4f}\n" f"mAP @ 50: {self.map50:.4f}\n" f"mAP @ 75: {self.map75:.4f}\n" @@ -576,4 +558,4 @@ def plot(self): plt.rcParams["font.family"] = "sans-serif" plt.tight_layout() - plt.show() + plt.show() \ No newline at end of file From 865c4e42317f75e822dbe3b3abfd67eea3164b73 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Tue, 15 Oct 2024 17:15:13 +0000 Subject: [PATCH 057/161] =?UTF-8?q?fix(pre=5Fcommit):=20=F0=9F=8E=A8=20aut?= =?UTF-8?q?o=20format=20pre-commit=20hooks?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- supervision/metrics/mean_average_precision.py | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/supervision/metrics/mean_average_precision.py b/supervision/metrics/mean_average_precision.py index f0613f2b9..0ebffd51b 100644 --- a/supervision/metrics/mean_average_precision.py +++ b/supervision/metrics/mean_average_precision.py @@ -9,7 +9,11 @@ from supervision.config import ORIENTED_BOX_COORDINATES from supervision.detection.core import Detections -from supervision.detection.utils import box_iou_batch, mask_iou_batch, oriented_box_iou_batch +from supervision.detection.utils import ( + box_iou_batch, + mask_iou_batch, + oriented_box_iou_batch, +) from supervision.draw.color import LEGACY_COLOR_PALETTE from supervision.metrics.core import Metric, MetricTarget from supervision.metrics.utils.object_size import ( @@ -171,9 +175,13 @@ def _compute( elif self._metric_target == MetricTarget.MASKS: iou = mask_iou_batch(target_contents, prediction_contents) elif self._metric_target == MetricTarget.ORIENTED_BOUNDING_BOXES: - iou = oriented_box_iou_batch(target_contents, prediction_contents) + iou = oriented_box_iou_batch( + target_contents, prediction_contents + ) else: - raise NotImplementedError("Unsupported metric target for IoU calculation") + raise NotImplementedError( + "Unsupported metric target for IoU calculation" + ) matches = self._match_detection_batch( predictions.class_id, targets.class_id, iou, iou_thresholds @@ -558,4 +566,4 @@ def plot(self): plt.rcParams["font.family"] = "sans-serif" plt.tight_layout() - plt.show() \ No newline at end of file + plt.show() From 85f400c1625867bea5ac12af9296a5ef65ec47d0 Mon Sep 17 00:00:00 2001 From: prakharjain Date: Tue, 15 Oct 2024 22:55:41 +0530 Subject: [PATCH 058/161] Added support by using oriented_box_iou_batch --- supervision/metrics/mean_average_precision.py | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/supervision/metrics/mean_average_precision.py b/supervision/metrics/mean_average_precision.py index 0ebffd51b..2a4f121d4 100644 --- a/supervision/metrics/mean_average_precision.py +++ b/supervision/metrics/mean_average_precision.py @@ -78,6 +78,15 @@ def update( f" targets ({len(targets)}) during the update must be the same." ) + if self._class_agnostic: + predictions = deepcopy(predictions) + targets = deepcopy(targets) + + for prediction in predictions: + prediction.class_id[:] = -1 + for target in targets: + target.class_id[:] = -1 + self._predictions_list.extend(predictions) self._targets_list.extend(targets) From 0012052cdd33fb3727269f72024e9f3592ee1ae1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 16 Oct 2024 00:19:59 +0000 Subject: [PATCH 059/161] :arrow_up: Bump tox from 4.21.2 to 4.22.0 Bumps [tox](https://github.com/tox-dev/tox) from 4.21.2 to 4.22.0. - [Release notes](https://github.com/tox-dev/tox/releases) - [Changelog](https://github.com/tox-dev/tox/blob/main/docs/changelog.rst) - [Commits](https://github.com/tox-dev/tox/compare/4.21.2...4.22.0) --- updated-dependencies: - dependency-name: tox dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- poetry.lock | 226 ++++++++++++++++------------------------------------ 1 file changed, 68 insertions(+), 158 deletions(-) diff --git a/poetry.lock b/poetry.lock index 1994447dd..22d8fa7b4 100644 --- a/poetry.lock +++ b/poetry.lock @@ -602,6 +602,68 @@ traitlets = ">=4" [package.extras] test = ["pytest"] +[[package]] +name = "contourpy" +version = "1.1.0" +description = "Python library for calculating contours of 2D quadrilateral grids" +optional = false +python-versions = ">=3.8" +files = [ + {file = "contourpy-1.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:89f06eff3ce2f4b3eb24c1055a26981bffe4e7264acd86f15b97e40530b794bc"}, + {file = "contourpy-1.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dffcc2ddec1782dd2f2ce1ef16f070861af4fb78c69862ce0aab801495dda6a3"}, + {file = "contourpy-1.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25ae46595e22f93592d39a7eac3d638cda552c3e1160255258b695f7b58e5655"}, + {file = "contourpy-1.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:17cfaf5ec9862bc93af1ec1f302457371c34e688fbd381f4035a06cd47324f48"}, + {file = "contourpy-1.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18a64814ae7bce73925131381603fff0116e2df25230dfc80d6d690aa6e20b37"}, + {file = "contourpy-1.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90c81f22b4f572f8a2110b0b741bb64e5a6427e0a198b2cdc1fbaf85f352a3aa"}, + {file = "contourpy-1.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:53cc3a40635abedbec7f1bde60f8c189c49e84ac180c665f2cd7c162cc454baa"}, + {file = "contourpy-1.1.0-cp310-cp310-win32.whl", hash = "sha256:9b2dd2ca3ac561aceef4c7c13ba654aaa404cf885b187427760d7f7d4c57cff8"}, + {file = "contourpy-1.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:1f795597073b09d631782e7245016a4323cf1cf0b4e06eef7ea6627e06a37ff2"}, + {file = "contourpy-1.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0b7b04ed0961647691cfe5d82115dd072af7ce8846d31a5fac6c142dcce8b882"}, + {file = "contourpy-1.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:27bc79200c742f9746d7dd51a734ee326a292d77e7d94c8af6e08d1e6c15d545"}, + {file = "contourpy-1.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:052cc634bf903c604ef1a00a5aa093c54f81a2612faedaa43295809ffdde885e"}, + {file = "contourpy-1.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9382a1c0bc46230fb881c36229bfa23d8c303b889b788b939365578d762b5c18"}, + {file = "contourpy-1.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5cec36c5090e75a9ac9dbd0ff4a8cf7cecd60f1b6dc23a374c7d980a1cd710e"}, + {file = "contourpy-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f0cbd657e9bde94cd0e33aa7df94fb73c1ab7799378d3b3f902eb8eb2e04a3a"}, + {file = "contourpy-1.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:181cbace49874f4358e2929aaf7ba84006acb76694102e88dd15af861996c16e"}, + {file = "contourpy-1.1.0-cp311-cp311-win32.whl", hash = "sha256:edb989d31065b1acef3828a3688f88b2abb799a7db891c9e282df5ec7e46221b"}, + {file = "contourpy-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:fb3b7d9e6243bfa1efb93ccfe64ec610d85cfe5aec2c25f97fbbd2e58b531256"}, + {file = "contourpy-1.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bcb41692aa09aeb19c7c213411854402f29f6613845ad2453d30bf421fe68fed"}, + {file = "contourpy-1.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5d123a5bc63cd34c27ff9c7ac1cd978909e9c71da12e05be0231c608048bb2ae"}, + {file = "contourpy-1.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62013a2cf68abc80dadfd2307299bfa8f5aa0dcaec5b2954caeb5fa094171103"}, + {file = "contourpy-1.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0b6616375d7de55797d7a66ee7d087efe27f03d336c27cf1f32c02b8c1a5ac70"}, + {file = "contourpy-1.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:317267d915490d1e84577924bd61ba71bf8681a30e0d6c545f577363157e5e94"}, + {file = "contourpy-1.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d551f3a442655f3dcc1285723f9acd646ca5858834efeab4598d706206b09c9f"}, + {file = "contourpy-1.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e7a117ce7df5a938fe035cad481b0189049e8d92433b4b33aa7fc609344aafa1"}, + {file = "contourpy-1.1.0-cp38-cp38-win32.whl", hash = "sha256:108dfb5b3e731046a96c60bdc46a1a0ebee0760418951abecbe0fc07b5b93b27"}, + {file = "contourpy-1.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:d4f26b25b4f86087e7d75e63212756c38546e70f2a92d2be44f80114826e1cd4"}, + {file = "contourpy-1.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc00bb4225d57bff7ebb634646c0ee2a1298402ec10a5fe7af79df9a51c1bfd9"}, + {file = "contourpy-1.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:189ceb1525eb0655ab8487a9a9c41f42a73ba52d6789754788d1883fb06b2d8a"}, + {file = "contourpy-1.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f2931ed4741f98f74b410b16e5213f71dcccee67518970c42f64153ea9313b9"}, + {file = "contourpy-1.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:30f511c05fab7f12e0b1b7730ebdc2ec8deedcfb505bc27eb570ff47c51a8f15"}, + {file = "contourpy-1.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:143dde50520a9f90e4a2703f367cf8ec96a73042b72e68fcd184e1279962eb6f"}, + {file = "contourpy-1.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e94bef2580e25b5fdb183bf98a2faa2adc5b638736b2c0a4da98691da641316a"}, + {file = "contourpy-1.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ed614aea8462735e7d70141374bd7650afd1c3f3cb0c2dbbcbe44e14331bf002"}, + {file = "contourpy-1.1.0-cp39-cp39-win32.whl", hash = "sha256:71551f9520f008b2950bef5f16b0e3587506ef4f23c734b71ffb7b89f8721999"}, + {file = "contourpy-1.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:438ba416d02f82b692e371858143970ed2eb6337d9cdbbede0d8ad9f3d7dd17d"}, + {file = "contourpy-1.1.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a698c6a7a432789e587168573a864a7ea374c6be8d4f31f9d87c001d5a843493"}, + {file = "contourpy-1.1.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:397b0ac8a12880412da3551a8cb5a187d3298a72802b45a3bd1805e204ad8439"}, + {file = "contourpy-1.1.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:a67259c2b493b00e5a4d0f7bfae51fb4b3371395e47d079a4446e9b0f4d70e76"}, + {file = "contourpy-1.1.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2b836d22bd2c7bb2700348e4521b25e077255ebb6ab68e351ab5aa91ca27e027"}, + {file = "contourpy-1.1.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:084eaa568400cfaf7179b847ac871582199b1b44d5699198e9602ecbbb5f6104"}, + {file = "contourpy-1.1.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:911ff4fd53e26b019f898f32db0d4956c9d227d51338fb3b03ec72ff0084ee5f"}, + {file = "contourpy-1.1.0.tar.gz", hash = "sha256:e53046c3863828d21d531cc3b53786e6580eb1ba02477e8681009b6aa0870b21"}, +] + +[package.dependencies] +numpy = ">=1.16" + +[package.extras] +bokeh = ["bokeh", "selenium"] +docs = ["furo", "sphinx-copybutton"] +mypy = ["contourpy[bokeh,docs]", "docutils-stubs", "mypy (==1.2.0)", "types-Pillow"] +test = ["Pillow", "contourpy[test-no-images]", "matplotlib"] +test-no-images = ["pytest", "pytest-cov", "wurlitzer"] + [[package]] name = "contourpy" version = "1.1.1" @@ -664,10 +726,7 @@ files = [ ] [package.dependencies] -numpy = [ - {version = ">=1.16,<2.0", markers = "python_version <= \"3.11\""}, - {version = ">=1.26.0rc1,<2.0", markers = "python_version >= \"3.12\""}, -] +numpy = {version = ">=1.16,<2.0", markers = "python_version <= \"3.11\""} [package.extras] bokeh = ["bokeh", "selenium"] @@ -2139,74 +2198,6 @@ pillow = ">=6.2.0" pyparsing = ">=2.3.1,<3.1" python-dateutil = ">=2.7" -[[package]] -name = "matplotlib" -version = "3.7.5" -description = "Python plotting package" -optional = false -python-versions = ">=3.8" -files = [ - {file = "matplotlib-3.7.5-cp310-cp310-macosx_10_12_universal2.whl", hash = "sha256:4a87b69cb1cb20943010f63feb0b2901c17a3b435f75349fd9865713bfa63925"}, - {file = "matplotlib-3.7.5-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:d3ce45010fefb028359accebb852ca0c21bd77ec0f281952831d235228f15810"}, - {file = "matplotlib-3.7.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fbea1e762b28400393d71be1a02144aa16692a3c4c676ba0178ce83fc2928fdd"}, - {file = "matplotlib-3.7.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec0e1adc0ad70ba8227e957551e25a9d2995e319c29f94a97575bb90fa1d4469"}, - {file = "matplotlib-3.7.5-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6738c89a635ced486c8a20e20111d33f6398a9cbebce1ced59c211e12cd61455"}, - {file = "matplotlib-3.7.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1210b7919b4ed94b5573870f316bca26de3e3b07ffdb563e79327dc0e6bba515"}, - {file = "matplotlib-3.7.5-cp310-cp310-win32.whl", hash = "sha256:068ebcc59c072781d9dcdb82f0d3f1458271c2de7ca9c78f5bd672141091e9e1"}, - {file = "matplotlib-3.7.5-cp310-cp310-win_amd64.whl", hash = "sha256:f098ffbaab9df1e3ef04e5a5586a1e6b1791380698e84938d8640961c79b1fc0"}, - {file = "matplotlib-3.7.5-cp311-cp311-macosx_10_12_universal2.whl", hash = "sha256:f65342c147572673f02a4abec2d5a23ad9c3898167df9b47c149f32ce61ca078"}, - {file = "matplotlib-3.7.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4ddf7fc0e0dc553891a117aa083039088d8a07686d4c93fb8a810adca68810af"}, - {file = "matplotlib-3.7.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0ccb830fc29442360d91be48527809f23a5dcaee8da5f4d9b2d5b867c1b087b8"}, - {file = "matplotlib-3.7.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:efc6bb28178e844d1f408dd4d6341ee8a2e906fc9e0fa3dae497da4e0cab775d"}, - {file = "matplotlib-3.7.5-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b15c4c2d374f249f324f46e883340d494c01768dd5287f8bc00b65b625ab56c"}, - {file = "matplotlib-3.7.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d028555421912307845e59e3de328260b26d055c5dac9b182cc9783854e98fb"}, - {file = "matplotlib-3.7.5-cp311-cp311-win32.whl", hash = "sha256:fe184b4625b4052fa88ef350b815559dd90cc6cc8e97b62f966e1ca84074aafa"}, - {file = "matplotlib-3.7.5-cp311-cp311-win_amd64.whl", hash = "sha256:084f1f0f2f1010868c6f1f50b4e1c6f2fb201c58475494f1e5b66fed66093647"}, - {file = "matplotlib-3.7.5-cp312-cp312-macosx_10_12_universal2.whl", hash = "sha256:34bceb9d8ddb142055ff27cd7135f539f2f01be2ce0bafbace4117abe58f8fe4"}, - {file = "matplotlib-3.7.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:c5a2134162273eb8cdfd320ae907bf84d171de948e62180fa372a3ca7cf0f433"}, - {file = "matplotlib-3.7.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:039ad54683a814002ff37bf7981aa1faa40b91f4ff84149beb53d1eb64617980"}, - {file = "matplotlib-3.7.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d742ccd1b09e863b4ca58291728db645b51dab343eebb08d5d4b31b308296ce"}, - {file = "matplotlib-3.7.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:743b1c488ca6a2bc7f56079d282e44d236bf375968bfd1b7ba701fd4d0fa32d6"}, - {file = "matplotlib-3.7.5-cp312-cp312-win_amd64.whl", hash = "sha256:fbf730fca3e1f23713bc1fae0a57db386e39dc81ea57dc305c67f628c1d7a342"}, - {file = "matplotlib-3.7.5-cp38-cp38-macosx_10_12_universal2.whl", hash = "sha256:cfff9b838531698ee40e40ea1a8a9dc2c01edb400b27d38de6ba44c1f9a8e3d2"}, - {file = "matplotlib-3.7.5-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:1dbcca4508bca7847fe2d64a05b237a3dcaec1f959aedb756d5b1c67b770c5ee"}, - {file = "matplotlib-3.7.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4cdf4ef46c2a1609a50411b66940b31778db1e4b73d4ecc2eaa40bd588979b13"}, - {file = "matplotlib-3.7.5-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:167200ccfefd1674b60e957186dfd9baf58b324562ad1a28e5d0a6b3bea77905"}, - {file = "matplotlib-3.7.5-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:53e64522934df6e1818b25fd48cf3b645b11740d78e6ef765fbb5fa5ce080d02"}, - {file = "matplotlib-3.7.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3e3bc79b2d7d615067bd010caff9243ead1fc95cf735c16e4b2583173f717eb"}, - {file = "matplotlib-3.7.5-cp38-cp38-win32.whl", hash = "sha256:6b641b48c6819726ed47c55835cdd330e53747d4efff574109fd79b2d8a13748"}, - {file = "matplotlib-3.7.5-cp38-cp38-win_amd64.whl", hash = "sha256:f0b60993ed3488b4532ec6b697059897891927cbfc2b8d458a891b60ec03d9d7"}, - {file = "matplotlib-3.7.5-cp39-cp39-macosx_10_12_universal2.whl", hash = "sha256:090964d0afaff9c90e4d8de7836757e72ecfb252fb02884016d809239f715651"}, - {file = "matplotlib-3.7.5-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:9fc6fcfbc55cd719bc0bfa60bde248eb68cf43876d4c22864603bdd23962ba25"}, - {file = "matplotlib-3.7.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5e7cc3078b019bb863752b8b60e8b269423000f1603cb2299608231996bd9d54"}, - {file = "matplotlib-3.7.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e4e9a868e8163abaaa8259842d85f949a919e1ead17644fb77a60427c90473c"}, - {file = "matplotlib-3.7.5-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fa7ebc995a7d747dacf0a717d0eb3aa0f0c6a0e9ea88b0194d3a3cd241a1500f"}, - {file = "matplotlib-3.7.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3785bfd83b05fc0e0c2ae4c4a90034fe693ef96c679634756c50fe6efcc09856"}, - {file = "matplotlib-3.7.5-cp39-cp39-win32.whl", hash = "sha256:29b058738c104d0ca8806395f1c9089dfe4d4f0f78ea765c6c704469f3fffc81"}, - {file = "matplotlib-3.7.5-cp39-cp39-win_amd64.whl", hash = "sha256:fd4028d570fa4b31b7b165d4a685942ae9cdc669f33741e388c01857d9723eab"}, - {file = "matplotlib-3.7.5-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2a9a3f4d6a7f88a62a6a18c7e6a84aedcaf4faf0708b4ca46d87b19f1b526f88"}, - {file = "matplotlib-3.7.5-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b9b3fd853d4a7f008a938df909b96db0b454225f935d3917520305b90680579c"}, - {file = "matplotlib-3.7.5-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0ad550da9f160737d7890217c5eeed4337d07e83ca1b2ca6535078f354e7675"}, - {file = "matplotlib-3.7.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:20da7924a08306a861b3f2d1da0d1aa9a6678e480cf8eacffe18b565af2813e7"}, - {file = "matplotlib-3.7.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b45c9798ea6bb920cb77eb7306409756a7fab9db9b463e462618e0559aecb30e"}, - {file = "matplotlib-3.7.5-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a99866267da1e561c7776fe12bf4442174b79aac1a47bd7e627c7e4d077ebd83"}, - {file = "matplotlib-3.7.5-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2b6aa62adb6c268fc87d80f963aca39c64615c31830b02697743c95590ce3fbb"}, - {file = "matplotlib-3.7.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:e530ab6a0afd082d2e9c17eb1eb064a63c5b09bb607b2b74fa41adbe3e162286"}, - {file = "matplotlib-3.7.5.tar.gz", hash = "sha256:1e5c971558ebc811aa07f54c7b7c677d78aa518ef4c390e14673a09e0860184a"}, -] - -[package.dependencies] -contourpy = ">=1.0.1" -cycler = ">=0.10" -fonttools = ">=4.22.0" -importlib-resources = {version = ">=3.2.0", markers = "python_version < \"3.10\""} -kiwisolver = ">=1.0.1" -numpy = ">=1.20,<2" -packaging = ">=20.0" -pillow = ">=6.2.0" -pyparsing = ">=2.3.1" -python-dateutil = ">=2.7" - [[package]] name = "matplotlib-inline" version = "0.1.7" @@ -2779,51 +2770,6 @@ files = [ {file = "numpy-1.23.2.tar.gz", hash = "sha256:b78d00e48261fbbd04aa0d7427cf78d18401ee0abd89c7559bbf422e5b1c7d01"}, ] -[[package]] -name = "numpy" -version = "1.26.4" -description = "Fundamental package for array computing in Python" -optional = false -python-versions = ">=3.9" -files = [ - {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, - {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, - {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, - {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, - {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, - {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, - {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, - {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, - {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, - {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, - {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, - {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, - {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, - {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, - {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, - {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, - {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, - {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, - {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, - {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, - {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, - {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, - {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, - {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, - {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, - {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, - {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, - {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, - {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, - {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, - {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, - {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, - {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, -] - [[package]] name = "numpy" version = "2.1.2" @@ -2904,12 +2850,12 @@ files = [ [package.dependencies] numpy = [ + {version = ">=1.26.0", markers = "python_version >= \"3.12\""}, {version = ">=1.21.0", markers = "python_version <= \"3.9\" and platform_system == \"Darwin\" and platform_machine == \"arm64\" and python_version >= \"3.8\""}, {version = ">=1.19.3", markers = "platform_system == \"Linux\" and platform_machine == \"aarch64\" and python_version >= \"3.8\" and python_version < \"3.10\" or python_version > \"3.9\" and python_version < \"3.10\" or python_version >= \"3.9\" and platform_system != \"Darwin\" and python_version < \"3.10\" or python_version >= \"3.9\" and platform_machine != \"arm64\" and python_version < \"3.10\""}, {version = ">=1.17.3", markers = "(platform_system != \"Darwin\" and platform_system != \"Linux\") and python_version >= \"3.8\" and python_version < \"3.9\" or platform_system != \"Darwin\" and python_version >= \"3.8\" and python_version < \"3.9\" and platform_machine != \"aarch64\" or platform_machine != \"arm64\" and python_version >= \"3.8\" and python_version < \"3.9\" and platform_system != \"Linux\" or (platform_machine != \"arm64\" and platform_machine != \"aarch64\") and python_version >= \"3.8\" and python_version < \"3.9\""}, {version = ">=1.21.4", markers = "python_version >= \"3.10\" and platform_system == \"Darwin\" and python_version < \"3.11\""}, {version = ">=1.21.2", markers = "platform_system != \"Darwin\" and python_version >= \"3.10\" and python_version < \"3.11\""}, - {version = ">=1.26.0", markers = "python_version >= \"3.12\""}, {version = ">=1.23.5", markers = "python_version >= \"3.11\" and python_version < \"3.12\""}, ] @@ -2986,9 +2932,9 @@ files = [ [package.dependencies] numpy = [ + {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, {version = ">=1.20.3", markers = "python_version < \"3.10\""}, {version = ">=1.21.0", markers = "python_version >= \"3.10\" and python_version < \"3.11\""}, - {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, ] python-dateutil = ">=2.8.2" pytz = ">=2020.1" @@ -3031,24 +2977,6 @@ files = [ [package.dependencies] types-pytz = ">=2022.1.1" -[[package]] -name = "pandas-stubs" -version = "2.0.3.230814" -description = "Type annotations for pandas" -optional = true -python-versions = ">=3.8" -files = [ - {file = "pandas_stubs-2.0.3.230814-py3-none-any.whl", hash = "sha256:4b3dfc027d49779176b7daa031a3405f7b839bcb6e312f4b9f29fea5feec5b4f"}, - {file = "pandas_stubs-2.0.3.230814.tar.gz", hash = "sha256:1d5cc09e36e3d9f9a1ed9dceae4e03eeb26d1b898dd769996925f784365c8769"}, -] - -[package.dependencies] -numpy = [ - {version = "<=1.24.3", markers = "python_full_version <= \"3.8.0\""}, - {version = ">=1.25.0", markers = "python_version >= \"3.9\""}, -] -types-pytz = ">=2022.1.1" - [[package]] name = "pandocfilters" version = "1.5.1" @@ -3421,20 +3349,6 @@ files = [ [package.extras] diagrams = ["jinja2", "railroad-diagrams"] -[[package]] -name = "pyparsing" -version = "3.1.4" -description = "pyparsing module - Classes and methods to define and execute parsing grammars" -optional = false -python-versions = ">=3.6.8" -files = [ - {file = "pyparsing-3.1.4-py3-none-any.whl", hash = "sha256:a6a7ee4235a3f944aa1fa2249307708f893fe5717dc603503c6c7969c070fb7c"}, - {file = "pyparsing-3.1.4.tar.gz", hash = "sha256:f86ec8d1a83f11977c9a6ea7598e8c27fc5cddfa5b07ea2241edbbde1d7bc032"}, -] - -[package.extras] -diagrams = ["jinja2", "railroad-diagrams"] - [[package]] name = "pyproject-api" version = "1.8.0" @@ -4458,13 +4372,13 @@ files = [ [[package]] name = "tox" -version = "4.21.2" +version = "4.22.0" description = "tox is a generic virtualenv management and test command line tool" optional = false python-versions = ">=3.8" files = [ - {file = "tox-4.21.2-py3-none-any.whl", hash = "sha256:13d996adcd792e7c82994b0e116d85efd84f0c6d185254d83d156f73f86b2038"}, - {file = "tox-4.21.2.tar.gz", hash = "sha256:49381ff102296753e378fa5ff30e42a35e695f149b4dbf8a2c49d15fdb5797b2"}, + {file = "tox-4.22.0-py3-none-any.whl", hash = "sha256:03734d9a9ac138cd1a898a372fb1b8079e2728618ae06dc37cbf3686cfb56eea"}, + {file = "tox-4.22.0.tar.gz", hash = "sha256:acc6c627cb3316585238d55d2b633e132fea1bdb01b9d93b56bce7caea6ae73d"}, ] [package.dependencies] @@ -4480,10 +4394,6 @@ tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} typing-extensions = {version = ">=4.12.2", markers = "python_version < \"3.11\""} virtualenv = ">=20.26.6" -[package.extras] -docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-argparse-cli (>=1.18.2)", "sphinx-autodoc-typehints (>=2.4.4)", "sphinx-copybutton (>=0.5.2)", "sphinx-inline-tabs (>=2023.4.21)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=24.8)"] -testing = ["build[virtualenv] (>=1.2.2)", "covdefaults (>=2.3)", "detect-test-pollution (>=1.2)", "devpi-process (>=1.0.2)", "diff-cover (>=9.2)", "distlib (>=0.3.8)", "flaky (>=3.8.1)", "hatch-vcs (>=0.4)", "hatchling (>=1.25)", "psutil (>=6)", "pytest (>=8.3.3)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-xdist (>=3.6.1)", "re-assert (>=1.1)", "setuptools (>=75.1)", "time-machine (>=2.15)", "wheel (>=0.44)"] - [[package]] name = "tqdm" version = "4.66.5" From 5b09368a0e8c18f9fbb45428a0a7acc7a0c9c708 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 16 Oct 2024 00:21:15 +0000 Subject: [PATCH 060/161] :arrow_up: Bump mkdocs-material from 9.5.40 to 9.5.41 Bumps [mkdocs-material](https://github.com/squidfunk/mkdocs-material) from 9.5.40 to 9.5.41. - [Release notes](https://github.com/squidfunk/mkdocs-material/releases) - [Changelog](https://github.com/squidfunk/mkdocs-material/blob/master/CHANGELOG) - [Commits](https://github.com/squidfunk/mkdocs-material/compare/9.5.40...9.5.41) --- updated-dependencies: - dependency-name: mkdocs-material dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- poetry.lock | 222 ++++++++++++++++------------------------------------ 1 file changed, 68 insertions(+), 154 deletions(-) diff --git a/poetry.lock b/poetry.lock index 1994447dd..b11f09989 100644 --- a/poetry.lock +++ b/poetry.lock @@ -602,6 +602,68 @@ traitlets = ">=4" [package.extras] test = ["pytest"] +[[package]] +name = "contourpy" +version = "1.1.0" +description = "Python library for calculating contours of 2D quadrilateral grids" +optional = false +python-versions = ">=3.8" +files = [ + {file = "contourpy-1.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:89f06eff3ce2f4b3eb24c1055a26981bffe4e7264acd86f15b97e40530b794bc"}, + {file = "contourpy-1.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dffcc2ddec1782dd2f2ce1ef16f070861af4fb78c69862ce0aab801495dda6a3"}, + {file = "contourpy-1.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25ae46595e22f93592d39a7eac3d638cda552c3e1160255258b695f7b58e5655"}, + {file = "contourpy-1.1.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:17cfaf5ec9862bc93af1ec1f302457371c34e688fbd381f4035a06cd47324f48"}, + {file = "contourpy-1.1.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18a64814ae7bce73925131381603fff0116e2df25230dfc80d6d690aa6e20b37"}, + {file = "contourpy-1.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90c81f22b4f572f8a2110b0b741bb64e5a6427e0a198b2cdc1fbaf85f352a3aa"}, + {file = "contourpy-1.1.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:53cc3a40635abedbec7f1bde60f8c189c49e84ac180c665f2cd7c162cc454baa"}, + {file = "contourpy-1.1.0-cp310-cp310-win32.whl", hash = "sha256:9b2dd2ca3ac561aceef4c7c13ba654aaa404cf885b187427760d7f7d4c57cff8"}, + {file = "contourpy-1.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:1f795597073b09d631782e7245016a4323cf1cf0b4e06eef7ea6627e06a37ff2"}, + {file = "contourpy-1.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0b7b04ed0961647691cfe5d82115dd072af7ce8846d31a5fac6c142dcce8b882"}, + {file = "contourpy-1.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:27bc79200c742f9746d7dd51a734ee326a292d77e7d94c8af6e08d1e6c15d545"}, + {file = "contourpy-1.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:052cc634bf903c604ef1a00a5aa093c54f81a2612faedaa43295809ffdde885e"}, + {file = "contourpy-1.1.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9382a1c0bc46230fb881c36229bfa23d8c303b889b788b939365578d762b5c18"}, + {file = "contourpy-1.1.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e5cec36c5090e75a9ac9dbd0ff4a8cf7cecd60f1b6dc23a374c7d980a1cd710e"}, + {file = "contourpy-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f0cbd657e9bde94cd0e33aa7df94fb73c1ab7799378d3b3f902eb8eb2e04a3a"}, + {file = "contourpy-1.1.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:181cbace49874f4358e2929aaf7ba84006acb76694102e88dd15af861996c16e"}, + {file = "contourpy-1.1.0-cp311-cp311-win32.whl", hash = "sha256:edb989d31065b1acef3828a3688f88b2abb799a7db891c9e282df5ec7e46221b"}, + {file = "contourpy-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:fb3b7d9e6243bfa1efb93ccfe64ec610d85cfe5aec2c25f97fbbd2e58b531256"}, + {file = "contourpy-1.1.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:bcb41692aa09aeb19c7c213411854402f29f6613845ad2453d30bf421fe68fed"}, + {file = "contourpy-1.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5d123a5bc63cd34c27ff9c7ac1cd978909e9c71da12e05be0231c608048bb2ae"}, + {file = "contourpy-1.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62013a2cf68abc80dadfd2307299bfa8f5aa0dcaec5b2954caeb5fa094171103"}, + {file = "contourpy-1.1.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0b6616375d7de55797d7a66ee7d087efe27f03d336c27cf1f32c02b8c1a5ac70"}, + {file = "contourpy-1.1.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:317267d915490d1e84577924bd61ba71bf8681a30e0d6c545f577363157e5e94"}, + {file = "contourpy-1.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d551f3a442655f3dcc1285723f9acd646ca5858834efeab4598d706206b09c9f"}, + {file = "contourpy-1.1.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e7a117ce7df5a938fe035cad481b0189049e8d92433b4b33aa7fc609344aafa1"}, + {file = "contourpy-1.1.0-cp38-cp38-win32.whl", hash = "sha256:108dfb5b3e731046a96c60bdc46a1a0ebee0760418951abecbe0fc07b5b93b27"}, + {file = "contourpy-1.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:d4f26b25b4f86087e7d75e63212756c38546e70f2a92d2be44f80114826e1cd4"}, + {file = "contourpy-1.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc00bb4225d57bff7ebb634646c0ee2a1298402ec10a5fe7af79df9a51c1bfd9"}, + {file = "contourpy-1.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:189ceb1525eb0655ab8487a9a9c41f42a73ba52d6789754788d1883fb06b2d8a"}, + {file = "contourpy-1.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9f2931ed4741f98f74b410b16e5213f71dcccee67518970c42f64153ea9313b9"}, + {file = "contourpy-1.1.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:30f511c05fab7f12e0b1b7730ebdc2ec8deedcfb505bc27eb570ff47c51a8f15"}, + {file = "contourpy-1.1.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:143dde50520a9f90e4a2703f367cf8ec96a73042b72e68fcd184e1279962eb6f"}, + {file = "contourpy-1.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e94bef2580e25b5fdb183bf98a2faa2adc5b638736b2c0a4da98691da641316a"}, + {file = "contourpy-1.1.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ed614aea8462735e7d70141374bd7650afd1c3f3cb0c2dbbcbe44e14331bf002"}, + {file = "contourpy-1.1.0-cp39-cp39-win32.whl", hash = "sha256:71551f9520f008b2950bef5f16b0e3587506ef4f23c734b71ffb7b89f8721999"}, + {file = "contourpy-1.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:438ba416d02f82b692e371858143970ed2eb6337d9cdbbede0d8ad9f3d7dd17d"}, + {file = "contourpy-1.1.0-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:a698c6a7a432789e587168573a864a7ea374c6be8d4f31f9d87c001d5a843493"}, + {file = "contourpy-1.1.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:397b0ac8a12880412da3551a8cb5a187d3298a72802b45a3bd1805e204ad8439"}, + {file = "contourpy-1.1.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:a67259c2b493b00e5a4d0f7bfae51fb4b3371395e47d079a4446e9b0f4d70e76"}, + {file = "contourpy-1.1.0-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:2b836d22bd2c7bb2700348e4521b25e077255ebb6ab68e351ab5aa91ca27e027"}, + {file = "contourpy-1.1.0-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:084eaa568400cfaf7179b847ac871582199b1b44d5699198e9602ecbbb5f6104"}, + {file = "contourpy-1.1.0-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:911ff4fd53e26b019f898f32db0d4956c9d227d51338fb3b03ec72ff0084ee5f"}, + {file = "contourpy-1.1.0.tar.gz", hash = "sha256:e53046c3863828d21d531cc3b53786e6580eb1ba02477e8681009b6aa0870b21"}, +] + +[package.dependencies] +numpy = ">=1.16" + +[package.extras] +bokeh = ["bokeh", "selenium"] +docs = ["furo", "sphinx-copybutton"] +mypy = ["contourpy[bokeh,docs]", "docutils-stubs", "mypy (==1.2.0)", "types-Pillow"] +test = ["Pillow", "contourpy[test-no-images]", "matplotlib"] +test-no-images = ["pytest", "pytest-cov", "wurlitzer"] + [[package]] name = "contourpy" version = "1.1.1" @@ -664,10 +726,7 @@ files = [ ] [package.dependencies] -numpy = [ - {version = ">=1.16,<2.0", markers = "python_version <= \"3.11\""}, - {version = ">=1.26.0rc1,<2.0", markers = "python_version >= \"3.12\""}, -] +numpy = {version = ">=1.16,<2.0", markers = "python_version <= \"3.11\""} [package.extras] bokeh = ["bokeh", "selenium"] @@ -2139,74 +2198,6 @@ pillow = ">=6.2.0" pyparsing = ">=2.3.1,<3.1" python-dateutil = ">=2.7" -[[package]] -name = "matplotlib" -version = "3.7.5" -description = "Python plotting package" -optional = false -python-versions = ">=3.8" -files = [ - {file = "matplotlib-3.7.5-cp310-cp310-macosx_10_12_universal2.whl", hash = "sha256:4a87b69cb1cb20943010f63feb0b2901c17a3b435f75349fd9865713bfa63925"}, - {file = "matplotlib-3.7.5-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:d3ce45010fefb028359accebb852ca0c21bd77ec0f281952831d235228f15810"}, - {file = "matplotlib-3.7.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fbea1e762b28400393d71be1a02144aa16692a3c4c676ba0178ce83fc2928fdd"}, - {file = "matplotlib-3.7.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec0e1adc0ad70ba8227e957551e25a9d2995e319c29f94a97575bb90fa1d4469"}, - {file = "matplotlib-3.7.5-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6738c89a635ced486c8a20e20111d33f6398a9cbebce1ced59c211e12cd61455"}, - {file = "matplotlib-3.7.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1210b7919b4ed94b5573870f316bca26de3e3b07ffdb563e79327dc0e6bba515"}, - {file = "matplotlib-3.7.5-cp310-cp310-win32.whl", hash = "sha256:068ebcc59c072781d9dcdb82f0d3f1458271c2de7ca9c78f5bd672141091e9e1"}, - {file = "matplotlib-3.7.5-cp310-cp310-win_amd64.whl", hash = "sha256:f098ffbaab9df1e3ef04e5a5586a1e6b1791380698e84938d8640961c79b1fc0"}, - {file = "matplotlib-3.7.5-cp311-cp311-macosx_10_12_universal2.whl", hash = "sha256:f65342c147572673f02a4abec2d5a23ad9c3898167df9b47c149f32ce61ca078"}, - {file = "matplotlib-3.7.5-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:4ddf7fc0e0dc553891a117aa083039088d8a07686d4c93fb8a810adca68810af"}, - {file = "matplotlib-3.7.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0ccb830fc29442360d91be48527809f23a5dcaee8da5f4d9b2d5b867c1b087b8"}, - {file = "matplotlib-3.7.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:efc6bb28178e844d1f408dd4d6341ee8a2e906fc9e0fa3dae497da4e0cab775d"}, - {file = "matplotlib-3.7.5-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b15c4c2d374f249f324f46e883340d494c01768dd5287f8bc00b65b625ab56c"}, - {file = "matplotlib-3.7.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3d028555421912307845e59e3de328260b26d055c5dac9b182cc9783854e98fb"}, - {file = "matplotlib-3.7.5-cp311-cp311-win32.whl", hash = "sha256:fe184b4625b4052fa88ef350b815559dd90cc6cc8e97b62f966e1ca84074aafa"}, - {file = "matplotlib-3.7.5-cp311-cp311-win_amd64.whl", hash = "sha256:084f1f0f2f1010868c6f1f50b4e1c6f2fb201c58475494f1e5b66fed66093647"}, - {file = "matplotlib-3.7.5-cp312-cp312-macosx_10_12_universal2.whl", hash = "sha256:34bceb9d8ddb142055ff27cd7135f539f2f01be2ce0bafbace4117abe58f8fe4"}, - {file = "matplotlib-3.7.5-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:c5a2134162273eb8cdfd320ae907bf84d171de948e62180fa372a3ca7cf0f433"}, - {file = "matplotlib-3.7.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:039ad54683a814002ff37bf7981aa1faa40b91f4ff84149beb53d1eb64617980"}, - {file = "matplotlib-3.7.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4d742ccd1b09e863b4ca58291728db645b51dab343eebb08d5d4b31b308296ce"}, - {file = "matplotlib-3.7.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:743b1c488ca6a2bc7f56079d282e44d236bf375968bfd1b7ba701fd4d0fa32d6"}, - {file = "matplotlib-3.7.5-cp312-cp312-win_amd64.whl", hash = "sha256:fbf730fca3e1f23713bc1fae0a57db386e39dc81ea57dc305c67f628c1d7a342"}, - {file = "matplotlib-3.7.5-cp38-cp38-macosx_10_12_universal2.whl", hash = "sha256:cfff9b838531698ee40e40ea1a8a9dc2c01edb400b27d38de6ba44c1f9a8e3d2"}, - {file = "matplotlib-3.7.5-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:1dbcca4508bca7847fe2d64a05b237a3dcaec1f959aedb756d5b1c67b770c5ee"}, - {file = "matplotlib-3.7.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4cdf4ef46c2a1609a50411b66940b31778db1e4b73d4ecc2eaa40bd588979b13"}, - {file = "matplotlib-3.7.5-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:167200ccfefd1674b60e957186dfd9baf58b324562ad1a28e5d0a6b3bea77905"}, - {file = "matplotlib-3.7.5-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:53e64522934df6e1818b25fd48cf3b645b11740d78e6ef765fbb5fa5ce080d02"}, - {file = "matplotlib-3.7.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d3e3bc79b2d7d615067bd010caff9243ead1fc95cf735c16e4b2583173f717eb"}, - {file = "matplotlib-3.7.5-cp38-cp38-win32.whl", hash = "sha256:6b641b48c6819726ed47c55835cdd330e53747d4efff574109fd79b2d8a13748"}, - {file = "matplotlib-3.7.5-cp38-cp38-win_amd64.whl", hash = "sha256:f0b60993ed3488b4532ec6b697059897891927cbfc2b8d458a891b60ec03d9d7"}, - {file = "matplotlib-3.7.5-cp39-cp39-macosx_10_12_universal2.whl", hash = "sha256:090964d0afaff9c90e4d8de7836757e72ecfb252fb02884016d809239f715651"}, - {file = "matplotlib-3.7.5-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:9fc6fcfbc55cd719bc0bfa60bde248eb68cf43876d4c22864603bdd23962ba25"}, - {file = "matplotlib-3.7.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5e7cc3078b019bb863752b8b60e8b269423000f1603cb2299608231996bd9d54"}, - {file = "matplotlib-3.7.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e4e9a868e8163abaaa8259842d85f949a919e1ead17644fb77a60427c90473c"}, - {file = "matplotlib-3.7.5-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fa7ebc995a7d747dacf0a717d0eb3aa0f0c6a0e9ea88b0194d3a3cd241a1500f"}, - {file = "matplotlib-3.7.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3785bfd83b05fc0e0c2ae4c4a90034fe693ef96c679634756c50fe6efcc09856"}, - {file = "matplotlib-3.7.5-cp39-cp39-win32.whl", hash = "sha256:29b058738c104d0ca8806395f1c9089dfe4d4f0f78ea765c6c704469f3fffc81"}, - {file = "matplotlib-3.7.5-cp39-cp39-win_amd64.whl", hash = "sha256:fd4028d570fa4b31b7b165d4a685942ae9cdc669f33741e388c01857d9723eab"}, - {file = "matplotlib-3.7.5-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:2a9a3f4d6a7f88a62a6a18c7e6a84aedcaf4faf0708b4ca46d87b19f1b526f88"}, - {file = "matplotlib-3.7.5-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b9b3fd853d4a7f008a938df909b96db0b454225f935d3917520305b90680579c"}, - {file = "matplotlib-3.7.5-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0ad550da9f160737d7890217c5eeed4337d07e83ca1b2ca6535078f354e7675"}, - {file = "matplotlib-3.7.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:20da7924a08306a861b3f2d1da0d1aa9a6678e480cf8eacffe18b565af2813e7"}, - {file = "matplotlib-3.7.5-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:b45c9798ea6bb920cb77eb7306409756a7fab9db9b463e462618e0559aecb30e"}, - {file = "matplotlib-3.7.5-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a99866267da1e561c7776fe12bf4442174b79aac1a47bd7e627c7e4d077ebd83"}, - {file = "matplotlib-3.7.5-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2b6aa62adb6c268fc87d80f963aca39c64615c31830b02697743c95590ce3fbb"}, - {file = "matplotlib-3.7.5-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:e530ab6a0afd082d2e9c17eb1eb064a63c5b09bb607b2b74fa41adbe3e162286"}, - {file = "matplotlib-3.7.5.tar.gz", hash = "sha256:1e5c971558ebc811aa07f54c7b7c677d78aa518ef4c390e14673a09e0860184a"}, -] - -[package.dependencies] -contourpy = ">=1.0.1" -cycler = ">=0.10" -fonttools = ">=4.22.0" -importlib-resources = {version = ">=3.2.0", markers = "python_version < \"3.10\""} -kiwisolver = ">=1.0.1" -numpy = ">=1.20,<2" -packaging = ">=20.0" -pillow = ">=6.2.0" -pyparsing = ">=2.3.1" -python-dateutil = ">=2.7" - [[package]] name = "matplotlib-inline" version = "0.1.7" @@ -2421,13 +2412,13 @@ pygments = ">2.12.0" [[package]] name = "mkdocs-material" -version = "9.5.40" +version = "9.5.41" description = "Documentation that simply works" optional = false python-versions = ">=3.8" files = [ - {file = "mkdocs_material-9.5.40-py3-none-any.whl", hash = "sha256:8e7a16ada34e79a7b6459ff2602584222f522c738b6a023d1bea853d5049da6f"}, - {file = "mkdocs_material-9.5.40.tar.gz", hash = "sha256:b69d70e667ec51fc41f65e006a3184dd00d95b2439d982cb1586e4c018943156"}, + {file = "mkdocs_material-9.5.41-py3-none-any.whl", hash = "sha256:990bc138c33342b5b73e7545915ebc0136e501bfbd8e365735144f5120891d83"}, + {file = "mkdocs_material-9.5.41.tar.gz", hash = "sha256:30fa5d459b4b8130848ecd8e1c908878345d9d8268f7ddbc31eebe88d462d97b"}, ] [package.dependencies] @@ -2779,51 +2770,6 @@ files = [ {file = "numpy-1.23.2.tar.gz", hash = "sha256:b78d00e48261fbbd04aa0d7427cf78d18401ee0abd89c7559bbf422e5b1c7d01"}, ] -[[package]] -name = "numpy" -version = "1.26.4" -description = "Fundamental package for array computing in Python" -optional = false -python-versions = ">=3.9" -files = [ - {file = "numpy-1.26.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9ff0f4f29c51e2803569d7a51c2304de5554655a60c5d776e35b4a41413830d0"}, - {file = "numpy-1.26.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2e4ee3380d6de9c9ec04745830fd9e2eccb3e6cf790d39d7b98ffd19b0dd754a"}, - {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d209d8969599b27ad20994c8e41936ee0964e6da07478d6c35016bc386b66ad4"}, - {file = "numpy-1.26.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ffa75af20b44f8dba823498024771d5ac50620e6915abac414251bd971b4529f"}, - {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:62b8e4b1e28009ef2846b4c7852046736bab361f7aeadeb6a5b89ebec3c7055a"}, - {file = "numpy-1.26.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a4abb4f9001ad2858e7ac189089c42178fcce737e4169dc61321660f1a96c7d2"}, - {file = "numpy-1.26.4-cp310-cp310-win32.whl", hash = "sha256:bfe25acf8b437eb2a8b2d49d443800a5f18508cd811fea3181723922a8a82b07"}, - {file = "numpy-1.26.4-cp310-cp310-win_amd64.whl", hash = "sha256:b97fe8060236edf3662adfc2c633f56a08ae30560c56310562cb4f95500022d5"}, - {file = "numpy-1.26.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4c66707fabe114439db9068ee468c26bbdf909cac0fb58686a42a24de1760c71"}, - {file = "numpy-1.26.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:edd8b5fe47dab091176d21bb6de568acdd906d1887a4584a15a9a96a1dca06ef"}, - {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ab55401287bfec946ced39700c053796e7cc0e3acbef09993a9ad2adba6ca6e"}, - {file = "numpy-1.26.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:666dbfb6ec68962c033a450943ded891bed2d54e6755e35e5835d63f4f6931d5"}, - {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:96ff0b2ad353d8f990b63294c8986f1ec3cb19d749234014f4e7eb0112ceba5a"}, - {file = "numpy-1.26.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:60dedbb91afcbfdc9bc0b1f3f402804070deed7392c23eb7a7f07fa857868e8a"}, - {file = "numpy-1.26.4-cp311-cp311-win32.whl", hash = "sha256:1af303d6b2210eb850fcf03064d364652b7120803a0b872f5211f5234b399f20"}, - {file = "numpy-1.26.4-cp311-cp311-win_amd64.whl", hash = "sha256:cd25bcecc4974d09257ffcd1f098ee778f7834c3ad767fe5db785be9a4aa9cb2"}, - {file = "numpy-1.26.4-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b3ce300f3644fb06443ee2222c2201dd3a89ea6040541412b8fa189341847218"}, - {file = "numpy-1.26.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:03a8c78d01d9781b28a6989f6fa1bb2c4f2d51201cf99d3dd875df6fbd96b23b"}, - {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9fad7dcb1aac3c7f0584a5a8133e3a43eeb2fe127f47e3632d43d677c66c102b"}, - {file = "numpy-1.26.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:675d61ffbfa78604709862923189bad94014bef562cc35cf61d3a07bba02a7ed"}, - {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:ab47dbe5cc8210f55aa58e4805fe224dac469cde56b9f731a4c098b91917159a"}, - {file = "numpy-1.26.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:1dda2e7b4ec9dd512f84935c5f126c8bd8b9f2fc001e9f54af255e8c5f16b0e0"}, - {file = "numpy-1.26.4-cp312-cp312-win32.whl", hash = "sha256:50193e430acfc1346175fcbdaa28ffec49947a06918b7b92130744e81e640110"}, - {file = "numpy-1.26.4-cp312-cp312-win_amd64.whl", hash = "sha256:08beddf13648eb95f8d867350f6a018a4be2e5ad54c8d8caed89ebca558b2818"}, - {file = "numpy-1.26.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7349ab0fa0c429c82442a27a9673fc802ffdb7c7775fad780226cb234965e53c"}, - {file = "numpy-1.26.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:52b8b60467cd7dd1e9ed082188b4e6bb35aa5cdd01777621a1658910745b90be"}, - {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d5241e0a80d808d70546c697135da2c613f30e28251ff8307eb72ba696945764"}, - {file = "numpy-1.26.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f870204a840a60da0b12273ef34f7051e98c3b5961b61b0c2c1be6dfd64fbcd3"}, - {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:679b0076f67ecc0138fd2ede3a8fd196dddc2ad3254069bcb9faf9a79b1cebcd"}, - {file = "numpy-1.26.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:47711010ad8555514b434df65f7d7b076bb8261df1ca9bb78f53d3b2db02e95c"}, - {file = "numpy-1.26.4-cp39-cp39-win32.whl", hash = "sha256:a354325ee03388678242a4d7ebcd08b5c727033fcff3b2f536aea978e15ee9e6"}, - {file = "numpy-1.26.4-cp39-cp39-win_amd64.whl", hash = "sha256:3373d5d70a5fe74a2c1bb6d2cfd9609ecf686d47a2d7b1d37a8f3b6bf6003aea"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:afedb719a9dcfc7eaf2287b839d8198e06dcd4cb5d276a3df279231138e83d30"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95a7476c59002f2f6c590b9b7b998306fba6a5aa646b1e22ddfeaf8f78c3a29c"}, - {file = "numpy-1.26.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:7e50d0a0cc3189f9cb0aeb3a6a6af18c16f59f004b866cd2be1c14b36134a4a0"}, - {file = "numpy-1.26.4.tar.gz", hash = "sha256:2a02aba9ed12e4ac4eb3ea9421c420301a0c6460d9830d74a9df87efa4912010"}, -] - [[package]] name = "numpy" version = "2.1.2" @@ -2904,12 +2850,12 @@ files = [ [package.dependencies] numpy = [ + {version = ">=1.26.0", markers = "python_version >= \"3.12\""}, {version = ">=1.21.0", markers = "python_version <= \"3.9\" and platform_system == \"Darwin\" and platform_machine == \"arm64\" and python_version >= \"3.8\""}, {version = ">=1.19.3", markers = "platform_system == \"Linux\" and platform_machine == \"aarch64\" and python_version >= \"3.8\" and python_version < \"3.10\" or python_version > \"3.9\" and python_version < \"3.10\" or python_version >= \"3.9\" and platform_system != \"Darwin\" and python_version < \"3.10\" or python_version >= \"3.9\" and platform_machine != \"arm64\" and python_version < \"3.10\""}, {version = ">=1.17.3", markers = "(platform_system != \"Darwin\" and platform_system != \"Linux\") and python_version >= \"3.8\" and python_version < \"3.9\" or platform_system != \"Darwin\" and python_version >= \"3.8\" and python_version < \"3.9\" and platform_machine != \"aarch64\" or platform_machine != \"arm64\" and python_version >= \"3.8\" and python_version < \"3.9\" and platform_system != \"Linux\" or (platform_machine != \"arm64\" and platform_machine != \"aarch64\") and python_version >= \"3.8\" and python_version < \"3.9\""}, {version = ">=1.21.4", markers = "python_version >= \"3.10\" and platform_system == \"Darwin\" and python_version < \"3.11\""}, {version = ">=1.21.2", markers = "platform_system != \"Darwin\" and python_version >= \"3.10\" and python_version < \"3.11\""}, - {version = ">=1.26.0", markers = "python_version >= \"3.12\""}, {version = ">=1.23.5", markers = "python_version >= \"3.11\" and python_version < \"3.12\""}, ] @@ -2986,9 +2932,9 @@ files = [ [package.dependencies] numpy = [ + {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, {version = ">=1.20.3", markers = "python_version < \"3.10\""}, {version = ">=1.21.0", markers = "python_version >= \"3.10\" and python_version < \"3.11\""}, - {version = ">=1.23.2", markers = "python_version >= \"3.11\""}, ] python-dateutil = ">=2.8.2" pytz = ">=2020.1" @@ -3031,24 +2977,6 @@ files = [ [package.dependencies] types-pytz = ">=2022.1.1" -[[package]] -name = "pandas-stubs" -version = "2.0.3.230814" -description = "Type annotations for pandas" -optional = true -python-versions = ">=3.8" -files = [ - {file = "pandas_stubs-2.0.3.230814-py3-none-any.whl", hash = "sha256:4b3dfc027d49779176b7daa031a3405f7b839bcb6e312f4b9f29fea5feec5b4f"}, - {file = "pandas_stubs-2.0.3.230814.tar.gz", hash = "sha256:1d5cc09e36e3d9f9a1ed9dceae4e03eeb26d1b898dd769996925f784365c8769"}, -] - -[package.dependencies] -numpy = [ - {version = "<=1.24.3", markers = "python_full_version <= \"3.8.0\""}, - {version = ">=1.25.0", markers = "python_version >= \"3.9\""}, -] -types-pytz = ">=2022.1.1" - [[package]] name = "pandocfilters" version = "1.5.1" @@ -3421,20 +3349,6 @@ files = [ [package.extras] diagrams = ["jinja2", "railroad-diagrams"] -[[package]] -name = "pyparsing" -version = "3.1.4" -description = "pyparsing module - Classes and methods to define and execute parsing grammars" -optional = false -python-versions = ">=3.6.8" -files = [ - {file = "pyparsing-3.1.4-py3-none-any.whl", hash = "sha256:a6a7ee4235a3f944aa1fa2249307708f893fe5717dc603503c6c7969c070fb7c"}, - {file = "pyparsing-3.1.4.tar.gz", hash = "sha256:f86ec8d1a83f11977c9a6ea7598e8c27fc5cddfa5b07ea2241edbbde1d7bc032"}, -] - -[package.extras] -diagrams = ["jinja2", "railroad-diagrams"] - [[package]] name = "pyproject-api" version = "1.8.0" From 4ce033c4d7cada60b2493d56eb4dada82bdaf39f Mon Sep 17 00:00:00 2001 From: Soham Date: Wed, 16 Oct 2024 11:51:13 +0530 Subject: [PATCH 061/161] fix: made required changes --- supervision/detection/core.py | 17 ++++++++--------- supervision/detection/utils.py | 28 +++++++++++++++++++++++++--- 2 files changed, 33 insertions(+), 12 deletions(-) diff --git a/supervision/detection/core.py b/supervision/detection/core.py index 6ef52a03f..8e2d06e2d 100644 --- a/supervision/detection/core.py +++ b/supervision/detection/core.py @@ -32,6 +32,7 @@ extract_ultralytics_masks, get_data_item, is_data_equal, + is_metadata_equal, mask_to_xyxy, merge_data, merge_metadata, @@ -190,7 +191,7 @@ def __eq__(self, other: Detections): np.array_equal(self.confidence, other.confidence), np.array_equal(self.tracker_id, other.tracker_id), is_data_equal(self.data, other.data), - self.metadata == other.metadata, + is_metadata_equal(self.metadata, other.metadata), ] ) @@ -979,6 +980,9 @@ def empty(cls, metadata: Optional[Dict[str, Any]] = None) -> Detections: empty_detections = Detections.empty() ``` """ + if metadata is not None and not isinstance(metadata, dict): + raise TypeError("Metadata must be a dictionary.") + return cls( xyxy=np.empty((0, 4), dtype=np.float32), confidence=np.array([], dtype=np.float32), @@ -990,14 +994,9 @@ def is_empty(self) -> bool: """ Returns `True` if the `Detections` object is considered empty. """ - return ( - len(self.xyxy) == 0 - and (self.mask is None or len(self.mask) == 0) - and (self.class_id is None or len(self.class_id) == 0) - and (self.confidence is None or len(self.confidence) == 0) - and (self.tracker_id is None or len(self.tracker_id) == 0) - and not self.data - ) + empty_detections = Detections.empty() + empty_detections.data = self.data + return self == empty_detections @classmethod def merge(cls, detections_list: List[Detections]) -> Detections: diff --git a/supervision/detection/utils.py b/supervision/detection/utils.py index 71e67c49c..421c4a6d2 100644 --- a/supervision/detection/utils.py +++ b/supervision/detection/utils.py @@ -808,6 +808,21 @@ def is_data_equal(data_a: Dict[str, np.ndarray], data_b: Dict[str, np.ndarray]) ) +def is_metadata_equal(metadata_a: Dict[str, Any], metadata_b: Dict[str, Any]) -> bool: + """ + Compares the metadata payloads of two Detections instances. + + Args: + metadata_a, metadata_b: The metadata payloads of the instances. + + Returns: + True if the metadata payloads are equal, False otherwise. + """ + return set(metadata_a.keys()) == set(metadata_b.keys()) and all( + np.array_equal(metadata_a[key], metadata_b[key]) for key in metadata_a + ) + + def merge_data( data_list: List[Dict[str, Union[npt.NDArray[np.generic], List]]], ) -> Dict[str, Union[npt.NDArray[np.generic], List]]: @@ -880,15 +895,22 @@ def merge_metadata(metadata_list: List[Dict[str, Any]]) -> Dict[str, Any]: Dict[str, Any]: A single merged metadata dictionary. Raises: - ValueError: If there are conflicting values for the same key. + ValueError: If there are conflicting values for the same key or if + dictionaries have different keys. """ - merged_metadata = {} + if not metadata_list: + return {} + all_keys_sets = [set(metadata.keys()) for metadata in metadata_list] + if not all(keys_set == all_keys_sets[0] for keys_set in all_keys_sets): + raise ValueError("All metadata dictionaries must have the same keys to merge.") + + merged_metadata = {} for metadata in metadata_list: for key, value in metadata.items(): if key in merged_metadata: if merged_metadata[key] != value: - raise ValueError(f"Conflicting metadata for key: {key}.") + raise ValueError(f"Conflicting metadata for key: '{key}'.") else: merged_metadata[key] = value From c6e701be1c702b22d57ddfd103de80e9085b1471 Mon Sep 17 00:00:00 2001 From: Soham Date: Wed, 16 Oct 2024 19:30:28 +0530 Subject: [PATCH 062/161] fix: added check if not array --- supervision/detection/utils.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/supervision/detection/utils.py b/supervision/detection/utils.py index 421c4a6d2..d80a4dcb0 100644 --- a/supervision/detection/utils.py +++ b/supervision/detection/utils.py @@ -819,7 +819,11 @@ def is_metadata_equal(metadata_a: Dict[str, Any], metadata_b: Dict[str, Any]) -> True if the metadata payloads are equal, False otherwise. """ return set(metadata_a.keys()) == set(metadata_b.keys()) and all( - np.array_equal(metadata_a[key], metadata_b[key]) for key in metadata_a + np.array_equal(metadata_a[key], metadata_b[key]) + if isinstance(metadata_a[key], np.ndarray) + and isinstance(metadata_b[key], np.ndarray) + else metadata_a[key] == metadata_b[key] + for key in metadata_a ) From d50aa67ac003f62b2af9826b41bf98d7e15ff8c4 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 17 Oct 2024 00:38:46 +0000 Subject: [PATCH 063/161] :arrow_up: Bump types-requests from 2.32.0.20240914 to 2.32.0.20241016 Bumps [types-requests](https://github.com/python/typeshed) from 2.32.0.20240914 to 2.32.0.20241016. - [Commits](https://github.com/python/typeshed/commits) --- updated-dependencies: - dependency-name: types-requests dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 8e0cf21ca..797b71397 100644 --- a/poetry.lock +++ b/poetry.lock @@ -4505,13 +4505,13 @@ files = [ [[package]] name = "types-requests" -version = "2.32.0.20240914" +version = "2.32.0.20241016" description = "Typing stubs for requests" optional = false python-versions = ">=3.8" files = [ - {file = "types-requests-2.32.0.20240914.tar.gz", hash = "sha256:2850e178db3919d9bf809e434eef65ba49d0e7e33ac92d588f4a5e295fffd405"}, - {file = "types_requests-2.32.0.20240914-py3-none-any.whl", hash = "sha256:59c2f673eb55f32a99b2894faf6020e1a9f4a402ad0f192bfee0b64469054310"}, + {file = "types-requests-2.32.0.20241016.tar.gz", hash = "sha256:0d9cad2f27515d0e3e3da7134a1b6f28fb97129d86b867f24d9c726452634d95"}, + {file = "types_requests-2.32.0.20241016-py3-none-any.whl", hash = "sha256:4195d62d6d3e043a4eaaf08ff8a62184584d2e8684e9d2aa178c7915a7da3747"}, ] [package.dependencies] From 5ac3999aeb3947f24e33cb47fc30fdf3e58527ab Mon Sep 17 00:00:00 2001 From: Andrey Blazejuk Date: Thu, 17 Oct 2024 13:58:37 -0300 Subject: [PATCH 064/161] Validate that polygon has at least 3 vertices in geometry utils.py --- supervision/geometry/utils.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/supervision/geometry/utils.py b/supervision/geometry/utils.py index 8a0ca35c5..580dc0c5a 100644 --- a/supervision/geometry/utils.py +++ b/supervision/geometry/utils.py @@ -15,6 +15,9 @@ def get_polygon_center(polygon: np.ndarray) -> Point: Returns: Point: The center of the polygon, represented as a Point object with x and y attributes. + + Raises: + ValueError: If the polygon has less than 3 vertices. Examples: ```python @@ -30,6 +33,9 @@ def get_polygon_center(polygon: np.ndarray) -> Point: # This is one of the 3 candidate algorithms considered for centroid calculation. # For a more detailed discussion, see PR #1084 and commit eb33176 + if len(polygon) < 3: + raise ValueError("Polygon must have at least 3 vertices.") + shift_polygon = np.roll(polygon, -1, axis=0) signed_areas = np.cross(polygon, shift_polygon) / 2 if signed_areas.sum() == 0: From f8ff33287255d187a6403833994a1791a95ddaec Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 17 Oct 2024 17:01:02 +0000 Subject: [PATCH 065/161] =?UTF-8?q?fix(pre=5Fcommit):=20=F0=9F=8E=A8=20aut?= =?UTF-8?q?o=20format=20pre-commit=20hooks?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- supervision/geometry/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/supervision/geometry/utils.py b/supervision/geometry/utils.py index 580dc0c5a..cdceb36dc 100644 --- a/supervision/geometry/utils.py +++ b/supervision/geometry/utils.py @@ -15,7 +15,7 @@ def get_polygon_center(polygon: np.ndarray) -> Point: Returns: Point: The center of the polygon, represented as a Point object with x and y attributes. - + Raises: ValueError: If the polygon has less than 3 vertices. From 1b95d1dea2dc4c5c48582087d6b8dc83f802094a Mon Sep 17 00:00:00 2001 From: LinasKo Date: Thu, 17 Oct 2024 23:11:56 +0300 Subject: [PATCH 066/161] Change get_polygon_center validation to fail on 0 points --- supervision/geometry/utils.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/supervision/geometry/utils.py b/supervision/geometry/utils.py index cdceb36dc..2247adc50 100644 --- a/supervision/geometry/utils.py +++ b/supervision/geometry/utils.py @@ -17,7 +17,7 @@ def get_polygon_center(polygon: np.ndarray) -> Point: Point object with x and y attributes. Raises: - ValueError: If the polygon has less than 3 vertices. + ValueError: If the polygon has no vertices. Examples: ```python @@ -33,8 +33,8 @@ def get_polygon_center(polygon: np.ndarray) -> Point: # This is one of the 3 candidate algorithms considered for centroid calculation. # For a more detailed discussion, see PR #1084 and commit eb33176 - if len(polygon) < 3: - raise ValueError("Polygon must have at least 3 vertices.") + if len(polygon) == 0: + raise ValueError("Polygon must have at least one vertex.") shift_polygon = np.roll(polygon, -1, axis=0) signed_areas = np.cross(polygon, shift_polygon) / 2 From e5215b967c74325eee54d9d0d0d1a09df0ee797f Mon Sep 17 00:00:00 2001 From: LinasKo Date: Thu, 17 Oct 2024 23:52:05 +0300 Subject: [PATCH 067/161] typos, minor type updates --- supervision/tracker/byte_tracker/basetrack.py | 2 +- supervision/tracker/byte_tracker/matching.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/supervision/tracker/byte_tracker/basetrack.py b/supervision/tracker/byte_tracker/basetrack.py index b78bc5961..e2bc66d57 100644 --- a/supervision/tracker/byte_tracker/basetrack.py +++ b/supervision/tracker/byte_tracker/basetrack.py @@ -38,7 +38,7 @@ def reset_counter(self): self.frame_id = 0 self.time_since_update = 0 - def activate(self, *args): + def activate(self, *args, **kwargs): raise NotImplementedError def predict(self): diff --git a/supervision/tracker/byte_tracker/matching.py b/supervision/tracker/byte_tracker/matching.py index 24abe224c..f791e518f 100644 --- a/supervision/tracker/byte_tracker/matching.py +++ b/supervision/tracker/byte_tracker/matching.py @@ -20,7 +20,7 @@ def indices_to_matches( def linear_assignment( cost_matrix: np.ndarray, thresh: float -) -> [np.ndarray, Tuple[int], Tuple[int, int]]: +) -> Tuple[np.ndarray, Tuple[int], Tuple[int, int]]: if cost_matrix.size == 0: return ( np.empty((0, 2), dtype=int), From 4a7d9ceaa94d8f8f2f9094dd9639532faffe953e Mon Sep 17 00:00:00 2001 From: LinasKo Date: Fri, 18 Oct 2024 01:01:43 +0300 Subject: [PATCH 068/161] Tracker fix: add & rework mistakenly removed ID system * External IDs were counted as the numbers grow very large very fast * Reworked ID counting as a class, as state is necessary, and only STracks know if an external ID is required after the update (see conditions everywhere before self.external_id_counter.new_id() is called). --- supervision/tracker/byte_tracker/core.py | 115 ++++++++++++++--------- 1 file changed, 70 insertions(+), 45 deletions(-) diff --git a/supervision/tracker/byte_tracker/core.py b/supervision/tracker/byte_tracker/core.py index 013af2ee0..d1d567fab 100644 --- a/supervision/tracker/byte_tracker/core.py +++ b/supervision/tracker/byte_tracker/core.py @@ -1,4 +1,4 @@ -from typing import List, Optional, Tuple +from typing import List, Tuple import numpy as np @@ -9,10 +9,34 @@ from supervision.tracker.byte_tracker.kalman_filter import KalmanFilter +class IdCounter: + def __init__(self): + self.reset() + + def reset(self) -> None: + self._id = self.NO_ID + + def new_id(self) -> int: + self._id += 1 + return self._id + + @property + def NO_ID(self) -> int: + return 0 + + class STrack(BaseTrack): shared_kalman = KalmanFilter() - def __init__(self, tlwh, score, class_ids, minimum_consecutive_frames): + def __init__( + self, + tlwh, + score, + class_ids, + minimum_consecutive_frames, + internal_id_counter: IdCounter, + external_id_counter: IdCounter, + ): super().__init__() # wait activate self._tlwh = np.asarray(tlwh, dtype=np.float32) @@ -24,10 +48,13 @@ def __init__(self, tlwh, score, class_ids, minimum_consecutive_frames): self.class_ids = class_ids self.tracklet_len = 0 - self.external_track_id = -1 - self.minimum_consecutive_frames = minimum_consecutive_frames + self.internal_id_counter = internal_id_counter + self.external_id_counter = external_id_counter + self.internal_track_id = self.internal_id_counter.NO_ID + self.external_track_id = self.external_id_counter.NO_ID + def predict(self): mean_state = self.mean.copy() if self.state != TrackState.Tracked: @@ -54,10 +81,10 @@ def multi_predict(stracks): stracks[i].mean = mean stracks[i].covariance = cov - def activate(self, kalman_filter, frame_id, track_id): + def activate(self, kalman_filter, frame_id): """Start a new tracklet""" self.kalman_filter = kalman_filter - self.internal_track_id = track_id + self.internal_track_id = self.internal_id_counter.new_id() self.mean, self.covariance = self.kalman_filter.initiate( self.tlwh_to_xyah(self._tlwh) ) @@ -68,12 +95,12 @@ def activate(self, kalman_filter, frame_id, track_id): self.is_activated = True if self.minimum_consecutive_frames == 1: - self.external_track_id = track_id + self.external_track_id = self.external_id_counter.new_id() self.frame_id = frame_id self.start_frame = frame_id - def re_activate(self, new_track, frame_id, new_id: Optional[int] = None): + def re_activate(self, new_track, frame_id): self.mean, self.covariance = self.kalman_filter.update( self.mean, self.covariance, self.tlwh_to_xyah(new_track.tlwh) ) @@ -81,11 +108,9 @@ def re_activate(self, new_track, frame_id, new_id: Optional[int] = None): self.state = TrackState.Tracked self.frame_id = frame_id - if new_id: - self.internal_track_id = new_id self.score = new_track.score - def update(self, new_track, frame_id, track_id): + def update(self, new_track, frame_id): """ Update a matched track :type new_track: STrack @@ -103,8 +128,8 @@ def update(self, new_track, frame_id, track_id): self.state = TrackState.Tracked if self.tracklet_len == self.minimum_consecutive_frames: self.is_activated = True - if self.external_track_id == -1: - self.external_track_id = track_id + if self.external_track_id == self.external_id_counter.NO_ID: + self.external_track_id = self.external_id_counter.new_id() self.score = new_track.score @@ -160,24 +185,6 @@ def __repr__(self): ) -def detections2boxes(detections: Detections) -> np.ndarray: - """ - Convert Supervision Detections to numpy tensors for further computation. - Args: - detections (Detections): Detections/Targets in the format of sv.Detections. - Returns: - (np.ndarray): Detections as numpy tensors as in - `(x_min, y_min, x_max, y_max, confidence, class_id)` order. - """ - return np.hstack( - ( - detections.xyxy, - detections.confidence[:, np.newaxis], - detections.class_id[:, np.newaxis], - ) - ) - - class ByteTrack: """ Initialize the ByteTrack object. @@ -216,7 +223,6 @@ def __init__( self.track_activation_threshold = track_activation_threshold self.minimum_matching_threshold = minimum_matching_threshold - self._count = 0 self.frame_id = 0 self.det_thresh = self.track_activation_threshold + 0.1 self.max_time_lost = int(frame_rate / 30.0 * lost_track_buffer) @@ -227,9 +233,8 @@ def __init__( self.lost_tracks: List[STrack] = [] self.removed_tracks: List[STrack] = [] - def _next_id(self) -> int: - self._count += 1 - return self._count + self.internal_id_counter = IdCounter() + self.external_id_counter = IdCounter() def update_with_detections(self, detections: Detections) -> Detections: """ @@ -271,7 +276,13 @@ def callback(frame: np.ndarray, index: int) -> np.ndarray: ``` """ - tensors = detections2boxes(detections=detections) + tensors = np.hstack( + ( + detections.xyxy, + detections.confidence[:, np.newaxis], + detections.class_id[:, np.newaxis], + ) + ) tracks = self.update_with_tensors(tensors=tensors) if len(tracks) > 0: @@ -307,7 +318,9 @@ def reset(self): ensuring the tracker starts with a clean state for each new video. """ self.frame_id = 0 - self._count = 0 + BaseTrack.reset_counter() + self.internal_id_counter.reset() + self.external_id_counter.reset() self.tracked_tracks: List[STrack] = [] self.lost_tracks: List[STrack] = [] self.removed_tracks: List[STrack] = [] @@ -348,7 +361,14 @@ def update_with_tensors(self, tensors: np.ndarray) -> List[STrack]: if len(dets) > 0: """Detections""" detections = [ - STrack(STrack.tlbr_to_tlwh(tlbr), s, c, self.minimum_consecutive_frames) + STrack( + STrack.tlbr_to_tlwh(tlbr), + s, + c, + self.minimum_consecutive_frames, + self.internal_id_counter, + self.external_id_counter, + ) for (tlbr, s, c) in zip(dets, scores_keep, class_ids_keep) ] else: @@ -379,7 +399,7 @@ def update_with_tensors(self, tensors: np.ndarray) -> List[STrack]: track = strack_pool[itracked] det = detections[idet] if track.state == TrackState.Tracked: - track.update(detections[idet], self.frame_id, self._next_id()) + track.update(detections[idet], self.frame_id) activated_starcks.append(track) else: track.re_activate(det, self.frame_id) @@ -390,7 +410,14 @@ def update_with_tensors(self, tensors: np.ndarray) -> List[STrack]: if len(dets_second) > 0: """Detections""" detections_second = [ - STrack(STrack.tlbr_to_tlwh(tlbr), s, c, self.minimum_consecutive_frames) + STrack( + STrack.tlbr_to_tlwh(tlbr), + s, + c, + self.minimum_consecutive_frames, + self.internal_id_counter, + self.external_id_counter, + ) for (tlbr, s, c) in zip(dets_second, scores_second, class_ids_second) ] else: @@ -408,7 +435,7 @@ def update_with_tensors(self, tensors: np.ndarray) -> List[STrack]: track = r_tracked_stracks[itracked] det = detections_second[idet] if track.state == TrackState.Tracked: - track.update(det, self.frame_id, self._next_id()) + track.update(det, self.frame_id) activated_starcks.append(track) else: track.re_activate(det, self.frame_id) @@ -429,9 +456,7 @@ def update_with_tensors(self, tensors: np.ndarray) -> List[STrack]: dists, thresh=0.7 ) for itracked, idet in matches: - unconfirmed[itracked].update( - detections[idet], self.frame_id, self._next_id() - ) + unconfirmed[itracked].update(detections[idet], self.frame_id) activated_starcks.append(unconfirmed[itracked]) for it in u_unconfirmed: track = unconfirmed[it] @@ -443,7 +468,7 @@ def update_with_tensors(self, tensors: np.ndarray) -> List[STrack]: track = detections[inew] if track.score < self.det_thresh: continue - track.activate(self.kalman_filter, self.frame_id, self._next_id()) + track.activate(self.kalman_filter, self.frame_id) activated_starcks.append(track) """ Step 5: Update state""" for track in self.lost_tracks: From b2762dafaff0ab1ee0836da3b2ca8cdef06a3561 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 18 Oct 2024 00:18:11 +0000 Subject: [PATCH 069/161] :arrow_up: Bump tox from 4.22.0 to 4.23.0 Bumps [tox](https://github.com/tox-dev/tox) from 4.22.0 to 4.23.0. - [Release notes](https://github.com/tox-dev/tox/releases) - [Changelog](https://github.com/tox-dev/tox/blob/main/docs/changelog.rst) - [Commits](https://github.com/tox-dev/tox/compare/4.22.0...4.23.0) --- updated-dependencies: - dependency-name: tox dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 797b71397..eadeb1380 100644 --- a/poetry.lock +++ b/poetry.lock @@ -4377,13 +4377,13 @@ files = [ [[package]] name = "tox" -version = "4.22.0" +version = "4.23.0" description = "tox is a generic virtualenv management and test command line tool" optional = false python-versions = ">=3.8" files = [ - {file = "tox-4.22.0-py3-none-any.whl", hash = "sha256:03734d9a9ac138cd1a898a372fb1b8079e2728618ae06dc37cbf3686cfb56eea"}, - {file = "tox-4.22.0.tar.gz", hash = "sha256:acc6c627cb3316585238d55d2b633e132fea1bdb01b9d93b56bce7caea6ae73d"}, + {file = "tox-4.23.0-py3-none-any.whl", hash = "sha256:46da40afb660e46238c251280eb910bdaf00b390c7557c8e4bb611f422e9db12"}, + {file = "tox-4.23.0.tar.gz", hash = "sha256:a6bd7d54231d755348d3c3a7b450b5bf6563833716d1299a1619587a1b77a3bf"}, ] [package.dependencies] From 70dd0e1c1d292381702ca74c4a839aa102266d3e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 18 Oct 2024 00:21:04 +0000 Subject: [PATCH 070/161] :arrow_up: Bump ruff from 0.6.9 to 0.7.0 Bumps [ruff](https://github.com/astral-sh/ruff) from 0.6.9 to 0.7.0. - [Release notes](https://github.com/astral-sh/ruff/releases) - [Changelog](https://github.com/astral-sh/ruff/blob/main/CHANGELOG.md) - [Commits](https://github.com/astral-sh/ruff/compare/0.6.9...0.7.0) --- updated-dependencies: - dependency-name: ruff dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- poetry.lock | 38 +++++++++++++++++++------------------- 1 file changed, 19 insertions(+), 19 deletions(-) diff --git a/poetry.lock b/poetry.lock index 797b71397..1ee978e64 100644 --- a/poetry.lock +++ b/poetry.lock @@ -4036,29 +4036,29 @@ files = [ [[package]] name = "ruff" -version = "0.6.9" +version = "0.7.0" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.6.9-py3-none-linux_armv6l.whl", hash = "sha256:064df58d84ccc0ac0fcd63bc3090b251d90e2a372558c0f057c3f75ed73e1ccd"}, - {file = "ruff-0.6.9-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:140d4b5c9f5fc7a7b074908a78ab8d384dd7f6510402267bc76c37195c02a7ec"}, - {file = "ruff-0.6.9-py3-none-macosx_11_0_arm64.whl", hash = "sha256:53fd8ca5e82bdee8da7f506d7b03a261f24cd43d090ea9db9a1dc59d9313914c"}, - {file = "ruff-0.6.9-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:645d7d8761f915e48a00d4ecc3686969761df69fb561dd914a773c1a8266e14e"}, - {file = "ruff-0.6.9-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eae02b700763e3847595b9d2891488989cac00214da7f845f4bcf2989007d577"}, - {file = "ruff-0.6.9-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d5ccc9e58112441de8ad4b29dcb7a86dc25c5f770e3c06a9d57e0e5eba48829"}, - {file = "ruff-0.6.9-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:417b81aa1c9b60b2f8edc463c58363075412866ae4e2b9ab0f690dc1e87ac1b5"}, - {file = "ruff-0.6.9-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3c866b631f5fbce896a74a6e4383407ba7507b815ccc52bcedabb6810fdb3ef7"}, - {file = "ruff-0.6.9-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7b118afbb3202f5911486ad52da86d1d52305b59e7ef2031cea3425142b97d6f"}, - {file = "ruff-0.6.9-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a67267654edc23c97335586774790cde402fb6bbdb3c2314f1fc087dee320bfa"}, - {file = "ruff-0.6.9-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3ef0cc774b00fec123f635ce5c547dac263f6ee9fb9cc83437c5904183b55ceb"}, - {file = "ruff-0.6.9-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:12edd2af0c60fa61ff31cefb90aef4288ac4d372b4962c2864aeea3a1a2460c0"}, - {file = "ruff-0.6.9-py3-none-musllinux_1_2_i686.whl", hash = "sha256:55bb01caeaf3a60b2b2bba07308a02fca6ab56233302406ed5245180a05c5625"}, - {file = "ruff-0.6.9-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:925d26471fa24b0ce5a6cdfab1bb526fb4159952385f386bdcc643813d472039"}, - {file = "ruff-0.6.9-py3-none-win32.whl", hash = "sha256:eb61ec9bdb2506cffd492e05ac40e5bc6284873aceb605503d8494180d6fc84d"}, - {file = "ruff-0.6.9-py3-none-win_amd64.whl", hash = "sha256:785d31851c1ae91f45b3d8fe23b8ae4b5170089021fbb42402d811135f0b7117"}, - {file = "ruff-0.6.9-py3-none-win_arm64.whl", hash = "sha256:a9641e31476d601f83cd602608739a0840e348bda93fec9f1ee816f8b6798b93"}, - {file = "ruff-0.6.9.tar.gz", hash = "sha256:b076ef717a8e5bc819514ee1d602bbdca5b4420ae13a9cf61a0c0a4f53a2baa2"}, + {file = "ruff-0.7.0-py3-none-linux_armv6l.whl", hash = "sha256:0cdf20c2b6ff98e37df47b2b0bd3a34aaa155f59a11182c1303cce79be715628"}, + {file = "ruff-0.7.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:496494d350c7fdeb36ca4ef1c9f21d80d182423718782222c29b3e72b3512737"}, + {file = "ruff-0.7.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:214b88498684e20b6b2b8852c01d50f0651f3cc6118dfa113b4def9f14faaf06"}, + {file = "ruff-0.7.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:630fce3fefe9844e91ea5bbf7ceadab4f9981f42b704fae011bb8efcaf5d84be"}, + {file = "ruff-0.7.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:211d877674e9373d4bb0f1c80f97a0201c61bcd1e9d045b6e9726adc42c156aa"}, + {file = "ruff-0.7.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:194d6c46c98c73949a106425ed40a576f52291c12bc21399eb8f13a0f7073495"}, + {file = "ruff-0.7.0-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:82c2579b82b9973a110fab281860403b397c08c403de92de19568f32f7178598"}, + {file = "ruff-0.7.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9af971fe85dcd5eaed8f585ddbc6bdbe8c217fb8fcf510ea6bca5bdfff56040e"}, + {file = "ruff-0.7.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b641c7f16939b7d24b7bfc0be4102c56562a18281f84f635604e8a6989948914"}, + {file = "ruff-0.7.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d71672336e46b34e0c90a790afeac8a31954fd42872c1f6adaea1dff76fd44f9"}, + {file = "ruff-0.7.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:ab7d98c7eed355166f367597e513a6c82408df4181a937628dbec79abb2a1fe4"}, + {file = "ruff-0.7.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:1eb54986f770f49edb14f71d33312d79e00e629a57387382200b1ef12d6a4ef9"}, + {file = "ruff-0.7.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:dc452ba6f2bb9cf8726a84aa877061a2462afe9ae0ea1d411c53d226661c601d"}, + {file = "ruff-0.7.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:4b406c2dce5be9bad59f2de26139a86017a517e6bcd2688da515481c05a2cb11"}, + {file = "ruff-0.7.0-py3-none-win32.whl", hash = "sha256:f6c968509f767776f524a8430426539587d5ec5c662f6addb6aa25bc2e8195ec"}, + {file = "ruff-0.7.0-py3-none-win_amd64.whl", hash = "sha256:ff4aabfbaaba880e85d394603b9e75d32b0693152e16fa659a3064a85df7fce2"}, + {file = "ruff-0.7.0-py3-none-win_arm64.whl", hash = "sha256:10842f69c245e78d6adec7e1db0a7d9ddc2fff0621d730e61657b64fa36f207e"}, + {file = "ruff-0.7.0.tar.gz", hash = "sha256:47a86360cf62d9cd53ebfb0b5eb0e882193fc191c6d717e8bef4462bc3b9ea2b"}, ] [[package]] From eb3e1e62935019841308b17d5eb288f4f35dafca Mon Sep 17 00:00:00 2001 From: LinasKo Date: Fri, 18 Oct 2024 12:45:38 +0300 Subject: [PATCH 071/161] Add usage example in Polygon Zone docs --- supervision/detection/tools/polygon_zone.py | 28 +++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/supervision/detection/tools/polygon_zone.py b/supervision/detection/tools/polygon_zone.py index b8b728fc5..f69f3c9fe 100644 --- a/supervision/detection/tools/polygon_zone.py +++ b/supervision/detection/tools/polygon_zone.py @@ -17,6 +17,12 @@ class PolygonZone: """ A class for defining a polygon-shaped zone within a frame for detecting objects. + !!! warning + + LineZone uses the `tracker_id`. Read + [here](/latest/trackers/) to learn how to plug + tracking into your inference pipeline. + Attributes: polygon (np.ndarray): A polygon represented by a numpy array of shape `(N, 2)`, containing the `x`, `y` coordinates of the points. @@ -26,6 +32,28 @@ class PolygonZone: (default: (sv.Position.BOTTOM_CENTER,)). current_count (int): The current count of detected objects within the zone mask (np.ndarray): The 2D bool mask for the polygon zone + + Example: + ```python + import supervision as sv + from ultralytics import YOLO + import numpy as np + import cv2 + + image = cv2.imread() + model = YOLO("yolo11s") + tracker = sv.ByteTrack() + + polygon = np.array([[100, 200], [200, 100], [300, 200], [200, 300]]) + polygon_zone = sv.PolygonZone(polygon=polygon) + + result = model.infer(image)[0] + detections = sv.Detections.from_ultralytics(result) + detections = tracker.update_with_detections(detections) + + is_detections_in_zone = polygon_zone.trigger(detections) + print(polygon_zone.current_count) + ``` """ def __init__( From c17de5c11b220b07ea8343ee3ebbedfa52130b22 Mon Sep 17 00:00:00 2001 From: LinasKo Date: Fri, 18 Oct 2024 14:37:36 +0300 Subject: [PATCH 072/161] Add precision and recall metrics --- docs/metrics/precision.md | 18 + docs/metrics/recall.md | 18 + mkdocs.yml | 2 + supervision/metrics/precision.py | 547 +++++++++++++++++++++++++++++++ supervision/metrics/recall.py | 545 ++++++++++++++++++++++++++++++ 5 files changed, 1130 insertions(+) create mode 100644 docs/metrics/precision.md create mode 100644 docs/metrics/recall.md create mode 100644 supervision/metrics/precision.py create mode 100644 supervision/metrics/recall.py diff --git a/docs/metrics/precision.md b/docs/metrics/precision.md new file mode 100644 index 000000000..ca318f8fb --- /dev/null +++ b/docs/metrics/precision.md @@ -0,0 +1,18 @@ +--- +comments: true +status: new +--- + +# F1 Score + + + +:::supervision.metrics.precision.Precision + + + +:::supervision.metrics.precision.PrecisionResult diff --git a/docs/metrics/recall.md b/docs/metrics/recall.md new file mode 100644 index 000000000..5baa4d3ee --- /dev/null +++ b/docs/metrics/recall.md @@ -0,0 +1,18 @@ +--- +comments: true +status: new +--- + +# F1 Score + +
+

Recall

+
+ +:::supervision.metrics.recall.Recall + + + +:::supervision.metrics.recall.RecallResult diff --git a/mkdocs.yml b/mkdocs.yml index 3cd867590..a3c9c1caa 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -66,6 +66,8 @@ nav: - Utils: datasets/utils.md - Metrics: - mAP: metrics/mean_average_precision.md + - Precision: metrics/precision.md + - Recall: metrics/recall.md - F1 Score: metrics/f1_score.md - Legacy Metrics: detection/metrics.md - Utils: diff --git a/supervision/metrics/precision.py b/supervision/metrics/precision.py new file mode 100644 index 000000000..ba441831a --- /dev/null +++ b/supervision/metrics/precision.py @@ -0,0 +1,547 @@ +from __future__ import annotations + +from copy import deepcopy +from dataclasses import dataclass +from typing import TYPE_CHECKING, List, Optional, Tuple, Union + +import numpy as np +from matplotlib import pyplot as plt + +from supervision.config import ORIENTED_BOX_COORDINATES +from supervision.detection.core import Detections +from supervision.detection.utils import box_iou_batch, mask_iou_batch +from supervision.draw.color import LEGACY_COLOR_PALETTE +from supervision.metrics.core import AveragingMethod, Metric, MetricTarget +from supervision.metrics.utils.object_size import ( + ObjectSizeCategory, + get_detection_size_category, +) +from supervision.metrics.utils.utils import ensure_pandas_installed + +if TYPE_CHECKING: + import pandas as pd + + +class Precision(Metric): + def __init__( + self, + metric_target: MetricTarget = MetricTarget.BOXES, + averaging_method: AveragingMethod = AveragingMethod.WEIGHTED, + ): + self._metric_target = metric_target + if self._metric_target == MetricTarget.ORIENTED_BOUNDING_BOXES: + raise NotImplementedError( + "Precision is not implemented for oriented bounding boxes." + ) + + self._metric_target = metric_target + self.averaging_method = averaging_method + self._predictions_list: List[Detections] = [] + self._targets_list: List[Detections] = [] + + def reset(self) -> None: + self._predictions_list = [] + self._targets_list = [] + + def update( + self, + predictions: Union[Detections, List[Detections]], + targets: Union[Detections, List[Detections]], + ) -> Precision: + if not isinstance(predictions, list): + predictions = [predictions] + if not isinstance(targets, list): + targets = [targets] + + if len(predictions) != len(targets): + raise ValueError( + f"The number of predictions ({len(predictions)}) and" + f" targets ({len(targets)}) during the update must be the same." + ) + + self._predictions_list.extend(predictions) + self._targets_list.extend(targets) + + return self + + def compute(self) -> PrecisionResult: + result = self._compute(self._predictions_list, self._targets_list) + + small_predictions, small_targets = self._filter_predictions_and_targets_by_size( + self._predictions_list, self._targets_list, ObjectSizeCategory.SMALL + ) + result.small_objects = self._compute(small_predictions, small_targets) + + medium_predictions, medium_targets = ( + self._filter_predictions_and_targets_by_size( + self._predictions_list, self._targets_list, ObjectSizeCategory.MEDIUM + ) + ) + result.medium_objects = self._compute(medium_predictions, medium_targets) + + large_predictions, large_targets = self._filter_predictions_and_targets_by_size( + self._predictions_list, self._targets_list, ObjectSizeCategory.LARGE + ) + result.large_objects = self._compute(large_predictions, large_targets) + + return result + + def _compute( + self, predictions_list: List[Detections], targets_list: List[Detections] + ) -> PrecisionResult: + iou_thresholds = np.linspace(0.5, 0.95, 10) + stats = [] + + for predictions, targets in zip(predictions_list, targets_list): + prediction_contents = self._detections_content(predictions) + target_contents = self._detections_content(targets) + + if len(targets) > 0: + if len(predictions) == 0: + stats.append( + ( + np.zeros((0, iou_thresholds.size), dtype=bool), + np.zeros((0,), dtype=np.float32), + np.zeros((0,), dtype=int), + targets.class_id, + ) + ) + + else: + if self._metric_target == MetricTarget.BOXES: + iou = box_iou_batch(target_contents, prediction_contents) + elif self._metric_target == MetricTarget.MASKS: + iou = mask_iou_batch(target_contents, prediction_contents) + else: + raise NotImplementedError( + "Unsupported metric target for IoU calculation" + ) + + matches = self._match_detection_batch( + predictions.class_id, targets.class_id, iou, iou_thresholds + ) + stats.append( + ( + matches, + predictions.confidence, + predictions.class_id, + targets.class_id, + ) + ) + + if not stats: + return PrecisionResult( + metric_target=self._metric_target, + averaging_method=self.averaging_method, + precision_scores=np.zeros(iou_thresholds.shape[0]), + precision_per_class=np.zeros((0, iou_thresholds.shape[0])), + iou_thresholds=iou_thresholds, + matched_classes=np.array([], dtype=int), + small_objects=None, + medium_objects=None, + large_objects=None, + ) + + concatenated_stats = [np.concatenate(items, 0) for items in zip(*stats)] + precision_scores, precision_per_class, unique_classes = ( + self._compute_precision_for_classes(*concatenated_stats) + ) + + return PrecisionResult( + metric_target=self._metric_target, + averaging_method=self.averaging_method, + precision_scores=precision_scores, + precision_per_class=precision_per_class, + iou_thresholds=iou_thresholds, + matched_classes=unique_classes, + small_objects=None, + medium_objects=None, + large_objects=None, + ) + + def _compute_precision_for_classes( + self, + matches: np.ndarray, + prediction_confidence: np.ndarray, + prediction_class_ids: np.ndarray, + true_class_ids: np.ndarray, + ) -> Tuple[np.ndarray, np.ndarray, np.ndarray]: + sorted_indices = np.argsort(-prediction_confidence) + matches = matches[sorted_indices] + prediction_class_ids = prediction_class_ids[sorted_indices] + unique_classes, class_counts = np.unique(true_class_ids, return_counts=True) + + # Shape: PxTh,P,C,C -> CxThx3 + confusion_matrix = self._compute_confusion_matrix( + matches, prediction_class_ids, unique_classes, class_counts + ) + + # Shape: CxThx3 -> CxTh + precision_per_class = self._compute_precision(confusion_matrix) + + # Shape: CxTh -> Th + if self.averaging_method == AveragingMethod.MACRO: + precision_scores = np.mean(precision_per_class, axis=0) + elif self.averaging_method == AveragingMethod.MICRO: + confusion_matrix_merged = confusion_matrix.sum(0) + precision_scores = self._compute_precision(confusion_matrix_merged) + elif self.averaging_method == AveragingMethod.WEIGHTED: + class_counts = class_counts.astype(np.float32) + precision_scores = np.average( + precision_per_class, axis=0, weights=class_counts + ) + + return precision_scores, precision_per_class, unique_classes + + @staticmethod + def _match_detection_batch( + predictions_classes: np.ndarray, + target_classes: np.ndarray, + iou: np.ndarray, + iou_thresholds: np.ndarray, + ) -> np.ndarray: + num_predictions, num_iou_levels = ( + predictions_classes.shape[0], + iou_thresholds.shape[0], + ) + correct = np.zeros((num_predictions, num_iou_levels), dtype=bool) + correct_class = target_classes[:, None] == predictions_classes + + for i, iou_level in enumerate(iou_thresholds): + matched_indices = np.where((iou >= iou_level) & correct_class) + + if matched_indices[0].shape[0]: + combined_indices = np.stack(matched_indices, axis=1) + iou_values = iou[matched_indices][:, None] + matches = np.hstack([combined_indices, iou_values]) + + if matched_indices[0].shape[0] > 1: + matches = matches[matches[:, 2].argsort()[::-1]] + matches = matches[np.unique(matches[:, 1], return_index=True)[1]] + matches = matches[np.unique(matches[:, 0], return_index=True)[1]] + + correct[matches[:, 1].astype(int), i] = True + + return correct + + @staticmethod + def _compute_confusion_matrix( + sorted_matches: np.ndarray, + sorted_prediction_class_ids: np.ndarray, + unique_classes: np.ndarray, + class_counts: np.ndarray, + ) -> np.ndarray: + """ + Compute the confusion matrix for each class and IoU threshold. + + Assumes the matches and prediction_class_ids are sorted by confidence + in descending order. + + Arguments: + sorted_matches: np.ndarray, bool, shape (P, Th), that is True + if the prediction is a true positive at the given IoU threshold. + sorted_prediction_class_ids: np.ndarray, int, shape (P,), containing + the class id for each prediction. + unique_classes: np.ndarray, int, shape (C,), containing the unique + class ids. + class_counts: np.ndarray, int, shape (C,), containing the number + of true instances for each class. + + Returns: + np.ndarray, shape (C, Th, 3), containing the true positives, false + positives, and false negatives for each class and IoU threshold. + """ + + num_thresholds = sorted_matches.shape[1] + num_classes = unique_classes.shape[0] + + confusion_matrix = np.zeros((num_classes, num_thresholds, 3)) + for class_idx, class_id in enumerate(unique_classes): + is_class = sorted_prediction_class_ids == class_id + num_true = class_counts[class_idx] + num_predictions = is_class.sum() + + if num_predictions == 0: + true_positives = np.zeros(num_thresholds) + false_positives = np.zeros(num_thresholds) + false_negatives = np.full(num_thresholds, num_true) + elif num_true == 0: + true_positives = np.zeros(num_thresholds) + false_positives = np.full(num_thresholds, num_predictions) + false_negatives = np.zeros(num_thresholds) + else: + true_positives = sorted_matches[is_class].sum(0) + false_positives = (1 - sorted_matches[is_class]).sum(0) + false_negatives = num_true - true_positives + confusion_matrix[class_idx] = np.stack( + [true_positives, false_positives, false_negatives], axis=1 + ) + + return confusion_matrix + + @staticmethod + def _compute_precision(confusion_matrix: np.ndarray) -> np.ndarray: + """ + Broadcastable function, computing the precision from the confusion matrix. + + Arguments: + confusion_matrix: np.ndarray, shape (N, ..., 3), where the last dimension + contains the true positives, false positives, and false negatives. + + Returns: + np.ndarray, shape (N, ...), containing the precision for each element. + """ + if not confusion_matrix.shape[-1] == 3: + raise ValueError( + f"Confusion matrix must have shape (..., 3), got " + f"{confusion_matrix.shape}" + ) + true_positives = confusion_matrix[..., 0] + false_positives = confusion_matrix[..., 1] + + denominator = true_positives + false_positives + precision = np.where(denominator == 0, 0, true_positives / denominator) + + return precision + + def _detections_content(self, detections: Detections) -> np.ndarray: + """Return boxes, masks or oriented bounding boxes from detections.""" + if self._metric_target == MetricTarget.BOXES: + return detections.xyxy + if self._metric_target == MetricTarget.MASKS: + return ( + detections.mask + if detections.mask is not None + else np.empty((0, 0, 0), dtype=bool) + ) + if self._metric_target == MetricTarget.ORIENTED_BOUNDING_BOXES: + if obb := detections.data.get(ORIENTED_BOX_COORDINATES): + return np.ndarray(obb, dtype=np.float32) + return np.empty((0, 8), dtype=np.float32) + raise ValueError(f"Invalid metric target: {self._metric_target}") + + def _filter_detections_by_size( + self, detections: Detections, size_category: ObjectSizeCategory + ) -> Detections: + """Return a copy of detections with contents filtered by object size.""" + new_detections = deepcopy(detections) + if detections.is_empty() or size_category == ObjectSizeCategory.ANY: + return new_detections + + sizes = get_detection_size_category(new_detections, self._metric_target) + size_mask = sizes == size_category.value + + new_detections.xyxy = new_detections.xyxy[size_mask] + if new_detections.mask is not None: + new_detections.mask = new_detections.mask[size_mask] + if new_detections.class_id is not None: + new_detections.class_id = new_detections.class_id[size_mask] + if new_detections.confidence is not None: + new_detections.confidence = new_detections.confidence[size_mask] + if new_detections.tracker_id is not None: + new_detections.tracker_id = new_detections.tracker_id[size_mask] + if new_detections.data is not None: + for key, value in new_detections.data.items(): + new_detections.data[key] = np.array(value)[size_mask] + + return new_detections + + def _filter_predictions_and_targets_by_size( + self, + predictions_list: List[Detections], + targets_list: List[Detections], + size_category: ObjectSizeCategory, + ) -> Tuple[List[Detections], List[Detections]]: + """ + Filter predictions and targets by object size category. + """ + new_predictions_list = [] + new_targets_list = [] + for predictions, targets in zip(predictions_list, targets_list): + new_predictions_list.append( + self._filter_detections_by_size(predictions, size_category) + ) + new_targets_list.append( + self._filter_detections_by_size(targets, size_category) + ) + return new_predictions_list, new_targets_list + + +@dataclass +class PrecisionResult: + """ + The results of the precision metric calculation. + + Defaults to `0` if no detections or targets were provided. + Provides a custom `__str__` method for pretty printing. + + Attributes: + metric_target (MetricTarget): the type of data used for the metric - + boxes, masks or oriented bounding boxes. + averaging_method (AveragingMethod): the averaging method used to compute the + precision. Determines how the precision is aggregated across classes. + precision_at_50 (float): the precision at IoU threshold of `0.5`. + precision_at_75 (float): the precision at IoU threshold of `0.75`. + precision_scores (np.ndarray): the precision scores at each IoU threshold. + Shape: `(num_iou_thresholds,)` + precision_per_class (np.ndarray): the precision scores per class and + IoU threshold. Shape: `(num_target_classes, num_iou_thresholds)` + iou_thresholds (np.ndarray): the IoU thresholds used in the calculations. + matched_classes (np.ndarray): the class IDs of all matched classes. + Corresponds to the rows of `precision_per_class`. + small_objects (Optional[PrecisionResult]): the Precision metric results + for small objects. + medium_objects (Optional[PrecisionResult]): the Precision metric results + for medium objects. + large_objects (Optional[PrecisionResult]): the Precision metric results + for large objects. + """ + + metric_target: MetricTarget + averaging_method: AveragingMethod + + @property + def precision_at_50(self) -> float: + return self.precision_scores[0] + + @property + def precision_at_75(self) -> float: + return self.precision_scores[5] + + precision_scores: np.ndarray + precision_per_class: np.ndarray + iou_thresholds: np.ndarray + matched_classes: np.ndarray + + small_objects: Optional[PrecisionResult] + medium_objects: Optional[PrecisionResult] + large_objects: Optional[PrecisionResult] + + def __str__(self) -> str: + """ + Format as a pretty string. + + Example: + ```python + print(precision_result) + ``` + """ + out_str = ( + f"{self.__class__.__name__}:\n" + f"Metric target: {self.metric_target}\n" + f"Averaging method: {self.averaging_method}\n" + f"P @ 50: {self.precision_at_50:.4f}\n" + f"P @ 75: {self.precision_at_75:.4f}\n" + f"P @ thresh: {self.precision_scores}\n" + f"IoU thresh: {self.iou_thresholds}\n" + f"Precision per class:\n" + ) + if self.precision_per_class.size == 0: + out_str += " No results\n" + for class_id, precision_of_class in zip( + self.matched_classes, self.precision_per_class + ): + out_str += f" {class_id}: {precision_of_class}\n" + + indent = " " + if self.small_objects is not None: + indented = indent + str(self.small_objects).replace("\n", f"\n{indent}") + out_str += f"\nSmall objects:\n{indented}" + if self.medium_objects is not None: + indented = indent + str(self.medium_objects).replace("\n", f"\n{indent}") + out_str += f"\nMedium objects:\n{indented}" + if self.large_objects is not None: + indented = indent + str(self.large_objects).replace("\n", f"\n{indent}") + out_str += f"\nLarge objects:\n{indented}" + + return out_str + + def to_pandas(self) -> "pd.DataFrame": + """ + Convert the result to a pandas DataFrame. + + Returns: + (pd.DataFrame): The result as a DataFrame. + """ + ensure_pandas_installed() + import pandas as pd + + pandas_data = { + "P@50": self.precision_at_50, + "P@75": self.precision_at_75, + } + + if self.small_objects is not None: + small_objects_df = self.small_objects.to_pandas() + for key, value in small_objects_df.items(): + pandas_data[f"small_objects_{key}"] = value + if self.medium_objects is not None: + medium_objects_df = self.medium_objects.to_pandas() + for key, value in medium_objects_df.items(): + pandas_data[f"medium_objects_{key}"] = value + if self.large_objects is not None: + large_objects_df = self.large_objects.to_pandas() + for key, value in large_objects_df.items(): + pandas_data[f"large_objects_{key}"] = value + + return pd.DataFrame(pandas_data, index=[0]) + + def plot(self): + """ + Plot the precision results. + """ + + labels = ["Precision@50", "Precision@75"] + values = [self.precision_at_50, self.precision_at_75] + colors = [LEGACY_COLOR_PALETTE[0]] * 2 + + if self.small_objects is not None: + small_objects = self.small_objects + labels += ["Small: P@50", "Small: P@75"] + values += [small_objects.precision_at_50, small_objects.precision_at_75] + colors += [LEGACY_COLOR_PALETTE[3]] * 2 + + if self.medium_objects is not None: + medium_objects = self.medium_objects + labels += ["Medium: P@50", "Medium: P@75"] + values += [medium_objects.precision_at_50, medium_objects.precision_at_75] + colors += [LEGACY_COLOR_PALETTE[2]] * 2 + + if self.large_objects is not None: + large_objects = self.large_objects + labels += ["Large: P@50", "Large: P@75"] + values += [large_objects.precision_at_50, large_objects.precision_at_75] + colors += [LEGACY_COLOR_PALETTE[4]] * 2 + + plt.rcParams["font.family"] = "monospace" + + _, ax = plt.subplots(figsize=(10, 6)) + ax.set_ylim(0, 1) + ax.set_ylabel("Value", fontweight="bold") + title = ( + f"Precision, by Object Size" + f"\n(target: {self.metric_target.value}," + f" averaging: {self.averaging_method.value})" + ) + ax.set_title(title, fontweight="bold") + + x_positions = range(len(labels)) + bars = ax.bar(x_positions, values, color=colors, align="center") + + ax.set_xticks(x_positions) + ax.set_xticklabels(labels, rotation=45, ha="right") + + for bar in bars: + y_value = bar.get_height() + ax.text( + bar.get_x() + bar.get_width() / 2, + y_value + 0.02, + f"{y_value:.2f}", + ha="center", + va="bottom", + ) + + plt.rcParams["font.family"] = "sans-serif" + + plt.tight_layout() + plt.show() diff --git a/supervision/metrics/recall.py b/supervision/metrics/recall.py new file mode 100644 index 000000000..7c90859cc --- /dev/null +++ b/supervision/metrics/recall.py @@ -0,0 +1,545 @@ +from __future__ import annotations + +from copy import deepcopy +from dataclasses import dataclass +from typing import TYPE_CHECKING, List, Optional, Tuple, Union + +import numpy as np +from matplotlib import pyplot as plt + +from supervision.config import ORIENTED_BOX_COORDINATES +from supervision.detection.core import Detections +from supervision.detection.utils import box_iou_batch, mask_iou_batch +from supervision.draw.color import LEGACY_COLOR_PALETTE +from supervision.metrics.core import AveragingMethod, Metric, MetricTarget +from supervision.metrics.utils.object_size import ( + ObjectSizeCategory, + get_detection_size_category, +) +from supervision.metrics.utils.utils import ensure_pandas_installed + +if TYPE_CHECKING: + import pandas as pd + + +class Recall(Metric): + def __init__( + self, + metric_target: MetricTarget = MetricTarget.BOXES, + averaging_method: AveragingMethod = AveragingMethod.WEIGHTED, + ): + self._metric_target = metric_target + if self._metric_target == MetricTarget.ORIENTED_BOUNDING_BOXES: + raise NotImplementedError( + "Recall is not implemented for oriented bounding boxes." + ) + + self._metric_target = metric_target + self.averaging_method = averaging_method + self._predictions_list: List[Detections] = [] + self._targets_list: List[Detections] = [] + + def reset(self) -> None: + self._predictions_list = [] + self._targets_list = [] + + def update( + self, + predictions: Union[Detections, List[Detections]], + targets: Union[Detections, List[Detections]], + ) -> Recall: + if not isinstance(predictions, list): + predictions = [predictions] + if not isinstance(targets, list): + targets = [targets] + + if len(predictions) != len(targets): + raise ValueError( + f"The number of predictions ({len(predictions)}) and" + f" targets ({len(targets)}) during the update must be the same." + ) + + self._predictions_list.extend(predictions) + self._targets_list.extend(targets) + + return self + + def compute(self) -> RecallResult: + result = self._compute(self._predictions_list, self._targets_list) + + small_predictions, small_targets = self._filter_predictions_and_targets_by_size( + self._predictions_list, self._targets_list, ObjectSizeCategory.SMALL + ) + result.small_objects = self._compute(small_predictions, small_targets) + + medium_predictions, medium_targets = ( + self._filter_predictions_and_targets_by_size( + self._predictions_list, self._targets_list, ObjectSizeCategory.MEDIUM + ) + ) + result.medium_objects = self._compute(medium_predictions, medium_targets) + + large_predictions, large_targets = self._filter_predictions_and_targets_by_size( + self._predictions_list, self._targets_list, ObjectSizeCategory.LARGE + ) + result.large_objects = self._compute(large_predictions, large_targets) + + return result + + def _compute( + self, predictions_list: List[Detections], targets_list: List[Detections] + ) -> RecallResult: + iou_thresholds = np.linspace(0.5, 0.95, 10) + stats = [] + + for predictions, targets in zip(predictions_list, targets_list): + prediction_contents = self._detections_content(predictions) + target_contents = self._detections_content(targets) + + if len(targets) > 0: + if len(predictions) == 0: + stats.append( + ( + np.zeros((0, iou_thresholds.size), dtype=bool), + np.zeros((0,), dtype=np.float32), + np.zeros((0,), dtype=int), + targets.class_id, + ) + ) + + else: + if self._metric_target == MetricTarget.BOXES: + iou = box_iou_batch(target_contents, prediction_contents) + elif self._metric_target == MetricTarget.MASKS: + iou = mask_iou_batch(target_contents, prediction_contents) + else: + raise NotImplementedError( + "Unsupported metric target for IoU calculation" + ) + + matches = self._match_detection_batch( + predictions.class_id, targets.class_id, iou, iou_thresholds + ) + stats.append( + ( + matches, + predictions.confidence, + predictions.class_id, + targets.class_id, + ) + ) + + if not stats: + return RecallResult( + metric_target=self._metric_target, + averaging_method=self.averaging_method, + recall_scores=np.zeros(iou_thresholds.shape[0]), + recall_per_class=np.zeros((0, iou_thresholds.shape[0])), + iou_thresholds=iou_thresholds, + matched_classes=np.array([], dtype=int), + small_objects=None, + medium_objects=None, + large_objects=None, + ) + + concatenated_stats = [np.concatenate(items, 0) for items in zip(*stats)] + recall_scores, recall_per_class, unique_classes = ( + self._compute_recall_for_classes(*concatenated_stats) + ) + + return RecallResult( + metric_target=self._metric_target, + averaging_method=self.averaging_method, + recall_scores=recall_scores, + recall_per_class=recall_per_class, + iou_thresholds=iou_thresholds, + matched_classes=unique_classes, + small_objects=None, + medium_objects=None, + large_objects=None, + ) + + def _compute_recall_for_classes( + self, + matches: np.ndarray, + prediction_confidence: np.ndarray, + prediction_class_ids: np.ndarray, + true_class_ids: np.ndarray, + ) -> Tuple[np.ndarray, np.ndarray, np.ndarray]: + sorted_indices = np.argsort(-prediction_confidence) + matches = matches[sorted_indices] + prediction_class_ids = prediction_class_ids[sorted_indices] + unique_classes, class_counts = np.unique(true_class_ids, return_counts=True) + + # Shape: PxTh,P,C,C -> CxThx3 + confusion_matrix = self._compute_confusion_matrix( + matches, prediction_class_ids, unique_classes, class_counts + ) + + # Shape: CxThx3 -> CxTh + recall_per_class = self._compute_recall(confusion_matrix) + + # Shape: CxTh -> Th + if self.averaging_method == AveragingMethod.MACRO: + recall_scores = np.mean(recall_per_class, axis=0) + elif self.averaging_method == AveragingMethod.MICRO: + confusion_matrix_merged = confusion_matrix.sum(0) + recall_scores = self._compute_recall(confusion_matrix_merged) + elif self.averaging_method == AveragingMethod.WEIGHTED: + class_counts = class_counts.astype(np.float32) + recall_scores = np.average(recall_per_class, axis=0, weights=class_counts) + + return recall_scores, recall_per_class, unique_classes + + @staticmethod + def _match_detection_batch( + predictions_classes: np.ndarray, + target_classes: np.ndarray, + iou: np.ndarray, + iou_thresholds: np.ndarray, + ) -> np.ndarray: + num_predictions, num_iou_levels = ( + predictions_classes.shape[0], + iou_thresholds.shape[0], + ) + correct = np.zeros((num_predictions, num_iou_levels), dtype=bool) + correct_class = target_classes[:, None] == predictions_classes + + for i, iou_level in enumerate(iou_thresholds): + matched_indices = np.where((iou >= iou_level) & correct_class) + + if matched_indices[0].shape[0]: + combined_indices = np.stack(matched_indices, axis=1) + iou_values = iou[matched_indices][:, None] + matches = np.hstack([combined_indices, iou_values]) + + if matched_indices[0].shape[0] > 1: + matches = matches[matches[:, 2].argsort()[::-1]] + matches = matches[np.unique(matches[:, 1], return_index=True)[1]] + matches = matches[np.unique(matches[:, 0], return_index=True)[1]] + + correct[matches[:, 1].astype(int), i] = True + + return correct + + @staticmethod + def _compute_confusion_matrix( + sorted_matches: np.ndarray, + sorted_prediction_class_ids: np.ndarray, + unique_classes: np.ndarray, + class_counts: np.ndarray, + ) -> np.ndarray: + """ + Compute the confusion matrix for each class and IoU threshold. + + Assumes the matches and prediction_class_ids are sorted by confidence + in descending order. + + Arguments: + sorted_matches: np.ndarray, bool, shape (P, Th), that is True + if the prediction is a true positive at the given IoU threshold. + sorted_prediction_class_ids: np.ndarray, int, shape (P,), containing + the class id for each prediction. + unique_classes: np.ndarray, int, shape (C,), containing the unique + class ids. + class_counts: np.ndarray, int, shape (C,), containing the number + of true instances for each class. + + Returns: + np.ndarray, shape (C, Th, 3), containing the true positives, false + positives, and false negatives for each class and IoU threshold. + """ + + num_thresholds = sorted_matches.shape[1] + num_classes = unique_classes.shape[0] + + confusion_matrix = np.zeros((num_classes, num_thresholds, 3)) + for class_idx, class_id in enumerate(unique_classes): + is_class = sorted_prediction_class_ids == class_id + num_true = class_counts[class_idx] + num_predictions = is_class.sum() + + if num_predictions == 0: + true_positives = np.zeros(num_thresholds) + false_positives = np.zeros(num_thresholds) + false_negatives = np.full(num_thresholds, num_true) + elif num_true == 0: + true_positives = np.zeros(num_thresholds) + false_positives = np.full(num_thresholds, num_predictions) + false_negatives = np.zeros(num_thresholds) + else: + true_positives = sorted_matches[is_class].sum(0) + false_positives = (1 - sorted_matches[is_class]).sum(0) + false_negatives = num_true - true_positives + confusion_matrix[class_idx] = np.stack( + [true_positives, false_positives, false_negatives], axis=1 + ) + + return confusion_matrix + + @staticmethod + def _compute_recall(confusion_matrix: np.ndarray) -> np.ndarray: + """ + Broadcastable function, computing the recall from the confusion matrix. + + Arguments: + confusion_matrix: np.ndarray, shape (N, ..., 3), where the last dimension + contains the true positives, false positives, and false negatives. + + Returns: + np.ndarray, shape (N, ...), containing the recall for each element. + """ + if not confusion_matrix.shape[-1] == 3: + raise ValueError( + f"Confusion matrix must have shape (..., 3), got " + f"{confusion_matrix.shape}" + ) + true_positives = confusion_matrix[..., 0] + false_negatives = confusion_matrix[..., 2] + + denominator = true_positives + false_negatives + recall = np.where(denominator == 0, 0, true_positives / denominator) + + return recall + + def _detections_content(self, detections: Detections) -> np.ndarray: + """Return boxes, masks or oriented bounding boxes from detections.""" + if self._metric_target == MetricTarget.BOXES: + return detections.xyxy + if self._metric_target == MetricTarget.MASKS: + return ( + detections.mask + if detections.mask is not None + else np.empty((0, 0, 0), dtype=bool) + ) + if self._metric_target == MetricTarget.ORIENTED_BOUNDING_BOXES: + if obb := detections.data.get(ORIENTED_BOX_COORDINATES): + return np.ndarray(obb, dtype=np.float32) + return np.empty((0, 8), dtype=np.float32) + raise ValueError(f"Invalid metric target: {self._metric_target}") + + def _filter_detections_by_size( + self, detections: Detections, size_category: ObjectSizeCategory + ) -> Detections: + """Return a copy of detections with contents filtered by object size.""" + new_detections = deepcopy(detections) + if detections.is_empty() or size_category == ObjectSizeCategory.ANY: + return new_detections + + sizes = get_detection_size_category(new_detections, self._metric_target) + size_mask = sizes == size_category.value + + new_detections.xyxy = new_detections.xyxy[size_mask] + if new_detections.mask is not None: + new_detections.mask = new_detections.mask[size_mask] + if new_detections.class_id is not None: + new_detections.class_id = new_detections.class_id[size_mask] + if new_detections.confidence is not None: + new_detections.confidence = new_detections.confidence[size_mask] + if new_detections.tracker_id is not None: + new_detections.tracker_id = new_detections.tracker_id[size_mask] + if new_detections.data is not None: + for key, value in new_detections.data.items(): + new_detections.data[key] = np.array(value)[size_mask] + + return new_detections + + def _filter_predictions_and_targets_by_size( + self, + predictions_list: List[Detections], + targets_list: List[Detections], + size_category: ObjectSizeCategory, + ) -> Tuple[List[Detections], List[Detections]]: + """ + Filter predictions and targets by object size category. + """ + new_predictions_list = [] + new_targets_list = [] + for predictions, targets in zip(predictions_list, targets_list): + new_predictions_list.append( + self._filter_detections_by_size(predictions, size_category) + ) + new_targets_list.append( + self._filter_detections_by_size(targets, size_category) + ) + return new_predictions_list, new_targets_list + + +@dataclass +class RecallResult: + """ + The results of the recall metric calculation. + + Defaults to `0` if no detections or targets were provided. + Provides a custom `__str__` method for pretty printing. + + Attributes: + metric_target (MetricTarget): the type of data used for the metric - + boxes, masks or oriented bounding boxes. + averaging_method (AveragingMethod): the averaging method used to compute the + recall. Determines how the recall is aggregated across classes. + recall_at_50 (float): the recall at IoU threshold of `0.5`. + recall_at_75 (float): the recall at IoU threshold of `0.75`. + recall_scores (np.ndarray): the recall scores at each IoU threshold. + Shape: `(num_iou_thresholds,)` + recall_per_class (np.ndarray): the recall scores per class and IoU threshold. + Shape: `(num_target_classes, num_iou_thresholds)` + iou_thresholds (np.ndarray): the IoU thresholds used in the calculations. + matched_classes (np.ndarray): the class IDs of all matched classes. + Corresponds to the rows of `recall_per_class`. + small_objects (Optional[RecallResult]): the Recall metric results + for small objects. + medium_objects (Optional[RecallResult]): the Recall metric results + for medium objects. + large_objects (Optional[RecallResult]): the Recall metric results + for large objects. + """ + + metric_target: MetricTarget + averaging_method: AveragingMethod + + @property + def recall_at_50(self) -> float: + return self.recall_scores[0] + + @property + def recall_at_75(self) -> float: + return self.recall_scores[5] + + recall_scores: np.ndarray + recall_per_class: np.ndarray + iou_thresholds: np.ndarray + matched_classes: np.ndarray + + small_objects: Optional[RecallResult] + medium_objects: Optional[RecallResult] + large_objects: Optional[RecallResult] + + def __str__(self) -> str: + """ + Format as a pretty string. + + Example: + ```python + print(recall_result) + ``` + """ + out_str = ( + f"{self.__class__.__name__}:\n" + f"Metric target: {self.metric_target}\n" + f"Averaging method: {self.averaging_method}\n" + f"R @ 50: {self.recall_at_50:.4f}\n" + f"R @ 75: {self.recall_at_75:.4f}\n" + f"R @ thresh: {self.recall_scores}\n" + f"IoU thresh: {self.iou_thresholds}\n" + f"Recall per class:\n" + ) + if self.recall_per_class.size == 0: + out_str += " No results\n" + for class_id, recall_of_class in zip( + self.matched_classes, self.recall_per_class + ): + out_str += f" {class_id}: {recall_of_class}\n" + + indent = " " + if self.small_objects is not None: + indented = indent + str(self.small_objects).replace("\n", f"\n{indent}") + out_str += f"\nSmall objects:\n{indented}" + if self.medium_objects is not None: + indented = indent + str(self.medium_objects).replace("\n", f"\n{indent}") + out_str += f"\nMedium objects:\n{indented}" + if self.large_objects is not None: + indented = indent + str(self.large_objects).replace("\n", f"\n{indent}") + out_str += f"\nLarge objects:\n{indented}" + + return out_str + + def to_pandas(self) -> "pd.DataFrame": + """ + Convert the result to a pandas DataFrame. + + Returns: + (pd.DataFrame): The result as a DataFrame. + """ + ensure_pandas_installed() + import pandas as pd + + pandas_data = { + "R@50": self.recall_at_50, + "R@75": self.recall_at_75, + } + + if self.small_objects is not None: + small_objects_df = self.small_objects.to_pandas() + for key, value in small_objects_df.items(): + pandas_data[f"small_objects_{key}"] = value + if self.medium_objects is not None: + medium_objects_df = self.medium_objects.to_pandas() + for key, value in medium_objects_df.items(): + pandas_data[f"medium_objects_{key}"] = value + if self.large_objects is not None: + large_objects_df = self.large_objects.to_pandas() + for key, value in large_objects_df.items(): + pandas_data[f"large_objects_{key}"] = value + + return pd.DataFrame(pandas_data, index=[0]) + + def plot(self): + """ + Plot the recall results. + """ + + labels = ["Recall@50", "Recall@75"] + values = [self.recall_at_50, self.recall_at_75] + colors = [LEGACY_COLOR_PALETTE[0]] * 2 + + if self.small_objects is not None: + small_objects = self.small_objects + labels += ["Small: R@50", "Small: R@75"] + values += [small_objects.recall_at_50, small_objects.recall_at_75] + colors += [LEGACY_COLOR_PALETTE[3]] * 2 + + if self.medium_objects is not None: + medium_objects = self.medium_objects + labels += ["Medium: R@50", "Medium: R@75"] + values += [medium_objects.recall_at_50, medium_objects.recall_at_75] + colors += [LEGACY_COLOR_PALETTE[2]] * 2 + + if self.large_objects is not None: + large_objects = self.large_objects + labels += ["Large: R@50", "Large: R@75"] + values += [large_objects.recall_at_50, large_objects.recall_at_75] + colors += [LEGACY_COLOR_PALETTE[4]] * 2 + + plt.rcParams["font.family"] = "monospace" + + _, ax = plt.subplots(figsize=(10, 6)) + ax.set_ylim(0, 1) + ax.set_ylabel("Value", fontweight="bold") + title = ( + f"Recall, by Object Size" + f"\n(target: {self.metric_target.value}," + f" averaging: {self.averaging_method.value})" + ) + ax.set_title(title, fontweight="bold") + + x_positions = range(len(labels)) + bars = ax.bar(x_positions, values, color=colors, align="center") + + ax.set_xticks(x_positions) + ax.set_xticklabels(labels, rotation=45, ha="right") + + for bar in bars: + y_value = bar.get_height() + ax.text( + bar.get_x() + bar.get_width() / 2, + y_value + 0.02, + f"{y_value:.2f}", + ha="center", + va="bottom", + ) + + plt.rcParams["font.family"] = "sans-serif" + + plt.tight_layout() + plt.show() From fbd96d10c4c6358ddba4bf2bd4d34f498ebfde2a Mon Sep 17 00:00:00 2001 From: LinasKo Date: Fri, 18 Oct 2024 15:15:40 +0300 Subject: [PATCH 073/161] Add new and missing docstrings, examples to metrics, new Common section --- docs/metrics/common_values.md | 20 +++++++ docs/metrics/precision.md | 2 +- docs/metrics/recall.md | 2 +- mkdocs.yml | 1 + supervision/metrics/core.py | 26 +++++---- supervision/metrics/f1_score.py | 55 +++++++++++++++++- supervision/metrics/mean_average_precision.py | 44 ++++++++------ supervision/metrics/precision.py | 58 ++++++++++++++++++- supervision/metrics/recall.py | 58 ++++++++++++++++++- 9 files changed, 231 insertions(+), 35 deletions(-) create mode 100644 docs/metrics/common_values.md diff --git a/docs/metrics/common_values.md b/docs/metrics/common_values.md new file mode 100644 index 000000000..b7600f3f1 --- /dev/null +++ b/docs/metrics/common_values.md @@ -0,0 +1,20 @@ +--- +comments: true +status: new +--- + +# Common Values + +This page contains supplementary values, types and enums that metrics use. + + + +:::supervision.metrics.core.MetricTarget + + + +:::supervision.metrics.core.AveragingMethod diff --git a/docs/metrics/precision.md b/docs/metrics/precision.md index ca318f8fb..c704452ee 100644 --- a/docs/metrics/precision.md +++ b/docs/metrics/precision.md @@ -3,7 +3,7 @@ comments: true status: new --- -# F1 Score +# Precision

Precision

diff --git a/docs/metrics/recall.md b/docs/metrics/recall.md index 5baa4d3ee..78dde8334 100644 --- a/docs/metrics/recall.md +++ b/docs/metrics/recall.md @@ -3,7 +3,7 @@ comments: true status: new --- -# F1 Score +# Recall

Recall

diff --git a/mkdocs.yml b/mkdocs.yml index a3c9c1caa..b30dbcfcc 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -69,6 +69,7 @@ nav: - Precision: metrics/precision.md - Recall: metrics/recall.md - F1 Score: metrics/f1_score.md + - Common Values: metrics/common_values.md - Legacy Metrics: detection/metrics.md - Utils: - Video: utils/video.md diff --git a/supervision/metrics/core.py b/supervision/metrics/core.py index d1818441e..def5999a0 100644 --- a/supervision/metrics/core.py +++ b/supervision/metrics/core.py @@ -37,9 +37,10 @@ class MetricTarget(Enum): """ Specifies what type of detection is used to compute the metric. - * BOXES: xyxy bounding boxes - * MASKS: Binary masks - * ORIENTED_BOUNDING_BOXES: Oriented bounding boxes (OBB) + Attributes: + BOXES: xyxy bounding boxes + MASKS: Binary masks + ORIENTED_BOUNDING_BOXES: Oriented bounding boxes (OBB) """ BOXES = "boxes" @@ -54,15 +55,16 @@ class AveragingMethod(Enum): Suppose, before returning the final result, a metric is computed for each class. How do you combine those to get the final number? - * MACRO: Calculate the metric for each class and average the results. The simplest - averaging method, but it does not take class imbalance into account. - * MICRO: Calculate the metric globally by counting the total true positives, false - positives, and false negatives. Micro averaging is useful when you want to give - more importance to classes with more samples. It's also more appropriate if you - have an imbalance in the number of instances per class. - * WEIGHTED: Calculate the metric for each class and average the results, weighted by - the number of true instances of each class. Use weighted averaging if you want - to take class imbalance into account. + Attributes: + MACRO: Calculate the metric for each class and average the results. The simplest + averaging method, but it does not take class imbalance into account. + MICRO: Calculate the metric globally by counting the total true positives, false + positives, and false negatives. Micro averaging is useful when you want to + give more importance to classes with more samples. It's also more + appropriate if you have an imbalance in the number of instances per class. + WEIGHTED: Calculate the metric for each class and average the results, weighted + by the number of true instances of each class. Use weighted averaging if + you want to take class imbalance into account. """ MACRO = "macro" diff --git a/supervision/metrics/f1_score.py b/supervision/metrics/f1_score.py index 2ca5bca5c..ba4fcd59a 100644 --- a/supervision/metrics/f1_score.py +++ b/supervision/metrics/f1_score.py @@ -23,11 +23,45 @@ class F1Score(Metric): + """ + F1 Score is a metric used to evaluate object detection models. It is the harmonic + mean of precision and recall, calculated at different IoU thresholds. + + In simple terms, F1 Score is a measure of a model's balance between precision and + recall (accuracy and completeness), calculated as: + + `F1 = 2 * (precision * recall) / (precision + recall)` + + Example: + ```python + import supervision as sv + from supervision.metrics import F1Score + + predictions = sv.Detections(...) + targets = sv.Detections(...) + + f1_metric = F1Score() + f1_result = f1_metric.update(predictions, targets).compute() + + print(f1_result) + print(f1_result.f1_50) + print(f1_result.small_objects.f1_50) + ``` + """ + def __init__( self, metric_target: MetricTarget = MetricTarget.BOXES, averaging_method: AveragingMethod = AveragingMethod.WEIGHTED, ): + """ + Initialize the F1Score metric. + + Args: + metric_target (MetricTarget): The type of detection data to use. + averaging_method (AveragingMethod): The averaging method used to compute the + F1 scores. Determines how the F1 scores are aggregated across classes. + """ self._metric_target = metric_target if self._metric_target == MetricTarget.ORIENTED_BOUNDING_BOXES: raise NotImplementedError( @@ -40,6 +74,9 @@ def __init__( self._targets_list: List[Detections] = [] def reset(self) -> None: + """ + Reset the metric to its initial state, clearing all stored data. + """ self._predictions_list = [] self._targets_list = [] @@ -48,6 +85,16 @@ def update( predictions: Union[Detections, List[Detections]], targets: Union[Detections, List[Detections]], ) -> F1Score: + """ + Add new predictions and targets to the metric, but do not compute the result. + + Args: + predictions (Union[Detections, List[Detections]]): The predicted detections. + targets (Union[Detections, List[Detections]]): The target detections. + + Returns: + (F1Score): The updated metric instance. + """ if not isinstance(predictions, list): predictions = [predictions] if not isinstance(targets, list): @@ -65,6 +112,13 @@ def update( return self def compute(self) -> F1ScoreResult: + """ + Calculate the F1 score metric based on the stored predictions and ground-truth + data, at different IoU thresholds. + + Returns: + (F1ScoreResult): The F1 score metric result. + """ result = self._compute(self._predictions_list, self._targets_list) small_predictions, small_targets = self._filter_predictions_and_targets_by_size( @@ -373,7 +427,6 @@ class F1ScoreResult: The results of the F1 score metric calculation. Defaults to `0` if no detections or targets were provided. - Provides a custom `__str__` method for pretty printing. Attributes: metric_target (MetricTarget): the type of data used for the metric - diff --git a/supervision/metrics/mean_average_precision.py b/supervision/metrics/mean_average_precision.py index dbd60b2e7..8cec50c85 100644 --- a/supervision/metrics/mean_average_precision.py +++ b/supervision/metrics/mean_average_precision.py @@ -23,6 +23,27 @@ class MeanAveragePrecision(Metric): + """ + Mean Average Precision (mAP) is a metric used to evaluate object detection models. + It is the average of the precision-recall curves at different IoU thresholds. + + Example: + ```python + import supervision as sv + from supervision.metrics import MeanAveragePrecision + + predictions = sv.Detections(...) + targets = sv.Detections(...) + + map_metric = MeanAveragePrecision() + map_result = map_metric.update(predictions, targets).compute() + + print(map_result) + print(map_result.map50_95) + map_result.plot() + ``` + """ + def __init__( self, metric_target: MetricTarget = MetricTarget.BOXES, @@ -47,6 +68,9 @@ def __init__( self._targets_list: List[Detections] = [] def reset(self) -> None: + """ + Reset the metric to its initial state, clearing all stored data. + """ self._predictions_list = [] self._targets_list = [] @@ -95,26 +119,10 @@ def compute( ) -> MeanAveragePrecisionResult: """ Calculate Mean Average Precision based on predicted and ground-truth - detections at different thresholds. + detections at different thresholds. Returns: - (MeanAveragePrecisionResult): New instance of MeanAveragePrecision. - - Example: - ```python - import supervision as sv - from supervision.metrics import MeanAveragePrecision - - predictions = sv.Detections(...) - targets = sv.Detections(...) - - map_metric = MeanAveragePrecision() - map_result = map_metric.update(predictions, targets).compute() - - print(map_result) - print(map_result.map50_95) - map_result.plot() - ``` + (MeanAveragePrecisionResult): The Mean Average Precision result. """ result = self._compute(self._predictions_list, self._targets_list) diff --git a/supervision/metrics/precision.py b/supervision/metrics/precision.py index ba441831a..d915e1f49 100644 --- a/supervision/metrics/precision.py +++ b/supervision/metrics/precision.py @@ -23,11 +23,48 @@ class Precision(Metric): + """ + Precision is a metric used to evaluate object detection models. It is the ratio of + true positive detections to the total number of predicted detections. We calculate + it at different IoU thresholds. + + In simple terms, Precision is a measure of a model's accuracy, calculated as: + + `Precision = TP / (TP + FP)` + + Here, `TP` is the number of true positives (correct detections), and `FP` is the + number of false positive detections (detected, but incorrectly). + + Example: + ```python + import supervision as sv + from supervision.metrics import Precision + + predictions = sv.Detections(...) + targets = sv.Detections(...) + + precision_metric = Precision() + precision_result = precision_metric.update(predictions, targets).compute() + + print(precision_result) + print(precision_result.precision_at_50) + print(precision_result.small_objects.precision_at_50) + ``` + """ + def __init__( self, metric_target: MetricTarget = MetricTarget.BOXES, averaging_method: AveragingMethod = AveragingMethod.WEIGHTED, ): + """ + Initialize the Precision metric. + + Args: + metric_target (MetricTarget): The type of detection data to use. + averaging_method (AveragingMethod): The averaging method used to compute the + precision. Determines how the precision is aggregated across classes. + """ self._metric_target = metric_target if self._metric_target == MetricTarget.ORIENTED_BOUNDING_BOXES: raise NotImplementedError( @@ -40,6 +77,9 @@ def __init__( self._targets_list: List[Detections] = [] def reset(self) -> None: + """ + Reset the metric to its initial state, clearing all stored data. + """ self._predictions_list = [] self._targets_list = [] @@ -48,6 +88,16 @@ def update( predictions: Union[Detections, List[Detections]], targets: Union[Detections, List[Detections]], ) -> Precision: + """ + Add new predictions and targets to the metric, but do not compute the result. + + Args: + predictions (Union[Detections, List[Detections]]): The predicted detections. + targets (Union[Detections, List[Detections]]): The target detections. + + Returns: + (Precision): The updated metric instance. + """ if not isinstance(predictions, list): predictions = [predictions] if not isinstance(targets, list): @@ -65,6 +115,13 @@ def update( return self def compute(self) -> PrecisionResult: + """ + Calculate the precision metric based on the stored predictions and ground-truth + data, at different IoU thresholds. + + Returns: + (PrecisionResult): The precision metric result. + """ result = self._compute(self._predictions_list, self._targets_list) small_predictions, small_targets = self._filter_predictions_and_targets_by_size( @@ -373,7 +430,6 @@ class PrecisionResult: The results of the precision metric calculation. Defaults to `0` if no detections or targets were provided. - Provides a custom `__str__` method for pretty printing. Attributes: metric_target (MetricTarget): the type of data used for the metric - diff --git a/supervision/metrics/recall.py b/supervision/metrics/recall.py index 7c90859cc..9eae24f8e 100644 --- a/supervision/metrics/recall.py +++ b/supervision/metrics/recall.py @@ -23,11 +23,48 @@ class Recall(Metric): + """ + Recall is a metric used to evaluate object detection models. It is the ratio of + true positive detections to the total number of ground truth instances. We calculate + it at different IoU thresholds. + + In simple terms, Recall is a measure of a model's completeness, calculated as: + + `Recall = TP / (TP + FN)` + + Here, `TP` is the number of true positives (correct detections), and `FN` is the + number of false negatives (missed detections). + + Example: + ```python + import supervision as sv + from supervision.metrics import Recall + + predictions = sv.Detections(...) + targets = sv.Detections(...) + + recall_metric = Recall() + recall_result = recall_metric.update(predictions, targets).compute() + + print(recall_result) + print(recall_result.recall_at_50) + print(recall_result.small_objects.recall_at_50) + ``` + """ + def __init__( self, metric_target: MetricTarget = MetricTarget.BOXES, averaging_method: AveragingMethod = AveragingMethod.WEIGHTED, ): + """ + Initialize the Recall metric. + + Args: + metric_target (MetricTarget): The type of detection data to use. + averaging_method (AveragingMethod): The averaging method used to compute the + recall. Determines how the recall is aggregated across classes. + """ self._metric_target = metric_target if self._metric_target == MetricTarget.ORIENTED_BOUNDING_BOXES: raise NotImplementedError( @@ -40,6 +77,9 @@ def __init__( self._targets_list: List[Detections] = [] def reset(self) -> None: + """ + Reset the metric to its initial state, clearing all stored data. + """ self._predictions_list = [] self._targets_list = [] @@ -48,6 +88,16 @@ def update( predictions: Union[Detections, List[Detections]], targets: Union[Detections, List[Detections]], ) -> Recall: + """ + Add new predictions and targets to the metric, but do not compute the result. + + Args: + predictions (Union[Detections, List[Detections]]): The predicted detections. + targets (Union[Detections, List[Detections]]): The target detections. + + Returns: + (Recall): The updated metric instance. + """ if not isinstance(predictions, list): predictions = [predictions] if not isinstance(targets, list): @@ -65,6 +115,13 @@ def update( return self def compute(self) -> RecallResult: + """ + Calculate the precision metric based on the stored predictions and ground-truth + data, at different IoU thresholds. + + Returns: + (RecallResult): The precision metric result. + """ result = self._compute(self._predictions_list, self._targets_list) small_predictions, small_targets = self._filter_predictions_and_targets_by_size( @@ -371,7 +428,6 @@ class RecallResult: The results of the recall metric calculation. Defaults to `0` if no detections or targets were provided. - Provides a custom `__str__` method for pretty printing. Attributes: metric_target (MetricTarget): the type of data used for the metric - From 3e8a88a8d4ba3d31850788af839e469410a86961 Mon Sep 17 00:00:00 2001 From: LinasKo Date: Fri, 18 Oct 2024 15:28:45 +0300 Subject: [PATCH 074/161] Add Precision and Recall to metrics __init__ --- supervision/metrics/__init__.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/supervision/metrics/__init__.py b/supervision/metrics/__init__.py index 8ae33e639..90fc17b47 100644 --- a/supervision/metrics/__init__.py +++ b/supervision/metrics/__init__.py @@ -8,6 +8,8 @@ MeanAveragePrecision, MeanAveragePrecisionResult, ) +from supervision.metrics.precision import Precision, PrecisionResult +from supervision.metrics.recall import Recall, RecallResult from supervision.metrics.utils.object_size import ( ObjectSizeCategory, get_detection_size_category, From 74ae1eec87aa27d7109900169f3e7eeea3f797c0 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 21 Oct 2024 00:48:47 +0000 Subject: [PATCH 075/161] :arrow_up: Bump mypy from 1.12.0 to 1.12.1 Bumps [mypy](https://github.com/python/mypy) from 1.12.0 to 1.12.1. - [Changelog](https://github.com/python/mypy/blob/master/CHANGELOG.md) - [Commits](https://github.com/python/mypy/compare/v1.12.0...v1.12.1) --- updated-dependencies: - dependency-name: mypy dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- poetry.lock | 66 ++++++++++++++++++++++++++--------------------------- 1 file changed, 33 insertions(+), 33 deletions(-) diff --git a/poetry.lock b/poetry.lock index e1767ce1e..f43db4828 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2509,43 +2509,43 @@ files = [ [[package]] name = "mypy" -version = "1.12.0" +version = "1.12.1" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.12.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4397081e620dc4dc18e2f124d5e1d2c288194c2c08df6bdb1db31c38cd1fe1ed"}, - {file = "mypy-1.12.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:684a9c508a283f324804fea3f0effeb7858eb03f85c4402a967d187f64562469"}, - {file = "mypy-1.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6cabe4cda2fa5eca7ac94854c6c37039324baaa428ecbf4de4567279e9810f9e"}, - {file = "mypy-1.12.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:060a07b10e999ac9e7fa249ce2bdcfa9183ca2b70756f3bce9df7a92f78a3c0a"}, - {file = "mypy-1.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:0eff042d7257f39ba4ca06641d110ca7d2ad98c9c1fb52200fe6b1c865d360ff"}, - {file = "mypy-1.12.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4b86de37a0da945f6d48cf110d5206c5ed514b1ca2614d7ad652d4bf099c7de7"}, - {file = "mypy-1.12.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:20c7c5ce0c1be0b0aea628374e6cf68b420bcc772d85c3c974f675b88e3e6e57"}, - {file = "mypy-1.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a64ee25f05fc2d3d8474985c58042b6759100a475f8237da1f4faf7fcd7e6309"}, - {file = "mypy-1.12.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:faca7ab947c9f457a08dcb8d9a8664fd438080e002b0fa3e41b0535335edcf7f"}, - {file = "mypy-1.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:5bc81701d52cc8767005fdd2a08c19980de9ec61a25dbd2a937dfb1338a826f9"}, - {file = "mypy-1.12.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:8462655b6694feb1c99e433ea905d46c478041a8b8f0c33f1dab00ae881b2164"}, - {file = "mypy-1.12.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:923ea66d282d8af9e0f9c21ffc6653643abb95b658c3a8a32dca1eff09c06475"}, - {file = "mypy-1.12.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1ebf9e796521f99d61864ed89d1fb2926d9ab6a5fab421e457cd9c7e4dd65aa9"}, - {file = "mypy-1.12.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:e478601cc3e3fa9d6734d255a59c7a2e5c2934da4378f3dd1e3411ea8a248642"}, - {file = "mypy-1.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:c72861b7139a4f738344faa0e150834467521a3fba42dc98264e5aa9507dd601"}, - {file = "mypy-1.12.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:52b9e1492e47e1790360a43755fa04101a7ac72287b1a53ce817f35899ba0521"}, - {file = "mypy-1.12.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:48d3e37dd7d9403e38fa86c46191de72705166d40b8c9f91a3de77350daa0893"}, - {file = "mypy-1.12.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2f106db5ccb60681b622ac768455743ee0e6a857724d648c9629a9bd2ac3f721"}, - {file = "mypy-1.12.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:233e11b3f73ee1f10efada2e6da0f555b2f3a5316e9d8a4a1224acc10e7181d3"}, - {file = "mypy-1.12.0-cp313-cp313-win_amd64.whl", hash = "sha256:4ae8959c21abcf9d73aa6c74a313c45c0b5a188752bf37dace564e29f06e9c1b"}, - {file = "mypy-1.12.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:eafc1b7319b40ddabdc3db8d7d48e76cfc65bbeeafaa525a4e0fa6b76175467f"}, - {file = "mypy-1.12.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9b9ce1ad8daeb049c0b55fdb753d7414260bad8952645367e70ac91aec90e07e"}, - {file = "mypy-1.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bfe012b50e1491d439172c43ccb50db66d23fab714d500b57ed52526a1020bb7"}, - {file = "mypy-1.12.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2c40658d4fa1ab27cb53d9e2f1066345596af2f8fe4827defc398a09c7c9519b"}, - {file = "mypy-1.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:dee78a8b9746c30c1e617ccb1307b351ded57f0de0d287ca6276378d770006c0"}, - {file = "mypy-1.12.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6b5df6c8a8224f6b86746bda716bbe4dbe0ce89fd67b1fa4661e11bfe38e8ec8"}, - {file = "mypy-1.12.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5feee5c74eb9749e91b77f60b30771563327329e29218d95bedbe1257e2fe4b0"}, - {file = "mypy-1.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:77278e8c6ffe2abfba6db4125de55f1024de9a323be13d20e4f73b8ed3402bd1"}, - {file = "mypy-1.12.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:dcfb754dea911039ac12434d1950d69a2f05acd4d56f7935ed402be09fad145e"}, - {file = "mypy-1.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:06de0498798527451ffb60f68db0d368bd2bae2bbfb5237eae616d4330cc87aa"}, - {file = "mypy-1.12.0-py3-none-any.whl", hash = "sha256:fd313226af375d52e1e36c383f39bf3836e1f192801116b31b090dfcd3ec5266"}, - {file = "mypy-1.12.0.tar.gz", hash = "sha256:65a22d87e757ccd95cbbf6f7e181e6caa87128255eb2b6be901bb71b26d8a99d"}, + {file = "mypy-1.12.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3d7d4371829184e22fda4015278fbfdef0327a4b955a483012bd2d423a788801"}, + {file = "mypy-1.12.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f59f1dfbf497d473201356966e353ef09d4daec48caeacc0254db8ef633a28a5"}, + {file = "mypy-1.12.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b947097fae68004b8328c55161ac9db7d3566abfef72d9d41b47a021c2fba6b1"}, + {file = "mypy-1.12.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:96af62050971c5241afb4701c15189ea9507db89ad07794a4ee7b4e092dc0627"}, + {file = "mypy-1.12.1-cp310-cp310-win_amd64.whl", hash = "sha256:d90da248f4c2dba6c44ddcfea94bb361e491962f05f41990ff24dbd09969ce20"}, + {file = "mypy-1.12.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1230048fec1380faf240be6385e709c8570604d2d27ec6ca7e573e3bc09c3735"}, + {file = "mypy-1.12.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:02dcfe270c6ea13338210908f8cadc8d31af0f04cee8ca996438fe6a97b4ec66"}, + {file = "mypy-1.12.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a5a437c9102a6a252d9e3a63edc191a3aed5f2fcb786d614722ee3f4472e33f6"}, + {file = "mypy-1.12.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:186e0c8346efc027ee1f9acf5ca734425fc4f7dc2b60144f0fbe27cc19dc7931"}, + {file = "mypy-1.12.1-cp311-cp311-win_amd64.whl", hash = "sha256:673ba1140a478b50e6d265c03391702fa11a5c5aff3f54d69a62a48da32cb811"}, + {file = "mypy-1.12.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9fb83a7be97c498176fb7486cafbb81decccaef1ac339d837c377b0ce3743a7f"}, + {file = "mypy-1.12.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:389e307e333879c571029d5b93932cf838b811d3f5395ed1ad05086b52148fb0"}, + {file = "mypy-1.12.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:94b2048a95a21f7a9ebc9fbd075a4fcd310410d078aa0228dbbad7f71335e042"}, + {file = "mypy-1.12.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ee5932370ccf7ebf83f79d1c157a5929d7ea36313027b0d70a488493dc1b179"}, + {file = "mypy-1.12.1-cp312-cp312-win_amd64.whl", hash = "sha256:19bf51f87a295e7ab2894f1d8167622b063492d754e69c3c2fed6563268cb42a"}, + {file = "mypy-1.12.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d34167d43613ffb1d6c6cdc0cc043bb106cac0aa5d6a4171f77ab92a3c758bcc"}, + {file = "mypy-1.12.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:427878aa54f2e2c5d8db31fa9010c599ed9f994b3b49e64ae9cd9990c40bd635"}, + {file = "mypy-1.12.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5fcde63ea2c9f69d6be859a1e6dd35955e87fa81de95bc240143cf00de1f7f81"}, + {file = "mypy-1.12.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:d54d840f6c052929f4a3d2aab2066af0f45a020b085fe0e40d4583db52aab4e4"}, + {file = "mypy-1.12.1-cp313-cp313-win_amd64.whl", hash = "sha256:20db6eb1ca3d1de8ece00033b12f793f1ea9da767334b7e8c626a4872090cf02"}, + {file = "mypy-1.12.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b16fe09f9c741d85a2e3b14a5257a27a4f4886c171d562bc5a5e90d8591906b8"}, + {file = "mypy-1.12.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0dcc1e843d58f444fce19da4cce5bd35c282d4bde232acdeca8279523087088a"}, + {file = "mypy-1.12.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e10ba7de5c616e44ad21005fa13450cd0de7caaa303a626147d45307492e4f2d"}, + {file = "mypy-1.12.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0e6fe449223fa59fbee351db32283838a8fee8059e0028e9e6494a03802b4004"}, + {file = "mypy-1.12.1-cp38-cp38-win_amd64.whl", hash = "sha256:dc6e2a2195a290a7fd5bac3e60b586d77fc88e986eba7feced8b778c373f9afe"}, + {file = "mypy-1.12.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:de5b2a8988b4e1269a98beaf0e7cc71b510d050dce80c343b53b4955fff45f19"}, + {file = "mypy-1.12.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:843826966f1d65925e8b50d2b483065c51fc16dc5d72647e0236aae51dc8d77e"}, + {file = "mypy-1.12.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9fe20f89da41a95e14c34b1ddb09c80262edcc295ad891f22cc4b60013e8f78d"}, + {file = "mypy-1.12.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8135ffec02121a75f75dc97c81af7c14aa4ae0dda277132cfcd6abcd21551bfd"}, + {file = "mypy-1.12.1-cp39-cp39-win_amd64.whl", hash = "sha256:a7b76fa83260824300cc4834a3ab93180db19876bce59af921467fd03e692810"}, + {file = "mypy-1.12.1-py3-none-any.whl", hash = "sha256:ce561a09e3bb9863ab77edf29ae3a50e65685ad74bba1431278185b7e5d5486e"}, + {file = "mypy-1.12.1.tar.gz", hash = "sha256:f5b3936f7a6d0e8280c9bdef94c7ce4847f5cdfc258fbb2c29a8c1711e8bb96d"}, ] [package.dependencies] From 2efe3030cb498ee277ab7bb072fe41e45295774d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 21 Oct 2024 00:52:04 +0000 Subject: [PATCH 076/161] :arrow_up: Bump mkdocs-material from 9.5.41 to 9.5.42 Bumps [mkdocs-material](https://github.com/squidfunk/mkdocs-material) from 9.5.41 to 9.5.42. - [Release notes](https://github.com/squidfunk/mkdocs-material/releases) - [Changelog](https://github.com/squidfunk/mkdocs-material/blob/master/CHANGELOG) - [Commits](https://github.com/squidfunk/mkdocs-material/compare/9.5.41...9.5.42) --- updated-dependencies: - dependency-name: mkdocs-material dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index e1767ce1e..548de46a9 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2412,13 +2412,13 @@ pygments = ">2.12.0" [[package]] name = "mkdocs-material" -version = "9.5.41" +version = "9.5.42" description = "Documentation that simply works" optional = false python-versions = ">=3.8" files = [ - {file = "mkdocs_material-9.5.41-py3-none-any.whl", hash = "sha256:990bc138c33342b5b73e7545915ebc0136e501bfbd8e365735144f5120891d83"}, - {file = "mkdocs_material-9.5.41.tar.gz", hash = "sha256:30fa5d459b4b8130848ecd8e1c908878345d9d8268f7ddbc31eebe88d462d97b"}, + {file = "mkdocs_material-9.5.42-py3-none-any.whl", hash = "sha256:452a7c5d21284b373f36b981a2cbebfff59263feebeede1bc28652e9c5bbe316"}, + {file = "mkdocs_material-9.5.42.tar.gz", hash = "sha256:92779b5e9b5934540c574c11647131d217dc540dce72b05feeda088c8eb1b8f2"}, ] [package.dependencies] From 40ef90c76e3b91aef47253ccfc5e86199b510119 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 21 Oct 2024 17:55:36 +0000 Subject: [PATCH 077/161] =?UTF-8?q?chore(pre=5Fcommit):=20=E2=AC=86=20pre?= =?UTF-8?q?=5Fcommit=20autoupdate?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.6.9 → v0.7.0](https://github.com/astral-sh/ruff-pre-commit/compare/v0.6.9...v0.7.0) --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 2ca4357cb..8ddaa08e9 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -32,7 +32,7 @@ repos: additional_dependencies: ["bandit[toml]"] - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.6.9 + rev: v0.7.0 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] From abd149fa1a6f65cd883e5328650a7b9ef6f77eb1 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 23 Oct 2024 00:36:13 +0000 Subject: [PATCH 078/161] :arrow_up: Bump tox from 4.23.0 to 4.23.2 Bumps [tox](https://github.com/tox-dev/tox) from 4.23.0 to 4.23.2. - [Release notes](https://github.com/tox-dev/tox/releases) - [Changelog](https://github.com/tox-dev/tox/blob/main/docs/changelog.rst) - [Commits](https://github.com/tox-dev/tox/compare/4.23.0...4.23.2) --- updated-dependencies: - dependency-name: tox dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- poetry.lock | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 088b34e51..8d73d1bb6 100644 --- a/poetry.lock +++ b/poetry.lock @@ -4377,13 +4377,13 @@ files = [ [[package]] name = "tox" -version = "4.23.0" +version = "4.23.2" description = "tox is a generic virtualenv management and test command line tool" optional = false python-versions = ">=3.8" files = [ - {file = "tox-4.23.0-py3-none-any.whl", hash = "sha256:46da40afb660e46238c251280eb910bdaf00b390c7557c8e4bb611f422e9db12"}, - {file = "tox-4.23.0.tar.gz", hash = "sha256:a6bd7d54231d755348d3c3a7b450b5bf6563833716d1299a1619587a1b77a3bf"}, + {file = "tox-4.23.2-py3-none-any.whl", hash = "sha256:452bc32bb031f2282881a2118923176445bac783ab97c874b8770ab4c3b76c38"}, + {file = "tox-4.23.2.tar.gz", hash = "sha256:86075e00e555df6e82e74cfc333917f91ecb47ffbc868dcafbd2672e332f4a2c"}, ] [package.dependencies] @@ -4399,6 +4399,9 @@ tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} typing-extensions = {version = ">=4.12.2", markers = "python_version < \"3.11\""} virtualenv = ">=20.26.6" +[package.extras] +test = ["devpi-process (>=1.0.2)", "pytest (>=8.3.3)", "pytest-mock (>=3.14)"] + [[package]] name = "tqdm" version = "4.66.5" From 6c7ee3722536f01c8c15b02e81c64e1459881467 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 23 Oct 2024 00:37:04 +0000 Subject: [PATCH 079/161] :arrow_up: Bump mypy from 1.12.1 to 1.13.0 Bumps [mypy](https://github.com/python/mypy) from 1.12.1 to 1.13.0. - [Changelog](https://github.com/python/mypy/blob/master/CHANGELOG.md) - [Commits](https://github.com/python/mypy/compare/v1.12.1...v1.13.0) --- updated-dependencies: - dependency-name: mypy dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- poetry.lock | 67 +++++++++++++++++++++++++++-------------------------- 1 file changed, 34 insertions(+), 33 deletions(-) diff --git a/poetry.lock b/poetry.lock index 088b34e51..29649128a 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2509,43 +2509,43 @@ files = [ [[package]] name = "mypy" -version = "1.12.1" +version = "1.13.0" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ - {file = "mypy-1.12.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3d7d4371829184e22fda4015278fbfdef0327a4b955a483012bd2d423a788801"}, - {file = "mypy-1.12.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f59f1dfbf497d473201356966e353ef09d4daec48caeacc0254db8ef633a28a5"}, - {file = "mypy-1.12.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b947097fae68004b8328c55161ac9db7d3566abfef72d9d41b47a021c2fba6b1"}, - {file = "mypy-1.12.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:96af62050971c5241afb4701c15189ea9507db89ad07794a4ee7b4e092dc0627"}, - {file = "mypy-1.12.1-cp310-cp310-win_amd64.whl", hash = "sha256:d90da248f4c2dba6c44ddcfea94bb361e491962f05f41990ff24dbd09969ce20"}, - {file = "mypy-1.12.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1230048fec1380faf240be6385e709c8570604d2d27ec6ca7e573e3bc09c3735"}, - {file = "mypy-1.12.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:02dcfe270c6ea13338210908f8cadc8d31af0f04cee8ca996438fe6a97b4ec66"}, - {file = "mypy-1.12.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a5a437c9102a6a252d9e3a63edc191a3aed5f2fcb786d614722ee3f4472e33f6"}, - {file = "mypy-1.12.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:186e0c8346efc027ee1f9acf5ca734425fc4f7dc2b60144f0fbe27cc19dc7931"}, - {file = "mypy-1.12.1-cp311-cp311-win_amd64.whl", hash = "sha256:673ba1140a478b50e6d265c03391702fa11a5c5aff3f54d69a62a48da32cb811"}, - {file = "mypy-1.12.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:9fb83a7be97c498176fb7486cafbb81decccaef1ac339d837c377b0ce3743a7f"}, - {file = "mypy-1.12.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:389e307e333879c571029d5b93932cf838b811d3f5395ed1ad05086b52148fb0"}, - {file = "mypy-1.12.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:94b2048a95a21f7a9ebc9fbd075a4fcd310410d078aa0228dbbad7f71335e042"}, - {file = "mypy-1.12.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ee5932370ccf7ebf83f79d1c157a5929d7ea36313027b0d70a488493dc1b179"}, - {file = "mypy-1.12.1-cp312-cp312-win_amd64.whl", hash = "sha256:19bf51f87a295e7ab2894f1d8167622b063492d754e69c3c2fed6563268cb42a"}, - {file = "mypy-1.12.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:d34167d43613ffb1d6c6cdc0cc043bb106cac0aa5d6a4171f77ab92a3c758bcc"}, - {file = "mypy-1.12.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:427878aa54f2e2c5d8db31fa9010c599ed9f994b3b49e64ae9cd9990c40bd635"}, - {file = "mypy-1.12.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5fcde63ea2c9f69d6be859a1e6dd35955e87fa81de95bc240143cf00de1f7f81"}, - {file = "mypy-1.12.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:d54d840f6c052929f4a3d2aab2066af0f45a020b085fe0e40d4583db52aab4e4"}, - {file = "mypy-1.12.1-cp313-cp313-win_amd64.whl", hash = "sha256:20db6eb1ca3d1de8ece00033b12f793f1ea9da767334b7e8c626a4872090cf02"}, - {file = "mypy-1.12.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b16fe09f9c741d85a2e3b14a5257a27a4f4886c171d562bc5a5e90d8591906b8"}, - {file = "mypy-1.12.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0dcc1e843d58f444fce19da4cce5bd35c282d4bde232acdeca8279523087088a"}, - {file = "mypy-1.12.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e10ba7de5c616e44ad21005fa13450cd0de7caaa303a626147d45307492e4f2d"}, - {file = "mypy-1.12.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:0e6fe449223fa59fbee351db32283838a8fee8059e0028e9e6494a03802b4004"}, - {file = "mypy-1.12.1-cp38-cp38-win_amd64.whl", hash = "sha256:dc6e2a2195a290a7fd5bac3e60b586d77fc88e986eba7feced8b778c373f9afe"}, - {file = "mypy-1.12.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:de5b2a8988b4e1269a98beaf0e7cc71b510d050dce80c343b53b4955fff45f19"}, - {file = "mypy-1.12.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:843826966f1d65925e8b50d2b483065c51fc16dc5d72647e0236aae51dc8d77e"}, - {file = "mypy-1.12.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9fe20f89da41a95e14c34b1ddb09c80262edcc295ad891f22cc4b60013e8f78d"}, - {file = "mypy-1.12.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8135ffec02121a75f75dc97c81af7c14aa4ae0dda277132cfcd6abcd21551bfd"}, - {file = "mypy-1.12.1-cp39-cp39-win_amd64.whl", hash = "sha256:a7b76fa83260824300cc4834a3ab93180db19876bce59af921467fd03e692810"}, - {file = "mypy-1.12.1-py3-none-any.whl", hash = "sha256:ce561a09e3bb9863ab77edf29ae3a50e65685ad74bba1431278185b7e5d5486e"}, - {file = "mypy-1.12.1.tar.gz", hash = "sha256:f5b3936f7a6d0e8280c9bdef94c7ce4847f5cdfc258fbb2c29a8c1711e8bb96d"}, + {file = "mypy-1.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6607e0f1dd1fb7f0aca14d936d13fd19eba5e17e1cd2a14f808fa5f8f6d8f60a"}, + {file = "mypy-1.13.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8a21be69bd26fa81b1f80a61ee7ab05b076c674d9b18fb56239d72e21d9f4c80"}, + {file = "mypy-1.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7b2353a44d2179846a096e25691d54d59904559f4232519d420d64da6828a3a7"}, + {file = "mypy-1.13.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0730d1c6a2739d4511dc4253f8274cdd140c55c32dfb0a4cf8b7a43f40abfa6f"}, + {file = "mypy-1.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:c5fc54dbb712ff5e5a0fca797e6e0aa25726c7e72c6a5850cfd2adbc1eb0a372"}, + {file = "mypy-1.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:581665e6f3a8a9078f28d5502f4c334c0c8d802ef55ea0e7276a6e409bc0d82d"}, + {file = "mypy-1.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3ddb5b9bf82e05cc9a627e84707b528e5c7caaa1c55c69e175abb15a761cec2d"}, + {file = "mypy-1.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:20c7ee0bc0d5a9595c46f38beb04201f2620065a93755704e141fcac9f59db2b"}, + {file = "mypy-1.13.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3790ded76f0b34bc9c8ba4def8f919dd6a46db0f5a6610fb994fe8efdd447f73"}, + {file = "mypy-1.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:51f869f4b6b538229c1d1bcc1dd7d119817206e2bc54e8e374b3dfa202defcca"}, + {file = "mypy-1.13.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5c7051a3461ae84dfb5dd15eff5094640c61c5f22257c8b766794e6dd85e72d5"}, + {file = "mypy-1.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:39bb21c69a5d6342f4ce526e4584bc5c197fd20a60d14a8624d8743fffb9472e"}, + {file = "mypy-1.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:164f28cb9d6367439031f4c81e84d3ccaa1e19232d9d05d37cb0bd880d3f93c2"}, + {file = "mypy-1.13.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a4c1bfcdbce96ff5d96fc9b08e3831acb30dc44ab02671eca5953eadad07d6d0"}, + {file = "mypy-1.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:a0affb3a79a256b4183ba09811e3577c5163ed06685e4d4b46429a271ba174d2"}, + {file = "mypy-1.13.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a7b44178c9760ce1a43f544e595d35ed61ac2c3de306599fa59b38a6048e1aa7"}, + {file = "mypy-1.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5d5092efb8516d08440e36626f0153b5006d4088c1d663d88bf79625af3d1d62"}, + {file = "mypy-1.13.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:de2904956dac40ced10931ac967ae63c5089bd498542194b436eb097a9f77bc8"}, + {file = "mypy-1.13.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:7bfd8836970d33c2105562650656b6846149374dc8ed77d98424b40b09340ba7"}, + {file = "mypy-1.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:9f73dba9ec77acb86457a8fc04b5239822df0c14a082564737833d2963677dbc"}, + {file = "mypy-1.13.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:100fac22ce82925f676a734af0db922ecfea991e1d7ec0ceb1e115ebe501301a"}, + {file = "mypy-1.13.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7bcb0bb7f42a978bb323a7c88f1081d1b5dee77ca86f4100735a6f541299d8fb"}, + {file = "mypy-1.13.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bde31fc887c213e223bbfc34328070996061b0833b0a4cfec53745ed61f3519b"}, + {file = "mypy-1.13.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:07de989f89786f62b937851295ed62e51774722e5444a27cecca993fc3f9cd74"}, + {file = "mypy-1.13.0-cp38-cp38-win_amd64.whl", hash = "sha256:4bde84334fbe19bad704b3f5b78c4abd35ff1026f8ba72b29de70dda0916beb6"}, + {file = "mypy-1.13.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0246bcb1b5de7f08f2826451abd947bf656945209b140d16ed317f65a17dc7dc"}, + {file = "mypy-1.13.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7f5b7deae912cf8b77e990b9280f170381fdfbddf61b4ef80927edd813163732"}, + {file = "mypy-1.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7029881ec6ffb8bc233a4fa364736789582c738217b133f1b55967115288a2bc"}, + {file = "mypy-1.13.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3e38b980e5681f28f033f3be86b099a247b13c491f14bb8b1e1e134d23bb599d"}, + {file = "mypy-1.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:a6789be98a2017c912ae6ccb77ea553bbaf13d27605d2ca20a76dfbced631b24"}, + {file = "mypy-1.13.0-py3-none-any.whl", hash = "sha256:9c250883f9fd81d212e0952c92dbfcc96fc237f4b7c92f56ac81fd48460b3e5a"}, + {file = "mypy-1.13.0.tar.gz", hash = "sha256:0291a61b6fbf3e6673e3405cfcc0e7650bebc7939659fdca2702958038bd835e"}, ] [package.dependencies] @@ -2555,6 +2555,7 @@ typing-extensions = ">=4.6.0" [package.extras] dmypy = ["psutil (>=4.0)"] +faster-cache = ["orjson"] install-types = ["pip"] mypyc = ["setuptools (>=50)"] reports = ["lxml"] From cf3c1e4e1022a48396a902107d9f3c0e852d423d Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 23 Oct 2024 00:37:54 +0000 Subject: [PATCH 080/161] :arrow_up: Bump mkdocs-git-revision-date-localized-plugin Bumps [mkdocs-git-revision-date-localized-plugin](https://github.com/timvink/mkdocs-git-revision-date-localized-plugin) from 1.2.9 to 1.3.0. - [Release notes](https://github.com/timvink/mkdocs-git-revision-date-localized-plugin/releases) - [Commits](https://github.com/timvink/mkdocs-git-revision-date-localized-plugin/compare/v1.2.9...v1.3.0) --- updated-dependencies: - dependency-name: mkdocs-git-revision-date-localized-plugin dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 088b34e51..792bc8360 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2371,13 +2371,13 @@ requests = "*" [[package]] name = "mkdocs-git-revision-date-localized-plugin" -version = "1.2.9" +version = "1.3.0" description = "Mkdocs plugin that enables displaying the localized date of the last git modification of a markdown file." optional = false python-versions = ">=3.8" files = [ - {file = "mkdocs_git_revision_date_localized_plugin-1.2.9-py3-none-any.whl", hash = "sha256:dea5c8067c23df30275702a1708885500fadf0abfb595b60e698bffc79c7a423"}, - {file = "mkdocs_git_revision_date_localized_plugin-1.2.9.tar.gz", hash = "sha256:df9a50873fba3a42ce9123885f8c53d589e90ef6c2443fe3280ef1e8d33c8f65"}, + {file = "mkdocs_git_revision_date_localized_plugin-1.3.0-py3-none-any.whl", hash = "sha256:c99377ee119372d57a9e47cff4e68f04cce634a74831c06bc89b33e456e840a1"}, + {file = "mkdocs_git_revision_date_localized_plugin-1.3.0.tar.gz", hash = "sha256:439e2f14582204050a664c258861c325064d97cdc848c541e48bb034a6c4d0cb"}, ] [package.dependencies] From dbd4aa7798c10e3acf73cd5ee01008614a07dc7d Mon Sep 17 00:00:00 2001 From: Kader Miyanyedi Date: Fri, 18 Oct 2024 16:07:42 +0300 Subject: [PATCH 081/161] feat(ByteTrack): Allow ByteTrack to track detection without class ids --- supervision/tracker/byte_tracker/core.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/supervision/tracker/byte_tracker/core.py b/supervision/tracker/byte_tracker/core.py index d1d567fab..f453e5237 100644 --- a/supervision/tracker/byte_tracker/core.py +++ b/supervision/tracker/byte_tracker/core.py @@ -276,11 +276,16 @@ def callback(frame: np.ndarray, index: int) -> np.ndarray: ``` """ + num_rows = detections.xyxy.shape[0] + class_ids = np.full(num_rows, -5) + if detections.class_id is not None: + class_ids = detections.class_id + tensors = np.hstack( ( detections.xyxy, detections.confidence[:, np.newaxis], - detections.class_id[:, np.newaxis], + class_ids[:, np.newaxis], ) ) tracks = self.update_with_tensors(tensors=tensors) From 40e16118f8efe7f8cdce455f96fd5d436ed07b92 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 25 Oct 2024 00:37:54 +0000 Subject: [PATCH 082/161] :arrow_up: Bump ruff from 0.7.0 to 0.7.1 Bumps [ruff](https://github.com/astral-sh/ruff) from 0.7.0 to 0.7.1. - [Release notes](https://github.com/astral-sh/ruff/releases) - [Changelog](https://github.com/astral-sh/ruff/blob/main/CHANGELOG.md) - [Commits](https://github.com/astral-sh/ruff/compare/0.7.0...0.7.1) --- updated-dependencies: - dependency-name: ruff dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- poetry.lock | 38 +++++++++++++++++++------------------- 1 file changed, 19 insertions(+), 19 deletions(-) diff --git a/poetry.lock b/poetry.lock index 0a010aa4d..82ab2f08b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -4037,29 +4037,29 @@ files = [ [[package]] name = "ruff" -version = "0.7.0" +version = "0.7.1" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.7.0-py3-none-linux_armv6l.whl", hash = "sha256:0cdf20c2b6ff98e37df47b2b0bd3a34aaa155f59a11182c1303cce79be715628"}, - {file = "ruff-0.7.0-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:496494d350c7fdeb36ca4ef1c9f21d80d182423718782222c29b3e72b3512737"}, - {file = "ruff-0.7.0-py3-none-macosx_11_0_arm64.whl", hash = "sha256:214b88498684e20b6b2b8852c01d50f0651f3cc6118dfa113b4def9f14faaf06"}, - {file = "ruff-0.7.0-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:630fce3fefe9844e91ea5bbf7ceadab4f9981f42b704fae011bb8efcaf5d84be"}, - {file = "ruff-0.7.0-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:211d877674e9373d4bb0f1c80f97a0201c61bcd1e9d045b6e9726adc42c156aa"}, - {file = "ruff-0.7.0-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:194d6c46c98c73949a106425ed40a576f52291c12bc21399eb8f13a0f7073495"}, - {file = "ruff-0.7.0-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:82c2579b82b9973a110fab281860403b397c08c403de92de19568f32f7178598"}, - {file = "ruff-0.7.0-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9af971fe85dcd5eaed8f585ddbc6bdbe8c217fb8fcf510ea6bca5bdfff56040e"}, - {file = "ruff-0.7.0-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b641c7f16939b7d24b7bfc0be4102c56562a18281f84f635604e8a6989948914"}, - {file = "ruff-0.7.0-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d71672336e46b34e0c90a790afeac8a31954fd42872c1f6adaea1dff76fd44f9"}, - {file = "ruff-0.7.0-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:ab7d98c7eed355166f367597e513a6c82408df4181a937628dbec79abb2a1fe4"}, - {file = "ruff-0.7.0-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:1eb54986f770f49edb14f71d33312d79e00e629a57387382200b1ef12d6a4ef9"}, - {file = "ruff-0.7.0-py3-none-musllinux_1_2_i686.whl", hash = "sha256:dc452ba6f2bb9cf8726a84aa877061a2462afe9ae0ea1d411c53d226661c601d"}, - {file = "ruff-0.7.0-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:4b406c2dce5be9bad59f2de26139a86017a517e6bcd2688da515481c05a2cb11"}, - {file = "ruff-0.7.0-py3-none-win32.whl", hash = "sha256:f6c968509f767776f524a8430426539587d5ec5c662f6addb6aa25bc2e8195ec"}, - {file = "ruff-0.7.0-py3-none-win_amd64.whl", hash = "sha256:ff4aabfbaaba880e85d394603b9e75d32b0693152e16fa659a3064a85df7fce2"}, - {file = "ruff-0.7.0-py3-none-win_arm64.whl", hash = "sha256:10842f69c245e78d6adec7e1db0a7d9ddc2fff0621d730e61657b64fa36f207e"}, - {file = "ruff-0.7.0.tar.gz", hash = "sha256:47a86360cf62d9cd53ebfb0b5eb0e882193fc191c6d717e8bef4462bc3b9ea2b"}, + {file = "ruff-0.7.1-py3-none-linux_armv6l.whl", hash = "sha256:cb1bc5ed9403daa7da05475d615739cc0212e861b7306f314379d958592aaa89"}, + {file = "ruff-0.7.1-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:27c1c52a8d199a257ff1e5582d078eab7145129aa02721815ca8fa4f9612dc35"}, + {file = "ruff-0.7.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:588a34e1ef2ea55b4ddfec26bbe76bc866e92523d8c6cdec5e8aceefeff02d99"}, + {file = "ruff-0.7.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94fc32f9cdf72dc75c451e5f072758b118ab8100727168a3df58502b43a599ca"}, + {file = "ruff-0.7.1-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:985818742b833bffa543a84d1cc11b5e6871de1b4e0ac3060a59a2bae3969250"}, + {file = "ruff-0.7.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32f1e8a192e261366c702c5fb2ece9f68d26625f198a25c408861c16dc2dea9c"}, + {file = "ruff-0.7.1-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:699085bf05819588551b11751eff33e9ca58b1b86a6843e1b082a7de40da1565"}, + {file = "ruff-0.7.1-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:344cc2b0814047dc8c3a8ff2cd1f3d808bb23c6658db830d25147339d9bf9ea7"}, + {file = "ruff-0.7.1-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4316bbf69d5a859cc937890c7ac7a6551252b6a01b1d2c97e8fc96e45a7c8b4a"}, + {file = "ruff-0.7.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79d3af9dca4c56043e738a4d6dd1e9444b6d6c10598ac52d146e331eb155a8ad"}, + {file = "ruff-0.7.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:c5c121b46abde94a505175524e51891f829414e093cd8326d6e741ecfc0a9112"}, + {file = "ruff-0.7.1-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:8422104078324ea250886954e48f1373a8fe7de59283d747c3a7eca050b4e378"}, + {file = "ruff-0.7.1-py3-none-musllinux_1_2_i686.whl", hash = "sha256:56aad830af8a9db644e80098fe4984a948e2b6fc2e73891538f43bbe478461b8"}, + {file = "ruff-0.7.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:658304f02f68d3a83c998ad8bf91f9b4f53e93e5412b8f2388359d55869727fd"}, + {file = "ruff-0.7.1-py3-none-win32.whl", hash = "sha256:b517a2011333eb7ce2d402652ecaa0ac1a30c114fbbd55c6b8ee466a7f600ee9"}, + {file = "ruff-0.7.1-py3-none-win_amd64.whl", hash = "sha256:f38c41fcde1728736b4eb2b18850f6d1e3eedd9678c914dede554a70d5241307"}, + {file = "ruff-0.7.1-py3-none-win_arm64.whl", hash = "sha256:19aa200ec824c0f36d0c9114c8ec0087082021732979a359d6f3c390a6ff2a37"}, + {file = "ruff-0.7.1.tar.gz", hash = "sha256:9d8a41d4aa2dad1575adb98a82870cf5db5f76b2938cf2206c22c940034a36f4"}, ] [[package]] From ba559c2658d467697cb6fe3322280ec6c77731dd Mon Sep 17 00:00:00 2001 From: KshitijAucharmal Date: Mon, 28 Oct 2024 08:59:30 +0530 Subject: [PATCH 083/161] Added smart positioning (non overlapping labels) to VertexLabelAnnotator --- supervision/detection/utils.py | 83 ++++++++++++++++++++++++++++++ supervision/keypoint/annotators.py | 12 ++++- 2 files changed, 94 insertions(+), 1 deletion(-) diff --git a/supervision/detection/utils.py b/supervision/detection/utils.py index 43fcec5a0..308797eab 100644 --- a/supervision/detection/utils.py +++ b/supervision/detection/utils.py @@ -1,3 +1,4 @@ +import math from itertools import chain from typing import Dict, List, Optional, Tuple, Union @@ -1039,3 +1040,85 @@ def cross_product(anchors: np.ndarray, vector: Vector) -> np.ndarray: ) vector_start = np.array([vector.start.x, vector.start.y]) return np.cross(vector_at_zero, anchors - vector_start) + + +# Intelligent padding functions +def get_intersection_center( + xyxy_1: np.ndarray, xyxy_2: np.ndarray +) -> Optional[Tuple[float, float]]: + overlap_xmin = max(xyxy_1[0], xyxy_2[0]) + overlap_ymin = max(xyxy_1[1], xyxy_2[1]) + overlap_xmax = min(xyxy_1[2], xyxy_2[2]) + overlap_ymax = min(xyxy_1[3], xyxy_2[3]) + + if overlap_xmin < overlap_xmax and overlap_ymin < overlap_ymax: + x_center = (overlap_xmin + overlap_xmax) / 2 + y_center = (overlap_ymin + overlap_ymax) / 2 + return (x_center, y_center) + else: + return None + + +def get_box_center(xyxy: np.ndarray) -> Tuple[float, float]: + x_center = (xyxy[0] + xyxy[2]) / 2 + y_center = (xyxy[1] + xyxy[3]) / 2 + return (x_center, y_center) + + +def vector_with_length( + xy_1: Tuple[float, float], xy_2: Tuple[float, float], n: float +) -> Tuple[float, float]: + x1, y1 = xy_1 + x2, y2 = xy_2 + + dx = x2 - x1 + dy = y2 - y1 + + if dx == 0 and dy == 0: + return 0, 0 + + magnitude = math.sqrt(dx**2 + dy**2) + + unit_dx = dx / magnitude + unit_dy = dy / magnitude + + v1 = unit_dx * n + v2 = unit_dy * n + + return (v1, v2) + + +def pad(xyxy: np.ndarray, px: int, py: Optional[int] = None): + if py is None: + py = px + + result = xyxy.copy() + result[:, [0, 1]] -= [px, py] + result[:, [2, 3]] += [px, py] + + return result + + +def spread_out(xyxy: np.ndarray, step) -> np.ndarray: + xyxy_padded = pad(xyxy, px=step) + while True: + iou = box_iou_batch(xyxy_padded, xyxy_padded) + np.fill_diagonal(iou, 0) + + if np.all(iou == 0): + return pad(xyxy_padded, px=-step) + + i, j = np.unravel_index(np.argmax(iou), iou.shape) + + xyxy_i, xyxy_j = xyxy_padded[i], xyxy_padded[j] + intersection_center = get_intersection_center(xyxy_i, xyxy_j) + xyxy_i_center = get_box_center(xyxy_i) + xyxy_j_center = get_box_center(xyxy_j) + + vector_i = vector_with_length(intersection_center, xyxy_i_center, step) + vector_j = vector_with_length(intersection_center, xyxy_j_center, step) + + xyxy_padded[i, [0, 2]] += int(vector_i[0]) + xyxy_padded[i, [1, 3]] += int(vector_i[1]) + xyxy_padded[j, [0, 2]] += int(vector_j[0]) + xyxy_padded[j, [1, 3]] += int(vector_j[1]) diff --git a/supervision/keypoint/annotators.py b/supervision/keypoint/annotators.py index 559bfa921..e82acbc65 100644 --- a/supervision/keypoint/annotators.py +++ b/supervision/keypoint/annotators.py @@ -7,6 +7,7 @@ from supervision import Rect, pad_boxes from supervision.annotators.base import ImageType +from supervision.detection.utils import pad, spread_out from supervision.draw.color import Color from supervision.draw.utils import draw_rounded_rectangle from supervision.keypoint.core import KeyPoints @@ -201,6 +202,7 @@ def __init__( text_thickness: int = 1, text_padding: int = 10, border_radius: int = 0, + use_smart_positioning: bool = False, ): """ Args: @@ -215,7 +217,10 @@ def __init__( text_padding (int): The padding around the text. border_radius (int): The radius of the rounded corners of the boxes. Set to a high value to produce circles. + use_smart_positioning (bool): Whether to use smart positioning to prevent + label overlapping or not. """ + self.use_smart_positioning = use_smart_positioning self.border_radius: int = border_radius self.color: Union[Color, List[Color]] = color self.text_color: Union[Color, List[Color]] = text_color @@ -357,7 +362,12 @@ def annotate( ] ) - xyxy_padded = pad_boxes(xyxy=xyxy, px=self.text_padding) + if self.use_smart_positioning: + xyxy_padded = pad(xyxy=xyxy, px=self.text_padding) + xyxy_padded = spread_out(xyxy_padded, step=2) + xyxy = pad(xyxy=xyxy_padded, px=-self.text_padding) + else: + xyxy_padded = pad_boxes(xyxy=xyxy, px=self.text_padding) for text, color, text_color, box, box_padded in zip( labels, colors, text_colors, xyxy, xyxy_padded From a2e2e3725a17de63d5d2c7e97f79722f9880753d Mon Sep 17 00:00:00 2001 From: KshitijAucharmal Date: Mon, 28 Oct 2024 19:05:39 +0530 Subject: [PATCH 084/161] Added smart positioning (non overlapping labels) to LabelAnnotator --- supervision/annotators/core.py | 176 ++++++++++++++++++++++++--------- 1 file changed, 129 insertions(+), 47 deletions(-) diff --git a/supervision/annotators/core.py b/supervision/annotators/core.py index 1910ac9f4..9d08cf1df 100644 --- a/supervision/annotators/core.py +++ b/supervision/annotators/core.py @@ -16,7 +16,7 @@ ) from supervision.config import CLASS_NAME_DATA_FIELD, ORIENTED_BOX_COORDINATES from supervision.detection.core import Detections -from supervision.detection.utils import clip_boxes, mask_to_polygons +from supervision.detection.utils import clip_boxes, mask_to_polygons, spread_out from supervision.draw.color import Color, ColorPalette from supervision.draw.utils import draw_polygon from supervision.geometry.core import Position @@ -1054,6 +1054,7 @@ def __init__( text_position: Position = Position.TOP_LEFT, color_lookup: ColorLookup = ColorLookup.CLASS, border_radius: int = 0, + use_smart_positioning: bool = False, ): """ Args: @@ -1070,7 +1071,10 @@ def __init__( Options are `INDEX`, `CLASS`, `TRACK`. border_radius (int): The radius to apply round edges. If the selected value is higher than the lower dimension, width or height, is clipped. + use_smart_positioning (bool): Whether to use smart positioning to prevent + label overlapping or not. """ + self.use_smart_positioning: bool = use_smart_positioning self.border_radius: int = border_radius self.color: Union[Color, ColorPalette] = color self.text_color: Union[Color, ColorPalette] = text_color @@ -1128,11 +1132,35 @@ def annotate( ![label-annotator-example](https://media.roboflow.com/ supervision-annotator-examples/label-annotator-example-purple.png) """ + assert isinstance(scene, np.ndarray) - font = cv2.FONT_HERSHEY_SIMPLEX - anchors_coordinates = detections.get_anchors_coordinates( - anchor=self.text_anchor - ).astype(int) + self._validate_labels(labels, detections) + + # Get text properties for all detections + text_props = self._get_text_properties(detections, labels) + + # Calculate background coordinates for all labels + xyxy = self._calculate_label_backgrounds( + detections, text_props, self.text_anchor, self.text_padding + ) + + # Adjust positions if smart positioning is enabled + if self.use_smart_positioning: + xyxy = spread_out(xyxy, step=2) + + # Draw all labels + self._draw_labels( + scene=scene, + xyxy=xyxy, + text_props=text_props, + detections=detections, + custom_color_lookup=custom_color_lookup, + ) + + return scene + + def _validate_labels(self, labels: Optional[List[str]], detections: Detections): + """Validates that the number of labels matches the number of detections.""" if labels is not None and len(labels) != len(detections): raise ValueError( f"The number of labels ({len(labels)}) does not match the " @@ -1140,64 +1168,119 @@ def annotate( f"should have exactly 1 label." ) - for detection_idx, center_coordinates in enumerate(anchors_coordinates): - color = resolve_color( - color=self.color, - detections=detections, - detection_idx=detection_idx, - color_lookup=( - self.color_lookup - if custom_color_lookup is None - else custom_color_lookup - ), - ) - - text_color = resolve_color( - color=self.text_color, - detections=detections, - detection_idx=detection_idx, - color_lookup=( - self.color_lookup - if custom_color_lookup is None - else custom_color_lookup - ), - ) + def _get_text_properties( + self, detections: Detections, custom_labels: Optional[List[str]] + ) -> List[dict]: + """Gets text content and dimensions for all detections.""" + text_props = [] + font = cv2.FONT_HERSHEY_SIMPLEX - if labels is not None: - text = labels[detection_idx] - elif CLASS_NAME_DATA_FIELD in detections.data: - text = detections.data[CLASS_NAME_DATA_FIELD][detection_idx] - elif detections.class_id is not None: - text = str(detections.class_id[detection_idx]) - else: - text = str(detection_idx) + for idx in range(len(detections)): + # Determine label text + text = self._get_label_text(detections, custom_labels, idx) - text_w, text_h = cv2.getTextSize( + # Calculate text dimensions + (text_w, text_h) = cv2.getTextSize( text=text, fontFace=font, fontScale=self.text_scale, thickness=self.text_thickness, )[0] - text_w_padded = text_w + 2 * self.text_padding - text_h_padded = text_h + 2 * self.text_padding + + text_props.append( + { + "text": text, + "width": text_w, + "height": text_h, + "width_padded": text_w + 2 * self.text_padding, + "height_padded": text_h + 2 * self.text_padding, + } + ) + + return text_props + + def _get_label_text( + self, detections: Detections, custom_labels: Optional[List[str]], idx: int + ) -> str: + """Determines the label text for a given detection.""" + if custom_labels is not None: + return custom_labels[idx] + elif CLASS_NAME_DATA_FIELD in detections.data: + return detections.data[CLASS_NAME_DATA_FIELD][idx] + elif detections.class_id is not None: + return str(detections.class_id[idx]) + return str(idx) + + def _calculate_label_backgrounds( + self, + detections: Detections, + text_props: List[dict], + text_anchor: str, + text_padding: int, + ) -> np.ndarray: + """Calculates background coordinates for all labels.""" + anchors_coordinates = detections.get_anchors_coordinates( + anchor=text_anchor + ).astype(int) + + xyxy = [] + for idx, center_coords in enumerate(anchors_coordinates): text_background_xyxy = resolve_text_background_xyxy( - center_coordinates=tuple(center_coordinates), - text_wh=(text_w_padded, text_h_padded), - position=self.text_anchor, + center_coordinates=tuple(center_coords), + text_wh=( + text_props[idx]["width_padded"], + text_props[idx]["height_padded"], + ), + position=text_anchor, + ) + xyxy.append(text_background_xyxy) + + return np.array(xyxy) + + def _draw_labels( + self, + scene: ImageType, + xyxy: np.ndarray, + text_props: List[dict], + detections: Detections, + custom_color_lookup: Optional[np.ndarray], + ) -> None: + """Draws all labels and their backgrounds on the scene.""" + if custom_color_lookup is not None: + color_lookup = custom_color_lookup + else: + color_lookup = self.color_lookup + font = cv2.FONT_HERSHEY_SIMPLEX + + for idx, coordinates in enumerate(xyxy): + # Resolve colors + bg_color = resolve_color( + color=self.color, + detections=detections, + detection_idx=idx, + color_lookup=color_lookup, + ) + text_color = resolve_color( + color=self.text_color, + detections=detections, + detection_idx=idx, + color_lookup=color_lookup, ) - text_x = text_background_xyxy[0] + self.text_padding - text_y = text_background_xyxy[1] + self.text_padding + text_h + # Calculate text position + text_x = coordinates[0] + self.text_padding + text_y = coordinates[1] + self.text_padding + text_props[idx]["height"] + # Draw background and text self.draw_rounded_rectangle( scene=scene, - xyxy=text_background_xyxy, - color=color.as_bgr(), + xyxy=coordinates, + color=bg_color.as_bgr(), border_radius=self.border_radius, ) cv2.putText( img=scene, - text=text, + text=text_props[idx]["text"], org=(text_x, text_y), fontFace=font, fontScale=self.text_scale, @@ -1205,7 +1288,6 @@ def annotate( thickness=self.text_thickness, lineType=cv2.LINE_AA, ) - return scene @staticmethod def draw_rounded_rectangle( From b4d93ce6c90310c97cea561b0b3b5d881a6204e5 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 28 Oct 2024 18:02:23 +0000 Subject: [PATCH 085/161] =?UTF-8?q?chore(pre=5Fcommit):=20=E2=AC=86=20pre?= =?UTF-8?q?=5Fcommit=20autoupdate?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.7.0 → v0.7.1](https://github.com/astral-sh/ruff-pre-commit/compare/v0.7.0...v0.7.1) --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 8ddaa08e9..39c50e823 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -32,7 +32,7 @@ repos: additional_dependencies: ["bandit[toml]"] - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.7.0 + rev: v0.7.1 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] From b26e8f58144fa75cc14032d11bff1e3fbaa77262 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 29 Oct 2024 00:12:10 +0000 Subject: [PATCH 086/161] :arrow_up: Bump tqdm from 4.66.5 to 4.66.6 Bumps [tqdm](https://github.com/tqdm/tqdm) from 4.66.5 to 4.66.6. - [Release notes](https://github.com/tqdm/tqdm/releases) - [Commits](https://github.com/tqdm/tqdm/compare/v4.66.5...v4.66.6) --- updated-dependencies: - dependency-name: tqdm dependency-type: direct:production update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- poetry.lock | 8 ++++---- pyproject.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index 82ab2f08b..69775617e 100644 --- a/poetry.lock +++ b/poetry.lock @@ -4405,13 +4405,13 @@ test = ["devpi-process (>=1.0.2)", "pytest (>=8.3.3)", "pytest-mock (>=3.14)"] [[package]] name = "tqdm" -version = "4.66.5" +version = "4.66.6" description = "Fast, Extensible Progress Meter" optional = true python-versions = ">=3.7" files = [ - {file = "tqdm-4.66.5-py3-none-any.whl", hash = "sha256:90279a3770753eafc9194a0364852159802111925aa30eb3f9d85b0e805ac7cd"}, - {file = "tqdm-4.66.5.tar.gz", hash = "sha256:e1020aef2e5096702d8a025ac7d16b1577279c9d63f8375b63083e9a5f0fcbad"}, + {file = "tqdm-4.66.6-py3-none-any.whl", hash = "sha256:223e8b5359c2efc4b30555531f09e9f2f3589bcd7fdd389271191031b49b7a63"}, + {file = "tqdm-4.66.6.tar.gz", hash = "sha256:4bdd694238bef1485ce839d67967ab50af8f9272aab687c0d7702a01da0be090"}, ] [package.dependencies] @@ -4781,4 +4781,4 @@ metrics = ["pandas", "pandas-stubs"] [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "fe9efd8caf098dc0301e1d7007ac3b00647b48cff6b060cc0eff57656d082099" +content-hash = "8f7dad5406a294901e3f489cf0d09e8217a80597ba9cd82695822a3fb5c13034" diff --git a/pyproject.toml b/pyproject.toml index 6464346b7..713bd838e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -71,7 +71,7 @@ pyyaml = ">=5.3" defusedxml = "^0.7.1" pillow = ">=9.4" requests = { version = ">=2.26.0,<=2.32.3", optional = true } -tqdm = { version = ">=4.62.3,<=4.66.5", optional = true } +tqdm = { version = ">=4.62.3,<=4.66.6", optional = true } # pandas: picked lowest major version that supports Python 3.8 pandas = { version = ">=2.0.0", optional = true } pandas-stubs = { version = ">=2.0.0.230412", optional = true } From ecf5b1334fb1042565a945ed48285147cfc9dbd1 Mon Sep 17 00:00:00 2001 From: KshitijAucharmal Date: Tue, 29 Oct 2024 16:06:54 +0530 Subject: [PATCH 087/161] Added smart positioning (non overlapping labels) to RichLabelAnnotator --- supervision/annotators/core.py | 157 +++++++++++++++++++++++---------- 1 file changed, 112 insertions(+), 45 deletions(-) diff --git a/supervision/annotators/core.py b/supervision/annotators/core.py index 9d08cf1df..167782bb6 100644 --- a/supervision/annotators/core.py +++ b/supervision/annotators/core.py @@ -1348,6 +1348,7 @@ def __init__( text_position: Position = Position.TOP_LEFT, color_lookup: ColorLookup = ColorLookup.CLASS, border_radius: int = 0, + use_smart_positioning: bool = False, ): """ Args: @@ -1364,6 +1365,8 @@ def __init__( Options are `INDEX`, `CLASS`, `TRACK`. border_radius (int): The radius to apply round edges. If the selected value is higher than the lower dimension, width or height, is clipped. + use_smart_positioning (bool): Whether to use smart positioning to prevent + label overlapping or not. """ self.color = color self.text_color = text_color @@ -1371,6 +1374,7 @@ def __init__( self.text_anchor = text_position self.color_lookup = color_lookup self.border_radius = border_radius + self.use_smart_positioning: bool = use_smart_positioning if font_path is not None: try: self.font = ImageFont.truetype(font_path, font_size) @@ -1429,72 +1433,135 @@ def annotate( """ assert isinstance(scene, Image.Image) draw = ImageDraw.Draw(scene) - anchors_coordinates = detections.get_anchors_coordinates( - anchor=self.text_anchor - ).astype(int) + + # Input validation if labels is not None and len(labels) != len(detections): raise ValueError( - f"The number of labels provided ({len(labels)}) does not match the " - f"number of detections ({len(detections)}). Each detection should have " - f"a corresponding label." + f"Label count ({len(labels)}) != detection count ({len(detections)})" ) - for detection_idx, center_coordinates in enumerate(anchors_coordinates): - color = resolve_color( + + # Get anchor coordinates for all detections + detection_anchor_coordinates = detections.get_anchors_coordinates( + anchor=self.text_anchor + ).astype(int) + + # Use the appropriate color lookup table + effective_color_lookup = ( + custom_color_lookup + if custom_color_lookup is not None + else self.color_lookup + ) + + def _get_detection_label_text(detection_index: int) -> str: + """ + Determine the appropriate label text for a detection. + Args: + detection_index: Index of the current detection + Returns: + str: The label text to display + """ + if labels is not None: + return labels[detection_index] + if CLASS_NAME_DATA_FIELD in detections.data: + return detections.data[CLASS_NAME_DATA_FIELD][detection_index] + if detections.class_id is not None: + return str(detections.class_id[detection_index]) + return str(detection_index) + + def _calculate_text_dimensions(label_text: str) -> tuple: + """ + Calculate text dimensions and offsets for the given label text. + Args: + label_text: The text to measure + Returns: + tuple: ((left_offset, top_offset), (padded_width, padded_height)) + """ + text_left, text_top, text_right, text_bottom = draw.textbbox( + (0, 0), label_text, font=self.font + ) + text_width = text_right - text_left + text_height = text_bottom - text_top + padded_width = text_width + 2 * self.text_padding + padded_height = text_height + 2 * self.text_padding + return (text_left, text_top), (padded_width, padded_height) + + # Prepare all annotation data + annotation_collection = [] + for detection_index, center_coordinate in enumerate( + detection_anchor_coordinates + ): + # Get colors once per detection + background_color = resolve_color( color=self.color, detections=detections, - detection_idx=detection_idx, - color_lookup=( - self.color_lookup - if custom_color_lookup is None - else custom_color_lookup - ), + detection_idx=detection_index, + color_lookup=effective_color_lookup, ) - - text_color = resolve_color( + label_text_color = resolve_color( color=self.text_color, detections=detections, - detection_idx=detection_idx, - color_lookup=( - self.color_lookup - if custom_color_lookup is None - else custom_color_lookup - ), + detection_idx=detection_index, + color_lookup=effective_color_lookup, ) - if labels is not None: - text = labels[detection_idx] - elif CLASS_NAME_DATA_FIELD in detections.data: - text = detections.data[CLASS_NAME_DATA_FIELD][detection_idx] - elif detections.class_id is not None: - text = str(detections.class_id[detection_idx]) - else: - text = str(detection_idx) + # Get text and calculate dimensions + label_text = _get_detection_label_text(detection_index) + text_offset_coordinates, padded_dimensions = _calculate_text_dimensions( + label_text + ) - left, top, right, bottom = draw.textbbox((0, 0), text, font=self.font) - text_width = right - left - text_height = bottom - top - text_w_padded = text_width + 2 * self.text_padding - text_h_padded = text_height + 2 * self.text_padding - text_background_xyxy = resolve_text_background_xyxy( - center_coordinates=tuple(center_coordinates), - text_wh=(text_w_padded, text_h_padded), + # Calculate background coordinates + background_coordinates = resolve_text_background_xyxy( + center_coordinates=tuple(center_coordinate), + text_wh=padded_dimensions, position=self.text_anchor, ) - text_x = text_background_xyxy[0] + self.text_padding - left - text_y = text_background_xyxy[1] + self.text_padding - top + # Store all data for this annotation + annotation_collection.append( + { + "label_text": label_text, + "background_color": background_color, + "text_color": label_text_color, + "text_offset": text_offset_coordinates, + "background_coordinates": background_coordinates, + } + ) + + # Convert coordinates to numpy array for processing + background_coordinate_array = np.array( + [data["background_coordinates"] for data in annotation_collection] + ) + + # Apply smart positioning if enabled + if self.use_smart_positioning: + background_coordinate_array = spread_out( + background_coordinate_array, step=2 + ) + + # Draw annotations + for annotation_index, coordinates in enumerate(background_coordinate_array): + annotation_data = annotation_collection[annotation_index] + + # Calculate final text position + label_x_position = ( + coordinates[0] + self.text_padding - annotation_data["text_offset"][0] + ) + label_y_position = ( + coordinates[1] + self.text_padding - annotation_data["text_offset"][1] + ) draw.rounded_rectangle( - text_background_xyxy, + tuple(coordinates), radius=self.border_radius, - fill=color.as_rgb(), + fill=annotation_data["background_color"].as_rgb(), outline=None, ) draw.text( - xy=(text_x, text_y), - text=text, + xy=(label_x_position, label_y_position), + text=annotation_data["label_text"], font=self.font, - fill=text_color.as_rgb(), + fill=annotation_data["text_color"].as_rgb(), ) return scene From 02735848b5cf19224842255017a8282c2e80114c Mon Sep 17 00:00:00 2001 From: LinasKo Date: Thu, 31 Oct 2024 15:42:04 +0200 Subject: [PATCH 088/161] Undo style, class agnostic, mAP regressions. Fix empty OBB object shape --- supervision/metrics/mean_average_precision.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/supervision/metrics/mean_average_precision.py b/supervision/metrics/mean_average_precision.py index 87de73271..d5f66103e 100644 --- a/supervision/metrics/mean_average_precision.py +++ b/supervision/metrics/mean_average_precision.py @@ -61,9 +61,6 @@ def __init__( class_agnostic (bool): Whether to treat all data as a single class. """ self._metric_target = metric_target - if self._metric_target == MetricTarget.ORIENTED_BOUNDING_BOXES: - pass - self._class_agnostic = class_agnostic self._predictions_list: List[Detections] = [] @@ -203,6 +200,7 @@ def _compute( matches = self._match_detection_batch( predictions.class_id, targets.class_id, iou, iou_thresholds ) + stats.append( ( matches, @@ -226,6 +224,7 @@ def _compute( return MeanAveragePrecisionResult( metric_target=self._metric_target, + is_class_agnostic=self._class_agnostic, mAP_scores=mAP_scores, iou_thresholds=iou_thresholds, matched_classes=unique_classes, @@ -253,7 +252,7 @@ def _compute_average_precision(recall: np.ndarray, precision: np.ndarray) -> flo for r, p in zip(recall[::-1], precision[::-1]): precision_levels[recall_levels <= r] = p - average_precision = (1 / 100 * precision_levels).sum() + average_precision = (1 / 101 * precision_levels).sum() return average_precision @staticmethod @@ -367,7 +366,7 @@ def _make_empty_content(self) -> np.ndarray: if self._metric_target == MetricTarget.MASKS: return np.empty((0, 0, 0), dtype=bool) if self._metric_target == MetricTarget.ORIENTED_BOUNDING_BOXES: - return np.empty((0, 8), dtype=np.float32) + return np.empty((0, 4, 2), dtype=np.float32) raise ValueError(f"Invalid metric target: {self._metric_target}") def _filter_detections_by_size( @@ -407,6 +406,8 @@ class MeanAveragePrecisionResult: Attributes: metric_target (MetricTarget): the type of data used for the metric - boxes, masks or oriented bounding boxes. + class_agnostic (bool): When computing class-agnostic results, class ID + is set to `-1`. mAP_map50_95 (float): the mAP score at IoU thresholds from `0.5` to `0.95`. mAP_map50 (float): the mAP score at IoU threshold of `0.5`. mAP_map75 (float): the mAP score at IoU threshold of `0.75`. @@ -426,6 +427,7 @@ class and IoU threshold. Shape: `(num_target_classes, num_iou_thresholds)` """ metric_target: MetricTarget + is_class_agnostic: bool @property def map50_95(self) -> float: @@ -460,6 +462,7 @@ def __str__(self) -> str: out_str = ( f"{self.__class__.__name__}:\n" f"Metric target: {self.metric_target}\n" + f"Class agnostic: {self.is_class_agnostic}\n" f"mAP @ 50:95: {self.map50_95:.4f}\n" f"mAP @ 50: {self.map50:.4f}\n" f"mAP @ 75: {self.map75:.4f}\n" From 37f0ad8b1432401704b79f48311724a19d73aaca Mon Sep 17 00:00:00 2001 From: LinasKo Date: Thu, 31 Oct 2024 15:57:31 +0200 Subject: [PATCH 089/161] Add OBB support to F1 Score --- supervision/metrics/f1_score.py | 37 +++++++++++++------ supervision/metrics/mean_average_precision.py | 2 +- 2 files changed, 26 insertions(+), 13 deletions(-) diff --git a/supervision/metrics/f1_score.py b/supervision/metrics/f1_score.py index ba4fcd59a..cc8c87a2c 100644 --- a/supervision/metrics/f1_score.py +++ b/supervision/metrics/f1_score.py @@ -9,7 +9,11 @@ from supervision.config import ORIENTED_BOX_COORDINATES from supervision.detection.core import Detections -from supervision.detection.utils import box_iou_batch, mask_iou_batch +from supervision.detection.utils import ( + box_iou_batch, + mask_iou_batch, + oriented_box_iou_batch, +) from supervision.draw.color import LEGACY_COLOR_PALETTE from supervision.metrics.core import AveragingMethod, Metric, MetricTarget from supervision.metrics.utils.object_size import ( @@ -62,14 +66,9 @@ def __init__( averaging_method (AveragingMethod): The averaging method used to compute the F1 scores. Determines how the F1 scores are aggregated across classes. """ - self._metric_target = metric_target - if self._metric_target == MetricTarget.ORIENTED_BOUNDING_BOXES: - raise NotImplementedError( - "F1 score is not implemented for oriented bounding boxes." - ) - self._metric_target = metric_target self.averaging_method = averaging_method + self._predictions_list: List[Detections] = [] self._targets_list: List[Detections] = [] @@ -166,8 +165,12 @@ def _compute( iou = box_iou_batch(target_contents, prediction_contents) elif self._metric_target == MetricTarget.MASKS: iou = mask_iou_batch(target_contents, prediction_contents) + elif self._metric_target == MetricTarget.ORIENTED_BOUNDING_BOXES: + iou = oriented_box_iou_batch( + target_contents, prediction_contents + ) else: - raise NotImplementedError( + raise ValueError( "Unsupported metric target for IoU calculation" ) @@ -366,12 +369,22 @@ def _detections_content(self, detections: Detections) -> np.ndarray: return ( detections.mask if detections.mask is not None - else np.empty((0, 0, 0), dtype=bool) + else self._make_empty_content() ) if self._metric_target == MetricTarget.ORIENTED_BOUNDING_BOXES: - if obb := detections.data.get(ORIENTED_BOX_COORDINATES): - return np.ndarray(obb, dtype=np.float32) - return np.empty((0, 8), dtype=np.float32) + obb = detections.data.get(ORIENTED_BOX_COORDINATES) + if obb is not None and len(obb) > 0: + return np.array(obb, dtype=np.float32) + return self._make_empty_content() + raise ValueError(f"Invalid metric target: {self._metric_target}") + + def _make_empty_content(self) -> np.ndarray: + if self._metric_target == MetricTarget.BOXES: + return np.empty((0, 4), dtype=np.float32) + if self._metric_target == MetricTarget.MASKS: + return np.empty((0, 0, 0), dtype=bool) + if self._metric_target == MetricTarget.ORIENTED_BOUNDING_BOXES: + return np.empty((0, 4, 2), dtype=np.float32) raise ValueError(f"Invalid metric target: {self._metric_target}") def _filter_detections_by_size( diff --git a/supervision/metrics/mean_average_precision.py b/supervision/metrics/mean_average_precision.py index d5f66103e..ba37837b3 100644 --- a/supervision/metrics/mean_average_precision.py +++ b/supervision/metrics/mean_average_precision.py @@ -193,7 +193,7 @@ def _compute( target_contents, prediction_contents ) else: - raise NotImplementedError( + raise ValueError( "Unsupported metric target for IoU calculation" ) From 6f3491f03ce922d33f36765d2f4ecb2da93840e9 Mon Sep 17 00:00:00 2001 From: LinasKo Date: Thu, 31 Oct 2024 17:18:08 +0200 Subject: [PATCH 090/161] Fix issue in polygon_to_mask - expects a np.int32 datatype --- supervision/detection/utils.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/supervision/detection/utils.py b/supervision/detection/utils.py index a0a4d3e21..69cdacc7e 100644 --- a/supervision/detection/utils.py +++ b/supervision/detection/utils.py @@ -25,8 +25,7 @@ def polygon_to_mask(polygon: np.ndarray, resolution_wh: Tuple[int, int]) -> np.n """ width, height = map(int, resolution_wh) mask = np.zeros((height, width), dtype=np.uint8) - - cv2.fillPoly(mask, [polygon], color=1) + cv2.fillPoly(mask, [polygon.astype(np.int32)], color=1) return mask From d1c07bc106d9f8fe24cad933fed10382748cfa32 Mon Sep 17 00:00:00 2001 From: LinasKo Date: Thu, 31 Oct 2024 18:05:54 +0200 Subject: [PATCH 091/161] Add OBB support to precision and recall --- supervision/metrics/precision.py | 37 +++++++++++++++++++++----------- supervision/metrics/recall.py | 37 +++++++++++++++++++++----------- 2 files changed, 50 insertions(+), 24 deletions(-) diff --git a/supervision/metrics/precision.py b/supervision/metrics/precision.py index d915e1f49..fa6cf2b1a 100644 --- a/supervision/metrics/precision.py +++ b/supervision/metrics/precision.py @@ -9,7 +9,11 @@ from supervision.config import ORIENTED_BOX_COORDINATES from supervision.detection.core import Detections -from supervision.detection.utils import box_iou_batch, mask_iou_batch +from supervision.detection.utils import ( + box_iou_batch, + mask_iou_batch, + oriented_box_iou_batch, +) from supervision.draw.color import LEGACY_COLOR_PALETTE from supervision.metrics.core import AveragingMethod, Metric, MetricTarget from supervision.metrics.utils.object_size import ( @@ -65,14 +69,9 @@ def __init__( averaging_method (AveragingMethod): The averaging method used to compute the precision. Determines how the precision is aggregated across classes. """ - self._metric_target = metric_target - if self._metric_target == MetricTarget.ORIENTED_BOUNDING_BOXES: - raise NotImplementedError( - "Precision is not implemented for oriented bounding boxes." - ) - self._metric_target = metric_target self.averaging_method = averaging_method + self._predictions_list: List[Detections] = [] self._targets_list: List[Detections] = [] @@ -169,8 +168,12 @@ def _compute( iou = box_iou_batch(target_contents, prediction_contents) elif self._metric_target == MetricTarget.MASKS: iou = mask_iou_batch(target_contents, prediction_contents) + elif self._metric_target == MetricTarget.ORIENTED_BOUNDING_BOXES: + iou = oriented_box_iou_batch( + target_contents, prediction_contents + ) else: - raise NotImplementedError( + raise ValueError( "Unsupported metric target for IoU calculation" ) @@ -369,12 +372,22 @@ def _detections_content(self, detections: Detections) -> np.ndarray: return ( detections.mask if detections.mask is not None - else np.empty((0, 0, 0), dtype=bool) + else self._make_empty_content() ) if self._metric_target == MetricTarget.ORIENTED_BOUNDING_BOXES: - if obb := detections.data.get(ORIENTED_BOX_COORDINATES): - return np.ndarray(obb, dtype=np.float32) - return np.empty((0, 8), dtype=np.float32) + obb = detections.data.get(ORIENTED_BOX_COORDINATES) + if obb is not None and len(obb) > 0: + return np.array(obb, dtype=np.float32) + return self._make_empty_content() + raise ValueError(f"Invalid metric target: {self._metric_target}") + + def _make_empty_content(self) -> np.ndarray: + if self._metric_target == MetricTarget.BOXES: + return np.empty((0, 4), dtype=np.float32) + if self._metric_target == MetricTarget.MASKS: + return np.empty((0, 0, 0), dtype=bool) + if self._metric_target == MetricTarget.ORIENTED_BOUNDING_BOXES: + return np.empty((0, 4, 2), dtype=np.float32) raise ValueError(f"Invalid metric target: {self._metric_target}") def _filter_detections_by_size( diff --git a/supervision/metrics/recall.py b/supervision/metrics/recall.py index 9eae24f8e..1848502b7 100644 --- a/supervision/metrics/recall.py +++ b/supervision/metrics/recall.py @@ -9,7 +9,11 @@ from supervision.config import ORIENTED_BOX_COORDINATES from supervision.detection.core import Detections -from supervision.detection.utils import box_iou_batch, mask_iou_batch +from supervision.detection.utils import ( + box_iou_batch, + mask_iou_batch, + oriented_box_iou_batch, +) from supervision.draw.color import LEGACY_COLOR_PALETTE from supervision.metrics.core import AveragingMethod, Metric, MetricTarget from supervision.metrics.utils.object_size import ( @@ -65,14 +69,9 @@ def __init__( averaging_method (AveragingMethod): The averaging method used to compute the recall. Determines how the recall is aggregated across classes. """ - self._metric_target = metric_target - if self._metric_target == MetricTarget.ORIENTED_BOUNDING_BOXES: - raise NotImplementedError( - "Recall is not implemented for oriented bounding boxes." - ) - self._metric_target = metric_target self.averaging_method = averaging_method + self._predictions_list: List[Detections] = [] self._targets_list: List[Detections] = [] @@ -169,8 +168,12 @@ def _compute( iou = box_iou_batch(target_contents, prediction_contents) elif self._metric_target == MetricTarget.MASKS: iou = mask_iou_batch(target_contents, prediction_contents) + elif self._metric_target == MetricTarget.ORIENTED_BOUNDING_BOXES: + iou = oriented_box_iou_batch( + target_contents, prediction_contents + ) else: - raise NotImplementedError( + raise ValueError( "Unsupported metric target for IoU calculation" ) @@ -367,12 +370,22 @@ def _detections_content(self, detections: Detections) -> np.ndarray: return ( detections.mask if detections.mask is not None - else np.empty((0, 0, 0), dtype=bool) + else self._make_empty_content() ) if self._metric_target == MetricTarget.ORIENTED_BOUNDING_BOXES: - if obb := detections.data.get(ORIENTED_BOX_COORDINATES): - return np.ndarray(obb, dtype=np.float32) - return np.empty((0, 8), dtype=np.float32) + obb = detections.data.get(ORIENTED_BOX_COORDINATES) + if obb is not None and len(obb) > 0: + return np.array(obb, dtype=np.float32) + return self._make_empty_content() + raise ValueError(f"Invalid metric target: {self._metric_target}") + + def _make_empty_content(self) -> np.ndarray: + if self._metric_target == MetricTarget.BOXES: + return np.empty((0, 4), dtype=np.float32) + if self._metric_target == MetricTarget.MASKS: + return np.empty((0, 0, 0), dtype=bool) + if self._metric_target == MetricTarget.ORIENTED_BOUNDING_BOXES: + return np.empty((0, 4, 2), dtype=np.float32) raise ValueError(f"Invalid metric target: {self._metric_target}") def _filter_detections_by_size( From 353fa93305bf1d0dd778a1694cb412f14d6be785 Mon Sep 17 00:00:00 2001 From: LinasKo Date: Thu, 31 Oct 2024 19:41:13 +0200 Subject: [PATCH 092/161] Bugfix: Tracker reset fails --- supervision/tracker/byte_tracker/core.py | 1 - 1 file changed, 1 deletion(-) diff --git a/supervision/tracker/byte_tracker/core.py b/supervision/tracker/byte_tracker/core.py index f453e5237..34a9501ad 100644 --- a/supervision/tracker/byte_tracker/core.py +++ b/supervision/tracker/byte_tracker/core.py @@ -323,7 +323,6 @@ def reset(self): ensuring the tracker starts with a clean state for each new video. """ self.frame_id = 0 - BaseTrack.reset_counter() self.internal_id_counter.reset() self.external_id_counter.reset() self.tracked_tracks: List[STrack] = [] From 7229409fabfd5cb0c76054a82c77ec177e252d60 Mon Sep 17 00:00:00 2001 From: LinasKo Date: Thu, 31 Oct 2024 19:43:26 +0200 Subject: [PATCH 093/161] Revert "feat(ByteTrack): Allow ByteTrack to track detection without class ids" --- supervision/tracker/byte_tracker/core.py | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/supervision/tracker/byte_tracker/core.py b/supervision/tracker/byte_tracker/core.py index f453e5237..d1d567fab 100644 --- a/supervision/tracker/byte_tracker/core.py +++ b/supervision/tracker/byte_tracker/core.py @@ -276,16 +276,11 @@ def callback(frame: np.ndarray, index: int) -> np.ndarray: ``` """ - num_rows = detections.xyxy.shape[0] - class_ids = np.full(num_rows, -5) - if detections.class_id is not None: - class_ids = detections.class_id - tensors = np.hstack( ( detections.xyxy, detections.confidence[:, np.newaxis], - class_ids[:, np.newaxis], + detections.class_id[:, np.newaxis], ) ) tracks = self.update_with_tensors(tensors=tensors) From ab99c3615de105b9339ff01e5e45a5315f20efda Mon Sep 17 00:00:00 2001 From: Kader Miyanyedi Date: Thu, 31 Oct 2024 20:11:03 +0300 Subject: [PATCH 094/161] feat(ByteTrack): Remove unnecessary class id from bytetrack --- supervision/annotators/core.py | 2 +- supervision/tracker/byte_tracker/core.py | 13 ++----------- 2 files changed, 3 insertions(+), 12 deletions(-) diff --git a/supervision/annotators/core.py b/supervision/annotators/core.py index 1910ac9f4..73e0cdd22 100644 --- a/supervision/annotators/core.py +++ b/supervision/annotators/core.py @@ -1609,7 +1609,7 @@ def __init__( position: Position = Position.CENTER, trace_length: int = 30, thickness: int = 2, - color_lookup: ColorLookup = ColorLookup.CLASS, + color_lookup: ColorLookup = ColorLookup.INDEX, ): """ Args: diff --git a/supervision/tracker/byte_tracker/core.py b/supervision/tracker/byte_tracker/core.py index 5ce81960f..e1139ea98 100644 --- a/supervision/tracker/byte_tracker/core.py +++ b/supervision/tracker/byte_tracker/core.py @@ -32,7 +32,6 @@ def __init__( self, tlwh, score, - class_ids, minimum_consecutive_frames, internal_id_counter: IdCounter, external_id_counter: IdCounter, @@ -45,7 +44,6 @@ def __init__( self.is_activated = False self.score = score - self.class_ids = class_ids self.tracklet_len = 0 self.minimum_consecutive_frames = minimum_consecutive_frames @@ -280,7 +278,6 @@ def callback(frame: np.ndarray, index: int) -> np.ndarray: ( detections.xyxy, detections.confidence[:, np.newaxis], - detections.class_id[:, np.newaxis], ) ) tracks = self.update_with_tensors(tensors=tensors) @@ -340,7 +337,6 @@ def update_with_tensors(self, tensors: np.ndarray) -> List[STrack]: lost_stracks = [] removed_stracks = [] - class_ids = tensors[:, 5] scores = tensors[:, 4] bboxes = tensors[:, :4] @@ -354,21 +350,17 @@ def update_with_tensors(self, tensors: np.ndarray) -> List[STrack]: scores_keep = scores[remain_inds] scores_second = scores[inds_second] - class_ids_keep = class_ids[remain_inds] - class_ids_second = class_ids[inds_second] - if len(dets) > 0: """Detections""" detections = [ STrack( STrack.tlbr_to_tlwh(tlbr), s, - c, self.minimum_consecutive_frames, self.internal_id_counter, self.external_id_counter, ) - for (tlbr, s, c) in zip(dets, scores_keep, class_ids_keep) + for (tlbr, s) in zip(dets, scores_keep) ] else: detections = [] @@ -412,12 +404,11 @@ def update_with_tensors(self, tensors: np.ndarray) -> List[STrack]: STrack( STrack.tlbr_to_tlwh(tlbr), s, - c, self.minimum_consecutive_frames, self.internal_id_counter, self.external_id_counter, ) - for (tlbr, s, c) in zip(dets_second, scores_second, class_ids_second) + for (tlbr, s) in zip(dets_second, scores_second) ] else: detections_second = [] From 895bcfbddbfacfe50845675e0814c0fd884fe70e Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Fri, 1 Nov 2024 01:03:55 +0000 Subject: [PATCH 095/161] :arrow_up: Bump mkdocs-material from 9.5.42 to 9.5.43 Bumps [mkdocs-material](https://github.com/squidfunk/mkdocs-material) from 9.5.42 to 9.5.43. - [Release notes](https://github.com/squidfunk/mkdocs-material/releases) - [Changelog](https://github.com/squidfunk/mkdocs-material/blob/master/CHANGELOG) - [Commits](https://github.com/squidfunk/mkdocs-material/compare/9.5.42...9.5.43) --- updated-dependencies: - dependency-name: mkdocs-material dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index 69775617e..6e2817a5d 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2412,13 +2412,13 @@ pygments = ">2.12.0" [[package]] name = "mkdocs-material" -version = "9.5.42" +version = "9.5.43" description = "Documentation that simply works" optional = false python-versions = ">=3.8" files = [ - {file = "mkdocs_material-9.5.42-py3-none-any.whl", hash = "sha256:452a7c5d21284b373f36b981a2cbebfff59263feebeede1bc28652e9c5bbe316"}, - {file = "mkdocs_material-9.5.42.tar.gz", hash = "sha256:92779b5e9b5934540c574c11647131d217dc540dce72b05feeda088c8eb1b8f2"}, + {file = "mkdocs_material-9.5.43-py3-none-any.whl", hash = "sha256:4aae0664c456fd12837a3192e0225c17960ba8bf55d7f0a7daef7e4b0b914a34"}, + {file = "mkdocs_material-9.5.43.tar.gz", hash = "sha256:83be7ff30b65a1e4930dfa4ab911e75780a3afc9583d162692e434581cb46979"}, ] [package.dependencies] From b8daa408bfb60b06ae68654fa4bace4e471f96d4 Mon Sep 17 00:00:00 2001 From: LinasKo Date: Fri, 1 Nov 2024 18:14:56 +0200 Subject: [PATCH 096/161] Reset default color selection to CLASS for TraceAnnotator --- supervision/annotators/core.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/supervision/annotators/core.py b/supervision/annotators/core.py index 73e0cdd22..1910ac9f4 100644 --- a/supervision/annotators/core.py +++ b/supervision/annotators/core.py @@ -1609,7 +1609,7 @@ def __init__( position: Position = Position.CENTER, trace_length: int = 30, thickness: int = 2, - color_lookup: ColorLookup = ColorLookup.INDEX, + color_lookup: ColorLookup = ColorLookup.CLASS, ): """ Args: From 4678dfc74a5c5f0e61f1d71dadc465d2edc9c70e Mon Sep 17 00:00:00 2001 From: LinasKo Date: Fri, 1 Nov 2024 19:29:31 +0200 Subject: [PATCH 097/161] Fix regression where all uninitialized tracks are connected * Apparently, external track ID must start with `1` and not `0`. Not sure where this coupling comes from. --- supervision/tracker/byte_tracker/core.py | 16 +++++++++++----- 1 file changed, 11 insertions(+), 5 deletions(-) diff --git a/supervision/tracker/byte_tracker/core.py b/supervision/tracker/byte_tracker/core.py index e1139ea98..2dbf5ab50 100644 --- a/supervision/tracker/byte_tracker/core.py +++ b/supervision/tracker/byte_tracker/core.py @@ -10,19 +10,23 @@ class IdCounter: - def __init__(self): + def __init__(self, start_id: int = 0): + self.start_id = start_id + if self.start_id <= self.NO_ID: + raise ValueError("start_id must be greater than -1") self.reset() def reset(self) -> None: - self._id = self.NO_ID + self._id = self.start_id def new_id(self) -> int: + returned_id = self._id self._id += 1 - return self._id + return returned_id @property def NO_ID(self) -> int: - return 0 + return -1 class STrack(BaseTrack): @@ -231,8 +235,10 @@ def __init__( self.lost_tracks: List[STrack] = [] self.removed_tracks: List[STrack] = [] + # Warning, possible bug: If you also set internal_id to start at 1, + # all traces will be connected across objects. self.internal_id_counter = IdCounter() - self.external_id_counter = IdCounter() + self.external_id_counter = IdCounter(start_id=1) def update_with_detections(self, detections: Detections) -> Detections: """ From 1f5b1c7e01db169490ae3a58584a4d3e0aae60a7 Mon Sep 17 00:00:00 2001 From: LinasKo Date: Fri, 18 Oct 2024 01:39:53 +0300 Subject: [PATCH 098/161] BaseTrack: Move TrackState to STrack --- supervision/tracker/byte_tracker/basetrack.py | 32 +------------------ supervision/tracker/byte_tracker/core.py | 19 ++++++++--- 2 files changed, 15 insertions(+), 36 deletions(-) diff --git a/supervision/tracker/byte_tracker/basetrack.py b/supervision/tracker/byte_tracker/basetrack.py index e2bc66d57..079bab228 100644 --- a/supervision/tracker/byte_tracker/basetrack.py +++ b/supervision/tracker/byte_tracker/basetrack.py @@ -1,42 +1,18 @@ -from collections import OrderedDict -from enum import Enum - -import numpy as np - - -class TrackState(Enum): - New = 0 - Tracked = 1 - Lost = 2 - Removed = 3 - - class BaseTrack: def __init__(self): - self.track_id = 0 self.is_activated = False - self.state = TrackState.New - self.history = OrderedDict() - self.features = [] - self.curr_feature = None self.score = 0 self.start_frame = 0 self.frame_id = 0 - self.time_since_update = 0 - - # multi-camera - self.location = (np.inf, np.inf) @property def end_frame(self) -> int: return self.frame_id def reset_counter(self): - self.track_id = 0 - self.start_frame = 0 self.frame_id = 0 - self.time_since_update = 0 + self.start_frame = 0 def activate(self, *args, **kwargs): raise NotImplementedError @@ -46,9 +22,3 @@ def predict(self): def update(self, *args, **kwargs): raise NotImplementedError - - def mark_lost(self): - self.state = TrackState.Lost - - def mark_removed(self): - self.state = TrackState.Removed diff --git a/supervision/tracker/byte_tracker/core.py b/supervision/tracker/byte_tracker/core.py index 2dbf5ab50..362d9d07e 100644 --- a/supervision/tracker/byte_tracker/core.py +++ b/supervision/tracker/byte_tracker/core.py @@ -1,3 +1,4 @@ +from enum import Enum from typing import List, Tuple import numpy as np @@ -5,10 +6,17 @@ from supervision.detection.core import Detections from supervision.detection.utils import box_iou_batch from supervision.tracker.byte_tracker import matching -from supervision.tracker.byte_tracker.basetrack import BaseTrack, TrackState +from supervision.tracker.byte_tracker.basetrack import BaseTrack from supervision.tracker.byte_tracker.kalman_filter import KalmanFilter +class TrackState(Enum): + New = 0 + Tracked = 1 + Lost = 2 + Removed = 3 + + class IdCounter: def __init__(self, start_id: int = 0): self.start_id = start_id @@ -41,7 +49,8 @@ def __init__( external_id_counter: IdCounter, ): super().__init__() - # wait activate + self.state = TrackState.New + self._tlwh = np.asarray(tlwh, dtype=np.float32) self.kalman_filter = None self.mean, self.covariance = None, None @@ -440,7 +449,7 @@ def update_with_tensors(self, tensors: np.ndarray) -> List[STrack]: for it in u_track: track = r_tracked_stracks[it] if not track.state == TrackState.Lost: - track.mark_lost() + track.state = TrackState.Lost lost_stracks.append(track) """Deal with unconfirmed tracks, usually tracks with only one beginning frame""" @@ -456,7 +465,7 @@ def update_with_tensors(self, tensors: np.ndarray) -> List[STrack]: activated_starcks.append(unconfirmed[itracked]) for it in u_unconfirmed: track = unconfirmed[it] - track.mark_removed() + track.state = TrackState.Removed removed_stracks.append(track) """ Step 4: Init new stracks""" @@ -469,7 +478,7 @@ def update_with_tensors(self, tensors: np.ndarray) -> List[STrack]: """ Step 5: Update state""" for track in self.lost_tracks: if self.frame_id - track.end_frame > self.max_time_lost: - track.mark_removed() + track.state = TrackState.Removed removed_stracks.append(track) self.tracked_tracks = [ From e8983bf4271739ade352b128fe51c0ad13ca2c86 Mon Sep 17 00:00:00 2001 From: LinasKo Date: Fri, 18 Oct 2024 01:50:40 +0300 Subject: [PATCH 099/161] BaseTrack: Move trivial values used in STrack to STrack --- supervision/tracker/byte_tracker/basetrack.py | 3 --- supervision/tracker/byte_tracker/core.py | 1 + supervision/tracker/byte_tracker/matching.py | 9 ++++++--- 3 files changed, 7 insertions(+), 6 deletions(-) diff --git a/supervision/tracker/byte_tracker/basetrack.py b/supervision/tracker/byte_tracker/basetrack.py index 079bab228..8ae511b76 100644 --- a/supervision/tracker/byte_tracker/basetrack.py +++ b/supervision/tracker/byte_tracker/basetrack.py @@ -1,8 +1,5 @@ class BaseTrack: def __init__(self): - self.is_activated = False - - self.score = 0 self.start_frame = 0 self.frame_id = 0 diff --git a/supervision/tracker/byte_tracker/core.py b/supervision/tracker/byte_tracker/core.py index 362d9d07e..37d3a8545 100644 --- a/supervision/tracker/byte_tracker/core.py +++ b/supervision/tracker/byte_tracker/core.py @@ -50,6 +50,7 @@ def __init__( ): super().__init__() self.state = TrackState.New + self.is_activated = False self._tlwh = np.asarray(tlwh, dtype=np.float32) self.kalman_filter = None diff --git a/supervision/tracker/byte_tracker/matching.py b/supervision/tracker/byte_tracker/matching.py index f791e518f..4a95bc2cd 100644 --- a/supervision/tracker/byte_tracker/matching.py +++ b/supervision/tracker/byte_tracker/matching.py @@ -1,10 +1,13 @@ -from typing import List, Tuple +from typing import TYPE_CHECKING, List, Tuple import numpy as np from scipy.optimize import linear_sum_assignment from supervision.detection.utils import box_iou_batch +if TYPE_CHECKING: + from supervision.tracker.byte_tracker.core import STrack + def indices_to_matches( cost_matrix: np.ndarray, indices: np.ndarray, thresh: float @@ -53,11 +56,11 @@ def iou_distance(atracks: List, btracks: List) -> np.ndarray: return cost_matrix -def fuse_score(cost_matrix: np.ndarray, detections: List) -> np.ndarray: +def fuse_score(cost_matrix: np.ndarray, stracks: List["STrack"]) -> np.ndarray: if cost_matrix.size == 0: return cost_matrix iou_sim = 1 - cost_matrix - det_scores = np.array([det.score for det in detections]) + det_scores = np.array([strack.score for strack in stracks]) det_scores = np.expand_dims(det_scores, axis=0).repeat(cost_matrix.shape[0], axis=0) fuse_sim = iou_sim * det_scores fuse_cost = 1 - fuse_sim From 37cc16e7a871f5ee0e63fa9683126aa353d5b068 Mon Sep 17 00:00:00 2001 From: LinasKo Date: Fri, 1 Nov 2024 20:30:21 +0200 Subject: [PATCH 100/161] Bugfix: remove reset start frame when tracker is reset --- supervision/tracker/byte_tracker/basetrack.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/supervision/tracker/byte_tracker/basetrack.py b/supervision/tracker/byte_tracker/basetrack.py index 8ae511b76..bd945653f 100644 --- a/supervision/tracker/byte_tracker/basetrack.py +++ b/supervision/tracker/byte_tracker/basetrack.py @@ -7,10 +7,6 @@ def __init__(self): def end_frame(self) -> int: return self.frame_id - def reset_counter(self): - self.frame_id = 0 - self.start_frame = 0 - def activate(self, *args, **kwargs): raise NotImplementedError From 89be6f92d2e5896c8b413792668071dae3c1990a Mon Sep 17 00:00:00 2001 From: LinasKo Date: Fri, 18 Oct 2024 02:02:27 +0300 Subject: [PATCH 101/161] Get rid of BaseTrack completely * 85%-90% of it was used in STrack * 10% was used by ByteTrack --- supervision/tracker/byte_tracker/basetrack.py | 17 ----------------- supervision/tracker/byte_tracker/core.py | 11 ++++++----- 2 files changed, 6 insertions(+), 22 deletions(-) delete mode 100644 supervision/tracker/byte_tracker/basetrack.py diff --git a/supervision/tracker/byte_tracker/basetrack.py b/supervision/tracker/byte_tracker/basetrack.py deleted file mode 100644 index bd945653f..000000000 --- a/supervision/tracker/byte_tracker/basetrack.py +++ /dev/null @@ -1,17 +0,0 @@ -class BaseTrack: - def __init__(self): - self.start_frame = 0 - self.frame_id = 0 - - @property - def end_frame(self) -> int: - return self.frame_id - - def activate(self, *args, **kwargs): - raise NotImplementedError - - def predict(self): - raise NotImplementedError - - def update(self, *args, **kwargs): - raise NotImplementedError diff --git a/supervision/tracker/byte_tracker/core.py b/supervision/tracker/byte_tracker/core.py index 37d3a8545..a1ae63f44 100644 --- a/supervision/tracker/byte_tracker/core.py +++ b/supervision/tracker/byte_tracker/core.py @@ -6,7 +6,6 @@ from supervision.detection.core import Detections from supervision.detection.utils import box_iou_batch from supervision.tracker.byte_tracker import matching -from supervision.tracker.byte_tracker.basetrack import BaseTrack from supervision.tracker.byte_tracker.kalman_filter import KalmanFilter @@ -37,7 +36,7 @@ def NO_ID(self) -> int: return -1 -class STrack(BaseTrack): +class STrack: shared_kalman = KalmanFilter() def __init__( @@ -48,9 +47,10 @@ def __init__( internal_id_counter: IdCounter, external_id_counter: IdCounter, ): - super().__init__() self.state = TrackState.New self.is_activated = False + self.start_frame = 0 + self.frame_id = 0 self._tlwh = np.asarray(tlwh, dtype=np.float32) self.kalman_filter = None @@ -193,7 +193,7 @@ def tlwh_to_tlbr(tlwh): def __repr__(self): return "OT_{}_({}-{})".format( - self.internal_track_id, self.start_frame, self.end_frame + self.internal_track_id, self.start_frame, self.frame_id ) @@ -236,6 +236,7 @@ def __init__( self.minimum_matching_threshold = minimum_matching_threshold self.frame_id = 0 + self.start_frame = 0 self.det_thresh = self.track_activation_threshold + 0.1 self.max_time_lost = int(frame_rate / 30.0 * lost_track_buffer) self.minimum_consecutive_frames = minimum_consecutive_frames @@ -478,7 +479,7 @@ def update_with_tensors(self, tensors: np.ndarray) -> List[STrack]: activated_starcks.append(track) """ Step 5: Update state""" for track in self.lost_tracks: - if self.frame_id - track.end_frame > self.max_time_lost: + if self.frame_id - track.frame_id > self.max_time_lost: track.state = TrackState.Removed removed_stracks.append(track) From b380bd2df79a167ffd2b768896eda49eb048c7b5 Mon Sep 17 00:00:00 2001 From: LinasKo Date: Fri, 18 Oct 2024 02:23:08 +0300 Subject: [PATCH 102/161] Correct but useless: Moved shared kalman to ByteTrack * It's not stateful! All it keeps is the initial params - motion and update matrices --- supervision/tracker/byte_tracker/core.py | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/supervision/tracker/byte_tracker/core.py b/supervision/tracker/byte_tracker/core.py index a1ae63f44..019ce890e 100644 --- a/supervision/tracker/byte_tracker/core.py +++ b/supervision/tracker/byte_tracker/core.py @@ -37,13 +37,12 @@ def NO_ID(self) -> int: class STrack: - shared_kalman = KalmanFilter() - def __init__( self, tlwh, score, minimum_consecutive_frames, + shared_kalman: KalmanFilter, internal_id_counter: IdCounter, external_id_counter: IdCounter, ): @@ -54,6 +53,7 @@ def __init__( self._tlwh = np.asarray(tlwh, dtype=np.float32) self.kalman_filter = None + self.shared_kalman = shared_kalman self.mean, self.covariance = None, None self.is_activated = False @@ -76,7 +76,7 @@ def predict(self): ) @staticmethod - def multi_predict(stracks): + def multi_predict(stracks, shared_kalman: KalmanFilter): if len(stracks) > 0: multi_mean = [] multi_covariance = [] @@ -86,7 +86,7 @@ def multi_predict(stracks): if st.state != TrackState.Tracked: multi_mean[i][7] = 0 - multi_mean, multi_covariance = STrack.shared_kalman.multi_predict( + multi_mean, multi_covariance = shared_kalman.multi_predict( np.asarray(multi_mean), np.asarray(multi_covariance) ) for i, (mean, cov) in enumerate(zip(multi_mean, multi_covariance)): @@ -241,6 +241,7 @@ def __init__( self.max_time_lost = int(frame_rate / 30.0 * lost_track_buffer) self.minimum_consecutive_frames = minimum_consecutive_frames self.kalman_filter = KalmanFilter() + self.shared_kalman = KalmanFilter() self.tracked_tracks: List[STrack] = [] self.lost_tracks: List[STrack] = [] @@ -374,6 +375,7 @@ def update_with_tensors(self, tensors: np.ndarray) -> List[STrack]: STrack.tlbr_to_tlwh(tlbr), s, self.minimum_consecutive_frames, + self.shared_kalman, self.internal_id_counter, self.external_id_counter, ) @@ -395,7 +397,7 @@ def update_with_tensors(self, tensors: np.ndarray) -> List[STrack]: """ Step 2: First association, with high score detection boxes""" strack_pool = joint_tracks(tracked_stracks, self.lost_tracks) # Predict the current location with KF - STrack.multi_predict(strack_pool) + STrack.multi_predict(strack_pool, self.shared_kalman) dists = matching.iou_distance(strack_pool, detections) dists = matching.fuse_score(dists, detections) @@ -422,6 +424,7 @@ def update_with_tensors(self, tensors: np.ndarray) -> List[STrack]: STrack.tlbr_to_tlwh(tlbr), s, self.minimum_consecutive_frames, + self.shared_kalman, self.internal_id_counter, self.external_id_counter, ) From fd1277f29a6cf503991f254648bb9d3feb7d2978 Mon Sep 17 00:00:00 2001 From: LinasKo Date: Fri, 1 Nov 2024 20:33:46 +0200 Subject: [PATCH 103/161] Add types, move STrack into a new file --- supervision/tracker/byte_tracker/core.py | 206 +----------------- supervision/tracker/byte_tracker/matching.py | 6 +- .../byte_tracker/single_object_track.py | 177 +++++++++++++++ supervision/tracker/byte_tracker/utils.py | 18 ++ 4 files changed, 209 insertions(+), 198 deletions(-) create mode 100644 supervision/tracker/byte_tracker/single_object_track.py create mode 100644 supervision/tracker/byte_tracker/utils.py diff --git a/supervision/tracker/byte_tracker/core.py b/supervision/tracker/byte_tracker/core.py index 019ce890e..ecd844adb 100644 --- a/supervision/tracker/byte_tracker/core.py +++ b/supervision/tracker/byte_tracker/core.py @@ -1,4 +1,3 @@ -from enum import Enum from typing import List, Tuple import numpy as np @@ -7,194 +6,8 @@ from supervision.detection.utils import box_iou_batch from supervision.tracker.byte_tracker import matching from supervision.tracker.byte_tracker.kalman_filter import KalmanFilter - - -class TrackState(Enum): - New = 0 - Tracked = 1 - Lost = 2 - Removed = 3 - - -class IdCounter: - def __init__(self, start_id: int = 0): - self.start_id = start_id - if self.start_id <= self.NO_ID: - raise ValueError("start_id must be greater than -1") - self.reset() - - def reset(self) -> None: - self._id = self.start_id - - def new_id(self) -> int: - returned_id = self._id - self._id += 1 - return returned_id - - @property - def NO_ID(self) -> int: - return -1 - - -class STrack: - def __init__( - self, - tlwh, - score, - minimum_consecutive_frames, - shared_kalman: KalmanFilter, - internal_id_counter: IdCounter, - external_id_counter: IdCounter, - ): - self.state = TrackState.New - self.is_activated = False - self.start_frame = 0 - self.frame_id = 0 - - self._tlwh = np.asarray(tlwh, dtype=np.float32) - self.kalman_filter = None - self.shared_kalman = shared_kalman - self.mean, self.covariance = None, None - self.is_activated = False - - self.score = score - self.tracklet_len = 0 - - self.minimum_consecutive_frames = minimum_consecutive_frames - - self.internal_id_counter = internal_id_counter - self.external_id_counter = external_id_counter - self.internal_track_id = self.internal_id_counter.NO_ID - self.external_track_id = self.external_id_counter.NO_ID - - def predict(self): - mean_state = self.mean.copy() - if self.state != TrackState.Tracked: - mean_state[7] = 0 - self.mean, self.covariance = self.kalman_filter.predict( - mean_state, self.covariance - ) - - @staticmethod - def multi_predict(stracks, shared_kalman: KalmanFilter): - if len(stracks) > 0: - multi_mean = [] - multi_covariance = [] - for i, st in enumerate(stracks): - multi_mean.append(st.mean.copy()) - multi_covariance.append(st.covariance) - if st.state != TrackState.Tracked: - multi_mean[i][7] = 0 - - multi_mean, multi_covariance = shared_kalman.multi_predict( - np.asarray(multi_mean), np.asarray(multi_covariance) - ) - for i, (mean, cov) in enumerate(zip(multi_mean, multi_covariance)): - stracks[i].mean = mean - stracks[i].covariance = cov - - def activate(self, kalman_filter, frame_id): - """Start a new tracklet""" - self.kalman_filter = kalman_filter - self.internal_track_id = self.internal_id_counter.new_id() - self.mean, self.covariance = self.kalman_filter.initiate( - self.tlwh_to_xyah(self._tlwh) - ) - - self.tracklet_len = 0 - self.state = TrackState.Tracked - if frame_id == 1: - self.is_activated = True - - if self.minimum_consecutive_frames == 1: - self.external_track_id = self.external_id_counter.new_id() - - self.frame_id = frame_id - self.start_frame = frame_id - - def re_activate(self, new_track, frame_id): - self.mean, self.covariance = self.kalman_filter.update( - self.mean, self.covariance, self.tlwh_to_xyah(new_track.tlwh) - ) - self.tracklet_len = 0 - self.state = TrackState.Tracked - - self.frame_id = frame_id - self.score = new_track.score - - def update(self, new_track, frame_id): - """ - Update a matched track - :type new_track: STrack - :type frame_id: int - :type update_feature: bool - :return: - """ - self.frame_id = frame_id - self.tracklet_len += 1 - - new_tlwh = new_track.tlwh - self.mean, self.covariance = self.kalman_filter.update( - self.mean, self.covariance, self.tlwh_to_xyah(new_tlwh) - ) - self.state = TrackState.Tracked - if self.tracklet_len == self.minimum_consecutive_frames: - self.is_activated = True - if self.external_track_id == self.external_id_counter.NO_ID: - self.external_track_id = self.external_id_counter.new_id() - - self.score = new_track.score - - @property - def tlwh(self): - """Get current position in bounding box format `(top left x, top left y, - width, height)`. - """ - if self.mean is None: - return self._tlwh.copy() - ret = self.mean[:4].copy() - ret[2] *= ret[3] - ret[:2] -= ret[2:] / 2 - return ret - - @property - def tlbr(self): - """Convert bounding box to format `(min x, min y, max x, max y)`, i.e., - `(top left, bottom right)`. - """ - ret = self.tlwh.copy() - ret[2:] += ret[:2] - return ret - - @staticmethod - def tlwh_to_xyah(tlwh): - """Convert bounding box to format `(center x, center y, aspect ratio, - height)`, where the aspect ratio is `width / height`. - """ - ret = np.asarray(tlwh).copy() - ret[:2] += ret[2:] / 2 - ret[2] /= ret[3] - return ret - - def to_xyah(self): - return self.tlwh_to_xyah(self.tlwh) - - @staticmethod - def tlbr_to_tlwh(tlbr): - ret = np.asarray(tlbr).copy() - ret[2:] -= ret[:2] - return ret - - @staticmethod - def tlwh_to_tlbr(tlwh): - ret = np.asarray(tlwh).copy() - ret[2:] += ret[:2] - return ret - - def __repr__(self): - return "OT_{}_({}-{})".format( - self.internal_track_id, self.start_frame, self.frame_id - ) +from supervision.tracker.byte_tracker.single_object_track import STrack, TrackState +from supervision.tracker.byte_tracker.utils import IdCounter class ByteTrack: @@ -291,7 +104,6 @@ def callback(frame: np.ndarray, index: int) -> np.ndarray: ) ``` """ - tensors = np.hstack( ( detections.xyxy, @@ -323,7 +135,7 @@ def callback(frame: np.ndarray, index: int) -> np.ndarray: return detections - def reset(self): + def reset(self) -> None: """ Resets the internal state of the ByteTrack tracker. @@ -335,9 +147,9 @@ def reset(self): self.frame_id = 0 self.internal_id_counter.reset() self.external_id_counter.reset() - self.tracked_tracks: List[STrack] = [] - self.lost_tracks: List[STrack] = [] - self.removed_tracks: List[STrack] = [] + self.tracked_tracks = [] + self.lost_tracks = [] + self.removed_tracks = [] def update_with_tensors(self, tensors: np.ndarray) -> List[STrack]: """ @@ -529,7 +341,7 @@ def joint_tracks( return result -def sub_tracks(track_list_a: List, track_list_b: List) -> List[int]: +def sub_tracks(track_list_a: List[STrack], track_list_b: List[STrack]) -> List[int]: """ Returns a list of tracks from track_list_a after removing any tracks that share the same internal_track_id with tracks in track_list_b. @@ -550,7 +362,9 @@ def sub_tracks(track_list_a: List, track_list_b: List) -> List[int]: return list(tracks.values()) -def remove_duplicate_tracks(tracks_a: List, tracks_b: List) -> Tuple[List, List]: +def remove_duplicate_tracks( + tracks_a: List[STrack], tracks_b: List[STrack] +) -> Tuple[List[STrack], List[STrack]]: pairwise_distance = matching.iou_distance(tracks_a, tracks_b) matching_pairs = np.where(pairwise_distance < 0.15) diff --git a/supervision/tracker/byte_tracker/matching.py b/supervision/tracker/byte_tracker/matching.py index 4a95bc2cd..eb774d4c4 100644 --- a/supervision/tracker/byte_tracker/matching.py +++ b/supervision/tracker/byte_tracker/matching.py @@ -1,3 +1,5 @@ +from __future__ import annotations + from typing import TYPE_CHECKING, List, Tuple import numpy as np @@ -38,7 +40,7 @@ def linear_assignment( return indices_to_matches(cost_matrix, indices, thresh) -def iou_distance(atracks: List, btracks: List) -> np.ndarray: +def iou_distance(atracks: List[STrack], btracks: List[STrack]) -> np.ndarray: if (len(atracks) > 0 and isinstance(atracks[0], np.ndarray)) or ( len(btracks) > 0 and isinstance(btracks[0], np.ndarray) ): @@ -56,7 +58,7 @@ def iou_distance(atracks: List, btracks: List) -> np.ndarray: return cost_matrix -def fuse_score(cost_matrix: np.ndarray, stracks: List["STrack"]) -> np.ndarray: +def fuse_score(cost_matrix: np.ndarray, stracks: List[STrack]) -> np.ndarray: if cost_matrix.size == 0: return cost_matrix iou_sim = 1 - cost_matrix diff --git a/supervision/tracker/byte_tracker/single_object_track.py b/supervision/tracker/byte_tracker/single_object_track.py new file mode 100644 index 000000000..685b86e3e --- /dev/null +++ b/supervision/tracker/byte_tracker/single_object_track.py @@ -0,0 +1,177 @@ +from __future__ import annotations +from enum import Enum +from typing import List + +import numpy as np +import numpy.typing as npt + +from supervision.tracker.byte_tracker.kalman_filter import KalmanFilter +from supervision.tracker.byte_tracker.utils import IdCounter + + +class TrackState(Enum): + New = 0 + Tracked = 1 + Lost = 2 + Removed = 3 + +class STrack: + def __init__( + self, + tlwh: npt.NDArray[np.float32], + score: npt.NDArray[np.float32], + minimum_consecutive_frames: int, + shared_kalman: KalmanFilter, + internal_id_counter: IdCounter, + external_id_counter: IdCounter, + ): + self.state = TrackState.New + self.is_activated = False + self.start_frame = 0 + self.frame_id = 0 + + self._tlwh = np.asarray(tlwh, dtype=np.float32) + self.kalman_filter = None + self.shared_kalman = shared_kalman + self.mean, self.covariance = None, None + self.is_activated = False + + self.score = score + self.tracklet_len = 0 + + self.minimum_consecutive_frames = minimum_consecutive_frames + + self.internal_id_counter = internal_id_counter + self.external_id_counter = external_id_counter + self.internal_track_id = self.internal_id_counter.NO_ID + self.external_track_id = self.external_id_counter.NO_ID + + def predict(self) -> None: + mean_state = self.mean.copy() + if self.state != TrackState.Tracked: + mean_state[7] = 0 + self.mean, self.covariance = self.kalman_filter.predict( + mean_state, self.covariance + ) + + @staticmethod + def multi_predict(stracks: List[STrack], shared_kalman: KalmanFilter) -> None: + if len(stracks) > 0: + multi_mean = [] + multi_covariance = [] + for i, st in enumerate(stracks): + multi_mean.append(st.mean.copy()) + multi_covariance.append(st.covariance) + if st.state != TrackState.Tracked: + multi_mean[i][7] = 0 + + multi_mean, multi_covariance = shared_kalman.multi_predict( + np.asarray(multi_mean), np.asarray(multi_covariance) + ) + for i, (mean, cov) in enumerate(zip(multi_mean, multi_covariance)): + stracks[i].mean = mean + stracks[i].covariance = cov + + def activate(self, kalman_filter: KalmanFilter, frame_id: int) -> None: + """Start a new tracklet""" + self.kalman_filter = kalman_filter + self.internal_track_id = self.internal_id_counter.new_id() + self.mean, self.covariance = self.kalman_filter.initiate( + self.tlwh_to_xyah(self._tlwh) + ) + + self.tracklet_len = 0 + self.state = TrackState.Tracked + if frame_id == 1: + self.is_activated = True + + if self.minimum_consecutive_frames == 1: + self.external_track_id = self.external_id_counter.new_id() + + self.frame_id = frame_id + self.start_frame = frame_id + + def re_activate(self, new_track: STrack, frame_id: int) -> None: + self.mean, self.covariance = self.kalman_filter.update( + self.mean, self.covariance, self.tlwh_to_xyah(new_track.tlwh) + ) + self.tracklet_len = 0 + self.state = TrackState.Tracked + + self.frame_id = frame_id + self.score = new_track.score + + def update(self, new_track: STrack, frame_id: int) -> None: + """ + Update a matched track + :type new_track: STrack + :type frame_id: int + :type update_feature: bool + :return: + """ + self.frame_id = frame_id + self.tracklet_len += 1 + + new_tlwh = new_track.tlwh + self.mean, self.covariance = self.kalman_filter.update( + self.mean, self.covariance, self.tlwh_to_xyah(new_tlwh) + ) + self.state = TrackState.Tracked + if self.tracklet_len == self.minimum_consecutive_frames: + self.is_activated = True + if self.external_track_id == self.external_id_counter.NO_ID: + self.external_track_id = self.external_id_counter.new_id() + + self.score = new_track.score + + @property + def tlwh(self) -> npt.NDArray[np.float32]: + """Get current position in bounding box format `(top left x, top left y, + width, height)`. + """ + if self.mean is None: + return self._tlwh.copy() + ret = self.mean[:4].copy() + ret[2] *= ret[3] + ret[:2] -= ret[2:] / 2 + return ret + + @property + def tlbr(self) -> npt.NDArray[np.float32]: + """Convert bounding box to format `(min x, min y, max x, max y)`, i.e., + `(top left, bottom right)`. + """ + ret = self.tlwh.copy() + ret[2:] += ret[:2] + return ret + + @staticmethod + def tlwh_to_xyah(tlwh) -> npt.NDArray[np.float32]: + """Convert bounding box to format `(center x, center y, aspect ratio, + height)`, where the aspect ratio is `width / height`. + """ + ret = np.asarray(tlwh).copy() + ret[:2] += ret[2:] / 2 + ret[2] /= ret[3] + return ret + + def to_xyah(self) -> npt.NDArray[np.float32]: + return self.tlwh_to_xyah(self.tlwh) + + @staticmethod + def tlbr_to_tlwh(tlbr) -> npt.NDArray[np.float32]: + ret = np.asarray(tlbr).copy() + ret[2:] -= ret[:2] + return ret + + @staticmethod + def tlwh_to_tlbr(tlwh) -> npt.NDArray[np.float32]: + ret = np.asarray(tlwh).copy() + ret[2:] += ret[:2] + return ret + + def __repr__(self) -> str: + return "OT_{}_({}-{})".format( + self.internal_track_id, self.start_frame, self.frame_id + ) + diff --git a/supervision/tracker/byte_tracker/utils.py b/supervision/tracker/byte_tracker/utils.py new file mode 100644 index 000000000..b7a62645d --- /dev/null +++ b/supervision/tracker/byte_tracker/utils.py @@ -0,0 +1,18 @@ +class IdCounter: + def __init__(self, start_id: int = 0): + self.start_id = start_id + if self.start_id <= self.NO_ID: + raise ValueError("start_id must be greater than -1") + self.reset() + + def reset(self) -> None: + self._id = self.start_id + + def new_id(self) -> int: + returned_id = self._id + self._id += 1 + return returned_id + + @property + def NO_ID(self) -> int: + return -1 \ No newline at end of file From cfd02c8c5cd485206a0ac2c395eb0cfc83d20937 Mon Sep 17 00:00:00 2001 From: LinasKo Date: Fri, 1 Nov 2024 20:34:58 +0200 Subject: [PATCH 104/161] =?UTF-8?q?fix(pre=5Fcommit):=20=F0=9F=8E=A8=20aut?= =?UTF-8?q?o=20format=20pre-commit=20hooks?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- supervision/tracker/byte_tracker/single_object_track.py | 3 ++- supervision/tracker/byte_tracker/utils.py | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/supervision/tracker/byte_tracker/single_object_track.py b/supervision/tracker/byte_tracker/single_object_track.py index 685b86e3e..3b9bfdf2d 100644 --- a/supervision/tracker/byte_tracker/single_object_track.py +++ b/supervision/tracker/byte_tracker/single_object_track.py @@ -1,4 +1,5 @@ from __future__ import annotations + from enum import Enum from typing import List @@ -15,6 +16,7 @@ class TrackState(Enum): Lost = 2 Removed = 3 + class STrack: def __init__( self, @@ -174,4 +176,3 @@ def __repr__(self) -> str: return "OT_{}_({}-{})".format( self.internal_track_id, self.start_frame, self.frame_id ) - diff --git a/supervision/tracker/byte_tracker/utils.py b/supervision/tracker/byte_tracker/utils.py index b7a62645d..b25797ce9 100644 --- a/supervision/tracker/byte_tracker/utils.py +++ b/supervision/tracker/byte_tracker/utils.py @@ -15,4 +15,4 @@ def new_id(self) -> int: @property def NO_ID(self) -> int: - return -1 \ No newline at end of file + return -1 From d3210ed0cc5d330c9443380ef86144111aa49cab Mon Sep 17 00:00:00 2001 From: LinasKo Date: Fri, 1 Nov 2024 20:36:00 +0200 Subject: [PATCH 105/161] Minor rename to avoid name shadowing --- supervision/tracker/byte_tracker/core.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/supervision/tracker/byte_tracker/core.py b/supervision/tracker/byte_tracker/core.py index ecd844adb..f5232cac7 100644 --- a/supervision/tracker/byte_tracker/core.py +++ b/supervision/tracker/byte_tracker/core.py @@ -185,13 +185,13 @@ def update_with_tensors(self, tensors: np.ndarray) -> List[STrack]: detections = [ STrack( STrack.tlbr_to_tlwh(tlbr), - s, + score_keep, self.minimum_consecutive_frames, self.shared_kalman, self.internal_id_counter, self.external_id_counter, ) - for (tlbr, s) in zip(dets, scores_keep) + for (tlbr, score_keep) in zip(dets, scores_keep) ] else: detections = [] @@ -234,13 +234,13 @@ def update_with_tensors(self, tensors: np.ndarray) -> List[STrack]: detections_second = [ STrack( STrack.tlbr_to_tlwh(tlbr), - s, + score_second, self.minimum_consecutive_frames, self.shared_kalman, self.internal_id_counter, self.external_id_counter, ) - for (tlbr, s) in zip(dets_second, scores_second) + for (tlbr, score_second) in zip(dets_second, scores_second) ] else: detections_second = [] From 9eee76a6d1dccef41eebd36d71089b88396c151a Mon Sep 17 00:00:00 2001 From: LinasKo Date: Fri, 1 Nov 2024 20:14:34 +0200 Subject: [PATCH 106/161] Remove start_frame from ByteTrack --- supervision/tracker/byte_tracker/core.py | 1 - 1 file changed, 1 deletion(-) diff --git a/supervision/tracker/byte_tracker/core.py b/supervision/tracker/byte_tracker/core.py index f5232cac7..cb46af733 100644 --- a/supervision/tracker/byte_tracker/core.py +++ b/supervision/tracker/byte_tracker/core.py @@ -49,7 +49,6 @@ def __init__( self.minimum_matching_threshold = minimum_matching_threshold self.frame_id = 0 - self.start_frame = 0 self.det_thresh = self.track_activation_threshold + 0.1 self.max_time_lost = int(frame_rate / 30.0 * lost_track_buffer) self.minimum_consecutive_frames = minimum_consecutive_frames From 22d827fac41a150a5f8955b062acb071819332ab Mon Sep 17 00:00:00 2001 From: LinasKo Date: Fri, 1 Nov 2024 20:41:38 +0200 Subject: [PATCH 107/161] Minor fix: show current NO_ID value of the ID counter when printing error --- supervision/tracker/byte_tracker/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/supervision/tracker/byte_tracker/utils.py b/supervision/tracker/byte_tracker/utils.py index b25797ce9..cd2a1036b 100644 --- a/supervision/tracker/byte_tracker/utils.py +++ b/supervision/tracker/byte_tracker/utils.py @@ -2,7 +2,7 @@ class IdCounter: def __init__(self, start_id: int = 0): self.start_id = start_id if self.start_id <= self.NO_ID: - raise ValueError("start_id must be greater than -1") + raise ValueError(f"start_id must be greater than {self.NO_ID}") self.reset() def reset(self) -> None: From 62d869ca878d6930ee152f2e3b67e5074952fe05 Mon Sep 17 00:00:00 2001 From: Onuralp SEZER Date: Fri, 1 Nov 2024 22:57:17 +0300 Subject: [PATCH 108/161] =?UTF-8?q?fix:=20=F0=9F=90=9E=20Remove=20lowercas?= =?UTF-8?q?e=20conversion=20for=20unique=20class=20names=20in=20merge=5Fcl?= =?UTF-8?q?ass=5Flists=20function?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Signed-off-by: Onuralp SEZER --- supervision/dataset/utils.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/supervision/dataset/utils.py b/supervision/dataset/utils.py index 20b80978f..6c30eeab0 100644 --- a/supervision/dataset/utils.py +++ b/supervision/dataset/utils.py @@ -55,7 +55,7 @@ def merge_class_lists(class_lists: List[List[str]]) -> List[str]: for class_list in class_lists: for class_name in class_list: - unique_classes.add(class_name.lower()) + unique_classes.add(class_name) return sorted(list(unique_classes)) From 6d032533706770150e63c75e6ddc6b666b8bc687 Mon Sep 17 00:00:00 2001 From: Onuralp SEZER Date: Sat, 2 Nov 2024 06:37:26 +0300 Subject: [PATCH 109/161] =?UTF-8?q?docs:=20=F0=9F=93=9D=20hacktoberfest=20?= =?UTF-8?q?picture=20removed?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- README.md | 2 -- 1 file changed, 2 deletions(-) diff --git a/README.md b/README.md index dd340ae53..fe569183e 100644 --- a/README.md +++ b/README.md @@ -34,8 +34,6 @@ **We write your reusable computer vision tools.** Whether you need to load your dataset from your hard drive, draw detections on an image or video, or count how many detections are in a zone. You can count on us! 🤝 -[![supervision-hackfest](https://media.roboflow.com/supervision/supervision-hacktoberfest-banner-2024.png)](https://github.com/roboflow/supervision/issues?q=is%3Aissue+is%3Aopen+label%3Ahacktoberfest) - ## 💻 install Pip install the supervision package in a From 940243fc18aec2989327397051c036319f2ca831 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 4 Nov 2024 00:48:54 +0000 Subject: [PATCH 110/161] :arrow_up: Bump ruff from 0.7.1 to 0.7.2 Bumps [ruff](https://github.com/astral-sh/ruff) from 0.7.1 to 0.7.2. - [Release notes](https://github.com/astral-sh/ruff/releases) - [Changelog](https://github.com/astral-sh/ruff/blob/main/CHANGELOG.md) - [Commits](https://github.com/astral-sh/ruff/compare/0.7.1...0.7.2) --- updated-dependencies: - dependency-name: ruff dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- poetry.lock | 38 +++++++++++++++++++------------------- 1 file changed, 19 insertions(+), 19 deletions(-) diff --git a/poetry.lock b/poetry.lock index 6e2817a5d..d7e29a430 100644 --- a/poetry.lock +++ b/poetry.lock @@ -4037,29 +4037,29 @@ files = [ [[package]] name = "ruff" -version = "0.7.1" +version = "0.7.2" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.7.1-py3-none-linux_armv6l.whl", hash = "sha256:cb1bc5ed9403daa7da05475d615739cc0212e861b7306f314379d958592aaa89"}, - {file = "ruff-0.7.1-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:27c1c52a8d199a257ff1e5582d078eab7145129aa02721815ca8fa4f9612dc35"}, - {file = "ruff-0.7.1-py3-none-macosx_11_0_arm64.whl", hash = "sha256:588a34e1ef2ea55b4ddfec26bbe76bc866e92523d8c6cdec5e8aceefeff02d99"}, - {file = "ruff-0.7.1-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:94fc32f9cdf72dc75c451e5f072758b118ab8100727168a3df58502b43a599ca"}, - {file = "ruff-0.7.1-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:985818742b833bffa543a84d1cc11b5e6871de1b4e0ac3060a59a2bae3969250"}, - {file = "ruff-0.7.1-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:32f1e8a192e261366c702c5fb2ece9f68d26625f198a25c408861c16dc2dea9c"}, - {file = "ruff-0.7.1-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:699085bf05819588551b11751eff33e9ca58b1b86a6843e1b082a7de40da1565"}, - {file = "ruff-0.7.1-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:344cc2b0814047dc8c3a8ff2cd1f3d808bb23c6658db830d25147339d9bf9ea7"}, - {file = "ruff-0.7.1-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4316bbf69d5a859cc937890c7ac7a6551252b6a01b1d2c97e8fc96e45a7c8b4a"}, - {file = "ruff-0.7.1-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79d3af9dca4c56043e738a4d6dd1e9444b6d6c10598ac52d146e331eb155a8ad"}, - {file = "ruff-0.7.1-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:c5c121b46abde94a505175524e51891f829414e093cd8326d6e741ecfc0a9112"}, - {file = "ruff-0.7.1-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:8422104078324ea250886954e48f1373a8fe7de59283d747c3a7eca050b4e378"}, - {file = "ruff-0.7.1-py3-none-musllinux_1_2_i686.whl", hash = "sha256:56aad830af8a9db644e80098fe4984a948e2b6fc2e73891538f43bbe478461b8"}, - {file = "ruff-0.7.1-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:658304f02f68d3a83c998ad8bf91f9b4f53e93e5412b8f2388359d55869727fd"}, - {file = "ruff-0.7.1-py3-none-win32.whl", hash = "sha256:b517a2011333eb7ce2d402652ecaa0ac1a30c114fbbd55c6b8ee466a7f600ee9"}, - {file = "ruff-0.7.1-py3-none-win_amd64.whl", hash = "sha256:f38c41fcde1728736b4eb2b18850f6d1e3eedd9678c914dede554a70d5241307"}, - {file = "ruff-0.7.1-py3-none-win_arm64.whl", hash = "sha256:19aa200ec824c0f36d0c9114c8ec0087082021732979a359d6f3c390a6ff2a37"}, - {file = "ruff-0.7.1.tar.gz", hash = "sha256:9d8a41d4aa2dad1575adb98a82870cf5db5f76b2938cf2206c22c940034a36f4"}, + {file = "ruff-0.7.2-py3-none-linux_armv6l.whl", hash = "sha256:b73f873b5f52092e63ed540adefc3c36f1f803790ecf2590e1df8bf0a9f72cb8"}, + {file = "ruff-0.7.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:5b813ef26db1015953daf476202585512afd6a6862a02cde63f3bafb53d0b2d4"}, + {file = "ruff-0.7.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:853277dbd9675810c6826dad7a428d52a11760744508340e66bf46f8be9701d9"}, + {file = "ruff-0.7.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21aae53ab1490a52bf4e3bf520c10ce120987b047c494cacf4edad0ba0888da2"}, + {file = "ruff-0.7.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ccc7e0fc6e0cb3168443eeadb6445285abaae75142ee22b2b72c27d790ab60ba"}, + {file = "ruff-0.7.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd77877a4e43b3a98e5ef4715ba3862105e299af0c48942cc6d51ba3d97dc859"}, + {file = "ruff-0.7.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:e00163fb897d35523c70d71a46fbaa43bf7bf9af0f4534c53ea5b96b2e03397b"}, + {file = "ruff-0.7.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f3c54b538633482dc342e9b634d91168fe8cc56b30a4b4f99287f4e339103e88"}, + {file = "ruff-0.7.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7b792468e9804a204be221b14257566669d1db5c00d6bb335996e5cd7004ba80"}, + {file = "ruff-0.7.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dba53ed84ac19ae4bfb4ea4bf0172550a2285fa27fbb13e3746f04c80f7fa088"}, + {file = "ruff-0.7.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:b19fafe261bf741bca2764c14cbb4ee1819b67adb63ebc2db6401dcd652e3748"}, + {file = "ruff-0.7.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:28bd8220f4d8f79d590db9e2f6a0674f75ddbc3847277dd44ac1f8d30684b828"}, + {file = "ruff-0.7.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:9fd67094e77efbea932e62b5d2483006154794040abb3a5072e659096415ae1e"}, + {file = "ruff-0.7.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:576305393998b7bd6c46018f8104ea3a9cb3fa7908c21d8580e3274a3b04b691"}, + {file = "ruff-0.7.2-py3-none-win32.whl", hash = "sha256:fa993cfc9f0ff11187e82de874dfc3611df80852540331bc85c75809c93253a8"}, + {file = "ruff-0.7.2-py3-none-win_amd64.whl", hash = "sha256:dd8800cbe0254e06b8fec585e97554047fb82c894973f7ff18558eee33d1cb88"}, + {file = "ruff-0.7.2-py3-none-win_arm64.whl", hash = "sha256:bb8368cd45bba3f57bb29cbb8d64b4a33f8415d0149d2655c5c8539452ce7760"}, + {file = "ruff-0.7.2.tar.gz", hash = "sha256:2b14e77293380e475b4e3a7a368e14549288ed2931fce259a6f99978669e844f"}, ] [[package]] From 5cce8a159934b4c97d390dfdd30bf1c0a54bb4d0 Mon Sep 17 00:00:00 2001 From: LinasKo Date: Mon, 4 Nov 2024 12:23:56 +0200 Subject: [PATCH 111/161] Remoe Detections.empty metadata field --- supervision/detection/core.py | 16 ++++++---------- supervision/detection/utils.py | 14 +++++++++++--- 2 files changed, 17 insertions(+), 13 deletions(-) diff --git a/supervision/detection/core.py b/supervision/detection/core.py index 8e2d06e2d..32753a30a 100644 --- a/supervision/detection/core.py +++ b/supervision/detection/core.py @@ -965,7 +965,7 @@ def from_ncnn(cls, ncnn_results) -> Detections: ) @classmethod - def empty(cls, metadata: Optional[Dict[str, Any]] = None) -> Detections: + def empty(cls) -> Detections: """ Create an empty Detections object with no bounding boxes, confidences, or class IDs. @@ -980,14 +980,10 @@ def empty(cls, metadata: Optional[Dict[str, Any]] = None) -> Detections: empty_detections = Detections.empty() ``` """ - if metadata is not None and not isinstance(metadata, dict): - raise TypeError("Metadata must be a dictionary.") - return cls( xyxy=np.empty((0, 4), dtype=np.float32), confidence=np.array([], dtype=np.float32), class_id=np.array([], dtype=int), - metadata=metadata if metadata is not None else {}, ) def is_empty(self) -> bool: @@ -996,6 +992,7 @@ def is_empty(self) -> bool: """ empty_detections = Detections.empty() empty_detections.data = self.data + empty_detections.metadata = self.metadata return self == empty_detections @classmethod @@ -1052,16 +1049,12 @@ def merge(cls, detections_list: List[Detections]) -> Detections: array([0.1, 0.2, 0.3]) ``` """ - metadata_list = [detections.metadata for detections in detections_list] - detections_list = [ detections for detections in detections_list if not detections.is_empty() ] - metadata = merge_metadata(metadata_list) - if len(detections_list) == 0: - return Detections.empty(metadata=metadata) + return Detections.empty() for detections in detections_list: validate_detections_fields( @@ -1093,6 +1086,9 @@ def stack_or_none(name: str): data = merge_data([d.data for d in detections_list]) + metadata_list = [detections.metadata for detections in detections_list] + metadata = merge_metadata(metadata_list) + return cls( xyxy=xyxy, mask=mask, diff --git a/supervision/detection/utils.py b/supervision/detection/utils.py index d80a4dcb0..c5990b42d 100644 --- a/supervision/detection/utils.py +++ b/supervision/detection/utils.py @@ -820,8 +820,10 @@ def is_metadata_equal(metadata_a: Dict[str, Any], metadata_b: Dict[str, Any]) -> """ return set(metadata_a.keys()) == set(metadata_b.keys()) and all( np.array_equal(metadata_a[key], metadata_b[key]) - if isinstance(metadata_a[key], np.ndarray) - and isinstance(metadata_b[key], np.ndarray) + if ( + isinstance(metadata_a[key], np.ndarray) + and isinstance(metadata_b[key], np.ndarray) + ) else metadata_a[key] == metadata_b[key] for key in metadata_a ) @@ -833,6 +835,9 @@ def merge_data( """ Merges the data payloads of a list of Detections instances. + Warning: Assumes that empty detections were filtered-out before passing data to + this function. + Args: data_list: The data payloads of the Detections instances. Each data payload is a dictionary with the same keys, and the values are either lists or @@ -892,6 +897,9 @@ def merge_metadata(metadata_list: List[Dict[str, Any]]) -> Dict[str, Any]: This function combines the metadata dictionaries. If a key appears in more than one dictionary, the values must be identical for the merge to succeed. + Warning: Assumes that empty detections were filtered-out before passing metadata to + this function. + Args: metadata_list (List[Dict[str, Any]]): A list of metadata dictionaries to merge. @@ -909,7 +917,7 @@ def merge_metadata(metadata_list: List[Dict[str, Any]]) -> Dict[str, Any]: if not all(keys_set == all_keys_sets[0] for keys_set in all_keys_sets): raise ValueError("All metadata dictionaries must have the same keys to merge.") - merged_metadata = {} + merged_metadata: Dict[str, Any] = {} for metadata in metadata_list: for key, value in metadata.items(): if key in merged_metadata: From d645bd816e454594adf972594ae2dea980392cd9 Mon Sep 17 00:00:00 2001 From: LinasKo Date: Mon, 4 Nov 2024 12:41:16 +0200 Subject: [PATCH 112/161] Fix: Add metadata to public var retrieval test --- test/utils/test_internal.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/test/utils/test_internal.py b/test/utils/test_internal.py index eee614e6c..ff9825e2e 100644 --- a/test/utils/test_internal.py +++ b/test/utils/test_internal.py @@ -121,7 +121,7 @@ def __private_property(self): ( Detections.empty(), False, - {"xyxy", "class_id", "confidence", "mask", "tracker_id", "data"}, + {"xyxy", "class_id", "confidence", "mask", "tracker_id", "data", "metadata"}, DoesNotRaise(), ), ( @@ -134,6 +134,7 @@ def __private_property(self): "mask", "tracker_id", "data", + "metadata", "area", "box_area", }, @@ -149,6 +150,7 @@ def __private_property(self): "mask", "tracker_id", "data", + "metadata" }, DoesNotRaise(), ), @@ -169,13 +171,14 @@ def __private_property(self): "mask", "tracker_id", "data", + "metadata" }, DoesNotRaise(), ), ( Detections.empty(), False, - {"xyxy", "class_id", "confidence", "mask", "tracker_id", "data"}, + {"xyxy", "class_id", "confidence", "mask", "tracker_id", "data", "metadata"}, DoesNotRaise(), ), ], From 9ee1b5dc5ab691a552f0e67e6f4ad5bfea9c2ea2 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 4 Nov 2024 10:41:52 +0000 Subject: [PATCH 113/161] =?UTF-8?q?fix(pre=5Fcommit):=20=F0=9F=8E=A8=20aut?= =?UTF-8?q?o=20format=20pre-commit=20hooks?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- test/utils/test_internal.py | 24 ++++++++++++++++++++---- 1 file changed, 20 insertions(+), 4 deletions(-) diff --git a/test/utils/test_internal.py b/test/utils/test_internal.py index ff9825e2e..872822a7c 100644 --- a/test/utils/test_internal.py +++ b/test/utils/test_internal.py @@ -121,7 +121,15 @@ def __private_property(self): ( Detections.empty(), False, - {"xyxy", "class_id", "confidence", "mask", "tracker_id", "data", "metadata"}, + { + "xyxy", + "class_id", + "confidence", + "mask", + "tracker_id", + "data", + "metadata", + }, DoesNotRaise(), ), ( @@ -150,7 +158,7 @@ def __private_property(self): "mask", "tracker_id", "data", - "metadata" + "metadata", }, DoesNotRaise(), ), @@ -171,14 +179,22 @@ def __private_property(self): "mask", "tracker_id", "data", - "metadata" + "metadata", }, DoesNotRaise(), ), ( Detections.empty(), False, - {"xyxy", "class_id", "confidence", "mask", "tracker_id", "data", "metadata"}, + { + "xyxy", + "class_id", + "confidence", + "mask", + "tracker_id", + "data", + "metadata", + }, DoesNotRaise(), ), ], From 5391671838c23409b51d7bf9c49b396bb685bcc7 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 4 Nov 2024 18:02:20 +0000 Subject: [PATCH 114/161] =?UTF-8?q?chore(pre=5Fcommit):=20=E2=AC=86=20pre?= =?UTF-8?q?=5Fcommit=20autoupdate?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.7.1 → v0.7.2](https://github.com/astral-sh/ruff-pre-commit/compare/v0.7.1...v0.7.2) --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 39c50e823..de50d5e0b 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -32,7 +32,7 @@ repos: additional_dependencies: ["bandit[toml]"] - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.7.1 + rev: v0.7.2 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] From d1e11684e75475543af6d96b8d193b09d9c01803 Mon Sep 17 00:00:00 2001 From: LinasKo Date: Wed, 6 Nov 2024 00:24:09 +0200 Subject: [PATCH 115/161] Fixed bugs, heavily refactored crossing confirmation in LineZone --- supervision/detection/line_zone.py | 261 +++++++++++++++------------- test/detection/test_line_counter.py | 27 ++- 2 files changed, 154 insertions(+), 134 deletions(-) diff --git a/supervision/detection/line_zone.py b/supervision/detection/line_zone.py index 235ecfd4f..da69ed454 100644 --- a/supervision/detection/line_zone.py +++ b/supervision/detection/line_zone.py @@ -1,11 +1,12 @@ import math import warnings -from collections import Counter, deque +from collections import Counter, defaultdict, deque from functools import lru_cache from typing import Any, Deque, Dict, Iterable, List, Literal, Optional, Tuple import cv2 import numpy as np +import numpy.typing as npt from supervision.config import CLASS_NAME_DATA_FIELD from supervision.detection.core import Detections @@ -74,7 +75,7 @@ def __init__( Position.BOTTOM_LEFT, Position.BOTTOM_RIGHT, ), - max_linger: int = 1, + crossing_acceptance_threshold: int = 1, ): """ Args: @@ -85,17 +86,18 @@ def __init__( to consider when deciding on whether the detection has passed the line counter or not. By default, this contains the four corners of the detection's bounding box - max_linger: An integer indicating the number of consecutive frames - detections should stay away from the line after crossing it - to consider crossing completed. This configuration option - is useful when dealing with unstable bounding boxes or when - detections may linger on the line + crossing_acceptance_threshold (int): Detection needs to be seen + on the other side of the line for this many frames to be + considered as having crossed the line. This is useful when + dealing with unstable bounding boxes or when detections + may linger on the line. """ self.vector = Vector(start=start, end=end) - self.limits = self.calculate_region_of_interest_limits(vector=self.vector) - self.max_linger = max(1, max_linger) - self.crossing_state: Dict[str, Tuple[List[Any], Deque[bool]]] = {} - self.tracker_state: Dict[str, bool] = {} + self.limits = self._calculate_region_of_interest_limits(vector=self.vector) + self.crossing_history_length = max(2, crossing_acceptance_threshold + 1) + self.crossing_state_history: Dict[int, Deque[bool]] = defaultdict( + lambda: deque(maxlen=self.crossing_history_length) + ) self._in_count_per_class: Counter = Counter() self._out_count_per_class: Counter = Counter() self.triggering_anchors = triggering_anchors @@ -135,8 +137,82 @@ def out_count_per_class(self) -> Dict[int, int]: """ return dict(self._out_count_per_class) + def trigger(self, detections: Detections) -> Tuple[np.ndarray, np.ndarray]: + """ + Update the `in_count` and `out_count` based on the objects that cross the line. + + Args: + detections (Detections): A list of detections for which to update the + counts. + + Returns: + A tuple of two boolean NumPy arrays. The first array indicates which + detections have crossed the line from outside to inside. The second + array indicates which detections have crossed the line from inside to + outside. + """ + crossed_in = np.full(len(detections), False) + crossed_out = np.full(len(detections), False) + + if len(detections) == 0: + return crossed_in, crossed_out + + if detections.tracker_id is None: + warnings.warn( + "Line zone counting skipped. LineZone requires tracker_id. Refer to " + "https://supervision.roboflow.com/latest/trackers for more " + "information.", + category=SupervisionWarnings, + ) + return crossed_in, crossed_out + + self._update_class_id_to_name(detections) + + in_limits, has_any_left_trigger, has_any_right_trigger = ( + self._compute_anchor_sides(detections) + ) + + class_ids: List[Optional[int]] = ( + list(detections.class_id) + if detections.class_id is not None + else [None] * len(detections) + ) + + for i, (class_id, tracker_id) in enumerate( + zip(class_ids, detections.tracker_id) + ): + if not in_limits[i]: + continue + + if has_any_left_trigger[i] and has_any_right_trigger[i]: + continue + + tracker_state: bool = has_any_left_trigger[i] + crossing_history = self.crossing_state_history[tracker_id] + crossing_history.append(tracker_state) + + if len(crossing_history) < self.crossing_history_length: + continue + + # TODO: Account for incorrect class_id. + # Most likely this would involve indexing self.crossing_state_history + # with (tracker_id, class_id). + + oldest_state = crossing_history[0] + if crossing_history.count(oldest_state) > 1: + continue + + if tracker_state: + self._in_count_per_class[class_id] += 1 + crossed_in[i] = True + else: + self._out_count_per_class[class_id] += 1 + crossed_out[i] = True + + return crossed_in, crossed_out + @staticmethod - def calculate_region_of_interest_limits(vector: Vector) -> Tuple[Vector, Vector]: + def _calculate_region_of_interest_limits(vector: Vector) -> Tuple[Vector, Vector]: magnitude = vector.magnitude if magnitude == 0: @@ -167,40 +243,45 @@ def calculate_region_of_interest_limits(vector: Vector) -> Tuple[Vector, Vector] ) return start_region_limit, end_region_limit - @staticmethod - def is_point_in_limits(point: Point, limits: Tuple[Vector, Vector]) -> bool: - cross_product_1 = limits[0].cross_product(point) - cross_product_2 = limits[1].cross_product(point) - return (cross_product_1 > 0) == (cross_product_2 > 0) - - def trigger(self, detections: Detections) -> Tuple[np.ndarray, np.ndarray]: + def _compute_anchor_sides( + self, detections: Detections + ) -> Tuple[npt.NDArray[np.bool_], npt.NDArray[np.bool_], npt.NDArray[np.bool_]]: """ - Update the `in_count` and `out_count` based on the objects that cross the line. + Find if detections' anchors are within the limit of the line + zone and which anchors are on its left and right side. + + Assumes: + * At least 1 detection is provided + * Detections have `tracker_id` + + The limit is defined as the region between the two lines, + perpendicular to the line zone, and passing through its start + and end points, as shown below: + + Limits: + ``` + | IN ↑ + | | + OUT o---LINE---o OUT + | | + ↓ IN | + ``` Args: - detections (Detections): A list of detections for which to update the - counts. + detections (Detections): The detections to check. Returns: - A tuple of two boolean NumPy arrays. The first array indicates which - detections have crossed the line from outside to inside. The second - array indicates which detections have crossed the line from inside to - outside. + result (Tuple[np.ndarray, np.ndarray, np.ndarray]): + All 3 arrays are boolean arrays of shape (N, ) where N is the + number of detections. The first array, `in_limits`, indicates + if the detection's anchor is within the line zone limits. + The second array, `has_any_left_trigger`, indicates if the + detection's anchor is on the left side of the line zone. + The third array, `has_any_right_trigger`, indicates if the + detection's anchor is on the right side of the line zone. """ - crossed_in = np.full(len(detections), False) - crossed_out = np.full(len(detections), False) - - if len(detections) == 0: - return crossed_in, crossed_out - - if detections.tracker_id is None: - warnings.warn( - "Line zone counting skipped. LineZone requires tracker_id. Refer to " - "https://supervision.roboflow.com/latest/trackers for more " - "information.", - category=SupervisionWarnings, - ) - return crossed_in, crossed_out + assert len(detections) > 0 + assert detections.tracker_id is not None all_anchors = np.array( [ @@ -211,96 +292,38 @@ def trigger(self, detections: Detections) -> Tuple[np.ndarray, np.ndarray]: cross_products_1 = cross_product(all_anchors, self.limits[0]) cross_products_2 = cross_product(all_anchors, self.limits[1]) + + # Works because limit vectors are pointing in opposite directions in_limits = (cross_products_1 > 0) == (cross_products_2 > 0) in_limits = np.all(in_limits, axis=0) triggers = cross_product(all_anchors, self.vector) < 0 has_any_left_trigger = np.any(triggers, axis=0) has_any_right_trigger = np.any(~triggers, axis=0) - is_uniformly_triggered = ~(has_any_left_trigger & has_any_right_trigger) - class_ids = ( - list(detections.class_id) - if detections.class_id is not None - else [None] * len(detections) - ) - tracker_ids = list(detections.tracker_id) - - if CLASS_NAME_DATA_FIELD in detections.data: - class_names = detections.data[CLASS_NAME_DATA_FIELD] - for class_id, class_name in zip(class_ids, class_names): - if class_id is None: - class_name = "No class" - self.class_id_to_name[class_id] = class_name + return in_limits, has_any_left_trigger, has_any_right_trigger - for i, (class_ids, tracker_id) in enumerate(zip(class_ids, tracker_ids)): - if not in_limits[i]: - continue - - if not is_uniformly_triggered[i]: - continue - - tracker_state = has_any_left_trigger[i] - if tracker_id not in self.crossing_state: - self.crossing_state[tracker_id] = ( - class_ids, - deque([tracker_state], maxlen=self.max_linger), - ) - continue - - crossing_state_class_ids, crossing_state = self.crossing_state[tracker_id] - prev_frame_tracker_state = crossing_state[-1] - if self.max_linger == 1 and prev_frame_tracker_state == tracker_state: - continue - - crossing_in_progress = ( - crossing_state.count(True) != 0 and crossing_state.count(False) != 0 - ) - crossing_state.appendleft(tracker_state) - all_on_same_side = crossing_state.count(not tracker_state) == 0 - if class_ids: - if len(class_ids) != len(crossing_state_class_ids) or not all( - class_ids == crossing_state_class_ids - ): - self.crossing_state[tracker_id] = (class_ids, tracker_state) - if not all_on_same_side: - continue - else: - if self.max_linger > 1 and not crossing_in_progress: - continue - - if tracker_state: - self._in_count_per_class[class_ids] += 1 - crossed_in[i] = True - else: - self._out_count_per_class[class_ids] += 1 - crossed_out[i] = True - - if self.max_linger == 1: - return crossed_in, crossed_out + def _update_class_id_to_name(self, detections: Detections) -> None: + """ + Update the attribute keeping track of which class + IDs correspond to which class names. - this_frame_trackers = set(detections.tracker_id) - for tracker_id in list(self.crossing_state.keys()): - if tracker_id in this_frame_trackers: - continue - crossing_state_class_ids, crossing_state = self.crossing_state[tracker_id] - crossing_in_progress = ( - crossing_state.count(True) != 0 and crossing_state.count(False) != 0 - ) - if not crossing_in_progress: - continue - tracker_state = crossing_state[0] - crossing_state.appendleft(tracker_state) - all_on_same_side = crossing_state.count(not tracker_state) == 0 - if not all_on_same_side: - continue + Assumes that class_names are only provided when class_ids are. + """ + class_names = detections.data.get(CLASS_NAME_DATA_FIELD) + assert class_names is None or detections.class_id is not None - if tracker_state: - self._in_count_per_class[crossing_state_class_ids] += 1 - else: - self._out_count_per_class[crossing_state_class_ids] += 1 + if detections.class_id is None: + return - return crossed_in, crossed_out + if class_names is None: + new_names = {class_id: str(class_id) for class_id in detections.class_id} + else: + new_names = { + class_id: class_name + for class_id, class_name in zip(detections.class_id, class_names) + } + self.class_id_to_name.update(new_names) class LineZoneAnnotator: diff --git a/test/detection/test_line_counter.py b/test/detection/test_line_counter.py index f483ab186..d0ec5fbd2 100644 --- a/test/detection/test_line_counter.py +++ b/test/detection/test_line_counter.py @@ -69,7 +69,7 @@ def test_calculate_region_of_interest_limits( exception: Exception, ) -> None: with exception: - result = LineZone.calculate_region_of_interest_limits(vector=vector) + result = LineZone._calculate_region_of_interest_limits(vector=vector) assert result == expected_result @@ -493,8 +493,8 @@ def test_line_zone_multiple_detections( @pytest.mark.parametrize( - "vector, xyxy_sequence, triggering_anchors, max_linger, expected_crossed_in, " - "expected_crossed_out", + "vector, xyxy_sequence, triggering_anchors, crossing_acceptance_threshold, " + "expected_crossed_in, expected_crossed_out", [ ( # Detection lingers around line, all crosses counted Vector(Point(0, 0), Point(10, 0)), @@ -578,7 +578,7 @@ def test_line_zone_one_detection_long_horizon( vector: Vector, xyxy_sequence: List[List[float]], triggering_anchors: List[Position], - max_linger: int, + crossing_acceptance_threshold: int, expected_crossed_in: List[bool], expected_crossed_out: List[bool], ) -> None: @@ -586,7 +586,7 @@ def test_line_zone_one_detection_long_horizon( start=vector.start, end=vector.end, triggering_anchors=triggering_anchors, - max_linger=max_linger, + crossing_acceptance_threshold=crossing_acceptance_threshold, ) crossed_in_list = [] @@ -609,8 +609,9 @@ def test_line_zone_one_detection_long_horizon( @pytest.mark.parametrize( - "vector, xyxy_sequence, anchors, max_linger, expected_crossed_in, " - "expected_crossed_out, expected_count_in, expected_count_out, exception", + "vector, xyxy_sequence, anchors, crossing_acceptance_threshold, " + "expected_crossed_in, expected_crossed_out, expected_count_in, " + "expected_count_out, exception", [ ( # One stays, one crosses, one disappears before crossing Vector(Point(0, 0), Point(10, 0)), @@ -659,11 +660,7 @@ def test_line_zone_one_detection_long_horizon( [ [False, False, False], [False, True, False], - [ - False, - False, - True, - ], + [False, False, True], [False, True], [False, False], ], @@ -736,7 +733,7 @@ def test_line_zone_one_detection_long_horizon( [False, False], [False, False], ], - [0, 0, 0, 1, 2], + [0, 0, 0, 0, 1], [0, 0, 0, 0, 0], DoesNotRaise(), ), @@ -746,7 +743,7 @@ def test_line_zone_long_horizon_disappearing_detections( vector: Vector, xyxy_sequence: List[List[Optional[List[float]]]], anchors: List[Position], - max_linger: int, + crossing_acceptance_threshold: int, expected_crossed_in: List[List[bool]], expected_crossed_out: List[List[bool]], expected_count_in: List[int], @@ -758,7 +755,7 @@ def test_line_zone_long_horizon_disappearing_detections( start=vector.start, end=vector.end, triggering_anchors=anchors, - max_linger=max_linger, + crossing_acceptance_threshold=crossing_acceptance_threshold, ) crossed_in_list = [] crossed_out_list = [] From 62c2c6a6681d000f66b0ada086aeed7058d72d54 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Wed, 6 Nov 2024 00:28:19 +0000 Subject: [PATCH 116/161] :arrow_up: Bump mkdocs-material from 9.5.43 to 9.5.44 Bumps [mkdocs-material](https://github.com/squidfunk/mkdocs-material) from 9.5.43 to 9.5.44. - [Release notes](https://github.com/squidfunk/mkdocs-material/releases) - [Changelog](https://github.com/squidfunk/mkdocs-material/blob/master/CHANGELOG) - [Commits](https://github.com/squidfunk/mkdocs-material/compare/9.5.43...9.5.44) --- updated-dependencies: - dependency-name: mkdocs-material dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- poetry.lock | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/poetry.lock b/poetry.lock index d7e29a430..fddfa23fb 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2412,13 +2412,13 @@ pygments = ">2.12.0" [[package]] name = "mkdocs-material" -version = "9.5.43" +version = "9.5.44" description = "Documentation that simply works" optional = false python-versions = ">=3.8" files = [ - {file = "mkdocs_material-9.5.43-py3-none-any.whl", hash = "sha256:4aae0664c456fd12837a3192e0225c17960ba8bf55d7f0a7daef7e4b0b914a34"}, - {file = "mkdocs_material-9.5.43.tar.gz", hash = "sha256:83be7ff30b65a1e4930dfa4ab911e75780a3afc9583d162692e434581cb46979"}, + {file = "mkdocs_material-9.5.44-py3-none-any.whl", hash = "sha256:47015f9c167d58a5ff5e682da37441fc4d66a1c79334bfc08d774763cacf69ca"}, + {file = "mkdocs_material-9.5.44.tar.gz", hash = "sha256:f3a6c968e524166b3f3ed1fb97d3ed3e0091183b0545cedf7156a2a6804c56c0"}, ] [package.dependencies] From 659ddec3604cddd209497dafbc21e84eb20c9a18 Mon Sep 17 00:00:00 2001 From: LinasKo Date: Wed, 6 Nov 2024 14:07:26 +0200 Subject: [PATCH 117/161] VideoSimk automatically infers video_info --- supervision/utils/video.py | 15 ++++++++++++--- 1 file changed, 12 insertions(+), 3 deletions(-) diff --git a/supervision/utils/video.py b/supervision/utils/video.py index 2e502cf2b..9d67dbfb1 100644 --- a/supervision/utils/video.py +++ b/supervision/utils/video.py @@ -65,8 +65,9 @@ class VideoSink: Attributes: target_path (str): The path to the output file where the video will be saved. - video_info (VideoInfo): Information about the video resolution, fps, - and total frame count. + video_info (Optional[VideoInfo]): Information about the output video resolution, + fps, and total frame count. If not provided, the information will be inferred + from the video path. codec (str): FOURCC code for video format Example: @@ -82,8 +83,16 @@ class VideoSink: ``` """ # noqa: E501 // docs - def __init__(self, target_path: str, video_info: VideoInfo, codec: str = "mp4v"): + def __init__( + self, + target_path: str, + video_info: Optional[VideoInfo] = None, + codec: str = "mp4v", + ): self.target_path = target_path + + if video_info is None: + video_info = VideoInfo.from_video_path(target_path) self.video_info = video_info self.__codec = codec self.__writer = None From b39ead2ef0fc9ca88cf50ab6f2b8f292f1a0c72a Mon Sep 17 00:00:00 2001 From: LinasKo Date: Wed, 6 Nov 2024 14:21:52 +0200 Subject: [PATCH 118/161] Revert "VideoSink automatically infers video_info" --- supervision/utils/video.py | 15 +++------------ 1 file changed, 3 insertions(+), 12 deletions(-) diff --git a/supervision/utils/video.py b/supervision/utils/video.py index 9d67dbfb1..2e502cf2b 100644 --- a/supervision/utils/video.py +++ b/supervision/utils/video.py @@ -65,9 +65,8 @@ class VideoSink: Attributes: target_path (str): The path to the output file where the video will be saved. - video_info (Optional[VideoInfo]): Information about the output video resolution, - fps, and total frame count. If not provided, the information will be inferred - from the video path. + video_info (VideoInfo): Information about the video resolution, fps, + and total frame count. codec (str): FOURCC code for video format Example: @@ -83,16 +82,8 @@ class VideoSink: ``` """ # noqa: E501 // docs - def __init__( - self, - target_path: str, - video_info: Optional[VideoInfo] = None, - codec: str = "mp4v", - ): + def __init__(self, target_path: str, video_info: VideoInfo, codec: str = "mp4v"): self.target_path = target_path - - if video_info is None: - video_info = VideoInfo.from_video_path(target_path) self.video_info = video_info self.__codec = codec self.__writer = None From e1af717c59cd0b718d83db84705612519dfae8ee Mon Sep 17 00:00:00 2001 From: LinasKo Date: Wed, 6 Nov 2024 15:35:49 +0200 Subject: [PATCH 119/161] Add environment to publish.yml, publish-test.yml * Similar issue: https://github.com/pypa/gh-action-pypi-publish/issues/217#issuecomment-1965733177 * Minor stylistic edits to the file --- .github/workflows/publish-test.yml | 7 ++++--- .github/workflows/publish.yml | 5 +++-- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/.github/workflows/publish-test.yml b/.github/workflows/publish-test.yml index 567a6aba6..2cd05bf13 100644 --- a/.github/workflows/publish-test.yml +++ b/.github/workflows/publish-test.yml @@ -2,9 +2,9 @@ name: Publish Supervision Pre-Releases to PyPI and TestPyPI on: push: tags: - - '[0-9]+.[0-9]+[0-9]+.[0-9]+a[0-9]' - - '[0-9]+.[0-9]+[0-9]+.[0-9]+b[0-9]' - - '[0-9]+.[0-9]+[0-9]+.[0-9]+rc[0-9]' + - "[0-9]+.[0-9]+[0-9]+.[0-9]+a[0-9]" + - "[0-9]+.[0-9]+[0-9]+.[0-9]+b[0-9]" + - "[0-9]+.[0-9]+[0-9]+.[0-9]+rc[0-9]" workflow_dispatch: @@ -12,6 +12,7 @@ jobs: build-and-publish-pre-release-pypi: name: Build and publish to PyPI runs-on: ubuntu-latest + environment: test permissions: id-token: write strategy: diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index f9a14d225..17c2629a8 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -2,13 +2,14 @@ name: Publish Supervision Releases to PyPI and TestPyPI on: push: tags: - - '[0-9]+.[0-9]+[0-9]+.[0-9]' + - "[0-9]+.[0-9]+[0-9]+.[0-9]" workflow_dispatch: jobs: build-and-publish-pre-release: runs-on: ubuntu-latest + environment: release permissions: id-token: write strategy: @@ -24,7 +25,7 @@ jobs: with: python-version: ${{ matrix.python-version }} - - name: 🏗️ Build source and wheel distributions + - name: 🏗️ Build source and wheel distributions run: | python -m pip install --upgrade build twine python -m build From c54b4be4f05d53b0887c3ffe2c5540998146cf1e Mon Sep 17 00:00:00 2001 From: LinasKo Date: Wed, 6 Nov 2024 19:49:36 +0200 Subject: [PATCH 120/161] Add skiing video asset --- supervision/assets/list.py | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/supervision/assets/list.py b/supervision/assets/list.py index 8a01b7585..926adeba8 100644 --- a/supervision/assets/list.py +++ b/supervision/assets/list.py @@ -20,6 +20,7 @@ class VideoAssets(Enum): | `PEOPLE_WALKING` | `people-walking.mp4` | [Link](https://media.roboflow.com/supervision/video-examples/people-walking.mp4) | | `BEACH` | `beach-1.mp4` | [Link](https://media.roboflow.com/supervision/video-examples/beach-1.mp4) | | `BASKETBALL` | `basketball-1.mp4` | [Link](https://media.roboflow.com/supervision/video-examples/basketball-1.mp4) | + | `SKIING` | `skiing.mp4` | [Link](https://media.roboflow.com/supervision/video-examples/skiing.mp4) | """ # noqa: E501 // docs VEHICLES = "vehicles.mp4" @@ -31,6 +32,7 @@ class VideoAssets(Enum): PEOPLE_WALKING = "people-walking.mp4" BEACH = "beach-1.mp4" BASKETBALL = "basketball-1.mp4" + SKIING = "skiing.mp4" @classmethod def list(cls): @@ -74,4 +76,8 @@ def list(cls): f"{BASE_VIDEO_URL}{VideoAssets.BASKETBALL.value}", "60d94a3c7c47d16f09d342b088012ecc", ), + VideoAssets.SKIING.value: ( + f"{BASE_VIDEO_URL}{VideoAssets.SKIING.value}", + "d30987cbab1bbc5934199cdd1b293119", + ), } From 6d7dba757259a1f45debf95932da0f8e5a12f6e2 Mon Sep 17 00:00:00 2001 From: LinasKo Date: Wed, 6 Nov 2024 20:02:11 +0200 Subject: [PATCH 121/161] Typo: LineZone in PolygonZone --- supervision/detection/tools/polygon_zone.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/supervision/detection/tools/polygon_zone.py b/supervision/detection/tools/polygon_zone.py index f69f3c9fe..5cd976b1e 100644 --- a/supervision/detection/tools/polygon_zone.py +++ b/supervision/detection/tools/polygon_zone.py @@ -19,7 +19,7 @@ class PolygonZone: !!! warning - LineZone uses the `tracker_id`. Read + PolygonZone uses the `tracker_id`. Read [here](/latest/trackers/) to learn how to plug tracking into your inference pipeline. From 698c0855187d1e4fbdb50922e52c0c599eff7c53 Mon Sep 17 00:00:00 2001 From: LinasKo Date: Wed, 6 Nov 2024 20:02:31 +0200 Subject: [PATCH 122/161] Add KeyPoints.is_empty() --- supervision/keypoint/core.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/supervision/keypoint/core.py b/supervision/keypoint/core.py index 36d6a5968..252fb63f3 100644 --- a/supervision/keypoint/core.py +++ b/supervision/keypoint/core.py @@ -612,3 +612,11 @@ def empty(cls) -> KeyPoints: ``` """ return cls(xy=np.empty((0, 0, 2), dtype=np.float32)) + + def is_empty(self) -> bool: + """ + Returns `True` if the `KeyPoints` object is considered empty. + """ + empty_keypoints = KeyPoints.empty() + empty_keypoints.data = self.data + return self == empty_keypoints From a0e909669ded277dd3cd08e7ae6110aef758c20e Mon Sep 17 00:00:00 2001 From: LinasKo Date: Wed, 6 Nov 2024 20:03:48 +0200 Subject: [PATCH 123/161] Add keypoints_to_detections --- docs/utils/datatypes.md | 12 +++++++ mkdocs.yml | 1 + supervision/__init__.py | 1 + supervision/utils/datatypes.py | 65 ++++++++++++++++++++++++++++++++++ 4 files changed, 79 insertions(+) create mode 100644 docs/utils/datatypes.md create mode 100644 supervision/utils/datatypes.py diff --git a/docs/utils/datatypes.md b/docs/utils/datatypes.md new file mode 100644 index 000000000..5e0560bd3 --- /dev/null +++ b/docs/utils/datatypes.md @@ -0,0 +1,12 @@ +--- +comments: true +status: new +--- + +# Data Types Utils + + + +:::supervision.utils.datatypes.keypoints_to_detections diff --git a/mkdocs.yml b/mkdocs.yml index b30dbcfcc..8c939cf07 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -79,6 +79,7 @@ nav: - File: utils/file.md - Draw: utils/draw.md - Geometry: utils/geometry.md + - Datatypes: utils/datatypes.md - Assets: assets.md - Cookbooks: cookbooks.md - Cheatsheet: https://roboflow.github.io/cheatsheet-supervision/ diff --git a/supervision/__init__.py b/supervision/__init__.py index 746b2f67d..04813ef22 100644 --- a/supervision/__init__.py +++ b/supervision/__init__.py @@ -100,6 +100,7 @@ from supervision.metrics.detection import ConfusionMatrix, MeanAveragePrecision from supervision.tracker.byte_tracker.core import ByteTrack from supervision.utils.conversion import cv2_to_pillow, pillow_to_cv2 +from supervision.utils.datatypes import keypoints_to_detections from supervision.utils.file import list_files_with_extensions from supervision.utils.image import ( ImageSink, diff --git a/supervision/utils/datatypes.py b/supervision/utils/datatypes.py new file mode 100644 index 000000000..dafbc4cb6 --- /dev/null +++ b/supervision/utils/datatypes.py @@ -0,0 +1,65 @@ +from typing import Iterable, Optional + +import numpy as np + +from supervision.detection.core import Detections +from supervision.keypoint.core import KeyPoints + + +def keypoints_to_detections( + keypoints: KeyPoints, selected_keypoint_indices: Optional[Iterable[int]] = None +) -> Detections: + """ + Convert a KeyPoints object to a Detections object. This + approximates the bounding box of the detected object by + taking the bounding box that fits all keypoints. + + Arguments: + keypoints (KeyPoints): The keypoints to convert to detections. + selected_keypoint_indices (Optional[Iterable[int]]): The + indices of the keypoints to include in the bounding box + calculation. This helps focus on a subset of keypoints, + e.g. when some are occluded. Captures all keypoints by default. + + Returns: + detections (Detections): The converted detections object. + + Example: + ```python + keypoints = sv.KeyPoints.from_inference(...) + detections = keypoints_to_detections(keypoints) + ``` + """ + if keypoints.is_empty(): + return Detections.empty() + + detections_list = [] + for i, xy in enumerate(keypoints.xy): + if selected_keypoint_indices: + xy = xy[selected_keypoint_indices] + x_min = xy[:, 0].min() + x_max = xy[:, 0].max() + y_min = xy[:, 1].min() + y_max = xy[:, 1].max() + xyxy = np.array([[x_min, y_min, x_max, y_max]], dtype=np.float32) + + if keypoints.confidence is None: + confidence = None + else: + confidence = keypoints.confidence[i] + if selected_keypoint_indices: + confidence = confidence[selected_keypoint_indices] + confidence = np.array([confidence.mean()], dtype=np.float32) + + detections_list.append( + Detections( + xyxy=xyxy, + confidence=confidence, + ) + ) + + detections = Detections.merge(detections_list) + detections.class_id = keypoints.class_id + detections.data = keypoints.data + + return detections From 763be226a41c3e79cc3697f31c66abfb6af0fdc9 Mon Sep 17 00:00:00 2001 From: LinasKo Date: Wed, 6 Nov 2024 21:00:31 +0200 Subject: [PATCH 124/161] Fix: unaccounted for missing keypoints returned as [0, 0] --- supervision/utils/datatypes.py | 17 ++++++++++++----- 1 file changed, 12 insertions(+), 5 deletions(-) diff --git a/supervision/utils/datatypes.py b/supervision/utils/datatypes.py index dafbc4cb6..6f2e8879c 100644 --- a/supervision/utils/datatypes.py +++ b/supervision/utils/datatypes.py @@ -37,11 +37,17 @@ def keypoints_to_detections( for i, xy in enumerate(keypoints.xy): if selected_keypoint_indices: xy = xy[selected_keypoint_indices] - x_min = xy[:, 0].min() - x_max = xy[:, 0].max() - y_min = xy[:, 1].min() - y_max = xy[:, 1].max() - xyxy = np.array([[x_min, y_min, x_max, y_max]], dtype=np.float32) + + # [0, 0] used by some frameworks to indicate missing keypoints + xy = xy[~np.all(xy == 0, axis=1)] + if len(xy) == 0: + xyxy = np.array([[0, 0, 0, 0]], dtype=np.float32) + else: + x_min = xy[:, 0].min() + x_max = xy[:, 0].max() + y_min = xy[:, 1].min() + y_max = xy[:, 1].max() + xyxy = np.array([[x_min, y_min, x_max, y_max]], dtype=np.float32) if keypoints.confidence is None: confidence = None @@ -61,5 +67,6 @@ def keypoints_to_detections( detections = Detections.merge(detections_list) detections.class_id = keypoints.class_id detections.data = keypoints.data + detections = detections[detections.area > 0] return detections From 89b911d5f93633a0f3f0924ad2dc19b0e6f31635 Mon Sep 17 00:00:00 2001 From: LinasKo Date: Wed, 6 Nov 2024 21:27:37 +0200 Subject: [PATCH 125/161] Guide: New section in "Track Objects on Video", for keypoints --- docs/how_to/track_objects.md | 106 ++++++++++++++++++++++++++++++++++- 1 file changed, 105 insertions(+), 1 deletion(-) diff --git a/docs/how_to/track_objects.md b/docs/how_to/track_objects.md index 464f6b8d9..784cb7bf1 100644 --- a/docs/how_to/track_objects.md +++ b/docs/how_to/track_objects.md @@ -6,7 +6,7 @@ comments: true Leverage Supervision's advanced capabilities for enhancing your video analysis by seamlessly [tracking](/latest/trackers/) objects recognized by -a multitude of object detection and segmentation models. This comprehensive guide will +a multitude of object detection, segmentation and keypoint models. This comprehensive guide will take you through the steps to perform inference using the YOLOv8 model via either the [Inference](https://github.com/roboflow/inference) or [Ultralytics](https://github.com/ultralytics/ultralytics) packages. Following this, @@ -21,6 +21,7 @@ example. You can do this using from supervision.assets import download_assets, VideoAssets download_assets(VideoAssets.PEOPLE_WALKING) +download_assets(VideoAssets.SKIING) ``` +## Tracking Key Points + +Keypoint tracking is currently supported via the conversion of `KeyPoints` to `Detections`. This is achieved with the [`keypoints_to_detections`](/latest/utils/datatypes/#supervision.utils.datatypes.keypoints_to_detections) function. We'll use a different video as well as [`DetectionsSmoother`](/latest/detection/tools/smoother/) to stabilize the boxes. + +!!! tip + + You may use the `selected_keypoint_indices` argument to specify a subset of keypoints to convert. This is useful when some keypoints could be occluded. For example: a person might swing their arm, causing the elbow to be occluded by the torso sometimes. + +=== "Ultralytics" + + ```{ .py hl_lines="5 7 14-15 17 33" } + import numpy as np + import supervision as sv + from ultralytics import YOLO + + model = YOLO("yolov8m-pose.pt") + tracker = sv.ByteTrack() + smoother = sv.DetectionsSmoother() + box_annotator = sv.BoundingBoxAnnotator() + label_annotator = sv.LabelAnnotator() + trace_annotator = sv.TraceAnnotator() + + def callback(frame: np.ndarray, _: int) -> np.ndarray: + results = model(frame)[0] + keypoints = sv.KeyPoints.from_ultralytics(results) + detections = sv.keypoints_to_detections(keypoints) + detections = tracker.update_with_detections(detections) + detections = smoother.update_with_detections(detections) + + labels = [ + f"#{tracker_id} {results.names[class_id]}" + for class_id, tracker_id + in zip(detections.class_id, detections.tracker_id) + ] + + annotated_frame = box_annotator.annotate( + frame.copy(), detections=detections) + annotated_frame = label_annotator.annotate( + annotated_frame, detections=detections, labels=labels) + return trace_annotator.annotate( + annotated_frame, detections=detections) + + sv.process_video( + source_path="skiing.mp4", + target_path="result.mp4", + callback=callback + ) + ``` + +=== "Inference" + + ```{ .py hl_lines="5-6 8 15-16 18 34" } + import numpy as np + import supervision as sv + from inference.models.utils import get_roboflow_model + + model = get_roboflow_model( + model_id="yolov8m-pose-640", api_key=) + tracker = sv.ByteTrack() + smoother = sv.DetectionsSmoother() + box_annotator = sv.BoundingBoxAnnotator() + label_annotator = sv.LabelAnnotator() + trace_annotator = sv.TraceAnnotator() + + def callback(frame: np.ndarray, _: int) -> np.ndarray: + results = model.infer(frame)[0] + keypoints = sv.KeyPoints.from_inference(results) + detections = sv.keypoints_to_detections(keypoints) + detections = tracker.update_with_detections(detections) + detections = smoother.update_with_detections(detections) + + labels = [ + f"#{tracker_id} {results.names[class_id]}" + for class_id, tracker_id + in zip(detections.class_id, detections.tracker_id) + ] + + annotated_frame = box_annotator.annotate( + frame.copy(), detections=detections) + annotated_frame = label_annotator.annotate( + annotated_frame, detections=detections, labels=labels) + return trace_annotator.annotate( + annotated_frame, detections=detections) + + sv.process_video( + source_path="skiing.mp4", + target_path="result.mp4", + callback=callback + ) + ``` + + + This structured walkthrough should give a detailed pathway to annotate videos effectively using Supervision’s various functionalities, including object tracking and trace annotations. From 11978410f08a7e429710b4df3993b56680861044 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Thu, 7 Nov 2024 00:11:34 +0000 Subject: [PATCH 126/161] :arrow_up: Bump tqdm from 4.66.6 to 4.67.0 Bumps [tqdm](https://github.com/tqdm/tqdm) from 4.66.6 to 4.67.0. - [Release notes](https://github.com/tqdm/tqdm/releases) - [Commits](https://github.com/tqdm/tqdm/compare/v4.66.6...v4.67.0) --- updated-dependencies: - dependency-name: tqdm dependency-type: direct:production update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- poetry.lock | 9 +++++---- pyproject.toml | 2 +- 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index fddfa23fb..e78ab20b6 100644 --- a/poetry.lock +++ b/poetry.lock @@ -4405,13 +4405,13 @@ test = ["devpi-process (>=1.0.2)", "pytest (>=8.3.3)", "pytest-mock (>=3.14)"] [[package]] name = "tqdm" -version = "4.66.6" +version = "4.67.0" description = "Fast, Extensible Progress Meter" optional = true python-versions = ">=3.7" files = [ - {file = "tqdm-4.66.6-py3-none-any.whl", hash = "sha256:223e8b5359c2efc4b30555531f09e9f2f3589bcd7fdd389271191031b49b7a63"}, - {file = "tqdm-4.66.6.tar.gz", hash = "sha256:4bdd694238bef1485ce839d67967ab50af8f9272aab687c0d7702a01da0be090"}, + {file = "tqdm-4.67.0-py3-none-any.whl", hash = "sha256:0cd8af9d56911acab92182e88d763100d4788bdf421d251616040cc4d44863be"}, + {file = "tqdm-4.67.0.tar.gz", hash = "sha256:fe5a6f95e6fe0b9755e9469b77b9c3cf850048224ecaa8293d7d2d31f97d869a"}, ] [package.dependencies] @@ -4419,6 +4419,7 @@ colorama = {version = "*", markers = "platform_system == \"Windows\""} [package.extras] dev = ["pytest (>=6)", "pytest-cov", "pytest-timeout", "pytest-xdist"] +discord = ["requests"] notebook = ["ipywidgets (>=6)"] slack = ["slack-sdk"] telegram = ["requests"] @@ -4781,4 +4782,4 @@ metrics = ["pandas", "pandas-stubs"] [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "8f7dad5406a294901e3f489cf0d09e8217a80597ba9cd82695822a3fb5c13034" +content-hash = "51d7489c6d66912ca4946850539636cfedb1c08b1a71747384ff732c4519cd8e" diff --git a/pyproject.toml b/pyproject.toml index 713bd838e..fc25a1cd7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -71,7 +71,7 @@ pyyaml = ">=5.3" defusedxml = "^0.7.1" pillow = ">=9.4" requests = { version = ">=2.26.0,<=2.32.3", optional = true } -tqdm = { version = ">=4.62.3,<=4.66.6", optional = true } +tqdm = { version = ">=4.62.3,<=4.67.0", optional = true } # pandas: picked lowest major version that supports Python 3.8 pandas = { version = ">=2.0.0", optional = true } pandas-stubs = { version = ">=2.0.0.230412", optional = true } From 01caa1f5289ceb3de186c72e0a4df42a9af326ce Mon Sep 17 00:00:00 2001 From: SkalskiP Date: Thu, 7 Nov 2024 14:05:19 +0100 Subject: [PATCH 127/161] bump version from `0.24.0` to `0.25.0rc1` --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 713bd838e..7c6b3d171 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "supervision" -version = "0.24.0" +version = "0.25.0rc1" description = "A set of easy-to-use utils that will come in handy in any Computer Vision project" authors = ["Piotr Skalski "] maintainers = [ From 19b7ca0a3d153f3b4f97dcc114c4fbe36439775b Mon Sep 17 00:00:00 2001 From: SkalskiP Date: Thu, 7 Nov 2024 14:15:05 +0100 Subject: [PATCH 128/161] bump version from `0.25.0rc1` to `0.25.0rc2` --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 7c6b3d171..b2a1ee31f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "supervision" -version = "0.25.0rc1" +version = "0.25.0rc2" description = "A set of easy-to-use utils that will come in handy in any Computer Vision project" authors = ["Piotr Skalski "] maintainers = [ From 6728527f36b6ecc728d35232c58240f2ffd3cb98 Mon Sep 17 00:00:00 2001 From: LinasKo Date: Thu, 7 Nov 2024 17:23:23 +0200 Subject: [PATCH 129/161] Smart annotators are now called via "spread_out" param. * Alos, refactored RichLabelAnnotator so its structure matches LabelAnnotator --- supervision/annotators/core.py | 432 ++++++++++++++++------------- supervision/detection/utils.py | 86 ++---- supervision/keypoint/annotators.py | 49 ++-- 3 files changed, 287 insertions(+), 280 deletions(-) diff --git a/supervision/annotators/core.py b/supervision/annotators/core.py index 167782bb6..2864bfdd9 100644 --- a/supervision/annotators/core.py +++ b/supervision/annotators/core.py @@ -1,6 +1,7 @@ +from dataclasses import dataclass from functools import lru_cache from math import sqrt -from typing import List, Optional, Tuple, Union +from typing import Any, Dict, List, Optional, Tuple, Union import cv2 import numpy as np @@ -16,7 +17,7 @@ ) from supervision.config import CLASS_NAME_DATA_FIELD, ORIENTED_BOX_COORDINATES from supervision.detection.core import Detections -from supervision.detection.utils import clip_boxes, mask_to_polygons, spread_out +from supervision.detection.utils import clip_boxes, mask_to_polygons, spread_out_boxes from supervision.draw.color import Color, ColorPalette from supervision.draw.utils import draw_polygon from supervision.geometry.core import Position @@ -1044,6 +1045,16 @@ class LabelAnnotator(BaseAnnotator): A class for annotating labels on an image using provided detections. """ + @dataclass + class _TextProperties: + text: str + width: int + height: int + width_padded: int + height_padded: int + + _FONT = cv2.FONT_HERSHEY_SIMPLEX + def __init__( self, color: Union[Color, ColorPalette] = ColorPalette.DEFAULT, @@ -1054,7 +1065,7 @@ def __init__( text_position: Position = Position.TOP_LEFT, color_lookup: ColorLookup = ColorLookup.CLASS, border_radius: int = 0, - use_smart_positioning: bool = False, + spread_out: bool = False, ): """ Args: @@ -1071,10 +1082,8 @@ def __init__( Options are `INDEX`, `CLASS`, `TRACK`. border_radius (int): The radius to apply round edges. If the selected value is higher than the lower dimension, width or height, is clipped. - use_smart_positioning (bool): Whether to use smart positioning to prevent - label overlapping or not. + spread_out (bool): Spread out the labels to avoid overlapping. """ - self.use_smart_positioning: bool = use_smart_positioning self.border_radius: int = border_radius self.color: Union[Color, ColorPalette] = color self.text_color: Union[Color, ColorPalette] = text_color @@ -1083,6 +1092,7 @@ def __init__( self.text_padding: int = text_padding self.text_anchor: Position = text_position self.color_lookup: ColorLookup = color_lookup + self.spread_out = spread_out @ensure_cv2_image_for_annotation def annotate( @@ -1136,23 +1146,23 @@ def annotate( assert isinstance(scene, np.ndarray) self._validate_labels(labels, detections) - # Get text properties for all detections - text_props = self._get_text_properties(detections, labels) + labels = self._get_labels_text(detections, labels) + text_properties = self._get_text_properties(labels) - # Calculate background coordinates for all labels - xyxy = self._calculate_label_backgrounds( - detections, text_props, self.text_anchor, self.text_padding + xyxy = self._calculate_label_positions( + detections, text_properties, self.text_anchor ) - # Adjust positions if smart positioning is enabled - if self.use_smart_positioning: - xyxy = spread_out(xyxy, step=2) + if self.spread_out: + xyxy = spread_out_boxes( + xyxy, + step=2, + max_iterations=len(xyxy) * 20) - # Draw all labels self._draw_labels( scene=scene, xyxy=xyxy, - text_props=text_props, + text_properties=text_properties, detections=detections, custom_color_lookup=custom_color_lookup, ) @@ -1160,7 +1170,6 @@ def annotate( return scene def _validate_labels(self, labels: Optional[List[str]], detections: Detections): - """Validates that the number of labels matches the number of detections.""" if labels is not None and len(labels) != len(detections): raise ValueError( f"The number of labels ({len(labels)}) does not match the " @@ -1168,57 +1177,51 @@ def _validate_labels(self, labels: Optional[List[str]], detections: Detections): f"should have exactly 1 label." ) - def _get_text_properties( - self, detections: Detections, custom_labels: Optional[List[str]] - ) -> List[dict]: + def _get_text_properties(self, labels: List[str]) -> List[_TextProperties]: """Gets text content and dimensions for all detections.""" - text_props = [] - font = cv2.FONT_HERSHEY_SIMPLEX - - for idx in range(len(detections)): - # Determine label text - text = self._get_label_text(detections, custom_labels, idx) + text_properties = [] - # Calculate text dimensions + for label in labels: (text_w, text_h) = cv2.getTextSize( - text=text, - fontFace=font, + text=label, + fontFace=self._FONT, fontScale=self.text_scale, thickness=self.text_thickness, )[0] - text_props.append( - { - "text": text, - "width": text_w, - "height": text_h, - "width_padded": text_w + 2 * self.text_padding, - "height_padded": text_h + 2 * self.text_padding, - } - ) + text_properties.append(self._TextProperties( + text=label, + width=text_w, + height=text_h, + width_padded=text_w + 2 * self.text_padding, + height_padded=text_h + 2 * self.text_padding, + )) - return text_props + return text_properties - def _get_label_text( - self, detections: Detections, custom_labels: Optional[List[str]], idx: int - ) -> str: - """Determines the label text for a given detection.""" + @staticmethod + def _get_labels_text( + detections: Detections, custom_labels: Optional[List[str]]) -> List[str]: + if custom_labels is not None: - return custom_labels[idx] - elif CLASS_NAME_DATA_FIELD in detections.data: - return detections.data[CLASS_NAME_DATA_FIELD][idx] - elif detections.class_id is not None: - return str(detections.class_id[idx]) - return str(idx) - - def _calculate_label_backgrounds( + return custom_labels + + labels = [] + for idx in range(len(detections)): + if CLASS_NAME_DATA_FIELD in detections.data: + labels.append(detections.data[CLASS_NAME_DATA_FIELD][idx]) + elif detections.class_id is not None: + labels.append(str(detections.class_id[idx])) + else: + labels.append(str(idx)) + return labels + + def _calculate_label_positions( self, detections: Detections, - text_props: List[dict], - text_anchor: str, - text_padding: int, + text_properties: List[_TextProperties], + text_anchor: Position, ) -> np.ndarray: - """Calculates background coordinates for all labels.""" anchors_coordinates = detections.get_anchors_coordinates( anchor=text_anchor ).astype(int) @@ -1228,8 +1231,8 @@ def _calculate_label_backgrounds( text_background_xyxy = resolve_text_background_xyxy( center_coordinates=tuple(center_coords), text_wh=( - text_props[idx]["width_padded"], - text_props[idx]["height_padded"], + text_properties[idx].width_padded, + text_properties[idx].height_padded, ), position=text_anchor, ) @@ -1239,22 +1242,24 @@ def _calculate_label_backgrounds( def _draw_labels( self, - scene: ImageType, + scene: np.ndarray, xyxy: np.ndarray, - text_props: List[dict], + text_properties: List[_TextProperties], detections: Detections, custom_color_lookup: Optional[np.ndarray], ) -> None: - """Draws all labels and their backgrounds on the scene.""" - if custom_color_lookup is not None: - color_lookup = custom_color_lookup - else: - color_lookup = self.color_lookup - font = cv2.FONT_HERSHEY_SIMPLEX + assert len(xyxy) == len(text_properties) == len(detections), ( + f"Number of text properties ({len(text_properties)}), xyxy ({len(xyxy)}) and detections ({len(detections)}) do not match." + ) - for idx, coordinates in enumerate(xyxy): - # Resolve colors - bg_color = resolve_color( + color_lookup = ( + custom_color_lookup + if custom_color_lookup is not None + else self.color_lookup + ) + + for idx, box_xyxy in enumerate(xyxy): + background_color = resolve_color( color=self.color, detections=detections, detection_idx=idx, @@ -1267,22 +1272,20 @@ def _draw_labels( color_lookup=color_lookup, ) - # Calculate text position - text_x = coordinates[0] + self.text_padding - text_y = coordinates[1] + self.text_padding + text_props[idx]["height"] - - # Draw background and text self.draw_rounded_rectangle( scene=scene, - xyxy=coordinates, - color=bg_color.as_bgr(), + xyxy=box_xyxy, + color=background_color.as_bgr(), border_radius=self.border_radius, ) + + text_x = box_xyxy[0] + self.text_padding + text_y = box_xyxy[1] + self.text_padding + text_properties[idx].height cv2.putText( img=scene, - text=text_props[idx]["text"], + text=text_properties[idx].text, org=(text_x, text_y), - fontFace=font, + fontFace=self._FONT, fontScale=self.text_scale, color=text_color.as_bgr(), thickness=self.text_thickness, @@ -1338,6 +1341,16 @@ class RichLabelAnnotator(BaseAnnotator): with support for Unicode characters by using a custom font. """ + @dataclass + class _TextProperties: + text: str + width: int + height: int + width_padded: int + height_padded: int + text_left: int + text_top: int + def __init__( self, color: Union[Color, ColorPalette] = ColorPalette.DEFAULT, @@ -1348,7 +1361,7 @@ def __init__( text_position: Position = Position.TOP_LEFT, color_lookup: ColorLookup = ColorLookup.CLASS, border_radius: int = 0, - use_smart_positioning: bool = False, + spread_out: bool = False, ): """ Args: @@ -1365,8 +1378,7 @@ def __init__( Options are `INDEX`, `CLASS`, `TRACK`. border_radius (int): The radius to apply round edges. If the selected value is higher than the lower dimension, width or height, is clipped. - use_smart_positioning (bool): Whether to use smart positioning to prevent - label overlapping or not. + spread_out (bool): Spread out the labels to avoid overlapping. """ self.color = color self.text_color = text_color @@ -1374,15 +1386,8 @@ def __init__( self.text_anchor = text_position self.color_lookup = color_lookup self.border_radius = border_radius - self.use_smart_positioning: bool = use_smart_positioning - if font_path is not None: - try: - self.font = ImageFont.truetype(font_path, font_size) - except OSError: - print(f"Font path '{font_path}' not found. Using PIL's default font.") - self.font = self._load_default_font(font_size) - else: - self.font = self._load_default_font(font_size) + self.spread_out = spread_out + self.font = self._load_font(font_size, font_path) @ensure_pil_image_for_annotation def annotate( @@ -1432,152 +1437,185 @@ def annotate( """ assert isinstance(scene, Image.Image) + self._validate_labels(labels, detections) + draw = ImageDraw.Draw(scene) + labels = self._get_labels_text(detections, labels) + text_properties = self._get_text_properties(draw, labels) + + labels_text = self._get_labels_text(detections, labels) + xyxy = self._calculate_label_positions( + detections, text_properties, self.text_anchor + ) - # Input validation + if self.spread_out: + xyxy = spread_out_boxes( + xyxy, step=2, max_iterations=len(xyxy) * 20 + ) + + self._draw_labels( + draw=draw, + xyxy=xyxy, + labels=labels_text, + text_properties=text_properties, + detections=detections, + custom_color_lookup=custom_color_lookup + ) + + return scene + + def _validate_labels(self, labels: Optional[List[str]], detections: Detections): if labels is not None and len(labels) != len(detections): raise ValueError( - f"Label count ({len(labels)}) != detection count ({len(detections)})" + f"The number of labels ({len(labels)}) does not match the " + f"number of detections ({len(detections)}). Each detection " + f"should have exactly 1 label." ) - # Get anchor coordinates for all detections - detection_anchor_coordinates = detections.get_anchors_coordinates( + def _get_text_properties(self, draw, labels: List[str]) -> List[_TextProperties]: + """Gets text content and dimensions for all detections.""" + text_properties = [] + + for label in labels: + text_left, text_top, text_right, text_bottom = draw.textbbox( + (0, 0), label, font=self.font + ) + text_width = text_right - text_left + text_height = text_bottom - text_top + width_padded = text_width + 2 * self.text_padding + height_padded = text_height + 2 * self.text_padding + + text_properties.append(self._TextProperties( + text=label, + width=text_width, + height=text_height, + width_padded=width_padded, + height_padded=height_padded, + text_left=text_left, + text_top=text_top, + )) + + return text_properties + + def _calculate_label_positions(self, detections: Detections, text_properties: List[_TextProperties], text_anchor: Position) -> np.ndarray: + anchor_coordinates = detections.get_anchors_coordinates( anchor=self.text_anchor ).astype(int) - # Use the appropriate color lookup table - effective_color_lookup = ( + xyxy = [] + for idx, center_coords in enumerate(anchor_coordinates): + text_background_xyxy = resolve_text_background_xyxy( + center_coordinates=tuple(center_coords), + text_wh=( + text_properties[idx].width_padded, + text_properties[idx].height_padded, + ), + position=text_anchor, + ) + xyxy.append(text_background_xyxy) + + return np.array(xyxy) + + def _calculate_text_dimensions(self, draw, label_text: str) -> Tuple[Tuple[int, int], Tuple[int, int]]: + """ + Calculate text dimensions and offsets for the given label text. + Args: + label_text: The text to measure + Returns: + (Tuple[int, int]): ((left_offset, top_offset), (padded_width, padded_height)) + """ + text_left, text_top, text_right, text_bottom = draw.textbbox( + (0, 0), label_text, font=self.font + ) + text_width = text_right - text_left + text_height = text_bottom - text_top + padded_width = text_width + 2 * self.text_padding + padded_height = text_height + 2 * self.text_padding + return (text_left, text_top), (padded_width, padded_height) + + @staticmethod + def _get_labels_text( + detections: Detections, custom_labels: Optional[List[str]]) -> List[str]: + + if custom_labels is not None: + return custom_labels + + labels = [] + for idx in range(len(detections)): + if CLASS_NAME_DATA_FIELD in detections.data: + labels.append(detections.data[CLASS_NAME_DATA_FIELD][idx]) + elif detections.class_id is not None: + labels.append(str(detections.class_id[idx])) + else: + labels.append(str(idx)) + return labels + + def _draw_labels( + self, + draw, + xyxy: np.ndarray, + labels: List[str], + text_properties: List[_TextProperties], + detections: Detections, + custom_color_lookup: Optional[np.ndarray], + ) -> None: + color_lookup = ( custom_color_lookup if custom_color_lookup is not None else self.color_lookup ) - def _get_detection_label_text(detection_index: int) -> str: - """ - Determine the appropriate label text for a detection. - Args: - detection_index: Index of the current detection - Returns: - str: The label text to display - """ - if labels is not None: - return labels[detection_index] - if CLASS_NAME_DATA_FIELD in detections.data: - return detections.data[CLASS_NAME_DATA_FIELD][detection_index] - if detections.class_id is not None: - return str(detections.class_id[detection_index]) - return str(detection_index) - - def _calculate_text_dimensions(label_text: str) -> tuple: - """ - Calculate text dimensions and offsets for the given label text. - Args: - label_text: The text to measure - Returns: - tuple: ((left_offset, top_offset), (padded_width, padded_height)) - """ - text_left, text_top, text_right, text_bottom = draw.textbbox( - (0, 0), label_text, font=self.font - ) - text_width = text_right - text_left - text_height = text_bottom - text_top - padded_width = text_width + 2 * self.text_padding - padded_height = text_height + 2 * self.text_padding - return (text_left, text_top), (padded_width, padded_height) - - # Prepare all annotation data - annotation_collection = [] - for detection_index, center_coordinate in enumerate( - detection_anchor_coordinates - ): - # Get colors once per detection + for idx, box_xyxy in enumerate(xyxy): background_color = resolve_color( color=self.color, detections=detections, - detection_idx=detection_index, - color_lookup=effective_color_lookup, + detection_idx=idx, + color_lookup=color_lookup, ) - label_text_color = resolve_color( + text_color = resolve_color( color=self.text_color, detections=detections, - detection_idx=detection_index, - color_lookup=effective_color_lookup, - ) - - # Get text and calculate dimensions - label_text = _get_detection_label_text(detection_index) - text_offset_coordinates, padded_dimensions = _calculate_text_dimensions( - label_text - ) - - # Calculate background coordinates - background_coordinates = resolve_text_background_xyxy( - center_coordinates=tuple(center_coordinate), - text_wh=padded_dimensions, - position=self.text_anchor, - ) - - # Store all data for this annotation - annotation_collection.append( - { - "label_text": label_text, - "background_color": background_color, - "text_color": label_text_color, - "text_offset": text_offset_coordinates, - "background_coordinates": background_coordinates, - } - ) - - # Convert coordinates to numpy array for processing - background_coordinate_array = np.array( - [data["background_coordinates"] for data in annotation_collection] - ) - - # Apply smart positioning if enabled - if self.use_smart_positioning: - background_coordinate_array = spread_out( - background_coordinate_array, step=2 + detection_idx=idx, + color_lookup=color_lookup, ) - # Draw annotations - for annotation_index, coordinates in enumerate(background_coordinate_array): - annotation_data = annotation_collection[annotation_index] - - # Calculate final text position label_x_position = ( - coordinates[0] + self.text_padding - annotation_data["text_offset"][0] + box_xyxy[0] + self.text_padding - text_properties[idx].text_left ) label_y_position = ( - coordinates[1] + self.text_padding - annotation_data["text_offset"][1] + box_xyxy[1] + self.text_padding - text_properties[idx].text_top ) draw.rounded_rectangle( - tuple(coordinates), + tuple(box_xyxy), radius=self.border_radius, - fill=annotation_data["background_color"].as_rgb(), + fill=background_color.as_rgb(), outline=None, ) draw.text( xy=(label_x_position, label_y_position), - text=annotation_data["label_text"], + text=labels[idx], font=self.font, - fill=annotation_data["text_color"].as_rgb(), + fill=text_color.as_rgb(), ) - return scene + @staticmethod - def _load_default_font(size): - """ - PIL either loads a font that accepts a size (e.g. on my machine) - or raises an error saying `load_default` does not accept arguments - (e.g. in Colab). - """ + def _load_font(font_size: int, font_path: Optional[str]): + def load_default_font(size): + try: + return ImageFont.load_default(size) + except TypeError: + return ImageFont.load_default() + + if font_path is None: + return load_default_font(font_size) + try: - font = ImageFont.load_default(size) - except TypeError: - font = ImageFont.load_default() - return font - + return ImageFont.truetype(font_path, font_size) + except OSError: + print(f"Font path '{font_path}' not found. Using PIL's default font.") + return load_default_font(font_size) class IconAnnotator(BaseAnnotator): """ diff --git a/supervision/detection/utils.py b/supervision/detection/utils.py index 308797eab..b874953df 100644 --- a/supervision/detection/utils.py +++ b/supervision/detection/utils.py @@ -1042,83 +1042,53 @@ def cross_product(anchors: np.ndarray, vector: Vector) -> np.ndarray: return np.cross(vector_at_zero, anchors - vector_start) -# Intelligent padding functions -def get_intersection_center( +def get_box_intersection( xyxy_1: np.ndarray, xyxy_2: np.ndarray -) -> Optional[Tuple[float, float]]: +) -> Optional[np.ndarray]: overlap_xmin = max(xyxy_1[0], xyxy_2[0]) overlap_ymin = max(xyxy_1[1], xyxy_2[1]) overlap_xmax = min(xyxy_1[2], xyxy_2[2]) overlap_ymax = min(xyxy_1[3], xyxy_2[3]) if overlap_xmin < overlap_xmax and overlap_ymin < overlap_ymax: - x_center = (overlap_xmin + overlap_xmax) / 2 - y_center = (overlap_ymin + overlap_ymax) / 2 - return (x_center, y_center) + return np.array([overlap_xmin, overlap_ymin, overlap_xmax, overlap_ymax]) else: return None +def get_unit_vector(xy_1: np.ndarray, xy_2: np.ndarray) -> np.ndarray: + direction = xy_2 - xy_1 + magnitude = np.linalg.norm(direction) + return direction / magnitude if magnitude > 0 else np.zeros(2) -def get_box_center(xyxy: np.ndarray) -> Tuple[float, float]: - x_center = (xyxy[0] + xyxy[2]) / 2 - y_center = (xyxy[1] + xyxy[3]) / 2 - return (x_center, y_center) +def spread_out_boxes(xyxy: np.ndarray, step: int, max_iterations: int = 100) -> np.ndarray: + if len(xyxy) == 0: + return xyxy - -def vector_with_length( - xy_1: Tuple[float, float], xy_2: Tuple[float, float], n: float -) -> Tuple[float, float]: - x1, y1 = xy_1 - x2, y2 = xy_2 - - dx = x2 - x1 - dy = y2 - y1 - - if dx == 0 and dy == 0: - return 0, 0 - - magnitude = math.sqrt(dx**2 + dy**2) - - unit_dx = dx / magnitude - unit_dy = dy / magnitude - - v1 = unit_dx * n - v2 = unit_dy * n - - return (v1, v2) - - -def pad(xyxy: np.ndarray, px: int, py: Optional[int] = None): - if py is None: - py = px - - result = xyxy.copy() - result[:, [0, 1]] -= [px, py] - result[:, [2, 3]] += [px, py] - - return result - - -def spread_out(xyxy: np.ndarray, step) -> np.ndarray: - xyxy_padded = pad(xyxy, px=step) - while True: + xyxy_padded = pad_boxes(xyxy, px=step) + for _ in range(max_iterations): iou = box_iou_batch(xyxy_padded, xyxy_padded) np.fill_diagonal(iou, 0) if np.all(iou == 0): - return pad(xyxy_padded, px=-step) + break i, j = np.unravel_index(np.argmax(iou), iou.shape) xyxy_i, xyxy_j = xyxy_padded[i], xyxy_padded[j] - intersection_center = get_intersection_center(xyxy_i, xyxy_j) - xyxy_i_center = get_box_center(xyxy_i) - xyxy_j_center = get_box_center(xyxy_j) + box_intersection = get_box_intersection(xyxy_i, xyxy_j) + assert box_intersection is not None, \ + "Since we checked IoU already, boxes should always intersect" + + intersection_center = (box_intersection[:2] + box_intersection[2:]) / 2 + xyxy_i_center = (xyxy_i[:2] + xyxy_i[2:]) / 2 + xyxy_j_center = (xyxy_j[:2] + xyxy_j[2:]) / 2 + + unit_vector_i = get_unit_vector(intersection_center, xyxy_i_center) + unit_vector_j = get_unit_vector(intersection_center, xyxy_j_center) - vector_i = vector_with_length(intersection_center, xyxy_i_center, step) - vector_j = vector_with_length(intersection_center, xyxy_j_center, step) + xyxy_padded[i, [0, 2]] += int(unit_vector_i[0] * step) + xyxy_padded[i, [1, 3]] += int(unit_vector_i[1] * step) + xyxy_padded[j, [0, 2]] += int(unit_vector_j[0] * step) + xyxy_padded[j, [1, 3]] += int(unit_vector_j[1] * step) - xyxy_padded[i, [0, 2]] += int(vector_i[0]) - xyxy_padded[i, [1, 3]] += int(vector_i[1]) - xyxy_padded[j, [0, 2]] += int(vector_j[0]) - xyxy_padded[j, [1, 3]] += int(vector_j[1]) + return pad_boxes(xyxy_padded, px=-step) diff --git a/supervision/keypoint/annotators.py b/supervision/keypoint/annotators.py index e82acbc65..ba1c5448d 100644 --- a/supervision/keypoint/annotators.py +++ b/supervision/keypoint/annotators.py @@ -5,9 +5,9 @@ import cv2 import numpy as np -from supervision import Rect, pad_boxes +from supervision.geometry.core import Rect from supervision.annotators.base import ImageType -from supervision.detection.utils import pad, spread_out +from supervision.detection.utils import pad_boxes, spread_out_boxes from supervision.draw.color import Color from supervision.draw.utils import draw_rounded_rectangle from supervision.keypoint.core import KeyPoints @@ -202,7 +202,7 @@ def __init__( text_thickness: int = 1, text_padding: int = 10, border_radius: int = 0, - use_smart_positioning: bool = False, + spread_out: bool = False, ): """ Args: @@ -217,16 +217,15 @@ def __init__( text_padding (int): The padding around the text. border_radius (int): The radius of the rounded corners of the boxes. Set to a high value to produce circles. - use_smart_positioning (bool): Whether to use smart positioning to prevent - label overlapping or not. + spread_out (bool): Spread out the labels to avoid overlap. """ - self.use_smart_positioning = use_smart_positioning self.border_radius: int = border_radius self.color: Union[Color, List[Color]] = color self.text_color: Union[Color, List[Color]] = text_color self.text_scale: float = text_scale self.text_thickness: int = text_thickness self.text_padding: int = text_padding + self.spread_out = spread_out def annotate( self, @@ -349,25 +348,25 @@ def annotate( text_colors = text_colors[mask] labels = labels[mask] - xyxy = np.array( - [ - self.get_text_bounding_box( - text=label, - font=font, - text_scale=self.text_scale, - text_thickness=self.text_thickness, - center_coordinates=tuple(anchor), - ) - for anchor, label in zip(anchors, labels) - ] - ) - - if self.use_smart_positioning: - xyxy_padded = pad(xyxy=xyxy, px=self.text_padding) - xyxy_padded = spread_out(xyxy_padded, step=2) - xyxy = pad(xyxy=xyxy_padded, px=-self.text_padding) - else: - xyxy_padded = pad_boxes(xyxy=xyxy, px=self.text_padding) + xyxy = np.array([ + self.get_text_bounding_box( + text=label, + font=font, + text_scale=self.text_scale, + text_thickness=self.text_thickness, + center_coordinates=tuple(anchor), + ) + for anchor, label in zip(anchors, labels) + ]) + xyxy_padded = pad_boxes(xyxy=xyxy, px=self.text_padding) + + if self.spread_out: + xyxy_padded = spread_out_boxes( + xyxy_padded, + step=2, + max_iterations=len(xyxy_padded) * 20 + ) + xyxy = pad_boxes(xyxy=xyxy_padded, px=-self.text_padding) for text, color, text_color, box, box_padded in zip( labels, colors, text_colors, xyxy, xyxy_padded From 920a2c0cda8251872339d49dc59180e27c50b20d Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Thu, 7 Nov 2024 15:27:30 +0000 Subject: [PATCH 130/161] =?UTF-8?q?fix(pre=5Fcommit):=20=F0=9F=8E=A8=20aut?= =?UTF-8?q?o=20format=20pre-commit=20hooks?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- supervision/annotators/core.py | 86 ++++++++++++++++-------------- supervision/detection/utils.py | 12 +++-- supervision/keypoint/annotators.py | 28 +++++----- 3 files changed, 68 insertions(+), 58 deletions(-) diff --git a/supervision/annotators/core.py b/supervision/annotators/core.py index 2864bfdd9..144e7c7f8 100644 --- a/supervision/annotators/core.py +++ b/supervision/annotators/core.py @@ -1,7 +1,7 @@ from dataclasses import dataclass from functools import lru_cache from math import sqrt -from typing import Any, Dict, List, Optional, Tuple, Union +from typing import List, Optional, Tuple, Union import cv2 import numpy as np @@ -1154,10 +1154,7 @@ def annotate( ) if self.spread_out: - xyxy = spread_out_boxes( - xyxy, - step=2, - max_iterations=len(xyxy) * 20) + xyxy = spread_out_boxes(xyxy, step=2, max_iterations=len(xyxy) * 20) self._draw_labels( scene=scene, @@ -1189,23 +1186,25 @@ def _get_text_properties(self, labels: List[str]) -> List[_TextProperties]: thickness=self.text_thickness, )[0] - text_properties.append(self._TextProperties( - text=label, - width=text_w, - height=text_h, - width_padded=text_w + 2 * self.text_padding, - height_padded=text_h + 2 * self.text_padding, - )) + text_properties.append( + self._TextProperties( + text=label, + width=text_w, + height=text_h, + width_padded=text_w + 2 * self.text_padding, + height_padded=text_h + 2 * self.text_padding, + ) + ) return text_properties @staticmethod def _get_labels_text( - detections: Detections, custom_labels: Optional[List[str]]) -> List[str]: - + detections: Detections, custom_labels: Optional[List[str]] + ) -> List[str]: if custom_labels is not None: return custom_labels - + labels = [] for idx in range(len(detections)): if CLASS_NAME_DATA_FIELD in detections.data: @@ -1248,9 +1247,9 @@ def _draw_labels( detections: Detections, custom_color_lookup: Optional[np.ndarray], ) -> None: - assert len(xyxy) == len(text_properties) == len(detections), ( - f"Number of text properties ({len(text_properties)}), xyxy ({len(xyxy)}) and detections ({len(detections)}) do not match." - ) + assert ( + len(xyxy) == len(text_properties) == len(detections) + ), f"Number of text properties ({len(text_properties)}), xyxy ({len(xyxy)}) and detections ({len(detections)}) do not match." color_lookup = ( custom_color_lookup @@ -1449,9 +1448,7 @@ def annotate( ) if self.spread_out: - xyxy = spread_out_boxes( - xyxy, step=2, max_iterations=len(xyxy) * 20 - ) + xyxy = spread_out_boxes(xyxy, step=2, max_iterations=len(xyxy) * 20) self._draw_labels( draw=draw, @@ -1459,7 +1456,7 @@ def annotate( labels=labels_text, text_properties=text_properties, detections=detections, - custom_color_lookup=custom_color_lookup + custom_color_lookup=custom_color_lookup, ) return scene @@ -1485,19 +1482,26 @@ def _get_text_properties(self, draw, labels: List[str]) -> List[_TextProperties] width_padded = text_width + 2 * self.text_padding height_padded = text_height + 2 * self.text_padding - text_properties.append(self._TextProperties( - text=label, - width=text_width, - height=text_height, - width_padded=width_padded, - height_padded=height_padded, - text_left=text_left, - text_top=text_top, - )) + text_properties.append( + self._TextProperties( + text=label, + width=text_width, + height=text_height, + width_padded=width_padded, + height_padded=height_padded, + text_left=text_left, + text_top=text_top, + ) + ) return text_properties - - def _calculate_label_positions(self, detections: Detections, text_properties: List[_TextProperties], text_anchor: Position) -> np.ndarray: + + def _calculate_label_positions( + self, + detections: Detections, + text_properties: List[_TextProperties], + text_anchor: Position, + ) -> np.ndarray: anchor_coordinates = detections.get_anchors_coordinates( anchor=self.text_anchor ).astype(int) @@ -1516,7 +1520,9 @@ def _calculate_label_positions(self, detections: Detections, text_properties: Li return np.array(xyxy) - def _calculate_text_dimensions(self, draw, label_text: str) -> Tuple[Tuple[int, int], Tuple[int, int]]: + def _calculate_text_dimensions( + self, draw, label_text: str + ) -> Tuple[Tuple[int, int], Tuple[int, int]]: """ Calculate text dimensions and offsets for the given label text. Args: @@ -1535,11 +1541,11 @@ def _calculate_text_dimensions(self, draw, label_text: str) -> Tuple[Tuple[int, @staticmethod def _get_labels_text( - detections: Detections, custom_labels: Optional[List[str]]) -> List[str]: - + detections: Detections, custom_labels: Optional[List[str]] + ) -> List[str]: if custom_labels is not None: return custom_labels - + labels = [] for idx in range(len(detections)): if CLASS_NAME_DATA_FIELD in detections.data: @@ -1599,7 +1605,6 @@ def _draw_labels( fill=text_color.as_rgb(), ) - @staticmethod def _load_font(font_size: int, font_path: Optional[str]): def load_default_font(size): @@ -1607,16 +1612,17 @@ def load_default_font(size): return ImageFont.load_default(size) except TypeError: return ImageFont.load_default() - + if font_path is None: return load_default_font(font_size) - + try: return ImageFont.truetype(font_path, font_size) except OSError: print(f"Font path '{font_path}' not found. Using PIL's default font.") return load_default_font(font_size) + class IconAnnotator(BaseAnnotator): """ A class for drawing an icon on an image, using provided detections. diff --git a/supervision/detection/utils.py b/supervision/detection/utils.py index b874953df..ede1ed355 100644 --- a/supervision/detection/utils.py +++ b/supervision/detection/utils.py @@ -1,4 +1,3 @@ -import math from itertools import chain from typing import Dict, List, Optional, Tuple, Union @@ -1055,12 +1054,16 @@ def get_box_intersection( else: return None + def get_unit_vector(xy_1: np.ndarray, xy_2: np.ndarray) -> np.ndarray: direction = xy_2 - xy_1 magnitude = np.linalg.norm(direction) return direction / magnitude if magnitude > 0 else np.zeros(2) -def spread_out_boxes(xyxy: np.ndarray, step: int, max_iterations: int = 100) -> np.ndarray: + +def spread_out_boxes( + xyxy: np.ndarray, step: int, max_iterations: int = 100 +) -> np.ndarray: if len(xyxy) == 0: return xyxy @@ -1076,8 +1079,9 @@ def spread_out_boxes(xyxy: np.ndarray, step: int, max_iterations: int = 100) -> xyxy_i, xyxy_j = xyxy_padded[i], xyxy_padded[j] box_intersection = get_box_intersection(xyxy_i, xyxy_j) - assert box_intersection is not None, \ - "Since we checked IoU already, boxes should always intersect" + assert ( + box_intersection is not None + ), "Since we checked IoU already, boxes should always intersect" intersection_center = (box_intersection[:2] + box_intersection[2:]) / 2 xyxy_i_center = (xyxy_i[:2] + xyxy_i[2:]) / 2 diff --git a/supervision/keypoint/annotators.py b/supervision/keypoint/annotators.py index ba1c5448d..4d3196b39 100644 --- a/supervision/keypoint/annotators.py +++ b/supervision/keypoint/annotators.py @@ -5,11 +5,11 @@ import cv2 import numpy as np -from supervision.geometry.core import Rect from supervision.annotators.base import ImageType from supervision.detection.utils import pad_boxes, spread_out_boxes from supervision.draw.color import Color from supervision.draw.utils import draw_rounded_rectangle +from supervision.geometry.core import Rect from supervision.keypoint.core import KeyPoints from supervision.keypoint.skeletons import SKELETONS_BY_VERTEX_COUNT from supervision.utils.conversion import ensure_cv2_image_for_annotation @@ -348,23 +348,23 @@ def annotate( text_colors = text_colors[mask] labels = labels[mask] - xyxy = np.array([ - self.get_text_bounding_box( - text=label, - font=font, - text_scale=self.text_scale, - text_thickness=self.text_thickness, - center_coordinates=tuple(anchor), - ) - for anchor, label in zip(anchors, labels) - ]) + xyxy = np.array( + [ + self.get_text_bounding_box( + text=label, + font=font, + text_scale=self.text_scale, + text_thickness=self.text_thickness, + center_coordinates=tuple(anchor), + ) + for anchor, label in zip(anchors, labels) + ] + ) xyxy_padded = pad_boxes(xyxy=xyxy, px=self.text_padding) if self.spread_out: xyxy_padded = spread_out_boxes( - xyxy_padded, - step=2, - max_iterations=len(xyxy_padded) * 20 + xyxy_padded, step=2, max_iterations=len(xyxy_padded) * 20 ) xyxy = pad_boxes(xyxy=xyxy_padded, px=-self.text_padding) From b66702c5ffae69ecf51bb1c6b9da44dce39b5af8 Mon Sep 17 00:00:00 2001 From: LinasKo Date: Thu, 7 Nov 2024 17:32:09 +0200 Subject: [PATCH 131/161] Remove dead code, surplus function args --- supervision/annotators/core.py | 32 ++++++-------------------------- 1 file changed, 6 insertions(+), 26 deletions(-) diff --git a/supervision/annotators/core.py b/supervision/annotators/core.py index 144e7c7f8..bfc20503b 100644 --- a/supervision/annotators/core.py +++ b/supervision/annotators/core.py @@ -1247,9 +1247,11 @@ def _draw_labels( detections: Detections, custom_color_lookup: Optional[np.ndarray], ) -> None: - assert ( - len(xyxy) == len(text_properties) == len(detections) - ), f"Number of text properties ({len(text_properties)}), xyxy ({len(xyxy)}) and detections ({len(detections)}) do not match." + assert len(xyxy) == len(text_properties) == len(detections), ( + f"Number of text properties ({len(text_properties)}), " + f"xyxy ({len(xyxy)}) and detections ({len(detections)}) " + "do not match." + ) color_lookup = ( custom_color_lookup @@ -1442,7 +1444,6 @@ def annotate( labels = self._get_labels_text(detections, labels) text_properties = self._get_text_properties(draw, labels) - labels_text = self._get_labels_text(detections, labels) xyxy = self._calculate_label_positions( detections, text_properties, self.text_anchor ) @@ -1453,7 +1454,6 @@ def annotate( self._draw_labels( draw=draw, xyxy=xyxy, - labels=labels_text, text_properties=text_properties, detections=detections, custom_color_lookup=custom_color_lookup, @@ -1520,25 +1520,6 @@ def _calculate_label_positions( return np.array(xyxy) - def _calculate_text_dimensions( - self, draw, label_text: str - ) -> Tuple[Tuple[int, int], Tuple[int, int]]: - """ - Calculate text dimensions and offsets for the given label text. - Args: - label_text: The text to measure - Returns: - (Tuple[int, int]): ((left_offset, top_offset), (padded_width, padded_height)) - """ - text_left, text_top, text_right, text_bottom = draw.textbbox( - (0, 0), label_text, font=self.font - ) - text_width = text_right - text_left - text_height = text_bottom - text_top - padded_width = text_width + 2 * self.text_padding - padded_height = text_height + 2 * self.text_padding - return (text_left, text_top), (padded_width, padded_height) - @staticmethod def _get_labels_text( detections: Detections, custom_labels: Optional[List[str]] @@ -1560,7 +1541,6 @@ def _draw_labels( self, draw, xyxy: np.ndarray, - labels: List[str], text_properties: List[_TextProperties], detections: Detections, custom_color_lookup: Optional[np.ndarray], @@ -1600,7 +1580,7 @@ def _draw_labels( ) draw.text( xy=(label_x_position, label_y_position), - text=labels[idx], + text=text_properties[idx].text, font=self.font, fill=text_color.as_rgb(), ) From c6c447c52710628c9f9c4deffeb66183a290377b Mon Sep 17 00:00:00 2001 From: LinasKo Date: Thu, 7 Nov 2024 17:44:59 +0200 Subject: [PATCH 132/161] Imporved Discord shield-badge --- README.md | 140 +++++++++++++++++++++++++++--------------------------- 1 file changed, 70 insertions(+), 70 deletions(-) diff --git a/README.md b/README.md index fe569183e..464fa4a4d 100644 --- a/README.md +++ b/README.md @@ -21,7 +21,7 @@ [![python-version](https://img.shields.io/pypi/pyversions/supervision)](https://badge.fury.io/py/supervision) [![colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/roboflow/supervision/blob/main/demo.ipynb) [![gradio](https://img.shields.io/badge/%F0%9F%A4%97%20Hugging%20Face-Spaces-blue)](https://huggingface.co/spaces/Roboflow/Annotators) -[![discord](https://img.shields.io/discord/1159501506232451173)](https://discord.gg/GbfgXGJ8Bk) +[![discord](https://img.shields.io/discord/1159501506232451173?logo=discord&label=discord)](https://discord.gg/GbfgXGJ8Bk) [![built-with-material-for-mkdocs](https://img.shields.io/badge/Material_for_MkDocs-526CFE?logo=MaterialForMkDocs&logoColor=white)](https://squidfunk.github.io/mkdocs-material/)
@@ -135,88 +135,88 @@ for path, image, annotation in ds: - load - ```python - dataset = sv.DetectionDataset.from_yolo( - images_directory_path=..., - annotations_directory_path=..., - data_yaml_path=... - ) - - dataset = sv.DetectionDataset.from_pascal_voc( - images_directory_path=..., - annotations_directory_path=... - ) - - dataset = sv.DetectionDataset.from_coco( - images_directory_path=..., - annotations_path=... - ) - ``` + ```python + dataset = sv.DetectionDataset.from_yolo( + images_directory_path=..., + annotations_directory_path=..., + data_yaml_path=... + ) + + dataset = sv.DetectionDataset.from_pascal_voc( + images_directory_path=..., + annotations_directory_path=... + ) + + dataset = sv.DetectionDataset.from_coco( + images_directory_path=..., + annotations_path=... + ) + ``` - split - ```python - train_dataset, test_dataset = dataset.split(split_ratio=0.7) - test_dataset, valid_dataset = test_dataset.split(split_ratio=0.5) + ```python + train_dataset, test_dataset = dataset.split(split_ratio=0.7) + test_dataset, valid_dataset = test_dataset.split(split_ratio=0.5) - len(train_dataset), len(test_dataset), len(valid_dataset) - # (700, 150, 150) - ``` + len(train_dataset), len(test_dataset), len(valid_dataset) + # (700, 150, 150) + ``` - merge - ```python - ds_1 = sv.DetectionDataset(...) - len(ds_1) - # 100 - ds_1.classes - # ['dog', 'person'] - - ds_2 = sv.DetectionDataset(...) - len(ds_2) - # 200 - ds_2.classes - # ['cat'] - - ds_merged = sv.DetectionDataset.merge([ds_1, ds_2]) - len(ds_merged) - # 300 - ds_merged.classes - # ['cat', 'dog', 'person'] - ``` + ```python + ds_1 = sv.DetectionDataset(...) + len(ds_1) + # 100 + ds_1.classes + # ['dog', 'person'] + + ds_2 = sv.DetectionDataset(...) + len(ds_2) + # 200 + ds_2.classes + # ['cat'] + + ds_merged = sv.DetectionDataset.merge([ds_1, ds_2]) + len(ds_merged) + # 300 + ds_merged.classes + # ['cat', 'dog', 'person'] + ``` - save - ```python - dataset.as_yolo( - images_directory_path=..., - annotations_directory_path=..., - data_yaml_path=... - ) - - dataset.as_pascal_voc( - images_directory_path=..., - annotations_directory_path=... - ) - - dataset.as_coco( - images_directory_path=..., - annotations_path=... - ) - ``` + ```python + dataset.as_yolo( + images_directory_path=..., + annotations_directory_path=..., + data_yaml_path=... + ) + + dataset.as_pascal_voc( + images_directory_path=..., + annotations_directory_path=... + ) + + dataset.as_coco( + images_directory_path=..., + annotations_path=... + ) + ``` - convert - ```python - sv.DetectionDataset.from_yolo( - images_directory_path=..., - annotations_directory_path=..., - data_yaml_path=... - ).as_pascal_voc( - images_directory_path=..., - annotations_directory_path=... - ) - ``` + ```python + sv.DetectionDataset.from_yolo( + images_directory_path=..., + annotations_directory_path=..., + data_yaml_path=... + ).as_pascal_voc( + images_directory_path=..., + annotations_directory_path=... + ) + ``` From c3d7dc0ee3d652f56cc144daee1bccc6763abe1d Mon Sep 17 00:00:00 2001 From: LinasKo Date: Thu, 7 Nov 2024 17:59:02 +0200 Subject: [PATCH 133/161] Move keypoint to detections converstion to sv.KeyPoints. * Test colab: https://colab.research.google.com/drive/10PMuW0IyaksofqI70NLnB_-Fnr4oKVmk?usp=sharing --- docs/how_to/track_objects.md | 7 ++-- docs/keypoint/core.md | 1 + docs/utils/datatypes.md | 12 ------ mkdocs.yml | 1 - supervision/__init__.py | 1 - supervision/keypoint/core.py | 67 ++++++++++++++++++++++++++++++- supervision/utils/datatypes.py | 72 ---------------------------------- 7 files changed, 71 insertions(+), 90 deletions(-) delete mode 100644 docs/utils/datatypes.md delete mode 100644 supervision/utils/datatypes.py diff --git a/docs/how_to/track_objects.md b/docs/how_to/track_objects.md index 784cb7bf1..29fa83984 100644 --- a/docs/how_to/track_objects.md +++ b/docs/how_to/track_objects.md @@ -1,5 +1,6 @@ --- comments: true +status: new --- # Track Objects @@ -317,7 +318,7 @@ movement patterns and interactions between objects in the video. ## Tracking Key Points -Keypoint tracking is currently supported via the conversion of `KeyPoints` to `Detections`. This is achieved with the [`keypoints_to_detections`](/latest/utils/datatypes/#supervision.utils.datatypes.keypoints_to_detections) function. We'll use a different video as well as [`DetectionsSmoother`](/latest/detection/tools/smoother/) to stabilize the boxes. +Keypoint tracking is currently supported via the conversion of `KeyPoints` to `Detections`. This is achieved with the [`KeyPoints.as_detections()`](/latest/keypoint/core/#supervision.keypoint.core.KeyPoints.as_detections) function. We'll use a different video as well as [`DetectionsSmoother`](/latest/detection/tools/smoother/) to stabilize the boxes. !!! tip @@ -340,7 +341,7 @@ Keypoint tracking is currently supported via the conversion of `KeyPoints` to `D def callback(frame: np.ndarray, _: int) -> np.ndarray: results = model(frame)[0] keypoints = sv.KeyPoints.from_ultralytics(results) - detections = sv.keypoints_to_detections(keypoints) + detections = keypoints.as_detections() detections = tracker.update_with_detections(detections) detections = smoother.update_with_detections(detections) @@ -382,7 +383,7 @@ Keypoint tracking is currently supported via the conversion of `KeyPoints` to `D def callback(frame: np.ndarray, _: int) -> np.ndarray: results = model.infer(frame)[0] keypoints = sv.KeyPoints.from_inference(results) - detections = sv.keypoints_to_detections(keypoints) + detections = keypoints.as_detections() detections = tracker.update_with_detections(detections) detections = smoother.update_with_detections(detections) diff --git a/docs/keypoint/core.md b/docs/keypoint/core.md index 6f42c254d..7354babab 100644 --- a/docs/keypoint/core.md +++ b/docs/keypoint/core.md @@ -1,5 +1,6 @@ --- comments: true +status: new --- # Keypoint Detection diff --git a/docs/utils/datatypes.md b/docs/utils/datatypes.md deleted file mode 100644 index 5e0560bd3..000000000 --- a/docs/utils/datatypes.md +++ /dev/null @@ -1,12 +0,0 @@ ---- -comments: true -status: new ---- - -# Data Types Utils - - - -:::supervision.utils.datatypes.keypoints_to_detections diff --git a/mkdocs.yml b/mkdocs.yml index 8c939cf07..b30dbcfcc 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -79,7 +79,6 @@ nav: - File: utils/file.md - Draw: utils/draw.md - Geometry: utils/geometry.md - - Datatypes: utils/datatypes.md - Assets: assets.md - Cookbooks: cookbooks.md - Cheatsheet: https://roboflow.github.io/cheatsheet-supervision/ diff --git a/supervision/__init__.py b/supervision/__init__.py index 04813ef22..746b2f67d 100644 --- a/supervision/__init__.py +++ b/supervision/__init__.py @@ -100,7 +100,6 @@ from supervision.metrics.detection import ConfusionMatrix, MeanAveragePrecision from supervision.tracker.byte_tracker.core import ByteTrack from supervision.utils.conversion import cv2_to_pillow, pillow_to_cv2 -from supervision.utils.datatypes import keypoints_to_detections from supervision.utils.file import list_files_with_extensions from supervision.utils.image import ( ImageSink, diff --git a/supervision/keypoint/core.py b/supervision/keypoint/core.py index 252fb63f3..4b8e9d55b 100644 --- a/supervision/keypoint/core.py +++ b/supervision/keypoint/core.py @@ -2,12 +2,13 @@ from contextlib import suppress from dataclasses import dataclass, field -from typing import Any, Dict, Iterator, List, Optional, Tuple, Union +from typing import Any, Dict, Iterable, Iterator, List, Optional, Tuple, Union import numpy as np import numpy.typing as npt from supervision.config import CLASS_NAME_DATA_FIELD +from supervision.detection.core import Detections from supervision.detection.utils import get_data_item, is_data_equal from supervision.validators import validate_keypoints_fields @@ -620,3 +621,67 @@ def is_empty(self) -> bool: empty_keypoints = KeyPoints.empty() empty_keypoints.data = self.data return self == empty_keypoints + + def as_detections( + self, selected_keypoint_indices: Optional[Iterable[int]] = None + ) -> Detections: + """ + Convert a KeyPoints object to a Detections object. This + approximates the bounding box of the detected object by + taking the bounding box that fits all keypoints. + + Arguments: + selected_keypoint_indices (Optional[Iterable[int]]): The + indices of the keypoints to include in the bounding box + calculation. This helps focus on a subset of keypoints, + e.g. when some are occluded. Captures all keypoints by default. + + Returns: + detections (Detections): The converted detections object. + + Example: + ```python + keypoints = sv.KeyPoints.from_inference(...) + detections = keypoints.as_detections() + ``` + """ + if self.is_empty(): + return Detections.empty() + + detections_list = [] + for i, xy in enumerate(self.xy): + if selected_keypoint_indices: + xy = xy[selected_keypoint_indices] + + # [0, 0] used by some frameworks to indicate missing keypoints + xy = xy[~np.all(xy == 0, axis=1)] + if len(xy) == 0: + xyxy = np.array([[0, 0, 0, 0]], dtype=np.float32) + else: + x_min = xy[:, 0].min() + x_max = xy[:, 0].max() + y_min = xy[:, 1].min() + y_max = xy[:, 1].max() + xyxy = np.array([[x_min, y_min, x_max, y_max]], dtype=np.float32) + + if self.confidence is None: + confidence = None + else: + confidence = self.confidence[i] + if selected_keypoint_indices: + confidence = confidence[selected_keypoint_indices] + confidence = np.array([confidence.mean()], dtype=np.float32) + + detections_list.append( + Detections( + xyxy=xyxy, + confidence=confidence, + ) + ) + + detections = Detections.merge(detections_list) + detections.class_id = self.class_id + detections.data = self.data + detections = detections[detections.area > 0] + + return detections diff --git a/supervision/utils/datatypes.py b/supervision/utils/datatypes.py deleted file mode 100644 index 6f2e8879c..000000000 --- a/supervision/utils/datatypes.py +++ /dev/null @@ -1,72 +0,0 @@ -from typing import Iterable, Optional - -import numpy as np - -from supervision.detection.core import Detections -from supervision.keypoint.core import KeyPoints - - -def keypoints_to_detections( - keypoints: KeyPoints, selected_keypoint_indices: Optional[Iterable[int]] = None -) -> Detections: - """ - Convert a KeyPoints object to a Detections object. This - approximates the bounding box of the detected object by - taking the bounding box that fits all keypoints. - - Arguments: - keypoints (KeyPoints): The keypoints to convert to detections. - selected_keypoint_indices (Optional[Iterable[int]]): The - indices of the keypoints to include in the bounding box - calculation. This helps focus on a subset of keypoints, - e.g. when some are occluded. Captures all keypoints by default. - - Returns: - detections (Detections): The converted detections object. - - Example: - ```python - keypoints = sv.KeyPoints.from_inference(...) - detections = keypoints_to_detections(keypoints) - ``` - """ - if keypoints.is_empty(): - return Detections.empty() - - detections_list = [] - for i, xy in enumerate(keypoints.xy): - if selected_keypoint_indices: - xy = xy[selected_keypoint_indices] - - # [0, 0] used by some frameworks to indicate missing keypoints - xy = xy[~np.all(xy == 0, axis=1)] - if len(xy) == 0: - xyxy = np.array([[0, 0, 0, 0]], dtype=np.float32) - else: - x_min = xy[:, 0].min() - x_max = xy[:, 0].max() - y_min = xy[:, 1].min() - y_max = xy[:, 1].max() - xyxy = np.array([[x_min, y_min, x_max, y_max]], dtype=np.float32) - - if keypoints.confidence is None: - confidence = None - else: - confidence = keypoints.confidence[i] - if selected_keypoint_indices: - confidence = confidence[selected_keypoint_indices] - confidence = np.array([confidence.mean()], dtype=np.float32) - - detections_list.append( - Detections( - xyxy=xyxy, - confidence=confidence, - ) - ) - - detections = Detections.merge(detections_list) - detections.class_id = keypoints.class_id - detections.data = keypoints.data - detections = detections[detections.area > 0] - - return detections From abcb055815b5ec3e9999dd12eb1dd3eae7b7ab84 Mon Sep 17 00:00:00 2001 From: LinasKo Date: Thu, 7 Nov 2024 18:28:34 +0200 Subject: [PATCH 134/161] Removed LineZone property docstrings; added 2 properties to Attributes --- supervision/detection/line_zone.py | 20 ++++---------------- 1 file changed, 4 insertions(+), 16 deletions(-) diff --git a/supervision/detection/line_zone.py b/supervision/detection/line_zone.py index 422bc9c5c..5f393d881 100644 --- a/supervision/detection/line_zone.py +++ b/supervision/detection/line_zone.py @@ -42,6 +42,10 @@ class LineZone: to inside. out_count (int): The number of objects that have crossed the line from inside to outside. + in_count_per_class (Dict[int, int]): Number of objects of each class that have + crossed the line from outside to inside. + out_count_per_class (Dict[int, int]): Number of objects of each class that have + crossed the line from inside to outside. Example: ```python @@ -107,34 +111,18 @@ def __init__( @property def in_count(self) -> int: - """ - Number of objects that have crossed the line from - outside to inside. - """ return sum(self._in_count_per_class.values()) @property def out_count(self) -> int: - """ - Number of objects that have crossed the line from - inside to outside. - """ return sum(self._out_count_per_class.values()) @property def in_count_per_class(self) -> Dict[int, int]: - """ - Number of objects of each class that have crossed - the line from outside to inside. - """ return dict(self._in_count_per_class) @property def out_count_per_class(self) -> Dict[int, int]: - """ - Number of objects of each class that have crossed the line - from inside to outside. - """ return dict(self._out_count_per_class) def trigger(self, detections: Detections) -> Tuple[np.ndarray, np.ndarray]: From 1b101b49ac2cf0ecfdaf283219d99e8755c1ba8e Mon Sep 17 00:00:00 2001 From: LinasKo Date: Thu, 7 Nov 2024 18:32:58 +0200 Subject: [PATCH 135/161] Renamed crossing_acceptance_threshold to minimum_crossing_threshold --- supervision/detection/line_zone.py | 6 +++--- test/detection/test_line_counter.py | 12 ++++++------ 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/supervision/detection/line_zone.py b/supervision/detection/line_zone.py index 5f393d881..b63660bc4 100644 --- a/supervision/detection/line_zone.py +++ b/supervision/detection/line_zone.py @@ -79,7 +79,7 @@ def __init__( Position.BOTTOM_LEFT, Position.BOTTOM_RIGHT, ), - crossing_acceptance_threshold: int = 1, + minimum_crossing_threshold: int = 1, ): """ Args: @@ -90,7 +90,7 @@ def __init__( to consider when deciding on whether the detection has passed the line counter or not. By default, this contains the four corners of the detection's bounding box - crossing_acceptance_threshold (int): Detection needs to be seen + minimum_crossing_threshold (int): Detection needs to be seen on the other side of the line for this many frames to be considered as having crossed the line. This is useful when dealing with unstable bounding boxes or when detections @@ -98,7 +98,7 @@ def __init__( """ self.vector = Vector(start=start, end=end) self.limits = self._calculate_region_of_interest_limits(vector=self.vector) - self.crossing_history_length = max(2, crossing_acceptance_threshold + 1) + self.crossing_history_length = max(2, minimum_crossing_threshold + 1) self.crossing_state_history: Dict[int, Deque[bool]] = defaultdict( lambda: deque(maxlen=self.crossing_history_length) ) diff --git a/test/detection/test_line_counter.py b/test/detection/test_line_counter.py index d0ec5fbd2..a140add55 100644 --- a/test/detection/test_line_counter.py +++ b/test/detection/test_line_counter.py @@ -493,7 +493,7 @@ def test_line_zone_multiple_detections( @pytest.mark.parametrize( - "vector, xyxy_sequence, triggering_anchors, crossing_acceptance_threshold, " + "vector, xyxy_sequence, triggering_anchors, minimum_crossing_threshold, " "expected_crossed_in, expected_crossed_out", [ ( # Detection lingers around line, all crosses counted @@ -578,7 +578,7 @@ def test_line_zone_one_detection_long_horizon( vector: Vector, xyxy_sequence: List[List[float]], triggering_anchors: List[Position], - crossing_acceptance_threshold: int, + minimum_crossing_threshold: int, expected_crossed_in: List[bool], expected_crossed_out: List[bool], ) -> None: @@ -586,7 +586,7 @@ def test_line_zone_one_detection_long_horizon( start=vector.start, end=vector.end, triggering_anchors=triggering_anchors, - crossing_acceptance_threshold=crossing_acceptance_threshold, + minimum_crossing_threshold=minimum_crossing_threshold, ) crossed_in_list = [] @@ -609,7 +609,7 @@ def test_line_zone_one_detection_long_horizon( @pytest.mark.parametrize( - "vector, xyxy_sequence, anchors, crossing_acceptance_threshold, " + "vector, xyxy_sequence, anchors, minimum_crossing_threshold, " "expected_crossed_in, expected_crossed_out, expected_count_in, " "expected_count_out, exception", [ @@ -743,7 +743,7 @@ def test_line_zone_long_horizon_disappearing_detections( vector: Vector, xyxy_sequence: List[List[Optional[List[float]]]], anchors: List[Position], - crossing_acceptance_threshold: int, + minimum_crossing_threshold: int, expected_crossed_in: List[List[bool]], expected_crossed_out: List[List[bool]], expected_count_in: List[int], @@ -755,7 +755,7 @@ def test_line_zone_long_horizon_disappearing_detections( start=vector.start, end=vector.end, triggering_anchors=anchors, - crossing_acceptance_threshold=crossing_acceptance_threshold, + minimum_crossing_threshold=minimum_crossing_threshold, ) crossed_in_list = [] crossed_out_list = [] From 2084f57d76420fb8de1f31c7b09f8db1fde4639c Mon Sep 17 00:00:00 2001 From: LinasKo Date: Thu, 7 Nov 2024 20:26:29 +0200 Subject: [PATCH 136/161] Split How To Track Keypoints into multiple sections --- docs/how_to/track_objects.md | 318 ++++++++++++++++++++++++++++++----- 1 file changed, 279 insertions(+), 39 deletions(-) diff --git a/docs/how_to/track_objects.md b/docs/how_to/track_objects.md index 29fa83984..1b321e7fe 100644 --- a/docs/how_to/track_objects.md +++ b/docs/how_to/track_objects.md @@ -14,6 +14,8 @@ take you through the steps to perform inference using the YOLOv8 model via eithe you'll discover how to track these objects efficiently and annotate your video content for a deeper analysis. +## Object Detection & Segmentation + To make it easier for you to follow our tutorial download the video we will use as an example. You can do this using [`supervision[assets]`](/latest/assets/) extension. @@ -22,14 +24,13 @@ example. You can do this using from supervision.assets import download_assets, VideoAssets download_assets(VideoAssets.PEOPLE_WALKING) -download_assets(VideoAssets.SKIING) ``` -## Run Inference +### Run Inference First, you'll need to obtain predictions from your object detection or segmentation model. In this tutorial, we are using the YOLOv8 model as an example. However, @@ -42,6 +43,10 @@ by obtaining model predictions and then annotating the frame based on these pred This `callback` function will be essential in the subsequent steps of the tutorial, as it will be modified to include tracking, labeling, and trace annotations. +!!! tip + + Both object detection and segmentation models are supported. Try it with `yolov8n.pt` or `yolov8n-640-seg`! + === "Ultralytics" ```{ .py } @@ -90,7 +95,7 @@ it will be modified to include tracking, labeling, and trace annotations. -## Tracking +### Tracking After running inference and obtaining predictions, the next step is to track the detected objects throughout the video. Utilizing Supervision’s @@ -146,7 +151,7 @@ enabling the continuous following of the object's motion path across different f ) ``` -## Annotate Video with Tracking IDs +### Annotate Video with Tracking IDs Annotating the video with tracking IDs helps in distinguishing and following each object distinctly. With the @@ -228,7 +233,7 @@ offering a clear visual representation of each object's class and unique identif -## Annotate Video with Traces +### Annotate Video with Traces Adding traces to the video involves overlaying the historical paths of the detected objects. This feature, powered by the @@ -316,9 +321,96 @@ movement patterns and interactions between objects in the video. -## Tracking Key Points +## Keypoints + +Models aren't limited to object detection and segmentation. Keypoint detection allows for detailed analysis of body joints and connections, especially valuable for applications like human pose estimation. This section introduces keypoint tracking. We'll walk through the steps of annotating keypoints, converting them into bounding box detections compatible with `ByteTrack`, and applying detection smoothing for enhanced stability. + +To make it easier for you to follow our tutorial, let's download the video we will use as an +example. You can do this using [`supervision[assets]`](/latest/assets/) extension. + +```python +from supervision.assets import download_assets, VideoAssets + +download_assets(VideoAssets.SKIING) +``` + + + +### Keypoint Detection + +First, you'll need to obtain predictions from your keypoint detection model. In this tutorial, we are using the YOLOv8 model as an example. However, +Supervision is versatile and compatible with various models. Check this [link](/latest/keypoint/core/) for guidance on how to plug in other models. + +We will define a `callback` function, which will process each frame of the video by obtaining model predictions and then annotating the frame based on these predictions. + +Let's immediately visualize the results with our [`EdgeAnnotator`](/latest/keypoint/annotators/#supervision.keypoint.annotators.EdgeAnnotator) and [`VertexAnnotator`](https://supervision.roboflow.com/latest/keypoint/annotators/#supervision.keypoint.annotators.VertexAnnotator). + +=== "Ultralytics" + + ```{ .py hl_lines="5 10-11" } + import numpy as np + import supervision as sv + from ultralytics import YOLO -Keypoint tracking is currently supported via the conversion of `KeyPoints` to `Detections`. This is achieved with the [`KeyPoints.as_detections()`](/latest/keypoint/core/#supervision.keypoint.core.KeyPoints.as_detections) function. We'll use a different video as well as [`DetectionsSmoother`](/latest/detection/tools/smoother/) to stabilize the boxes. + model = YOLO("yolov8m-pose.pt") + edge_annotator = sv.EdgeAnnotator() + vertex_annotator = sv.VertexAnnotator() + + def callback(frame: np.ndarray, _: int) -> np.ndarray: + results = model(frame)[0] + key_points = sv.KeyPoints.from_ultralytics(results) + + annotated_frame = edge_annotator.annotate( + frame.copy(), key_points=key_points) + return vertex_annotator.annotate( + annotated_frame, key_points=key_points) + + sv.process_video( + source_path="skiing.mp4", + target_path="result.mp4", + callback=callback + ) + ``` + +=== "Inference" + + ```{ .py hl_lines="5-6 11-12" } + import numpy as np + import supervision as sv + from inference.models.utils import get_roboflow_model + + model = get_roboflow_model( + model_id="yolov8m-pose-640", api_key=) + edge_annotator = sv.EdgeAnnotator() + vertex_annotator = sv.VertexAnnotator() + + def callback(frame: np.ndarray, _: int) -> np.ndarray: + results = model.infer(frame)[0] + key_points = sv.KeyPoints.from_inference(results) + + annotated_frame = edge_annotator.annotate( + frame.copy(), key_points=key_points) + return vertex_annotator.annotate( + annotated_frame, key_points=key_points) + + sv.process_video( + source_path="skiing.mp4", + target_path="result.mp4", + callback=callback + ) + ``` + + + +### Convert to Detections + +Keypoint tracking is currently supported via the conversion of `KeyPoints` to `Detections`. This is achieved with the [`KeyPoints.as_detections()`](/latest/keypoint/core/#supervision.keypoint.core.KeyPoints.as_detections) function. + +Let's convert to detections and visualize the results with our [`BoxAnnotator`](/latest/detection/annotators/#supervision.annotators.core.BoxAnnotator). !!! tip @@ -326,35 +418,187 @@ Keypoint tracking is currently supported via the conversion of `KeyPoints` to `D === "Ultralytics" - ```{ .py hl_lines="5 7 14-15 17 33" } + ```{ .py hl_lines="8 13 19-20" } import numpy as np import supervision as sv from ultralytics import YOLO model = YOLO("yolov8m-pose.pt") + edge_annotator = sv.EdgeAnnotator() + vertex_annotator = sv.VertexAnnotator() + box_annotator = sv.BoxAnnotator() + + def callback(frame: np.ndarray, _: int) -> np.ndarray: + results = model(frame)[0] + key_points = sv.KeyPoints.from_ultralytics(results) + detections = key_points.as_detections() + + annotated_frame = edge_annotator.annotate( + frame.copy(), key_points=key_points) + annotated_frame = vertex_annotator.annotate( + annotated_frame, key_points=key_points) + return box_annotator.annotate( + annotated_frame, detections=detections) + + sv.process_video( + source_path="skiing.mp4", + target_path="result.mp4", + callback=callback + ) + ``` + +=== "Inference" + + ```{ .py hl_lines="9 14 20-21" } + import numpy as np + import supervision as sv + from inference.models.utils import get_roboflow_model + + model = get_roboflow_model( + model_id="yolov8m-pose-640", api_key=) + edge_annotator = sv.EdgeAnnotator() + vertex_annotator = sv.VertexAnnotator() + box_annotator = sv.BoxAnnotator() + + def callback(frame: np.ndarray, _: int) -> np.ndarray: + results = model.infer(frame)[0] + key_points = sv.KeyPoints.from_inference(results) + detections = key_points.as_detections() + + annotated_frame = edge_annotator.annotate( + frame.copy(), key_points=key_points) + annotated_frame = vertex_annotator.annotate( + annotated_frame, key_points=key_points) + return box_annotator.annotate( + annotated_frame, detections=detections) + + sv.process_video( + source_path="skiing.mp4", + target_path="result.mp4", + callback=callback + ) + ``` + + + +### Keypoint Tracking + +Now that we have a `Detections` object, we can track it throughout the video. Utilizing Supervision’s [`sv.ByteTrack`](/latest/trackers/#supervision.tracker.byte_tracker.core.ByteTrack) functionality, each detected object is assigned a unique tracker ID, enabling the continuous following of the object's motion path across different frames. We shall visualize the result with `TraceAnnotator`. + +=== "Ultralytics" + + ```{ .py hl_lines="10-11 17 25-26" } + import numpy as np + import supervision as sv + from ultralytics import YOLO + + model = YOLO("yolov8m-pose.pt") + edge_annotator = sv.EdgeAnnotator() + vertex_annotator = sv.VertexAnnotator() + box_annotator = sv.BoxAnnotator() + + tracker = sv.ByteTrack() + trace_annotator = sv.TraceAnnotator() + + def callback(frame: np.ndarray, _: int) -> np.ndarray: + results = model(frame)[0] + key_points = sv.KeyPoints.from_ultralytics(results) + detections = key_points.as_detections() + detections = tracker.update_with_detections(detections) + + annotated_frame = edge_annotator.annotate( + frame.copy(), key_points=key_points) + annotated_frame = vertex_annotator.annotate( + annotated_frame, key_points=key_points) + annotated_frame = box_annotator.annotate( + annotated_frame, detections=detections) + return trace_annotator.annotate( + annotated_frame, detections=detections) + + sv.process_video( + source_path="skiing.mp4", + target_path="result.mp4", + callback=callback + ) + ``` + +=== "Inference" + + ```{ .py hl_lines="11-12 18 26-27" } + import numpy as np + import supervision as sv + from inference.models.utils import get_roboflow_model + + model = get_roboflow_model( + model_id="yolov8m-pose-640", api_key=) + edge_annotator = sv.EdgeAnnotator() + vertex_annotator = sv.VertexAnnotator() + box_annotator = sv.BoxAnnotator() + + tracker = sv.ByteTrack() + trace_annotator = sv.TraceAnnotator() + + def callback(frame: np.ndarray, _: int) -> np.ndarray: + results = model.infer(frame)[0] + key_points = sv.KeyPoints.from_inference(results) + detections = key_points.as_detections() + detections = tracker.update_with_detections(detections) + + annotated_frame = edge_annotator.annotate( + frame.copy(), key_points=key_points) + annotated_frame = vertex_annotator.annotate( + annotated_frame, key_points=key_points) + annotated_frame = box_annotator.annotate( + annotated_frame, detections=detections) + return trace_annotator.annotate( + annotated_frame, detections=detections) + + sv.process_video( + source_path="skiing.mp4", + target_path="result.mp4", + callback=callback + ) + ``` + + + +### Bonus: Smoothing + +We could stop here as we have successfully tracked the object detected by the keypoint model. However, we can further enhance the stability of the boxes by applying [`DetectionsSmoother`](/latest/detection/tools/smoother/). This tool helps in stabilizing the boxes by smoothing the bounding box coordinates across frames. It is very simple to use: + +=== "Ultralytics" + + ```{ .py hl_lines="11 19" } + import numpy as np + import supervision as sv + from ultralytics import YOLO + + model = YOLO("yolov8m-pose.pt") + edge_annotator = sv.EdgeAnnotator() + vertex_annotator = sv.VertexAnnotator() + box_annotator = sv.BoxAnnotator() + tracker = sv.ByteTrack() smoother = sv.DetectionsSmoother() - box_annotator = sv.BoundingBoxAnnotator() - label_annotator = sv.LabelAnnotator() trace_annotator = sv.TraceAnnotator() def callback(frame: np.ndarray, _: int) -> np.ndarray: results = model(frame)[0] - keypoints = sv.KeyPoints.from_ultralytics(results) - detections = keypoints.as_detections() + key_points = sv.KeyPoints.from_ultralytics(results) + detections = key_points.as_detections() detections = tracker.update_with_detections(detections) detections = smoother.update_with_detections(detections) - labels = [ - f"#{tracker_id} {results.names[class_id]}" - for class_id, tracker_id - in zip(detections.class_id, detections.tracker_id) - ] - + annotated_frame = edge_annotator.annotate( + frame.copy(), key_points=key_points) + annotated_frame = vertex_annotator.annotate( + annotated_frame, key_points=key_points) annotated_frame = box_annotator.annotate( - frame.copy(), detections=detections) - annotated_frame = label_annotator.annotate( - annotated_frame, detections=detections, labels=labels) + annotated_frame, detections=detections) return trace_annotator.annotate( annotated_frame, detections=detections) @@ -367,36 +611,34 @@ Keypoint tracking is currently supported via the conversion of `KeyPoints` to `D === "Inference" - ```{ .py hl_lines="5-6 8 15-16 18 34" } + ```{ .py hl_lines="12 20" } import numpy as np import supervision as sv from inference.models.utils import get_roboflow_model model = get_roboflow_model( model_id="yolov8m-pose-640", api_key=) + edge_annotator = sv.EdgeAnnotator() + vertex_annotator = sv.VertexAnnotator() + box_annotator = sv.BoxAnnotator() + tracker = sv.ByteTrack() smoother = sv.DetectionsSmoother() - box_annotator = sv.BoundingBoxAnnotator() - label_annotator = sv.LabelAnnotator() trace_annotator = sv.TraceAnnotator() def callback(frame: np.ndarray, _: int) -> np.ndarray: results = model.infer(frame)[0] - keypoints = sv.KeyPoints.from_inference(results) - detections = keypoints.as_detections() + key_points = sv.KeyPoints.from_inference(results) + detections = key_points.as_detections() detections = tracker.update_with_detections(detections) detections = smoother.update_with_detections(detections) - labels = [ - f"#{tracker_id} {results.names[class_id]}" - for class_id, tracker_id - in zip(detections.class_id, detections.tracker_id) - ] - + annotated_frame = edge_annotator.annotate( + frame.copy(), key_points=key_points) + annotated_frame = vertex_annotator.annotate( + annotated_frame, key_points=key_points) annotated_frame = box_annotator.annotate( - frame.copy(), detections=detections) - annotated_frame = label_annotator.annotate( - annotated_frame, detections=detections, labels=labels) + annotated_frame, detections=detections) return trace_annotator.annotate( annotated_frame, detections=detections) @@ -408,9 +650,7 @@ Keypoint tracking is currently supported via the conversion of `KeyPoints` to `D ``` -This structured walkthrough should give a detailed pathway to annotate videos -effectively using Supervision’s various functionalities, including object tracking and -trace annotations. +This structured walkthrough should give a detailed pathway to annotate videos effectively using Supervision’s various functionalities, including object tracking and trace annotations. From 4ed05e76fd50755351a0bbcf4ab5a01bc7adc3f6 Mon Sep 17 00:00:00 2001 From: LinasKo Date: Thu, 7 Nov 2024 20:58:18 +0200 Subject: [PATCH 137/161] Add example images for metrics --- supervision/metrics/f1_score.py | 4 ++++ supervision/metrics/mean_average_precision.py | 4 ++++ supervision/metrics/precision.py | 4 ++++ supervision/metrics/recall.py | 4 ++++ 4 files changed, 16 insertions(+) diff --git a/supervision/metrics/f1_score.py b/supervision/metrics/f1_score.py index cc8c87a2c..e53c1752c 100644 --- a/supervision/metrics/f1_score.py +++ b/supervision/metrics/f1_score.py @@ -51,6 +51,10 @@ class F1Score(Metric): print(f1_result.f1_50) print(f1_result.small_objects.f1_50) ``` + + ![example_plot](\ + https://media.roboflow.com/supervision-docs/metrics/f1_plot_example.png\ + ){ align=center width="800" } """ def __init__( diff --git a/supervision/metrics/mean_average_precision.py b/supervision/metrics/mean_average_precision.py index ba37837b3..64c432922 100644 --- a/supervision/metrics/mean_average_precision.py +++ b/supervision/metrics/mean_average_precision.py @@ -46,6 +46,10 @@ class MeanAveragePrecision(Metric): print(map_result.map50_95) map_result.plot() ``` + + ![example_plot](\ + https://media.roboflow.com/supervision-docs/metrics/mAP_plot_example.png\ + ){ align=center width="800" } """ def __init__( diff --git a/supervision/metrics/precision.py b/supervision/metrics/precision.py index fa6cf2b1a..8bb41f80d 100644 --- a/supervision/metrics/precision.py +++ b/supervision/metrics/precision.py @@ -54,6 +54,10 @@ class Precision(Metric): print(precision_result.precision_at_50) print(precision_result.small_objects.precision_at_50) ``` + + ![example_plot](\ + https://media.roboflow.com/supervision-docs/metrics/precision_plot_example.png\ + ){ align=center width="800" } """ def __init__( diff --git a/supervision/metrics/recall.py b/supervision/metrics/recall.py index 1848502b7..1cd66b79a 100644 --- a/supervision/metrics/recall.py +++ b/supervision/metrics/recall.py @@ -54,6 +54,10 @@ class Recall(Metric): print(recall_result.recall_at_50) print(recall_result.small_objects.recall_at_50) ``` + + ![example_plot](\ + https://media.roboflow.com/supervision-docs/metrics/recall_plot_example.png\ + ){ align=center width="800" } """ def __init__( From 42bb4c2865273744d77004680401466955d9733b Mon Sep 17 00:00:00 2001 From: LinasKo Date: Thu, 7 Nov 2024 21:16:45 +0200 Subject: [PATCH 138/161] Add print results and plot examples to plot() --- supervision/metrics/f1_score.py | 37 +++++++++++++++++- supervision/metrics/mean_average_precision.py | 37 +++++++++++++++++- supervision/metrics/precision.py | 35 ++++++++++++++++- supervision/metrics/recall.py | 38 ++++++++++++++++++- 4 files changed, 141 insertions(+), 6 deletions(-) diff --git a/supervision/metrics/f1_score.py b/supervision/metrics/f1_score.py index e53c1752c..bd89e1297 100644 --- a/supervision/metrics/f1_score.py +++ b/supervision/metrics/f1_score.py @@ -47,9 +47,25 @@ class F1Score(Metric): f1_metric = F1Score() f1_result = f1_metric.update(predictions, targets).compute() - print(f1_result) print(f1_result.f1_50) - print(f1_result.small_objects.f1_50) + # 0.7618 + + print(f1_result) + # F1ScoreResult: + # Metric target: MetricTarget.BOXES + # Averaging method: AveragingMethod.WEIGHTED + # F1 @ 50: 0.7618 + # F1 @ 75: 0.7487 + # F1 @ thresh: [0.76175 0.76068 0.76068] + # IoU thresh: [0.5 0.55 0.6 ...] + # F1 per class: + # 0: [0.70968 0.70968 0.70968 ...] + # ... + # Small objects: ... + # Medium objects: ... + # Large objects: ... + + f1_result.plot() ``` ![example_plot](\ @@ -494,6 +510,19 @@ def __str__(self) -> str: Example: ```python print(f1_result) + # F1ScoreResult: + # Metric target: MetricTarget.BOXES + # Averaging method: AveragingMethod.WEIGHTED + # F1 @ 50: 0.7618 + # F1 @ 75: 0.7487 + # F1 @ thresh: [0.76175 0.76068 0.76068] + # IoU thresh: [0.5 0.55 0.6 ...] + # F1 per class: + # 0: [0.70968 0.70968 0.70968 ...] + # ... + # Small objects: ... + # Medium objects: ... + # Large objects: ... ``` """ out_str = ( @@ -557,6 +586,10 @@ def to_pandas(self) -> "pd.DataFrame": def plot(self): """ Plot the F1 results. + + ![example_plot](\ + https://media.roboflow.com/supervision-docs/metrics/f1_plot_example.png\ + ){ align=center width="800" } """ labels = ["F1@50", "F1@75"] diff --git a/supervision/metrics/mean_average_precision.py b/supervision/metrics/mean_average_precision.py index 64c432922..ac12767a7 100644 --- a/supervision/metrics/mean_average_precision.py +++ b/supervision/metrics/mean_average_precision.py @@ -42,8 +42,25 @@ class MeanAveragePrecision(Metric): map_metric = MeanAveragePrecision() map_result = map_metric.update(predictions, targets).compute() - print(map_result) print(map_result.map50_95) + # 0.4674 + + print(map_result) + # MeanAveragePrecisionResult: + # Metric target: MetricTarget.BOXES + # Class agnostic: False + # mAP @ 50:95: 0.4674 + # mAP @ 50: 0.5048 + # mAP @ 75: 0.4796 + # mAP scores: [0.50485 0.50377 0.50377 ...] + # IoU thresh: [0.5 0.55 0.6 ...] + # AP per class: + # 0: [0.67699 0.67699 0.67699 ...] + # ... + # Small objects: ... + # Medium objects: ... + # Large objects: ... + map_result.plot() ``` @@ -460,6 +477,20 @@ def __str__(self) -> str: Example: ```python print(map_result) + # MeanAveragePrecisionResult: + # Metric target: MetricTarget.BOXES + # Class agnostic: False + # mAP @ 50:95: 0.4674 + # mAP @ 50: 0.5048 + # mAP @ 75: 0.4796 + # mAP scores: [0.50485 0.50377 0.50377 ...] + # IoU thresh: [0.5 0.55 0.6 ...] + # AP per class: + # 0: [0.67699 0.67699 0.67699 ...] + # ... + # Small objects: ... + # Medium objects: ... + # Large objects: ... ``` """ @@ -531,6 +562,10 @@ def to_pandas(self) -> "pd.DataFrame": def plot(self): """ Plot the mAP results. + + ![example_plot](\ + https://media.roboflow.com/supervision-docs/metrics/mAP_plot_example.png\ + ){ align=center width="800" } """ labels = ["mAP@50:95", "mAP@50", "mAP@75"] diff --git a/supervision/metrics/precision.py b/supervision/metrics/precision.py index 8bb41f80d..235110862 100644 --- a/supervision/metrics/precision.py +++ b/supervision/metrics/precision.py @@ -50,8 +50,24 @@ class Precision(Metric): precision_metric = Precision() precision_result = precision_metric.update(predictions, targets).compute() - print(precision_result) print(precision_result.precision_at_50) + # 0.8099 + + print(precision_result) + # PrecisionResult: + # Metric target: MetricTarget.BOXES + # Averaging method: AveragingMethod.WEIGHTED + # P @ 50: 0.8099 + # P @ 75: 0.7969 + # P @ thresh: [0.80992 0.80905 0.80905 ...] + # IoU thresh: [0.5 0.55 0.6 ...] + # Precision per class: + # 0: [0.64706 0.64706 0.64706 ...] + # ... + # Small objects: ... + # Medium objects: ... + # Large objects: ... + print(precision_result.small_objects.precision_at_50) ``` @@ -497,6 +513,19 @@ def __str__(self) -> str: Example: ```python print(precision_result) + # PrecisionResult: + # Metric target: MetricTarget.BOXES + # Averaging method: AveragingMethod.WEIGHTED + # P @ 50: 0.8099 + # P @ 75: 0.7969 + # P @ thresh: [0.80992 0.80905 0.80905 ...] + # IoU thresh: [0.5 0.55 0.6 ...] + # Precision per class: + # 0: [0.64706 0.64706 0.64706 ...] + # ... + # Small objects: ... + # Medium objects: ... + # Large objects: ... ``` """ out_str = ( @@ -562,6 +591,10 @@ def to_pandas(self) -> "pd.DataFrame": def plot(self): """ Plot the precision results. + + ![example_plot](\ + https://media.roboflow.com/supervision-docs/metrics/precision_plot_example.png\ + ){ align=center width="800" } """ labels = ["Precision@50", "Precision@75"] diff --git a/supervision/metrics/recall.py b/supervision/metrics/recall.py index 1cd66b79a..e297861b8 100644 --- a/supervision/metrics/recall.py +++ b/supervision/metrics/recall.py @@ -50,9 +50,26 @@ class Recall(Metric): recall_metric = Recall() recall_result = recall_metric.update(predictions, targets).compute() - print(recall_result) print(recall_result.recall_at_50) - print(recall_result.small_objects.recall_at_50) + # 0.7615 + + print(recall_result) + # RecallResult: + # Metric target: MetricTarget.BOXES + # Averaging method: AveragingMethod.WEIGHTED + # R @ 50: 0.7615 + # R @ 75: 0.7462 + # R @ thresh: [0.76151 0.76011 0.76011 0.75732 ...] + # IoU thresh: [0.5 0.55 0.6 ...] + # Recall per class: + # 0: [0.78571 0.78571 0.78571 ...] + # ... + # Small objects: ... + # Medium objects: ... + # Large objects: ... + + recall_result.plot() + ``` ![example_plot](\ @@ -495,6 +512,19 @@ def __str__(self) -> str: Example: ```python print(recall_result) + # RecallResult: + # Metric target: MetricTarget.BOXES + # Averaging method: AveragingMethod.WEIGHTED + # R @ 50: 0.7615 + # R @ 75: 0.7462 + # R @ thresh: [0.76151 0.76011 0.76011 0.75732 ...] + # IoU thresh: [0.5 0.55 0.6 ...] + # Recall per class: + # 0: [0.78571 0.78571 0.78571 ...] + # ... + # Small objects: ... + # Medium objects: ... + # Large objects: ... ``` """ out_str = ( @@ -560,6 +590,10 @@ def to_pandas(self) -> "pd.DataFrame": def plot(self): """ Plot the recall results. + + ![example_plot](\ + https://media.roboflow.com/supervision-docs/metrics/recall_plot_example.png\ + ){ align=center width="800" } """ labels = ["Recall@50", "Recall@75"] From 83d4386fa25e6bd4dd30d1174d5ca111d075b429 Mon Sep 17 00:00:00 2001 From: LinasKo Date: Fri, 8 Nov 2024 00:09:55 +0200 Subject: [PATCH 139/161] Implemented mAR metric --- supervision/metrics/__init__.py | 4 + supervision/metrics/mean_average_recall.py | 512 +++++++++++++++++++++ 2 files changed, 516 insertions(+) create mode 100644 supervision/metrics/mean_average_recall.py diff --git a/supervision/metrics/__init__.py b/supervision/metrics/__init__.py index 90fc17b47..12b243ce6 100644 --- a/supervision/metrics/__init__.py +++ b/supervision/metrics/__init__.py @@ -8,6 +8,10 @@ MeanAveragePrecision, MeanAveragePrecisionResult, ) +from supervision.metrics.mean_average_recall import ( + MeanAverageRecall, + MeanAverageRecallResult, +) from supervision.metrics.precision import Precision, PrecisionResult from supervision.metrics.recall import Recall, RecallResult from supervision.metrics.utils.object_size import ( diff --git a/supervision/metrics/mean_average_recall.py b/supervision/metrics/mean_average_recall.py new file mode 100644 index 000000000..5f0274aec --- /dev/null +++ b/supervision/metrics/mean_average_recall.py @@ -0,0 +1,512 @@ +from __future__ import annotations + +from copy import deepcopy +from dataclasses import dataclass +from typing import TYPE_CHECKING, List, Optional, Tuple, Union + +import numpy as np +from matplotlib import pyplot as plt + +from supervision.config import ORIENTED_BOX_COORDINATES +from supervision.detection.core import Detections +from supervision.detection.utils import ( + box_iou_batch, + mask_iou_batch, + oriented_box_iou_batch, +) +from supervision.draw.color import LEGACY_COLOR_PALETTE +from supervision.metrics.core import Metric, MetricTarget +from supervision.metrics.utils.object_size import ( + ObjectSizeCategory, + get_detection_size_category, +) +from supervision.metrics.utils.utils import ensure_pandas_installed + +if TYPE_CHECKING: + import pandas as pd + + +class MeanAverageRecall(Metric): + def __init__( + self, + metric_target: MetricTarget = MetricTarget.BOXES, + ): + self._metric_target = metric_target + + self._predictions_list: List[Detections] = [] + self._targets_list: List[Detections] = [] + + self.max_detections = np.array([1, 10, 100]) + + def reset(self) -> None: + self._predictions_list = [] + self._targets_list = [] + + def update( + self, + predictions: Union[Detections, List[Detections]], + targets: Union[Detections, List[Detections]], + ) -> MeanAverageRecall: + if not isinstance(predictions, list): + predictions = [predictions] + if not isinstance(targets, list): + targets = [targets] + + if len(predictions) != len(targets): + raise ValueError( + f"The number of predictions ({len(predictions)}) and" + f" targets ({len(targets)}) during the update must be the same." + ) + + self._predictions_list.extend(predictions) + self._targets_list.extend(targets) + + return self + + def compute(self) -> MeanAverageRecallResult: + result = self._compute(self._predictions_list, self._targets_list) + + small_predictions, small_targets = self._filter_predictions_and_targets_by_size( + self._predictions_list, self._targets_list, ObjectSizeCategory.SMALL + ) + result.small_objects = self._compute(small_predictions, small_targets) + + medium_predictions, medium_targets = ( + self._filter_predictions_and_targets_by_size( + self._predictions_list, self._targets_list, ObjectSizeCategory.MEDIUM + ) + ) + result.medium_objects = self._compute(medium_predictions, medium_targets) + + large_predictions, large_targets = self._filter_predictions_and_targets_by_size( + self._predictions_list, self._targets_list, ObjectSizeCategory.LARGE + ) + result.large_objects = self._compute(large_predictions, large_targets) + + return result + + def _compute( + self, predictions_list: List[Detections], targets_list: List[Detections] + ) -> MeanAverageRecallResult: + iou_thresholds = np.linspace(0.5, 0.95, 10) + stats = [] + + for predictions, targets in zip(predictions_list, targets_list): + prediction_contents = self._detections_content(predictions) + target_contents = self._detections_content(targets) + + if len(targets) > 0: + if len(predictions) == 0: + stats.append( + ( + np.zeros((0, iou_thresholds.size), dtype=bool), + np.zeros((0,), dtype=np.float32), + np.zeros((0,), dtype=int), + targets.class_id, + ) + ) + + else: + if self._metric_target == MetricTarget.BOXES: + iou = box_iou_batch(target_contents, prediction_contents) + elif self._metric_target == MetricTarget.MASKS: + iou = mask_iou_batch(target_contents, prediction_contents) + elif self._metric_target == MetricTarget.ORIENTED_BOUNDING_BOXES: + iou = oriented_box_iou_batch( + target_contents, prediction_contents + ) + else: + raise ValueError( + "Unsupported metric target for IoU calculation" + ) + + matches = self._match_detection_batch( + predictions.class_id, targets.class_id, iou, iou_thresholds + ) + stats.append( + ( + matches, + predictions.confidence, + predictions.class_id, + targets.class_id, + ) + ) + + if not stats: + return MeanAverageRecallResult( + metric_target=self._metric_target, + recall_scores=np.zeros(iou_thresholds.shape[0]), + recall_per_class=np.zeros((0, iou_thresholds.shape[0])), + max_detections=self.max_detections, + iou_thresholds=iou_thresholds, + matched_classes=np.array([], dtype=int), + small_objects=None, + medium_objects=None, + large_objects=None, + ) + + concatenated_stats = [np.concatenate(items, 0) for items in zip(*stats)] + recall_scores_per_k, recall_per_class, unique_classes = ( + self._compute_average_recall_for_classes(*concatenated_stats) + ) + + return MeanAverageRecallResult( + metric_target=self._metric_target, + recall_scores=recall_scores_per_k, + recall_per_class=recall_per_class, + max_detections=self.max_detections, + iou_thresholds=iou_thresholds, + matched_classes=unique_classes, + small_objects=None, + medium_objects=None, + large_objects=None, + ) + + def _compute_average_recall_for_classes( + self, + matches: np.ndarray, + prediction_confidence: np.ndarray, + prediction_class_ids: np.ndarray, + true_class_ids: np.ndarray, + ) -> Tuple[np.ndarray, np.ndarray, np.ndarray]: + sorted_indices = np.argsort(-prediction_confidence) + matches = matches[sorted_indices] + prediction_class_ids = prediction_class_ids[sorted_indices] + unique_classes, class_counts = np.unique(true_class_ids, return_counts=True) + + recalls_at_k = [] + for max_detections in self.max_detections: + # Shape: PxTh,P,C,C -> CxThx3 + confusion_matrix = self._compute_confusion_matrix( + matches, + prediction_class_ids, + unique_classes, + class_counts, + max_detections=max_detections, + ) + + # Shape: CxThx3 -> CxTh + recall_per_class = self._compute_recall(confusion_matrix) + recalls_at_k.append(recall_per_class) + + # Shape: KxCxTh -> KxC + recalls_at_k = np.array(recalls_at_k) + average_recall_per_class = np.mean(recalls_at_k, axis=2) + + # Shape: KxC -> K + recall_scores = np.mean(average_recall_per_class, axis=1) + + return recall_scores, recall_per_class, unique_classes + + @staticmethod + def _match_detection_batch( + predictions_classes: np.ndarray, + target_classes: np.ndarray, + iou: np.ndarray, + iou_thresholds: np.ndarray, + ) -> np.ndarray: + num_predictions, num_iou_levels = ( + predictions_classes.shape[0], + iou_thresholds.shape[0], + ) + correct = np.zeros((num_predictions, num_iou_levels), dtype=bool) + correct_class = target_classes[:, None] == predictions_classes + + for i, iou_level in enumerate(iou_thresholds): + matched_indices = np.where((iou >= iou_level) & correct_class) + + if matched_indices[0].shape[0]: + combined_indices = np.stack(matched_indices, axis=1) + iou_values = iou[matched_indices][:, None] + matches = np.hstack([combined_indices, iou_values]) + + if matched_indices[0].shape[0] > 1: + matches = matches[matches[:, 2].argsort()[::-1]] + matches = matches[np.unique(matches[:, 1], return_index=True)[1]] + matches = matches[np.unique(matches[:, 0], return_index=True)[1]] + + correct[matches[:, 1].astype(int), i] = True + + return correct + + @staticmethod + def _compute_confusion_matrix( + sorted_matches: np.ndarray, + sorted_prediction_class_ids: np.ndarray, + unique_classes: np.ndarray, + class_counts: np.ndarray, + max_detections: Optional[int] = None, + ) -> np.ndarray: + num_thresholds = sorted_matches.shape[1] + num_classes = unique_classes.shape[0] + + confusion_matrix = np.zeros((num_classes, num_thresholds, 3)) + for class_idx, class_id in enumerate(unique_classes): + is_class = sorted_prediction_class_ids == class_id + num_true = class_counts[class_idx] + num_predictions = is_class.sum() + + if num_predictions == 0: + true_positives = np.zeros(num_thresholds) + false_positives = np.zeros(num_thresholds) + false_negatives = np.full(num_thresholds, num_true) + elif num_true == 0: + true_positives = np.zeros(num_thresholds) + false_positives = np.full(num_thresholds, num_predictions) + false_negatives = np.zeros(num_thresholds) + else: + limited_matches = sorted_matches[is_class][slice(max_detections)] + true_positives = limited_matches.sum(0) + + false_positives = (1 - limited_matches).sum(0) + false_negatives = num_true - true_positives + false_negatives = num_true - true_positives + confusion_matrix[class_idx] = np.stack( + [true_positives, false_positives, false_negatives], axis=1 + ) + + return confusion_matrix + + @staticmethod + def _compute_recall(confusion_matrix: np.ndarray) -> np.ndarray: + """ + Broadcastable function, computing the recall from the confusion matrix. + + Arguments: + confusion_matrix: np.ndarray, shape (N, ..., 3), where the last dimension + contains the true positives, false positives, and false negatives. + + Returns: + np.ndarray, shape (N, ...), containing the recall for each element. + """ + if not confusion_matrix.shape[-1] == 3: + raise ValueError( + f"Confusion matrix must have shape (..., 3), got " + f"{confusion_matrix.shape}" + ) + true_positives = confusion_matrix[..., 0] + false_negatives = confusion_matrix[..., 2] + + denominator = true_positives + false_negatives + recall = np.where(denominator == 0, 0, true_positives / denominator) + + return recall + + def _detections_content(self, detections: Detections) -> np.ndarray: + """Return boxes, masks or oriented bounding boxes from detections.""" + if self._metric_target == MetricTarget.BOXES: + return detections.xyxy + if self._metric_target == MetricTarget.MASKS: + return ( + detections.mask + if detections.mask is not None + else self._make_empty_content() + ) + if self._metric_target == MetricTarget.ORIENTED_BOUNDING_BOXES: + obb = detections.data.get(ORIENTED_BOX_COORDINATES) + if obb is not None and len(obb) > 0: + return np.array(obb, dtype=np.float32) + return self._make_empty_content() + raise ValueError(f"Invalid metric target: {self._metric_target}") + + def _make_empty_content(self) -> np.ndarray: + if self._metric_target == MetricTarget.BOXES: + return np.empty((0, 4), dtype=np.float32) + if self._metric_target == MetricTarget.MASKS: + return np.empty((0, 0, 0), dtype=bool) + if self._metric_target == MetricTarget.ORIENTED_BOUNDING_BOXES: + return np.empty((0, 4, 2), dtype=np.float32) + raise ValueError(f"Invalid metric target: {self._metric_target}") + + def _filter_detections_by_size( + self, detections: Detections, size_category: ObjectSizeCategory + ) -> Detections: + """Return a copy of detections with contents filtered by object size.""" + new_detections = deepcopy(detections) + if detections.is_empty() or size_category == ObjectSizeCategory.ANY: + return new_detections + + sizes = get_detection_size_category(new_detections, self._metric_target) + size_mask = sizes == size_category.value + + new_detections.xyxy = new_detections.xyxy[size_mask] + if new_detections.mask is not None: + new_detections.mask = new_detections.mask[size_mask] + if new_detections.class_id is not None: + new_detections.class_id = new_detections.class_id[size_mask] + if new_detections.confidence is not None: + new_detections.confidence = new_detections.confidence[size_mask] + if new_detections.tracker_id is not None: + new_detections.tracker_id = new_detections.tracker_id[size_mask] + if new_detections.data is not None: + for key, value in new_detections.data.items(): + new_detections.data[key] = np.array(value)[size_mask] + + return new_detections + + def _filter_predictions_and_targets_by_size( + self, + predictions_list: List[Detections], + targets_list: List[Detections], + size_category: ObjectSizeCategory, + ) -> Tuple[List[Detections], List[Detections]]: + new_predictions_list = [] + new_targets_list = [] + for predictions, targets in zip(predictions_list, targets_list): + new_predictions_list.append( + self._filter_detections_by_size(predictions, size_category) + ) + new_targets_list.append( + self._filter_detections_by_size(targets, size_category) + ) + return new_predictions_list, new_targets_list + + +@dataclass +class MeanAverageRecallResult: + metric_target: MetricTarget + + @property + def mAR_at_1(self) -> float: + return self.recall_scores[0] + + @property + def mAR_at_10(self) -> float: + return self.recall_scores[1] + + @property + def mAR_at_100(self) -> float: + return self.recall_scores[2] + + recall_scores: np.ndarray + recall_per_class: np.ndarray + max_detections: np.ndarray + iou_thresholds: np.ndarray + matched_classes: np.ndarray + + small_objects: Optional[MeanAverageRecallResult] + medium_objects: Optional[MeanAverageRecallResult] + large_objects: Optional[MeanAverageRecallResult] + + def __str__(self) -> str: + out_str = ( + f"{self.__class__.__name__}:\n" + f"Metric target: {self.metric_target}\n" + f"mAR @ 1: {self.mAR_at_1:.4f}\n" + f"mAR @ 10: {self.mAR_at_10:.4f}\n" + f"mAR @ 100: {self.mAR_at_100:.4f}\n" + f"max detections: {self.max_detections}\n" + f"IoU thresh: {self.iou_thresholds}\n" + f"mAR per class:\n" + ) + if self.recall_per_class.size == 0: + out_str += " No results\n" + for class_id, recall_of_class in zip( + self.matched_classes, self.recall_per_class + ): + out_str += f" {class_id}: {recall_of_class}\n" + + indent = " " + if self.small_objects is not None: + indented = indent + str(self.small_objects).replace("\n", f"\n{indent}") + out_str += f"\nSmall objects:\n{indented}" + if self.medium_objects is not None: + indented = indent + str(self.medium_objects).replace("\n", f"\n{indent}") + out_str += f"\nMedium objects:\n{indented}" + if self.large_objects is not None: + indented = indent + str(self.large_objects).replace("\n", f"\n{indent}") + out_str += f"\nLarge objects:\n{indented}" + + return out_str + + def to_pandas(self) -> "pd.DataFrame": + ensure_pandas_installed() + import pandas as pd + + pandas_data = { + "mAR @ 1": self.mAR_at_1, + "mAR @ 10": self.mAR_at_10, + "mAR @ 100": self.mAR_at_100, + } + + if self.small_objects is not None: + small_objects_df = self.small_objects.to_pandas() + for key, value in small_objects_df.items(): + pandas_data[f"small_objects_{key}"] = value + if self.medium_objects is not None: + medium_objects_df = self.medium_objects.to_pandas() + for key, value in medium_objects_df.items(): + pandas_data[f"medium_objects_{key}"] = value + if self.large_objects is not None: + large_objects_df = self.large_objects.to_pandas() + for key, value in large_objects_df.items(): + pandas_data[f"large_objects_{key}"] = value + + return pd.DataFrame(pandas_data, index=[0]) + + def plot(self): + labels = ["mAR @ 1", "mAR @ 10", "mAR @ 100"] + values = [self.mAR_at_1, self.mAR_at_10, self.mAR_at_100] + colors = [LEGACY_COLOR_PALETTE[0]] * 3 + + if self.small_objects is not None: + small_objects = self.small_objects + labels += ["Small: mAR @ 1", "Small: mAR @ 10", "Small: mAR @ 100"] + values += [ + small_objects.mAR_at_1, + small_objects.mAR_at_10, + small_objects.mAR_at_100, + ] + colors += [LEGACY_COLOR_PALETTE[3]] * 3 + + if self.medium_objects is not None: + medium_objects = self.medium_objects + labels += ["Medium: mAR @ 1", "Medium: mAR @ 10", "Medium: mAR @ 100"] + values += [ + medium_objects.mAR_at_1, + medium_objects.mAR_at_10, + medium_objects.mAR_at_100, + ] + colors += [LEGACY_COLOR_PALETTE[2]] * 3 + + if self.large_objects is not None: + large_objects = self.large_objects + labels += ["Large: mAR @ 1", "Large: mAR @ 10", "Large: mAR @ 100"] + values += [ + large_objects.mAR_at_1, + large_objects.mAR_at_10, + large_objects.mAR_at_100, + ] + colors += [LEGACY_COLOR_PALETTE[4]] * 3 + + plt.rcParams["font.family"] = "monospace" + + _, ax = plt.subplots(figsize=(10, 6)) + ax.set_ylim(0, 1) + ax.set_ylabel("Value", fontweight="bold") + title = ( + f"Mean Average Recall, by Object Size" + f"\n(target: {self.metric_target.value}" + ) + ax.set_title(title, fontweight="bold") + + x_positions = range(len(labels)) + bars = ax.bar(x_positions, values, color=colors, align="center") + + ax.set_xticks(x_positions) + ax.set_xticklabels(labels, rotation=45, ha="right") + + for bar in bars: + y_value = bar.get_height() + ax.text( + bar.get_x() + bar.get_width() / 2, + y_value + 0.02, + f"{y_value:.2f}", + ha="center", + va="bottom", + ) + + plt.rcParams["font.family"] = "sans-serif" + + plt.tight_layout() + plt.show() From 0bfcdeab6f19ae282f9cf448f45aa9fca9ddd663 Mon Sep 17 00:00:00 2001 From: LinasKo Date: Fri, 8 Nov 2024 01:01:52 +0200 Subject: [PATCH 140/161] Add mAR docstrings & examples --- docs/metrics/f1_score.md | 1 - docs/metrics/mean_average_precision.md | 1 - docs/metrics/mean_average_recall.md | 18 ++ mkdocs.yml | 1 + supervision/metrics/mean_average_recall.py | 189 ++++++++++++++++++++- supervision/metrics/recall.py | 4 +- 6 files changed, 208 insertions(+), 6 deletions(-) create mode 100644 docs/metrics/mean_average_recall.md diff --git a/docs/metrics/f1_score.md b/docs/metrics/f1_score.md index e31a95577..5cf0cd77f 100644 --- a/docs/metrics/f1_score.md +++ b/docs/metrics/f1_score.md @@ -1,6 +1,5 @@ --- comments: true -status: new --- # F1 Score diff --git a/docs/metrics/mean_average_precision.md b/docs/metrics/mean_average_precision.md index 817591a19..9e23045e4 100644 --- a/docs/metrics/mean_average_precision.md +++ b/docs/metrics/mean_average_precision.md @@ -1,6 +1,5 @@ --- comments: true -status: new --- # Mean Average Precision diff --git a/docs/metrics/mean_average_recall.md b/docs/metrics/mean_average_recall.md new file mode 100644 index 000000000..5cc0bf0a2 --- /dev/null +++ b/docs/metrics/mean_average_recall.md @@ -0,0 +1,18 @@ +--- +comments: true +status: new +--- + +# Mean Average Recall + + + +:::supervision.metrics.mean_average_recall.MeanAverageRecall + + + +:::supervision.metrics.mean_average_recall.MeanAverageRecallResult diff --git a/mkdocs.yml b/mkdocs.yml index 8c939cf07..1ed9fafa3 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -66,6 +66,7 @@ nav: - Utils: datasets/utils.md - Metrics: - mAP: metrics/mean_average_precision.md + - mAR: metrics/mean_average_recall.md - Precision: metrics/precision.md - Recall: metrics/recall.md - F1 Score: metrics/f1_score.md diff --git a/supervision/metrics/mean_average_recall.py b/supervision/metrics/mean_average_recall.py index 5f0274aec..9c3a40718 100644 --- a/supervision/metrics/mean_average_recall.py +++ b/supervision/metrics/mean_average_recall.py @@ -27,10 +27,64 @@ class MeanAverageRecall(Metric): + """ + Mean Average Recall (mAR) measures how well the model detects + and retrieves relevant objects by averaging recall over multiple + IoU thresholds, classes and detection limits. + + Intuitively, while Recall measures the ability to find all relevant + objects, mAR narrows down how many detections are considered for each + class. For example, mAR @ 100 considers the top 100 highest confidence + detections for each class. mAR @ 1 considers only the highest + confidence detection for each class. + + Example: + ```python + import supervision as sv + from supervision.metrics import MeanAverageRecall + + predictions = sv.Detections(...) + targets = sv.Detections(...) + + map_metric = MeanAverageRecall() + map_result = map_metric.update(predictions, targets).compute() + + print(mar_results.mar_at_100) + # 0.5241 + + print(mar_results) + # MeanAverageRecallResult: + # Metric target: MetricTarget.BOXES + # mAR @ 1: 0.1362 + # mAR @ 10: 0.4239 + # mAR @ 100: 0.5241 + # max detections: [1 10 100] + # IoU thresh: [0.5 0.55 0.6 ...] + # mAR per class: + # 0: [0.78571 0.78571 0.78571 ...] + # ... + # Small objects: ... + # Medium objects: ... + # Large objects: ... + + mar_results.plot() + ``` + + ![example_plot](\ + https://media.roboflow.com/supervision-docs/metrics/mAR_plot_example.png\ + ){ align=center width="800" } + """ + def __init__( self, metric_target: MetricTarget = MetricTarget.BOXES, ): + """ + Initialize the Mean Average Recall metric. + + Args: + metric_target (MetricTarget): The type of detection data to use. + """ self._metric_target = metric_target self._predictions_list: List[Detections] = [] @@ -39,6 +93,9 @@ def __init__( self.max_detections = np.array([1, 10, 100]) def reset(self) -> None: + """ + Reset the metric to its initial state, clearing all stored data. + """ self._predictions_list = [] self._targets_list = [] @@ -47,6 +104,16 @@ def update( predictions: Union[Detections, List[Detections]], targets: Union[Detections, List[Detections]], ) -> MeanAverageRecall: + """ + Add new predictions and targets to the metric, but do not compute the result. + + Args: + predictions (Union[Detections, List[Detections]]): The predicted detections. + targets (Union[Detections, List[Detections]]): The target detections. + + Returns: + (Recall): The updated metric instance. + """ if not isinstance(predictions, list): predictions = [predictions] if not isinstance(targets, list): @@ -64,6 +131,13 @@ def update( return self def compute(self) -> MeanAverageRecallResult: + """ + Calculate the Mean Average Recall metric based on the stored predictions + and ground-truth, at different IoU thresholds and maximum detection counts. + + Returns: + (MeanAverageRecallResult): The Mean Average Recall metric result. + """ result = self._compute(self._predictions_list, self._targets_list) small_predictions, small_targets = self._filter_predictions_and_targets_by_size( @@ -237,6 +311,29 @@ def _compute_confusion_matrix( class_counts: np.ndarray, max_detections: Optional[int] = None, ) -> np.ndarray: + """ + Compute the confusion matrix for each class and IoU threshold. + + Assumes the matches and prediction_class_ids are sorted by confidence + in descending order. + + Args: + sorted_matches: np.ndarray, bool, shape (P, Th), that is True + if the prediction is a true positive at the given IoU threshold. + sorted_prediction_class_ids: np.ndarray, int, shape (P,), containing + the class id for each prediction. + unique_classes: np.ndarray, int, shape (C,), containing the unique + class ids. + class_counts: np.ndarray, int, shape (C,), containing the number + of true instances for each class. + max_detections: Optional[int], the maximum number of detections to + consider for each class. Extra detections are considered false + positives. By default, all detections are considered. + + Returns: + np.ndarray, shape (C, Th, 3), containing the true positives, false + positives, and false negatives for each class and IoU threshold. + """ num_thresholds = sorted_matches.shape[1] num_classes = unique_classes.shape[0] @@ -364,6 +461,61 @@ def _filter_predictions_and_targets_by_size( @dataclass class MeanAverageRecallResult: + # """ + # The results of the recall metric calculation. + + # Defaults to `0` if no detections or targets were provided. + + # Attributes: + # metric_target (MetricTarget): the type of data used for the metric - + # boxes, masks or oriented bounding boxes. + # averaging_method (AveragingMethod): the averaging method used to compute the + # recall. Determines how the recall is aggregated across classes. + # recall_at_50 (float): the recall at IoU threshold of `0.5`. + # recall_at_75 (float): the recall at IoU threshold of `0.75`. + # recall_scores (np.ndarray): the recall scores at each IoU threshold. + # Shape: `(num_iou_thresholds,)` + # recall_per_class (np.ndarray): the recall scores per class and IoU threshold. + # Shape: `(num_target_classes, num_iou_thresholds)` + # iou_thresholds (np.ndarray): the IoU thresholds used in the calculations. + # matched_classes (np.ndarray): the class IDs of all matched classes. + # Corresponds to the rows of `recall_per_class`. + # small_objects (Optional[RecallResult]): the Recall metric results + # for small objects. + # medium_objects (Optional[RecallResult]): the Recall metric results + # for medium objects. + # large_objects (Optional[RecallResult]): the Recall metric results + # for large objects. + # """ + """ + The results of the Mean Average Recall metric calculation. + + Defaults to `0` if no detections or targets were provided. + + Attributes: + metric_target (MetricTarget): the type of data used for the metric - + boxes, masks or oriented bounding boxes. + mAR_at_1 (float): the Mean Average Recall, when considering only the top + highest confidence detection for each class. + mAR_at_10 (float): the Mean Average Recall, when considering top 10 + highest confidence detections for each class. + mAR_at_100 (float): the Mean Average Recall, when considering top 100 + highest confidence detections for each class. + recall_per_class (np.ndarray): the recall scores per class and IoU threshold. + Shape: `(num_target_classes, num_iou_thresholds)` + max_detections (np.ndarray): the array with maximum number of detections + considered. + iou_thresholds (np.ndarray): the IoU thresholds used in the calculations. + matched_classes (np.ndarray): the class IDs of all matched classes. + Corresponds to the rows of `recall_per_class`. + small_objects (Optional[MeanAverageRecallResult]): the Mean Average Recall + metric results for small objects (area < 32²). + medium_objects (Optional[MeanAverageRecallResult]): the Mean Average Recall + metric results for medium objects (32² ≤ area < 96²). + large_objects (Optional[MeanAverageRecallResult]): the Mean Average Recall + metric results for large objects (area ≥ 96²). + """ + metric_target: MetricTarget @property @@ -389,9 +541,29 @@ def mAR_at_100(self) -> float: large_objects: Optional[MeanAverageRecallResult] def __str__(self) -> str: + """ + Format as a pretty string. + + Example: + ```python + # MeanAverageRecallResult: + # Metric target: MetricTarget.BOXES + # mAR @ 1: 0.1362 + # mAR @ 10: 0.4239 + # mAR @ 100: 0.5241 + # max detections: [1 10 100] + # IoU thresh: [0.5 0.55 0.6 ...] + # mAR per class: + # 0: [0.78571 0.78571 0.78571 ...] + # ... + # Small objects: ... + # Medium objects: ... + # Large objects: ... + ``` + """ out_str = ( f"{self.__class__.__name__}:\n" - f"Metric target: {self.metric_target}\n" + f"Metric target: {self.metric_target}\n" f"mAR @ 1: {self.mAR_at_1:.4f}\n" f"mAR @ 10: {self.mAR_at_10:.4f}\n" f"mAR @ 100: {self.mAR_at_100:.4f}\n" @@ -420,6 +592,12 @@ def __str__(self) -> str: return out_str def to_pandas(self) -> "pd.DataFrame": + """ + Convert the result to a pandas DataFrame. + + Returns: + (pd.DataFrame): The result as a DataFrame. + """ ensure_pandas_installed() import pandas as pd @@ -445,6 +623,13 @@ def to_pandas(self) -> "pd.DataFrame": return pd.DataFrame(pandas_data, index=[0]) def plot(self): + """ + Plot the Mean Average Recall results. + + ![example_plot](\ + https://media.roboflow.com/supervision-docs/metrics/mAR_plot_example.png\ + ){ align=center width="800" } + """ labels = ["mAR @ 1", "mAR @ 10", "mAR @ 100"] values = [self.mAR_at_1, self.mAR_at_10, self.mAR_at_100] colors = [LEGACY_COLOR_PALETTE[0]] * 3 @@ -486,7 +671,7 @@ def plot(self): ax.set_ylabel("Value", fontweight="bold") title = ( f"Mean Average Recall, by Object Size" - f"\n(target: {self.metric_target.value}" + f"\n(target: {self.metric_target.value})" ) ax.set_title(title, fontweight="bold") diff --git a/supervision/metrics/recall.py b/supervision/metrics/recall.py index 1848502b7..21bba1a6e 100644 --- a/supervision/metrics/recall.py +++ b/supervision/metrics/recall.py @@ -115,11 +115,11 @@ def update( def compute(self) -> RecallResult: """ - Calculate the precision metric based on the stored predictions and ground-truth + Calculate the recall metric based on the stored predictions and ground-truth data, at different IoU thresholds. Returns: - (RecallResult): The precision metric result. + (RecallResult): The recall metric result. """ result = self._compute(self._predictions_list, self._targets_list) From e4cf743c1cce048d2c96a87e26fbf1688d7c030c Mon Sep 17 00:00:00 2001 From: LinasKo Date: Fri, 8 Nov 2024 01:36:56 +0200 Subject: [PATCH 141/161] Add explanations for small/medium/large in metric results --- supervision/metrics/f1_score.py | 6 +++--- supervision/metrics/mean_average_precision.py | 6 +++--- supervision/metrics/precision.py | 6 +++--- supervision/metrics/recall.py | 6 +++--- 4 files changed, 12 insertions(+), 12 deletions(-) diff --git a/supervision/metrics/f1_score.py b/supervision/metrics/f1_score.py index bd89e1297..98cb5f265 100644 --- a/supervision/metrics/f1_score.py +++ b/supervision/metrics/f1_score.py @@ -476,11 +476,11 @@ class F1ScoreResult: matched_classes (np.ndarray): the class IDs of all matched classes. Corresponds to the rows of `f1_per_class`. small_objects (Optional[F1ScoreResult]): the F1 metric results - for small objects. + for small objects (area < 32²). medium_objects (Optional[F1ScoreResult]): the F1 metric results - for medium objects. + for medium objects (32² ≤ area < 96²). large_objects (Optional[F1ScoreResult]): the F1 metric results - for large objects. + for large objects (area ≥ 96²). """ metric_target: MetricTarget diff --git a/supervision/metrics/mean_average_precision.py b/supervision/metrics/mean_average_precision.py index ac12767a7..9e7a30d0e 100644 --- a/supervision/metrics/mean_average_precision.py +++ b/supervision/metrics/mean_average_precision.py @@ -440,11 +440,11 @@ class and IoU threshold. Shape: `(num_target_classes, num_iou_thresholds)` matched_classes (np.ndarray): the class IDs of all matched classes. Corresponds to the rows of `ap_per_class`. small_objects (Optional[MeanAveragePrecisionResult]): the mAP results - for small objects. + for small objects (area < 32²). medium_objects (Optional[MeanAveragePrecisionResult]): the mAP results - for medium objects. + for medium objects (32² ≤ area < 96²). large_objects (Optional[MeanAveragePrecisionResult]): the mAP results - for large objects. + for large objects (area ≥ 96²). """ metric_target: MetricTarget diff --git a/supervision/metrics/precision.py b/supervision/metrics/precision.py index 235110862..a5d4011e8 100644 --- a/supervision/metrics/precision.py +++ b/supervision/metrics/precision.py @@ -479,11 +479,11 @@ class PrecisionResult: matched_classes (np.ndarray): the class IDs of all matched classes. Corresponds to the rows of `precision_per_class`. small_objects (Optional[PrecisionResult]): the Precision metric results - for small objects. + for small objects (area < 32²). medium_objects (Optional[PrecisionResult]): the Precision metric results - for medium objects. + for medium objects (32² ≤ area < 96²). large_objects (Optional[PrecisionResult]): the Precision metric results - for large objects. + for large objects (area ≥ 96²). """ metric_target: MetricTarget diff --git a/supervision/metrics/recall.py b/supervision/metrics/recall.py index 45382da11..b3586ff7d 100644 --- a/supervision/metrics/recall.py +++ b/supervision/metrics/recall.py @@ -478,11 +478,11 @@ class RecallResult: matched_classes (np.ndarray): the class IDs of all matched classes. Corresponds to the rows of `recall_per_class`. small_objects (Optional[RecallResult]): the Recall metric results - for small objects. + for small objects (area < 32²). medium_objects (Optional[RecallResult]): the Recall metric results - for medium objects. + for medium objects (32² ≤ area < 96²). large_objects (Optional[RecallResult]): the Recall metric results - for large objects. + for large objects (area ≥ 96²). """ metric_target: MetricTarget From b72ad4656ba4a5301d4738633b0888eaf9845ded Mon Sep 17 00:00:00 2001 From: LinasKo Date: Fri, 8 Nov 2024 11:52:24 +0200 Subject: [PATCH 142/161] Unit tests for merge_metadata, fixes involving numpy arrays --- supervision/detection/utils.py | 22 ++++- test/detection/test_utils.py | 161 +++++++++++++++++++++++++++++++++ 2 files changed, 180 insertions(+), 3 deletions(-) diff --git a/supervision/detection/utils.py b/supervision/detection/utils.py index fc4458fa2..a7ad3a389 100644 --- a/supervision/detection/utils.py +++ b/supervision/detection/utils.py @@ -919,11 +919,27 @@ def merge_metadata(metadata_list: List[Dict[str, Any]]) -> Dict[str, Any]: merged_metadata: Dict[str, Any] = {} for metadata in metadata_list: for key, value in metadata.items(): - if key in merged_metadata: + if key not in merged_metadata: + merged_metadata[key] = value + continue + + other_value = merged_metadata[key] + if isinstance(value, np.ndarray) and isinstance(other_value, np.ndarray): + if not np.array_equal(merged_metadata[key], value): + raise ValueError( + f"Conflicting metadata for key: '{key}': " + "{type(value)}, {type(other_value)}." + ) + elif isinstance(value, np.ndarray) or isinstance(other_value, np.ndarray): + # Since [] == np.array([]). + raise ValueError( + f"Conflicting metadata for key: '{key}': " + "{type(value)}, {type(other_value)}." + ) + else: + print("hm") if merged_metadata[key] != value: raise ValueError(f"Conflicting metadata for key: '{key}'.") - else: - merged_metadata[key] = value return merged_metadata diff --git a/test/detection/test_utils.py b/test/detection/test_utils.py index 77c4cea54..87e50f6a4 100644 --- a/test/detection/test_utils.py +++ b/test/detection/test_utils.py @@ -14,6 +14,7 @@ filter_polygons_by_area, get_data_item, merge_data, + merge_metadata, move_boxes, process_roboflow_result, scale_boxes, @@ -1138,3 +1139,163 @@ def test_xywh_to_xyxy(xywh: np.ndarray, expected_result: np.ndarray) -> None: def test_xcycwh_to_xyxy(xcycwh: np.ndarray, expected_result: np.ndarray) -> None: result = xcycwh_to_xyxy(xcycwh) np.testing.assert_array_equal(result, expected_result) + + +@pytest.mark.parametrize( + "metadata_list, expected_result, exception", + [ + # Identical metadata with a single key + ([{"key1": "value1"}, {"key1": "value1"}], {"key1": "value1"}, DoesNotRaise()), + # Identical metadata with multiple keys + ( + [ + {"key1": "value1", "key2": "value2"}, + {"key1": "value1", "key2": "value2"}, + ], + {"key1": "value1", "key2": "value2"}, + DoesNotRaise(), + ), + # Conflicting values for the same key + ([{"key1": "value1"}, {"key1": "value2"}], None, pytest.raises(ValueError)), + # Different sets of keys across dictionaries + ([{"key1": "value1"}, {"key2": "value2"}], None, pytest.raises(ValueError)), + # Empty metadata list + ([], {}, DoesNotRaise()), + # Empty metadata dictionaries + ([{}, {}], {}, DoesNotRaise()), + # Different declaration order for keys + ( + [ + {"key1": "value1", "key2": "value2"}, + {"key2": "value2", "key1": "value1"}, + ], + {"key1": "value1", "key2": "value2"}, + DoesNotRaise(), + ), + # Nested metadata dictionaries + ( + [{"key1": {"sub_key": "sub_value"}}, {"key1": {"sub_key": "sub_value"}}], + {"key1": {"sub_key": "sub_value"}}, + DoesNotRaise(), + ), + # Large metadata dictionaries with many keys + ( + [ + {f"key{i}": f"value{i}" for i in range(100)}, + {f"key{i}": f"value{i}" for i in range(100)}, + ], + {f"key{i}": f"value{i}" for i in range(100)}, + DoesNotRaise(), + ), + # Mixed types in list metadata values + ( + [{"key1": ["value1", 2, True]}, {"key1": ["value1", 2, True]}], + {"key1": ["value1", 2, True]}, + DoesNotRaise(), + ), + # Identical lists across metadata dictionaries + ( + [{"key1": [1, 2, 3]}, {"key1": [1, 2, 3]}], + {"key1": [1, 2, 3]}, + DoesNotRaise(), + ), + # Identical numpy arrays across metadata dictionaries + ( + [{"key1": np.array([1, 2, 3])}, {"key1": np.array([1, 2, 3])}], + {"key1": np.array([1, 2, 3])}, + DoesNotRaise(), + ), + # Identical numpy arrays across metadata dictionaries, different datatype + ( + [ + {"key1": np.array([1, 2, 3], dtype=np.int32)}, + {"key1": np.array([1, 2, 3], dtype=np.int64)}, + ], + {"key1": np.array([1, 2, 3])}, + DoesNotRaise(), + ), + # Conflicting lists for the same key + ([{"key1": [1, 2, 3]}, {"key1": [4, 5, 6]}], None, pytest.raises(ValueError)), + # Conflicting numpy arrays for the same key + ( + [{"key1": np.array([1, 2, 3])}, {"key1": np.array([4, 5, 6])}], + None, + pytest.raises(ValueError), + ), + # Mixed data types: list and numpy array for the same key + ( + [{"key1": [1, 2, 3]}, {"key1": np.array([1, 2, 3])}], + None, + pytest.raises(ValueError), + ), + # Empty lists and numpy arrays for the same key + ([{"key1": []}, {"key1": np.array([])}], None, pytest.raises(ValueError)), + # Identical multi-dimensional lists across metadata dictionaries + ( + [{"key1": [[1, 2], [3, 4]]}, {"key1": [[1, 2], [3, 4]]}], + {"key1": [[1, 2], [3, 4]]}, + DoesNotRaise(), + ), + # Identical multi-dimensional numpy arrays across metadata dictionaries + ( + [ + {"key1": np.arange(4).reshape(2, 2)}, + {"key1": np.arange(4).reshape(2, 2)}, + ], + {"key1": np.arange(4).reshape(2, 2)}, + DoesNotRaise(), + ), + # Conflicting multi-dimensional lists for the same key + ( + [{"key1": [[1, 2], [3, 4]]}, {"key1": [[5, 6], [7, 8]]}], + None, + pytest.raises(ValueError), + ), + # Conflicting multi-dimensional numpy arrays for the same key + ( + [ + {"key1": np.arange(4).reshape(2, 2)}, + {"key1": np.arange(4, 8).reshape(2, 2)}, + ], + None, + pytest.raises(ValueError), + ), + # Mixed types with multi-dimensional list and array for the same key + ( + [{"key1": [[1, 2], [3, 4]]}, {"key1": np.arange(4).reshape(2, 2)}], + None, + pytest.raises(ValueError), + ), + # Identical higher-dimensional (3D) numpy arrays across + # metadata dictionaries + ( + [ + {"key1": np.arange(8).reshape(2, 2, 2)}, + {"key1": np.arange(8).reshape(2, 2, 2)}, + ], + {"key1": np.arange(8).reshape(2, 2, 2)}, + DoesNotRaise(), + ), + # Differently-shaped higher-dimensional (3D) numpy arrays + # across metadata dictionaries + ( + [ + {"key1": np.arange(8).reshape(2, 2, 2)}, + {"key1": np.arange(8).reshape(4, 1, 2)}, + ], + None, + pytest.raises(ValueError), + ), + ], +) +def test_merge_metadata(metadata_list, expected_result, exception): + with exception: + result = merge_metadata(metadata_list) + if expected_result is None: + assert result is None, f"Expected an error, but got a result {result}" + for key, value in result.items(): + assert key in expected_result + if isinstance(value, np.ndarray): + np.testing.assert_array_equal(value, expected_result[key]) + else: + assert value == expected_result[key] From c43ca06609ac97330e361c0336287774a9375ba0 Mon Sep 17 00:00:00 2001 From: LinasKo Date: Fri, 8 Nov 2024 12:48:23 +0200 Subject: [PATCH 143/161] Expand Detections.merge tests with metadata --- test/detection/test_core.py | 207 +++++++++++++++++++++++++++++++++++- 1 file changed, 205 insertions(+), 2 deletions(-) diff --git a/test/detection/test_core.py b/test/detection/test_core.py index b857250e0..61796bef2 100644 --- a/test/detection/test_core.py +++ b/test/detection/test_core.py @@ -106,6 +106,26 @@ "never_seen_key": [9], }, ) +TEST_DET_WITH_METADATA = Detections( + xyxy=np.array([[10, 10, 20, 20]]), + class_id=np.array([1]), + metadata={"source": "camera1"}, +) + +TEST_DET_WITH_METADATA_2 = Detections( + xyxy=np.array([[30, 30, 40, 40]]), + class_id=np.array([2]), + metadata={"source": "camera1"}, +) +TEST_DET_NO_METADATA = Detections( + xyxy=np.array([[10, 10, 20, 20]]), + class_id=np.array([1]), +) +TEST_DET_DIFFERENT_METADATA = Detections( + xyxy=np.array([[50, 50, 60, 60]]), + class_id=np.array([3]), + metadata={"source": "camera2"}, +) @pytest.mark.parametrize( @@ -258,6 +278,11 @@ def test_getitem( TEST_DET_1, DoesNotRaise(), ), # Single detection and empty-array fields + ( + [TEST_DET_ZERO_LENGTH, TEST_DET_ZERO_LENGTH], + TEST_DET_ZERO_LENGTH, + DoesNotRaise(), + ), # Zero-length fields across all Detections ( [ TEST_DET_1, @@ -287,12 +312,190 @@ def test_getitem( Detections.empty(), ], mock_detections( - xyxy=[[10, 10, 20, 20]], + xyxy=np.array([[10, 10, 20, 20]]), class_id=[1], mask=[np.zeros((4, 4), dtype=bool)], ), DoesNotRaise(), ), # Segmentation + Empty + # Metadata + ( + [ + Detections( + xyxy=np.array([[10, 10, 20, 20]]), + class_id=np.array([1]), + metadata={"source": "camera1"}, + ), + Detections.empty(), + ], + Detections( + xyxy=np.array([[10, 10, 20, 20]]), + class_id=np.array([1]), + metadata={"source": "camera1"}, + ), + DoesNotRaise(), + ), # Metadata merge with empty detections + ( + [ + Detections( + xyxy=np.array([[10, 10, 20, 20]]), + class_id=np.array([1]), + metadata={"source": "camera1"}, + ), + Detections(xyxy=np.array([[30, 30, 40, 40]]), class_id=np.array([2])), + ], + None, + pytest.raises(ValueError), + ), # Empty and non-empty metadata + ( + [ + Detections( + xyxy=np.array([[10, 10, 20, 20]]), + class_id=np.array([1]), + metadata={"source": "camera1"}, + ) + ], + Detections( + xyxy=np.array([[10, 10, 20, 20]]), + class_id=np.array([1]), + metadata={"source": "camera1"}, + ), + DoesNotRaise(), + ), # Single detection with metadata + ( + [ + Detections( + xyxy=np.array([[10, 10, 20, 20]]), + class_id=np.array([1]), + metadata={"source": "camera1"}, + ), + Detections( + xyxy=np.array([[30, 30, 40, 40]]), + class_id=np.array([2]), + metadata={"source": "camera1"}, + ), + ], + Detections( + xyxy=np.array([[10, 10, 20, 20], [30, 30, 40, 40]]), + class_id=np.array([1, 2]), + metadata={"source": "camera1"}, + ), + DoesNotRaise(), + ), # Multiple metadata entries with identical values + ( + [ + Detections( + xyxy=np.array([[10, 10, 20, 20]]), + class_id=np.array([1]), + metadata={"source": "camera1"}, + ), + Detections( + xyxy=np.array([[50, 50, 60, 60]]), + class_id=np.array([3]), + metadata={"source": "camera2"}, + ), + ], + None, + pytest.raises(ValueError), + ), # Different metadata values + ( + [ + Detections( + xyxy=np.array([[10, 10, 20, 20]]), + metadata={"source": "camera1", "resolution": "1080p"}, + ), + Detections( + xyxy=np.array([[30, 30, 40, 40]]), + metadata={"source": "camera1", "resolution": "1080p"}, + ), + ], + Detections( + xyxy=np.array([[10, 10, 20, 20], [30, 30, 40, 40]]), + metadata={"source": "camera1", "resolution": "1080p"}, + ), + DoesNotRaise(), + ), # Large metadata with multiple identical entries + ( + [ + Detections( + xyxy=np.array([[10, 10, 20, 20]]), metadata={"source": "camera1"} + ), + Detections( + xyxy=np.array([[30, 30, 40, 40]]), metadata={"source": ["camera1"]} + ), + ], + None, + pytest.raises(ValueError), + ), # Inconsistent types in metadata values + ( + [ + Detections( + xyxy=np.array([[10, 10, 20, 20]]), metadata={"source": "camera1"} + ), + Detections( + xyxy=np.array([[30, 30, 40, 40]]), metadata={"location": "indoor"} + ), + ], + None, + pytest.raises(ValueError), + ), # Metadata key mismatch + ( + [ + Detections( + xyxy=np.array([[10, 10, 20, 20]]), + metadata={ + "source": "camera1", + "settings": {"resolution": "1080p", "fps": 30}, + }, + ), + Detections( + xyxy=np.array([[30, 30, 40, 40]]), + metadata={ + "source": "camera1", + "settings": {"resolution": "1080p", "fps": 30}, + }, + ), + ], + Detections( + xyxy=np.array([[10, 10, 20, 20], [30, 30, 40, 40]]), + metadata={ + "source": "camera1", + "settings": {"resolution": "1080p", "fps": 30}, + }, + ), + DoesNotRaise(), + ), # multi-field metadata + ( + [ + Detections( + xyxy=np.array([[10, 10, 20, 20]]), + metadata={"calibration_matrix": np.array([[1, 0], [0, 1]])}, + ), + Detections( + xyxy=np.array([[30, 30, 40, 40]]), + metadata={"calibration_matrix": np.array([[1, 0], [0, 1]])}, + ), + ], + Detections( + xyxy=np.array([[10, 10, 20, 20], [30, 30, 40, 40]]), + metadata={"calibration_matrix": np.array([[1, 0], [0, 1]])}, + ), + DoesNotRaise(), + ), # Identical 2D numpy arrays in metadata + ( + [ + Detections( + xyxy=np.array([[10, 10, 20, 20]]), + metadata={"calibration_matrix": np.array([[1, 0], [0, 1]])}, + ), + Detections( + xyxy=np.array([[30, 30, 40, 40]]), + metadata={"calibration_matrix": np.array([[2, 0], [0, 2]])}, + ), + ], + None, + pytest.raises(ValueError), + ), # Mismatching 2D numpy arrays in metadata ], ) def test_merge( @@ -302,7 +505,7 @@ def test_merge( ) -> None: with exception: result = Detections.merge(detections_list=detections_list) - assert result == expected_result + assert result == expected_result, f"Expected: {expected_result}, Got: {result}" @pytest.mark.parametrize( From 5fedfe522087d55ee5bf081c59fc912e785b7a90 Mon Sep 17 00:00:00 2001 From: LinasKo Date: Fri, 8 Nov 2024 13:07:34 +0200 Subject: [PATCH 144/161] Rename "spread_out" arg to "smart_positions" --- supervision/annotators/core.py | 16 ++++++++-------- supervision/keypoint/annotators.py | 8 ++++---- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/supervision/annotators/core.py b/supervision/annotators/core.py index bfc20503b..44b816457 100644 --- a/supervision/annotators/core.py +++ b/supervision/annotators/core.py @@ -1065,7 +1065,7 @@ def __init__( text_position: Position = Position.TOP_LEFT, color_lookup: ColorLookup = ColorLookup.CLASS, border_radius: int = 0, - spread_out: bool = False, + smart_positions: bool = False, ): """ Args: @@ -1082,7 +1082,7 @@ def __init__( Options are `INDEX`, `CLASS`, `TRACK`. border_radius (int): The radius to apply round edges. If the selected value is higher than the lower dimension, width or height, is clipped. - spread_out (bool): Spread out the labels to avoid overlapping. + smart_positions (bool): Spread out the labels to avoid overlapping. """ self.border_radius: int = border_radius self.color: Union[Color, ColorPalette] = color @@ -1092,7 +1092,7 @@ def __init__( self.text_padding: int = text_padding self.text_anchor: Position = text_position self.color_lookup: ColorLookup = color_lookup - self.spread_out = spread_out + self.smart_positions = smart_positions @ensure_cv2_image_for_annotation def annotate( @@ -1153,7 +1153,7 @@ def annotate( detections, text_properties, self.text_anchor ) - if self.spread_out: + if self.smart_positions: xyxy = spread_out_boxes(xyxy, step=2, max_iterations=len(xyxy) * 20) self._draw_labels( @@ -1362,7 +1362,7 @@ def __init__( text_position: Position = Position.TOP_LEFT, color_lookup: ColorLookup = ColorLookup.CLASS, border_radius: int = 0, - spread_out: bool = False, + smart_positions: bool = False, ): """ Args: @@ -1379,7 +1379,7 @@ def __init__( Options are `INDEX`, `CLASS`, `TRACK`. border_radius (int): The radius to apply round edges. If the selected value is higher than the lower dimension, width or height, is clipped. - spread_out (bool): Spread out the labels to avoid overlapping. + smart_positions (bool): Spread out the labels to avoid overlapping. """ self.color = color self.text_color = text_color @@ -1387,7 +1387,7 @@ def __init__( self.text_anchor = text_position self.color_lookup = color_lookup self.border_radius = border_radius - self.spread_out = spread_out + self.smart_positions = smart_positions self.font = self._load_font(font_size, font_path) @ensure_pil_image_for_annotation @@ -1448,7 +1448,7 @@ def annotate( detections, text_properties, self.text_anchor ) - if self.spread_out: + if self.smart_positions: xyxy = spread_out_boxes(xyxy, step=2, max_iterations=len(xyxy) * 20) self._draw_labels( diff --git a/supervision/keypoint/annotators.py b/supervision/keypoint/annotators.py index 4d3196b39..d968786c8 100644 --- a/supervision/keypoint/annotators.py +++ b/supervision/keypoint/annotators.py @@ -202,7 +202,7 @@ def __init__( text_thickness: int = 1, text_padding: int = 10, border_radius: int = 0, - spread_out: bool = False, + smart_positions: bool = False, ): """ Args: @@ -217,7 +217,7 @@ def __init__( text_padding (int): The padding around the text. border_radius (int): The radius of the rounded corners of the boxes. Set to a high value to produce circles. - spread_out (bool): Spread out the labels to avoid overlap. + smart_positions (bool): Spread out the labels to avoid overlap. """ self.border_radius: int = border_radius self.color: Union[Color, List[Color]] = color @@ -225,7 +225,7 @@ def __init__( self.text_scale: float = text_scale self.text_thickness: int = text_thickness self.text_padding: int = text_padding - self.spread_out = spread_out + self.smart_positions = smart_positions def annotate( self, @@ -362,7 +362,7 @@ def annotate( ) xyxy_padded = pad_boxes(xyxy=xyxy, px=self.text_padding) - if self.spread_out: + if self.smart_positions: xyxy_padded = spread_out_boxes( xyxy_padded, step=2, max_iterations=len(xyxy_padded) * 20 ) From 01c2912a64426d948a36f372e83f4662796e5f35 Mon Sep 17 00:00:00 2001 From: LinasKo Date: Fri, 8 Nov 2024 13:10:18 +0200 Subject: [PATCH 145/161] Move FONT to global scope --- supervision/annotators/core.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/supervision/annotators/core.py b/supervision/annotators/core.py index 44b816457..6071b0839 100644 --- a/supervision/annotators/core.py +++ b/supervision/annotators/core.py @@ -33,6 +33,8 @@ ) from supervision.utils.internal import deprecated +CV2_FONT = cv2.FONT_HERSHEY_SIMPLEX + class BoxAnnotator(BaseAnnotator): """ @@ -1053,8 +1055,6 @@ class _TextProperties: width_padded: int height_padded: int - _FONT = cv2.FONT_HERSHEY_SIMPLEX - def __init__( self, color: Union[Color, ColorPalette] = ColorPalette.DEFAULT, @@ -1181,7 +1181,7 @@ def _get_text_properties(self, labels: List[str]) -> List[_TextProperties]: for label in labels: (text_w, text_h) = cv2.getTextSize( text=label, - fontFace=self._FONT, + fontFace=CV2_FONT, fontScale=self.text_scale, thickness=self.text_thickness, )[0] @@ -1286,7 +1286,7 @@ def _draw_labels( img=scene, text=text_properties[idx].text, org=(text_x, text_y), - fontFace=self._FONT, + fontFace=CV2_FONT, fontScale=self.text_scale, color=text_color.as_bgr(), thickness=self.text_thickness, From 5e76f86ff938d018da49067981b182f6a3cfd5c4 Mon Sep 17 00:00:00 2001 From: LinasKo Date: Fri, 8 Nov 2024 15:47:37 +0200 Subject: [PATCH 146/161] 3 changes to spread_put_boxes algo: * Vectorized * Using forces rather than discrete steps * Move along secondary axis more --- supervision/annotators/core.py | 4 +-- supervision/detection/utils.py | 53 +++++++++++++++++++----------- supervision/keypoint/annotators.py | 4 +-- 3 files changed, 37 insertions(+), 24 deletions(-) diff --git a/supervision/annotators/core.py b/supervision/annotators/core.py index 6071b0839..3200dc9c2 100644 --- a/supervision/annotators/core.py +++ b/supervision/annotators/core.py @@ -1154,7 +1154,7 @@ def annotate( ) if self.smart_positions: - xyxy = spread_out_boxes(xyxy, step=2, max_iterations=len(xyxy) * 20) + xyxy = spread_out_boxes(xyxy) self._draw_labels( scene=scene, @@ -1449,7 +1449,7 @@ def annotate( ) if self.smart_positions: - xyxy = spread_out_boxes(xyxy, step=2, max_iterations=len(xyxy) * 20) + xyxy = spread_out_boxes(xyxy) self._draw_labels( draw=draw, diff --git a/supervision/detection/utils.py b/supervision/detection/utils.py index ede1ed355..246d3c607 100644 --- a/supervision/detection/utils.py +++ b/supervision/detection/utils.py @@ -1062,37 +1062,52 @@ def get_unit_vector(xy_1: np.ndarray, xy_2: np.ndarray) -> np.ndarray: def spread_out_boxes( - xyxy: np.ndarray, step: int, max_iterations: int = 100 + xyxy: np.ndarray, + max_iterations: int = 100, + force_multiplier: float = 0.03, ) -> np.ndarray: + """ + Spread out boxes that overlap with each other. + + Args: + xyxy: Numpy array of shape (N, 4) where N is the number of boxes. + max_iterations: Maximum number of iterations to run the algorithm for. + force_multiplier: Multiplier to scale the force vectors by. Similar to + learning rate in gradient descent. + """ if len(xyxy) == 0: return xyxy - xyxy_padded = pad_boxes(xyxy, px=step) + xyxy_padded = pad_boxes(xyxy, px=1) for _ in range(max_iterations): + # NxN iou = box_iou_batch(xyxy_padded, xyxy_padded) np.fill_diagonal(iou, 0) - if np.all(iou == 0): break - i, j = np.unravel_index(np.argmax(iou), iou.shape) + overlap_mask = iou > 0 + + # Nx2 + centers = (xyxy_padded[:, :2] + xyxy_padded[:, 2:]) / 2 + + # NxNx2 + delta_centers = centers[:, np.newaxis, :] - centers[np.newaxis, :, :] + delta_centers *= overlap_mask[:, :, np.newaxis] - xyxy_i, xyxy_j = xyxy_padded[i], xyxy_padded[j] - box_intersection = get_box_intersection(xyxy_i, xyxy_j) - assert ( - box_intersection is not None - ), "Since we checked IoU already, boxes should always intersect" + # Nx2 + force_vectors = np.sum(delta_centers, axis=1) + force_vectors *= force_multiplier + force_vectors[(force_vectors > 0) & (force_vectors < 1)] = 1 + force_vectors[(force_vectors < 0) & (force_vectors > -1)] = -1 - intersection_center = (box_intersection[:2] + box_intersection[2:]) / 2 - xyxy_i_center = (xyxy_i[:2] + xyxy_i[2:]) / 2 - xyxy_j_center = (xyxy_j[:2] + xyxy_j[2:]) / 2 + # Reduce motion along primary axis + primary_axis = np.argmax(np.abs(force_vectors), axis=1) + force_vectors[np.arange(len(force_vectors)), primary_axis] /= 2 - unit_vector_i = get_unit_vector(intersection_center, xyxy_i_center) - unit_vector_j = get_unit_vector(intersection_center, xyxy_j_center) + force_vectors = force_vectors.astype(int) - xyxy_padded[i, [0, 2]] += int(unit_vector_i[0] * step) - xyxy_padded[i, [1, 3]] += int(unit_vector_i[1] * step) - xyxy_padded[j, [0, 2]] += int(unit_vector_j[0] * step) - xyxy_padded[j, [1, 3]] += int(unit_vector_j[1] * step) + xyxy_padded[:, [0, 1]] += force_vectors + xyxy_padded[:, [2, 3]] += force_vectors - return pad_boxes(xyxy_padded, px=-step) + return pad_boxes(xyxy_padded, px=-1) diff --git a/supervision/keypoint/annotators.py b/supervision/keypoint/annotators.py index d968786c8..fc450a837 100644 --- a/supervision/keypoint/annotators.py +++ b/supervision/keypoint/annotators.py @@ -363,9 +363,7 @@ def annotate( xyxy_padded = pad_boxes(xyxy=xyxy, px=self.text_padding) if self.smart_positions: - xyxy_padded = spread_out_boxes( - xyxy_padded, step=2, max_iterations=len(xyxy_padded) * 20 - ) + xyxy_padded = spread_out_boxes(xyxy_padded) xyxy = pad_boxes(xyxy=xyxy_padded, px=-self.text_padding) for text, color, text_color, box, box_padded in zip( From 38dbf5d05dd4a84cadc826c9a91e8575a4a6e57f Mon Sep 17 00:00:00 2001 From: LinasKo Date: Fri, 8 Nov 2024 16:42:04 +0200 Subject: [PATCH 147/161] Remove TextProperties class, merge auxiliary functions, use numpy arrays for passing data --- supervision/annotators/core.py | 198 ++++++++++++--------------------- 1 file changed, 73 insertions(+), 125 deletions(-) diff --git a/supervision/annotators/core.py b/supervision/annotators/core.py index 3200dc9c2..f8ab90827 100644 --- a/supervision/annotators/core.py +++ b/supervision/annotators/core.py @@ -1,4 +1,3 @@ -from dataclasses import dataclass from functools import lru_cache from math import sqrt from typing import List, Optional, Tuple, Union @@ -1047,14 +1046,6 @@ class LabelAnnotator(BaseAnnotator): A class for annotating labels on an image using provided detections. """ - @dataclass - class _TextProperties: - text: str - width: int - height: int - width_padded: int - height_padded: int - def __init__( self, color: Union[Color, ColorPalette] = ColorPalette.DEFAULT, @@ -1147,19 +1138,17 @@ def annotate( self._validate_labels(labels, detections) labels = self._get_labels_text(detections, labels) - text_properties = self._get_text_properties(labels) - - xyxy = self._calculate_label_positions( - detections, text_properties, self.text_anchor - ) + label_properties = self._get_label_properties(detections, labels) + xyxy = label_properties[:, :4] if self.smart_positions: xyxy = spread_out_boxes(xyxy) + label_properties[:, :4] = xyxy self._draw_labels( scene=scene, - xyxy=xyxy, - text_properties=text_properties, + labels=labels, + label_properties=label_properties, detections=detections, custom_color_lookup=custom_color_lookup, ) @@ -1174,11 +1163,17 @@ def _validate_labels(self, labels: Optional[List[str]], detections: Detections): f"should have exactly 1 label." ) - def _get_text_properties(self, labels: List[str]) -> List[_TextProperties]: - """Gets text content and dimensions for all detections.""" - text_properties = [] + def _get_label_properties( + self, + detections: Detections, + labels: List[str], + ) -> np.ndarray: + label_properties = [] + anchors_coordinates = detections.get_anchors_coordinates( + anchor=self.text_anchor + ).astype(int) - for label in labels: + for label, center_coords in zip(labels, anchors_coordinates): (text_w, text_h) = cv2.getTextSize( text=label, fontFace=CV2_FONT, @@ -1186,17 +1181,23 @@ def _get_text_properties(self, labels: List[str]) -> List[_TextProperties]: thickness=self.text_thickness, )[0] - text_properties.append( - self._TextProperties( - text=label, - width=text_w, - height=text_h, - width_padded=text_w + 2 * self.text_padding, - height_padded=text_h + 2 * self.text_padding, - ) + width_padded = text_w + 2 * self.text_padding + height_padded = text_h + 2 * self.text_padding + + text_background_xyxy = resolve_text_background_xyxy( + center_coordinates=tuple(center_coords), + text_wh=(width_padded, height_padded), + position=self.text_anchor, ) - return text_properties + label_properties.append( + [ + *text_background_xyxy, + text_h, + ] + ) + + return np.array(label_properties).reshape(-1, 5) @staticmethod def _get_labels_text( @@ -1215,41 +1216,17 @@ def _get_labels_text( labels.append(str(idx)) return labels - def _calculate_label_positions( - self, - detections: Detections, - text_properties: List[_TextProperties], - text_anchor: Position, - ) -> np.ndarray: - anchors_coordinates = detections.get_anchors_coordinates( - anchor=text_anchor - ).astype(int) - - xyxy = [] - for idx, center_coords in enumerate(anchors_coordinates): - text_background_xyxy = resolve_text_background_xyxy( - center_coordinates=tuple(center_coords), - text_wh=( - text_properties[idx].width_padded, - text_properties[idx].height_padded, - ), - position=text_anchor, - ) - xyxy.append(text_background_xyxy) - - return np.array(xyxy) - def _draw_labels( self, scene: np.ndarray, - xyxy: np.ndarray, - text_properties: List[_TextProperties], + labels: List[str], + label_properties: np.ndarray, detections: Detections, custom_color_lookup: Optional[np.ndarray], ) -> None: - assert len(xyxy) == len(text_properties) == len(detections), ( - f"Number of text properties ({len(text_properties)}), " - f"xyxy ({len(xyxy)}) and detections ({len(detections)}) " + assert len(labels) == len(label_properties) == len(detections), ( + f"Number of label properties ({len(label_properties)}), " + f"labels ({len(labels)}) and detections ({len(detections)}) " "do not match." ) @@ -1259,7 +1236,7 @@ def _draw_labels( else self.color_lookup ) - for idx, box_xyxy in enumerate(xyxy): + for idx, label_property in enumerate(label_properties): background_color = resolve_color( color=self.color, detections=detections, @@ -1273,6 +1250,8 @@ def _draw_labels( color_lookup=color_lookup, ) + box_xyxy = label_property[:4] + text_height_padded = label_property[4] self.draw_rounded_rectangle( scene=scene, xyxy=box_xyxy, @@ -1281,10 +1260,10 @@ def _draw_labels( ) text_x = box_xyxy[0] + self.text_padding - text_y = box_xyxy[1] + self.text_padding + text_properties[idx].height + text_y = box_xyxy[1] + self.text_padding + text_height_padded cv2.putText( img=scene, - text=text_properties[idx].text, + text=labels[idx], org=(text_x, text_y), fontFace=CV2_FONT, fontScale=self.text_scale, @@ -1342,16 +1321,6 @@ class RichLabelAnnotator(BaseAnnotator): with support for Unicode characters by using a custom font. """ - @dataclass - class _TextProperties: - text: str - width: int - height: int - width_padded: int - height_padded: int - text_left: int - text_top: int - def __init__( self, color: Union[Color, ColorPalette] = ColorPalette.DEFAULT, @@ -1442,19 +1411,17 @@ def annotate( draw = ImageDraw.Draw(scene) labels = self._get_labels_text(detections, labels) - text_properties = self._get_text_properties(draw, labels) - - xyxy = self._calculate_label_positions( - detections, text_properties, self.text_anchor - ) + label_properties = self._get_label_properties(draw, detections, labels) + xyxy = label_properties[:, :4] if self.smart_positions: xyxy = spread_out_boxes(xyxy) + label_properties[:, :4] = xyxy self._draw_labels( draw=draw, - xyxy=xyxy, - text_properties=text_properties, + labels=labels, + label_properties=label_properties, detections=detections, custom_color_lookup=custom_color_lookup, ) @@ -1469,11 +1436,16 @@ def _validate_labels(self, labels: Optional[List[str]], detections: Detections): f"should have exactly 1 label." ) - def _get_text_properties(self, draw, labels: List[str]) -> List[_TextProperties]: - """Gets text content and dimensions for all detections.""" - text_properties = [] + def _get_label_properties( + self, draw, detections: Detections, labels: List[str] + ) -> np.ndarray: + label_properties = [] - for label in labels: + anchor_coordinates = detections.get_anchors_coordinates( + anchor=self.text_anchor + ).astype(int) + + for label, center_coords in zip(labels, anchor_coordinates): text_left, text_top, text_right, text_bottom = draw.textbbox( (0, 0), label, font=self.font ) @@ -1482,43 +1454,15 @@ def _get_text_properties(self, draw, labels: List[str]) -> List[_TextProperties] width_padded = text_width + 2 * self.text_padding height_padded = text_height + 2 * self.text_padding - text_properties.append( - self._TextProperties( - text=label, - width=text_width, - height=text_height, - width_padded=width_padded, - height_padded=height_padded, - text_left=text_left, - text_top=text_top, - ) - ) - - return text_properties - - def _calculate_label_positions( - self, - detections: Detections, - text_properties: List[_TextProperties], - text_anchor: Position, - ) -> np.ndarray: - anchor_coordinates = detections.get_anchors_coordinates( - anchor=self.text_anchor - ).astype(int) - - xyxy = [] - for idx, center_coords in enumerate(anchor_coordinates): text_background_xyxy = resolve_text_background_xyxy( center_coordinates=tuple(center_coords), - text_wh=( - text_properties[idx].width_padded, - text_properties[idx].height_padded, - ), - position=text_anchor, + text_wh=(width_padded, height_padded), + position=self.text_anchor, ) - xyxy.append(text_background_xyxy) - return np.array(xyxy) + label_properties.append([*text_background_xyxy, text_left, text_top]) + + return np.array(label_properties).reshape(-1, 6) @staticmethod def _get_labels_text( @@ -1540,18 +1484,23 @@ def _get_labels_text( def _draw_labels( self, draw, - xyxy: np.ndarray, - text_properties: List[_TextProperties], + labels: List[str], + label_properties: np.ndarray, detections: Detections, custom_color_lookup: Optional[np.ndarray], ) -> None: + assert len(labels) == len(label_properties) == len(detections), ( + f"Number of label properties ({len(label_properties)}), " + f"labels ({len(labels)}) and detections ({len(detections)}) " + "do not match." + ) color_lookup = ( custom_color_lookup if custom_color_lookup is not None else self.color_lookup ) - for idx, box_xyxy in enumerate(xyxy): + for idx, label_property in enumerate(label_properties): background_color = resolve_color( color=self.color, detections=detections, @@ -1565,12 +1514,11 @@ def _draw_labels( color_lookup=color_lookup, ) - label_x_position = ( - box_xyxy[0] + self.text_padding - text_properties[idx].text_left - ) - label_y_position = ( - box_xyxy[1] + self.text_padding - text_properties[idx].text_top - ) + box_xyxy = label_property[:4] + text_left = label_property[4] + text_top = label_property[5] + label_x_position = box_xyxy[0] + self.text_padding - text_left + label_y_position = box_xyxy[1] + self.text_padding - text_top draw.rounded_rectangle( tuple(box_xyxy), @@ -1580,7 +1528,7 @@ def _draw_labels( ) draw.text( xy=(label_x_position, label_y_position), - text=text_properties[idx].text, + text=labels[idx], font=self.font, fill=text_color.as_rgb(), ) From 4326b87191f0d797be15d2fe90f59f69e8ce3cc0 Mon Sep 17 00:00:00 2001 From: LinasKo Date: Fri, 8 Nov 2024 16:51:21 +0200 Subject: [PATCH 148/161] Even better Discord shield (thanks Brad) --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 464fa4a4d..2aa3cef0f 100644 --- a/README.md +++ b/README.md @@ -21,7 +21,7 @@ [![python-version](https://img.shields.io/pypi/pyversions/supervision)](https://badge.fury.io/py/supervision) [![colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/github/roboflow/supervision/blob/main/demo.ipynb) [![gradio](https://img.shields.io/badge/%F0%9F%A4%97%20Hugging%20Face-Spaces-blue)](https://huggingface.co/spaces/Roboflow/Annotators) -[![discord](https://img.shields.io/discord/1159501506232451173?logo=discord&label=discord)](https://discord.gg/GbfgXGJ8Bk) +[![discord](https://img.shields.io/discord/1159501506232451173?logo=discord&label=discord&labelColor=fff&color=5865f2&link=https%3A%2F%2Fdiscord.gg%2FGbfgXGJ8Bk)](https://discord.gg/GbfgXGJ8Bk) [![built-with-material-for-mkdocs](https://img.shields.io/badge/Material_for_MkDocs-526CFE?logo=MaterialForMkDocs&logoColor=white)](https://squidfunk.github.io/mkdocs-material/)
From a1d8c69654b0f56bffdc91b65e8f091a5aa982b3 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 11 Nov 2024 01:03:27 +0000 Subject: [PATCH 149/161] :arrow_up: Bump wheel from 0.44.0 to 0.45.0 Bumps [wheel](https://github.com/pypa/wheel) from 0.44.0 to 0.45.0. - [Release notes](https://github.com/pypa/wheel/releases) - [Changelog](https://github.com/pypa/wheel/blob/main/docs/news.rst) - [Commits](https://github.com/pypa/wheel/compare/0.44.0...0.45.0) --- updated-dependencies: - dependency-name: wheel dependency-type: direct:development update-type: version-update:semver-minor ... Signed-off-by: dependabot[bot] --- poetry.lock | 8 ++++---- pyproject.toml | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/poetry.lock b/poetry.lock index e78ab20b6..a7a84da0b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -4733,13 +4733,13 @@ test = ["websockets"] [[package]] name = "wheel" -version = "0.44.0" +version = "0.45.0" description = "A built-package format for Python" optional = false python-versions = ">=3.8" files = [ - {file = "wheel-0.44.0-py3-none-any.whl", hash = "sha256:2376a90c98cc337d18623527a97c31797bd02bad0033d41547043a1cbfbe448f"}, - {file = "wheel-0.44.0.tar.gz", hash = "sha256:a29c3f2817e95ab89aa4660681ad547c0e9547f20e75b0562fe7723c9a2a9d49"}, + {file = "wheel-0.45.0-py3-none-any.whl", hash = "sha256:52f0baa5e6522155090a09c6bd95718cc46956d1b51d537ea5454249edb671c7"}, + {file = "wheel-0.45.0.tar.gz", hash = "sha256:a57353941a3183b3d5365346b567a260a0602a0f8a635926a7dede41b94c674a"}, ] [package.extras] @@ -4782,4 +4782,4 @@ metrics = ["pandas", "pandas-stubs"] [metadata] lock-version = "2.0" python-versions = "^3.8" -content-hash = "51d7489c6d66912ca4946850539636cfedb1c08b1a71747384ff732c4519cd8e" +content-hash = "85f56a451ee3e0f2c00c8a39b1433d4fb54a239f14f4878e5ded30bc63729734" diff --git a/pyproject.toml b/pyproject.toml index 0367cbc8b..035627afc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -84,7 +84,7 @@ metrics = ["pandas", "pandas-stubs"] [tool.poetry.group.dev.dependencies] twine = "^5.1.1" pytest = ">=7.2.2,<9.0.0" -wheel = ">=0.40,<0.45" +wheel = ">=0.40,<0.46" build = ">=0.10,<1.3" ruff = ">=0.1.0" mypy = "^1.4.1" From cc8be562b01019ad08c532f2b5ca84d9953ff4e8 Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 11 Nov 2024 01:06:12 +0000 Subject: [PATCH 150/161] :arrow_up: Bump ruff from 0.7.2 to 0.7.3 Bumps [ruff](https://github.com/astral-sh/ruff) from 0.7.2 to 0.7.3. - [Release notes](https://github.com/astral-sh/ruff/releases) - [Changelog](https://github.com/astral-sh/ruff/blob/main/CHANGELOG.md) - [Commits](https://github.com/astral-sh/ruff/compare/0.7.2...0.7.3) --- updated-dependencies: - dependency-name: ruff dependency-type: direct:development update-type: version-update:semver-patch ... Signed-off-by: dependabot[bot] --- poetry.lock | 38 +++++++++++++++++++------------------- 1 file changed, 19 insertions(+), 19 deletions(-) diff --git a/poetry.lock b/poetry.lock index e78ab20b6..200cf2bb6 100644 --- a/poetry.lock +++ b/poetry.lock @@ -4037,29 +4037,29 @@ files = [ [[package]] name = "ruff" -version = "0.7.2" +version = "0.7.3" description = "An extremely fast Python linter and code formatter, written in Rust." optional = false python-versions = ">=3.7" files = [ - {file = "ruff-0.7.2-py3-none-linux_armv6l.whl", hash = "sha256:b73f873b5f52092e63ed540adefc3c36f1f803790ecf2590e1df8bf0a9f72cb8"}, - {file = "ruff-0.7.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:5b813ef26db1015953daf476202585512afd6a6862a02cde63f3bafb53d0b2d4"}, - {file = "ruff-0.7.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:853277dbd9675810c6826dad7a428d52a11760744508340e66bf46f8be9701d9"}, - {file = "ruff-0.7.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21aae53ab1490a52bf4e3bf520c10ce120987b047c494cacf4edad0ba0888da2"}, - {file = "ruff-0.7.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ccc7e0fc6e0cb3168443eeadb6445285abaae75142ee22b2b72c27d790ab60ba"}, - {file = "ruff-0.7.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fd77877a4e43b3a98e5ef4715ba3862105e299af0c48942cc6d51ba3d97dc859"}, - {file = "ruff-0.7.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:e00163fb897d35523c70d71a46fbaa43bf7bf9af0f4534c53ea5b96b2e03397b"}, - {file = "ruff-0.7.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f3c54b538633482dc342e9b634d91168fe8cc56b30a4b4f99287f4e339103e88"}, - {file = "ruff-0.7.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7b792468e9804a204be221b14257566669d1db5c00d6bb335996e5cd7004ba80"}, - {file = "ruff-0.7.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dba53ed84ac19ae4bfb4ea4bf0172550a2285fa27fbb13e3746f04c80f7fa088"}, - {file = "ruff-0.7.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:b19fafe261bf741bca2764c14cbb4ee1819b67adb63ebc2db6401dcd652e3748"}, - {file = "ruff-0.7.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:28bd8220f4d8f79d590db9e2f6a0674f75ddbc3847277dd44ac1f8d30684b828"}, - {file = "ruff-0.7.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:9fd67094e77efbea932e62b5d2483006154794040abb3a5072e659096415ae1e"}, - {file = "ruff-0.7.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:576305393998b7bd6c46018f8104ea3a9cb3fa7908c21d8580e3274a3b04b691"}, - {file = "ruff-0.7.2-py3-none-win32.whl", hash = "sha256:fa993cfc9f0ff11187e82de874dfc3611df80852540331bc85c75809c93253a8"}, - {file = "ruff-0.7.2-py3-none-win_amd64.whl", hash = "sha256:dd8800cbe0254e06b8fec585e97554047fb82c894973f7ff18558eee33d1cb88"}, - {file = "ruff-0.7.2-py3-none-win_arm64.whl", hash = "sha256:bb8368cd45bba3f57bb29cbb8d64b4a33f8415d0149d2655c5c8539452ce7760"}, - {file = "ruff-0.7.2.tar.gz", hash = "sha256:2b14e77293380e475b4e3a7a368e14549288ed2931fce259a6f99978669e844f"}, + {file = "ruff-0.7.3-py3-none-linux_armv6l.whl", hash = "sha256:34f2339dc22687ec7e7002792d1f50712bf84a13d5152e75712ac08be565d344"}, + {file = "ruff-0.7.3-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:fb397332a1879b9764a3455a0bb1087bda876c2db8aca3a3cbb67b3dbce8cda0"}, + {file = "ruff-0.7.3-py3-none-macosx_11_0_arm64.whl", hash = "sha256:37d0b619546103274e7f62643d14e1adcbccb242efda4e4bdb9544d7764782e9"}, + {file = "ruff-0.7.3-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d59f0c3ee4d1a6787614e7135b72e21024875266101142a09a61439cb6e38a5"}, + {file = "ruff-0.7.3-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:44eb93c2499a169d49fafd07bc62ac89b1bc800b197e50ff4633aed212569299"}, + {file = "ruff-0.7.3-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6d0242ce53f3a576c35ee32d907475a8d569944c0407f91d207c8af5be5dae4e"}, + {file = "ruff-0.7.3-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:6b6224af8b5e09772c2ecb8dc9f3f344c1aa48201c7f07e7315367f6dd90ac29"}, + {file = "ruff-0.7.3-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c50f95a82b94421c964fae4c27c0242890a20fe67d203d127e84fbb8013855f5"}, + {file = "ruff-0.7.3-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7f3eff9961b5d2644bcf1616c606e93baa2d6b349e8aa8b035f654df252c8c67"}, + {file = "ruff-0.7.3-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8963cab06d130c4df2fd52c84e9f10d297826d2e8169ae0c798b6221be1d1d2"}, + {file = "ruff-0.7.3-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:61b46049d6edc0e4317fb14b33bd693245281a3007288b68a3f5b74a22a0746d"}, + {file = "ruff-0.7.3-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:10ebce7696afe4644e8c1a23b3cf8c0f2193a310c18387c06e583ae9ef284de2"}, + {file = "ruff-0.7.3-py3-none-musllinux_1_2_i686.whl", hash = "sha256:3f36d56326b3aef8eeee150b700e519880d1aab92f471eefdef656fd57492aa2"}, + {file = "ruff-0.7.3-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:5d024301109a0007b78d57ab0ba190087b43dce852e552734ebf0b0b85e4fb16"}, + {file = "ruff-0.7.3-py3-none-win32.whl", hash = "sha256:4ba81a5f0c5478aa61674c5a2194de8b02652f17addf8dfc40c8937e6e7d79fc"}, + {file = "ruff-0.7.3-py3-none-win_amd64.whl", hash = "sha256:588a9ff2fecf01025ed065fe28809cd5a53b43505f48b69a1ac7707b1b7e4088"}, + {file = "ruff-0.7.3-py3-none-win_arm64.whl", hash = "sha256:1713e2c5545863cdbfe2cbce21f69ffaf37b813bfd1fb3b90dc9a6f1963f5a8c"}, + {file = "ruff-0.7.3.tar.gz", hash = "sha256:e1d1ba2e40b6e71a61b063354d04be669ab0d39c352461f3d789cac68b54a313"}, ] [[package]] From 79b45a4ea8afbf8456a3df58ce9dbbde1e885b6c Mon Sep 17 00:00:00 2001 From: LinasKo Date: Mon, 11 Nov 2024 15:03:52 +0200 Subject: [PATCH 151/161] Label, RichLabel VertexLabel annotators: rename 'smart_positions' arg to 'smart_position' --- supervision/annotators/core.py | 16 ++++++++-------- supervision/keypoint/annotators.py | 8 ++++---- 2 files changed, 12 insertions(+), 12 deletions(-) diff --git a/supervision/annotators/core.py b/supervision/annotators/core.py index f8ab90827..b47397143 100644 --- a/supervision/annotators/core.py +++ b/supervision/annotators/core.py @@ -1056,7 +1056,7 @@ def __init__( text_position: Position = Position.TOP_LEFT, color_lookup: ColorLookup = ColorLookup.CLASS, border_radius: int = 0, - smart_positions: bool = False, + smart_position: bool = False, ): """ Args: @@ -1073,7 +1073,7 @@ def __init__( Options are `INDEX`, `CLASS`, `TRACK`. border_radius (int): The radius to apply round edges. If the selected value is higher than the lower dimension, width or height, is clipped. - smart_positions (bool): Spread out the labels to avoid overlapping. + smart_position (bool): Spread out the labels to avoid overlapping. """ self.border_radius: int = border_radius self.color: Union[Color, ColorPalette] = color @@ -1083,7 +1083,7 @@ def __init__( self.text_padding: int = text_padding self.text_anchor: Position = text_position self.color_lookup: ColorLookup = color_lookup - self.smart_positions = smart_positions + self.smart_position = smart_position @ensure_cv2_image_for_annotation def annotate( @@ -1141,7 +1141,7 @@ def annotate( label_properties = self._get_label_properties(detections, labels) xyxy = label_properties[:, :4] - if self.smart_positions: + if self.smart_position: xyxy = spread_out_boxes(xyxy) label_properties[:, :4] = xyxy @@ -1331,7 +1331,7 @@ def __init__( text_position: Position = Position.TOP_LEFT, color_lookup: ColorLookup = ColorLookup.CLASS, border_radius: int = 0, - smart_positions: bool = False, + smart_position: bool = False, ): """ Args: @@ -1348,7 +1348,7 @@ def __init__( Options are `INDEX`, `CLASS`, `TRACK`. border_radius (int): The radius to apply round edges. If the selected value is higher than the lower dimension, width or height, is clipped. - smart_positions (bool): Spread out the labels to avoid overlapping. + smart_position (bool): Spread out the labels to avoid overlapping. """ self.color = color self.text_color = text_color @@ -1356,7 +1356,7 @@ def __init__( self.text_anchor = text_position self.color_lookup = color_lookup self.border_radius = border_radius - self.smart_positions = smart_positions + self.smart_position = smart_position self.font = self._load_font(font_size, font_path) @ensure_pil_image_for_annotation @@ -1414,7 +1414,7 @@ def annotate( label_properties = self._get_label_properties(draw, detections, labels) xyxy = label_properties[:, :4] - if self.smart_positions: + if self.smart_position: xyxy = spread_out_boxes(xyxy) label_properties[:, :4] = xyxy diff --git a/supervision/keypoint/annotators.py b/supervision/keypoint/annotators.py index fc450a837..7537b264a 100644 --- a/supervision/keypoint/annotators.py +++ b/supervision/keypoint/annotators.py @@ -202,7 +202,7 @@ def __init__( text_thickness: int = 1, text_padding: int = 10, border_radius: int = 0, - smart_positions: bool = False, + smart_position: bool = False, ): """ Args: @@ -217,7 +217,7 @@ def __init__( text_padding (int): The padding around the text. border_radius (int): The radius of the rounded corners of the boxes. Set to a high value to produce circles. - smart_positions (bool): Spread out the labels to avoid overlap. + smart_position (bool): Spread out the labels to avoid overlap. """ self.border_radius: int = border_radius self.color: Union[Color, List[Color]] = color @@ -225,7 +225,7 @@ def __init__( self.text_scale: float = text_scale self.text_thickness: int = text_thickness self.text_padding: int = text_padding - self.smart_positions = smart_positions + self.smart_position = smart_position def annotate( self, @@ -362,7 +362,7 @@ def annotate( ) xyxy_padded = pad_boxes(xyxy=xyxy, px=self.text_padding) - if self.smart_positions: + if self.smart_position: xyxy_padded = spread_out_boxes(xyxy_padded) xyxy = pad_boxes(xyxy=xyxy_padded, px=-self.text_padding) From 3532130b2da32f27b4ba945f290b126968267bd1 Mon Sep 17 00:00:00 2001 From: LinasKo Date: Mon, 11 Nov 2024 15:05:57 +0200 Subject: [PATCH 152/161] Label annotators: `xyxy = label_properties[:, :4]` moved inside `if self.smart_position:` --- supervision/annotators/core.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/supervision/annotators/core.py b/supervision/annotators/core.py index b47397143..cad8cafe8 100644 --- a/supervision/annotators/core.py +++ b/supervision/annotators/core.py @@ -1139,9 +1139,9 @@ def annotate( labels = self._get_labels_text(detections, labels) label_properties = self._get_label_properties(detections, labels) - xyxy = label_properties[:, :4] if self.smart_position: + xyxy = label_properties[:, :4] xyxy = spread_out_boxes(xyxy) label_properties[:, :4] = xyxy @@ -1412,9 +1412,9 @@ def annotate( draw = ImageDraw.Draw(scene) labels = self._get_labels_text(detections, labels) label_properties = self._get_label_properties(draw, detections, labels) - xyxy = label_properties[:, :4] if self.smart_position: + xyxy = label_properties[:, :4] xyxy = spread_out_boxes(xyxy) label_properties[:, :4] = xyxy From 10652efbc42d8ec525089f00024df8a5db6c1d7f Mon Sep 17 00:00:00 2001 From: LinasKo Date: Mon, 11 Nov 2024 15:07:22 +0200 Subject: [PATCH 153/161] Label annotators: remove unused functions `get_box_intersection`, `get_unit_vector` --- supervision/detection/utils.py | 20 -------------------- 1 file changed, 20 deletions(-) diff --git a/supervision/detection/utils.py b/supervision/detection/utils.py index 246d3c607..841879446 100644 --- a/supervision/detection/utils.py +++ b/supervision/detection/utils.py @@ -1041,26 +1041,6 @@ def cross_product(anchors: np.ndarray, vector: Vector) -> np.ndarray: return np.cross(vector_at_zero, anchors - vector_start) -def get_box_intersection( - xyxy_1: np.ndarray, xyxy_2: np.ndarray -) -> Optional[np.ndarray]: - overlap_xmin = max(xyxy_1[0], xyxy_2[0]) - overlap_ymin = max(xyxy_1[1], xyxy_2[1]) - overlap_xmax = min(xyxy_1[2], xyxy_2[2]) - overlap_ymax = min(xyxy_1[3], xyxy_2[3]) - - if overlap_xmin < overlap_xmax and overlap_ymin < overlap_ymax: - return np.array([overlap_xmin, overlap_ymin, overlap_xmax, overlap_ymax]) - else: - return None - - -def get_unit_vector(xy_1: np.ndarray, xy_2: np.ndarray) -> np.ndarray: - direction = xy_2 - xy_1 - magnitude = np.linalg.norm(direction) - return direction / magnitude if magnitude > 0 else np.zeros(2) - - def spread_out_boxes( xyxy: np.ndarray, max_iterations: int = 100, From c12c562e2958da01a20ab175acaf9fc6d978f31a Mon Sep 17 00:00:00 2001 From: LinasKo Date: Mon, 11 Nov 2024 15:15:30 +0200 Subject: [PATCH 154/161] Label annotators: add docstring to `_get_label_properties` --- supervision/annotators/core.py | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/supervision/annotators/core.py b/supervision/annotators/core.py index cad8cafe8..02ab47d6f 100644 --- a/supervision/annotators/core.py +++ b/supervision/annotators/core.py @@ -1168,6 +1168,13 @@ def _get_label_properties( detections: Detections, labels: List[str], ) -> np.ndarray: + """ + Calculate the numerical properties required to draw the labels on the image. + + Returns: + (np.ndarray): An array of label properties, containing columns: + `min_x`, `min_y`, `max_x`, `max_y`, `padded_text_height`. + """ label_properties = [] anchors_coordinates = detections.get_anchors_coordinates( anchor=self.text_anchor @@ -1439,6 +1446,15 @@ def _validate_labels(self, labels: Optional[List[str]], detections: Detections): def _get_label_properties( self, draw, detections: Detections, labels: List[str] ) -> np.ndarray: + """ + Calculate the numerical properties required to draw the labels on the image. + + Returns: + (np.ndarray): An array of label properties, containing columns: + `min_x`, `min_y`, `max_x`, `max_y`, `text_left_coordinate`, + `text_top_coordinate`. The first 4 values are already padded + with `text_padding`. + """ label_properties = [] anchor_coordinates = detections.get_anchors_coordinates( From 1d9f0b1994b4448de9b793dc4115a322fb9cced2 Mon Sep 17 00:00:00 2001 From: LinasKo Date: Mon, 11 Nov 2024 19:47:58 +0200 Subject: [PATCH 155/161] SmartLabels: Labels move proportianlly to IoU --- supervision/detection/utils.py | 25 +++++++++++++++++-------- 1 file changed, 17 insertions(+), 8 deletions(-) diff --git a/supervision/detection/utils.py b/supervision/detection/utils.py index 841879446..2e486e48c 100644 --- a/supervision/detection/utils.py +++ b/supervision/detection/utils.py @@ -1044,7 +1044,6 @@ def cross_product(anchors: np.ndarray, vector: Vector) -> np.ndarray: def spread_out_boxes( xyxy: np.ndarray, max_iterations: int = 100, - force_multiplier: float = 0.03, ) -> np.ndarray: """ Spread out boxes that overlap with each other. @@ -1052,8 +1051,6 @@ def spread_out_boxes( Args: xyxy: Numpy array of shape (N, 4) where N is the number of boxes. max_iterations: Maximum number of iterations to run the algorithm for. - force_multiplier: Multiplier to scale the force vectors by. Similar to - learning rate in gradient descent. """ if len(xyxy) == 0: return xyxy @@ -1076,14 +1073,26 @@ def spread_out_boxes( delta_centers *= overlap_mask[:, :, np.newaxis] # Nx2 - force_vectors = np.sum(delta_centers, axis=1) - force_vectors *= force_multiplier + delta_sum = np.sum(delta_centers, axis=1) + delta_magnitude = np.linalg.norm(delta_sum, axis=1, keepdims=True) + direction_vectors = np.divide( + delta_sum, + delta_magnitude, + out=np.zeros_like(delta_sum), + where=delta_magnitude != 0, + ) + + force_vectors = np.sum(iou, axis=1) + force_vectors = force_vectors[:, np.newaxis] * direction_vectors + + force_vectors *= 10 force_vectors[(force_vectors > 0) & (force_vectors < 1)] = 1 force_vectors[(force_vectors < 0) & (force_vectors > -1)] = -1 - # Reduce motion along primary axis - primary_axis = np.argmax(np.abs(force_vectors), axis=1) - force_vectors[np.arange(len(force_vectors)), primary_axis] /= 2 + # Move along main axis only. + force_vectors[ + np.arange(len(force_vectors)), np.argmin(force_vectors, axis=1) + ] = 0 force_vectors = force_vectors.astype(int) From 1af9c7d629021bf2638088cfef035f5517cbcc98 Mon Sep 17 00:00:00 2001 From: "pre-commit-ci[bot]" <66853113+pre-commit-ci[bot]@users.noreply.github.com> Date: Mon, 11 Nov 2024 17:51:11 +0000 Subject: [PATCH 156/161] =?UTF-8?q?chore(pre=5Fcommit):=20=E2=AC=86=20pre?= =?UTF-8?q?=5Fcommit=20autoupdate?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit updates: - [github.com/astral-sh/ruff-pre-commit: v0.7.2 → v0.7.3](https://github.com/astral-sh/ruff-pre-commit/compare/v0.7.2...v0.7.3) --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index de50d5e0b..6767146f4 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -32,7 +32,7 @@ repos: additional_dependencies: ["bandit[toml]"] - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.7.2 + rev: v0.7.3 hooks: - id: ruff args: [--fix, --exit-non-zero-on-fix] From 3692fb65f9c998e0e8fb74858e191a9470f27556 Mon Sep 17 00:00:00 2001 From: LinasKo Date: Mon, 11 Nov 2024 20:07:51 +0200 Subject: [PATCH 157/161] Smart Lables: Increase minimum step size to 2 --- supervision/detection/utils.py | 9 ++------- 1 file changed, 2 insertions(+), 7 deletions(-) diff --git a/supervision/detection/utils.py b/supervision/detection/utils.py index 2e486e48c..c6c63286d 100644 --- a/supervision/detection/utils.py +++ b/supervision/detection/utils.py @@ -1086,13 +1086,8 @@ def spread_out_boxes( force_vectors = force_vectors[:, np.newaxis] * direction_vectors force_vectors *= 10 - force_vectors[(force_vectors > 0) & (force_vectors < 1)] = 1 - force_vectors[(force_vectors < 0) & (force_vectors > -1)] = -1 - - # Move along main axis only. - force_vectors[ - np.arange(len(force_vectors)), np.argmin(force_vectors, axis=1) - ] = 0 + force_vectors[(force_vectors > 0) & (force_vectors < 2)] = 2 + force_vectors[(force_vectors < 0) & (force_vectors > -2)] = -2 force_vectors = force_vectors.astype(int) From 90168644debd0d33ebb341389ef464698769b4f5 Mon Sep 17 00:00:00 2001 From: LinasKo Date: Tue, 12 Nov 2024 12:45:44 +0200 Subject: [PATCH 158/161] Update deprecated.md --- docs/deprecated.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/docs/deprecated.md b/docs/deprecated.md index d355bf962..e71407eb0 100644 --- a/docs/deprecated.md +++ b/docs/deprecated.md @@ -19,6 +19,10 @@ These features are phased out due to better alternatives or potential issues in # Removed +### 0.25.0 + +No removals in this version! + ### 0.24.0 - The `frame_resolution_wh ` parameter in [`sv.PolygonZone`](detection/tools/polygon_zone.md/#supervision.detection.tools.polygon_zone.PolygonZone) has been removed. From 7b3d88602baec5e9c31ddf1fce0a76660f4684fc Mon Sep 17 00:00:00 2001 From: LinasKo Date: Tue, 12 Nov 2024 13:08:08 +0200 Subject: [PATCH 159/161] 'New' tags for docs of 0.25.0 --- docs/assets.md | 2 ++ docs/detection/tools/inference_slicer.md | 1 - docs/detection/tools/polygon_zone.md | 1 - docs/keypoint/annotators.md | 1 + docs/metrics/f1_score.md | 1 + docs/metrics/mean_average_precision.md | 1 + docs/trackers.md | 1 + 7 files changed, 6 insertions(+), 2 deletions(-) diff --git a/docs/assets.md b/docs/assets.md index 84b3dfd62..2e38ad472 100644 --- a/docs/assets.md +++ b/docs/assets.md @@ -1,5 +1,6 @@ --- comments: true +status: new --- # Assets @@ -13,6 +14,7 @@ To install the Supervision assets utility, you can use `pip`. This utility is av as an extra within the Supervision package. !!! example "pip install" + ```bash pip install "supervision[assets]" ``` diff --git a/docs/detection/tools/inference_slicer.md b/docs/detection/tools/inference_slicer.md index 7a5d3e573..5d5d08bc5 100644 --- a/docs/detection/tools/inference_slicer.md +++ b/docs/detection/tools/inference_slicer.md @@ -1,6 +1,5 @@ --- comments: true -status: new --- # InferenceSlicer diff --git a/docs/detection/tools/polygon_zone.md b/docs/detection/tools/polygon_zone.md index 1d445d9fc..cbe76c20f 100644 --- a/docs/detection/tools/polygon_zone.md +++ b/docs/detection/tools/polygon_zone.md @@ -1,6 +1,5 @@ --- comments: true -status: new ---
diff --git a/docs/keypoint/annotators.md b/docs/keypoint/annotators.md index 32f30626b..30a970ecd 100644 --- a/docs/keypoint/annotators.md +++ b/docs/keypoint/annotators.md @@ -1,5 +1,6 @@ --- comments: true +status: new --- # Annotators diff --git a/docs/metrics/f1_score.md b/docs/metrics/f1_score.md index 5cf0cd77f..e31a95577 100644 --- a/docs/metrics/f1_score.md +++ b/docs/metrics/f1_score.md @@ -1,5 +1,6 @@ --- comments: true +status: new --- # F1 Score diff --git a/docs/metrics/mean_average_precision.md b/docs/metrics/mean_average_precision.md index 9e23045e4..817591a19 100644 --- a/docs/metrics/mean_average_precision.md +++ b/docs/metrics/mean_average_precision.md @@ -1,5 +1,6 @@ --- comments: true +status: new --- # Mean Average Precision diff --git a/docs/trackers.md b/docs/trackers.md index cb44441f1..47f700619 100644 --- a/docs/trackers.md +++ b/docs/trackers.md @@ -1,5 +1,6 @@ --- comments: true +status: new --- # ByteTrack From 2d50beeaa9fcdc12933e27ab5ce95f81ea704b9d Mon Sep 17 00:00:00 2001 From: LinasKo Date: Tue, 12 Nov 2024 23:46:29 +0200 Subject: [PATCH 160/161] Add changelog --- docs/changelog.md | 144 ++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 144 insertions(+) diff --git a/docs/changelog.md b/docs/changelog.md index 976a78d06..d845e6e9c 100644 --- a/docs/changelog.md +++ b/docs/changelog.md @@ -1,5 +1,149 @@ # CHANGELOG +### 0.25.0 Nov 12, 2024 + +- No removals or deprecations in this release! + +- Essential update to the [`LineZone`](https://supervision.roboflow.com/0.25.0/detection/tools/line_zone/): when computing line crossings, detections that jitter might be counted twice (or more). This can now be solved with the `minimum_crossing_threshold` argument. If you set it to `2` or more, extra frames will be used to confirm the crossing, improving the accuracy significantly. ([#1540](https://github.com/roboflow/supervision/pull/1540)) + +- It is now possible to track objects detected as [`KeyPoints`](https://supervision.roboflow.com/0.25.0/keypoint/core/#supervision.keypoint.core.KeyPoints). See the complete step-by-step guide in the [Object Tracking Guide](https://supervision.roboflow.com/latest/how_to/track_objects/#keypoints). ([#1658](https://github.com/roboflow/supervision/pull/1658)) + +```python +import numpy as np +import supervision as sv +from ultralytics import YOLO + +model = YOLO("yolov8m-pose.pt") +tracker = sv.ByteTrack() +trace_annotator = sv.TraceAnnotator() + +def callback(frame: np.ndarray, _: int) -> np.ndarray: + results = model(frame)[0] + key_points = sv.KeyPoints.from_ultralytics(results) + + detections = key_points.as_detections() + detections = tracker.update_with_detections(detections) + + annotated_image = trace_annotator.annotate(frame.copy(), detections) + return annotated_image + +sv.process_video( + source_path="input_video.mp4", + target_path="output_video.mp4", + callback=callback +) +``` + +- Added `is_empty` method to [`KeyPoints`](https://supervision.roboflow.com/0.25.0/keypoint/core/#supervision.keypoint.core.KeyPoints) to check if there are any keypoints in the object. ([#1658](https://github.com/roboflow/supervision/pull/1658)) + +- Added `as_detections` method to [`KeyPoints`](https://supervision.roboflow.com/0.25.0/keypoint/core/#supervision.keypoint.core.KeyPoints) that converts `KeyPoints` to `Detections`. ([#1658](https://github.com/roboflow/supervision/pull/1658)) + +- Added a new video to `supervision[assets]`. ([#1657](https://github.com/roboflow/supervision/pull/1657)) + +```python +from supervision.assets import download_assets, VideoAssets + +path_to_video = download_assets(VideoAssets.SKIING) +``` + +- Supervision can now be used with [`Python 3.13`](https://docs.python.org/3/whatsnew/3.13.html). The most renowned update is the ability to run Python [without Global Interpreter Lock (GIL)](https://docs.python.org/3/whatsnew/3.13.html#whatsnew313-free-threaded-cpython). We expect support for this among our dependencies to be inconsistent, but if you do attempt it - let us know the results! ([#1595](https://github.com/roboflow/supervision/pull/1595)) + +- Added [`Mean Average Recall`](https://supervision.roboflow.com/latest/metrics/mean_average_recall/) mAR metric, which returns a recall score, averaged over IoU thresholds, detected object classes, and limits imposed on maximum considered detections. ([#1661](https://github.com/roboflow/supervision/pull/1661)) + +```python +import supervision as sv +from supervision.metrics import MeanAverageRecall + +predictions = sv.Detections(...) +targets = sv.Detections(...) + +map_metric = MeanAverageRecall() +map_result = map_metric.update(predictions, targets).compute() + +map_result.plot() +``` + +- Added [`Precision`](https://supervision.roboflow.com/latest/metrics/precision/) and [`Recall`](https://supervision.roboflow.com/latest/metrics/recall/) metrics, providing a baseline for comparing model outputs to ground truth or another model ([#1609](https://github.com/roboflow/supervision/pull/1609)) + +```python +import supervision as sv +from supervision.metrics import Recall + +predictions = sv.Detections(...) +targets = sv.Detections(...) + +recall_metric = Recall() +recall_result = recall_metric.update(predictions, targets).compute() + +recall_result.plot() +``` + +- All Metrics now support Oriented Bounding Boxes (OBB) ([#1593](https://github.com/roboflow/supervision/pull/1593)) + +```python +import supervision as sv +from supervision.metrics import F1_Score + +predictions = sv.Detections(...) +targets = sv.Detections(...) + +f1_metric = MeanAverageRecall(metric_target=sv.MetricTarget.ORIENTED_BOUNDING_BOXES) +f1_result = f1_metric.update(predictions, targets).compute() +``` + +- Introducing Smart Labels! When `smart_position` is set for [`LabelAnnotator`](https://supervision.roboflow.com/0.25.0/detection/annotators/#supervision.annotators.core.LabelAnnotator), [`RichLabelAnnotator`](https://supervision.roboflow.com/0.25.0/detection/annotators/#supervision.annotators.core.RichLabelAnnotator) or [`VertexLabelAnnotator`](https://supervision.roboflow.com/0.25.0/detection/annotators/#supervision.annotators.core.RichLabelAnnotator), the labels will move around to avoid overlapping others. ([#1625](https://github.com/roboflow/supervision/pull/1625)) + +```python +import supervision as sv +from ultralytics import YOLO + +image = cv2.imread("image.jpg") + +label_annotator = sv.LabelAnnotator(smart_position=True) + +model = YOLO("yolo11m.pt") +results = model(image)[0] +detections = sv.Detections.from_ultralytics(results) + +annotated_frame = label_annotator.annotate(first_frame.copy(), detections) +sv.plot_image(annotated_frame) +``` + +- Added the `metadata` variable to [`Detections`](https://supervision.roboflow.com/0.25.0/detection/core/#supervision.detection.core.Detections). It allows you to store custom data per-image, rather than per-detected-object as was possible with `data` variable. For example, `metadata` could be used to store the source video path, camera model or camera parameters. ([#1589](https://github.com/roboflow/supervision/pull/1589)) + +```python +import supervision as sv +from ultralytics import YOLO + +model = YOLO("yolov8m") + +result = model("image.png")[0] +detections = sv.Detections.from_ultralytics(result) + +# Items in `data` must match length of detections +object_ids = [num for num in range(len(detections))] +detections.data["object_number"] = object_ids + +# Items in `metadata` can be of any length. +detections.metadata["camera_model"] = "Luxonis OAK-D" +``` + +- Added a `py.typed` type hints metafile. It should provide a stronger signal to type annotators and IDEs that type support is available. ([#1586](https://github.com/roboflow/supervision/pull/1586)) + +- `ByteTrack` no longer requires `detections` to have a `class_id` ([#1637](https://github.com/roboflow/supervision/pull/1637)) +- `draw_line`, `draw_rectangle`, `draw_filled_rectangle`, `draw_polygon`, `draw_filled_polygon` and `PolygonZoneAnnotator` now comes with a default color ([#1591](https://github.com/roboflow/supervision/pull/1591)) +- Dataset classes are treated as case-sensitive when merging multiple datasets. ([#1643](https://github.com/roboflow/supervision/pull/1643)) +- Expanded [metrics documentation](https://supervision.roboflow.com/0.25.0/metrics/f1_score/) with example plots and printed results ([#1660](https://github.com/roboflow/supervision/pull/1660)) +- Added usage example for polygon zone ([#1608](https://github.com/roboflow/supervision/pull/1608)) +- Small improvements to error handling in polygons: ([#1602](https://github.com/roboflow/supervision/pull/1602)) + +- Updated [`ByteTrack`](https://supervision.roboflow.com/0.25.0/trackers/#supervision.tracker.byte_tracker.core.ByteTrack), removing shared variables. Previously, multiple instances of `ByteTrack` would share some date, requiring liberal use of `tracker.reset()`. ([#1603](https://github.com/roboflow/supervision/pull/1603)), ([#1528](https://github.com/roboflow/supervision/pull/1528)) +- Fixed a bug where `class_agnostic` setting in `MeanAveragePrecision` would not work. ([#1577](https://github.com/roboflow/supervision/pull/1577)) hacktoberfest +- Removed welcome workflow from our CI system. ([#1596](https://github.com/roboflow/supervision/pull/1596)) + +- Large refactor of `ByteTrack`: STrack moved to separate class, removed superfluous `BaseTrack` class, removed unused variables ([#1603](https://github.com/roboflow/supervision/pull/1603)) +- Large refactor of `RichLabelAnnotator`, matching its contents with `LabelAnnotator`. ([#1625](https://github.com/roboflow/supervision/pull/1625)) + ### 0.24.0 Oct 4, 2024 - Added [F1 score](https://supervision.roboflow.com/0.24.0/metrics/f1_score/#supervision.metrics.f1_score.F1Score) as a new metric for detection and segmentation. [#1521](https://github.com/roboflow/supervision/pull/1521) From 8d562c16a6b5b4d24625ec094a9a0f423e82d20b Mon Sep 17 00:00:00 2001 From: LinasKo Date: Tue, 12 Nov 2024 23:53:48 +0200 Subject: [PATCH 161/161] Bump supervision to 0.25.0 --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 035627afc..d3084d52b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "supervision" -version = "0.25.0rc2" +version = "0.25.0" description = "A set of easy-to-use utils that will come in handy in any Computer Vision project" authors = ["Piotr Skalski "] maintainers = [