Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
21 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
30 changes: 24 additions & 6 deletions .github/workflows/testing.yml
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ jobs:
df -h

- name: Checkout code
uses: actions/checkout@v4
uses: actions/checkout@v4 # uses: actions/checkout@v6

- name: Install uv
uses: astral-sh/setup-uv@v6
Expand All @@ -53,9 +53,9 @@ jobs:
python-version: ${{ matrix.python-version }}

- name: Install the project
run: uv sync --no-cache --all-extras --dev
run: uv sync --all-extras --dev
shell: bash

- name: Install ffmpeg
run: |
if [ "$RUNNER_OS" == "Linux" ]; then
Expand All @@ -67,9 +67,27 @@ jobs:
choco install ffmpeg
fi
shell: bash
- name: Run DLC Live Tests

- name: Run Model Benchmark Test
run: uv run dlc-live-test --nodisplay

- name: Run Functional Benchmark Test
- name: Run DLC Live Unit Tests
run: uv run pytest
# - name: Run DLC Live Unit Tests
# run: uv run pytest --cov=dlclive --cov-report=xml --cov-report=term-missing

# - name: Coverage Report
# uses: codecov/codecov-action@v5
# with:
# files: ./coverage.xml
# flags: ${{ matrix.os }}-py${{ matrix.python-version }}
# name: codecov-${{ matrix.os }}-py${{ matrix.python-version }}
# - name: Add coverage to job summary
# if: always()
# shell: bash
# run: |
# uv run python -m coverage report -m > coverage.txt
# echo "## Coverage (dlclive)" >> "$GITHUB_STEP_SUMMARY"
# echo '```' >> "$GITHUB_STEP_SUMMARY"
# cat coverage.txt >> "$GITHUB_STEP_SUMMARY"
# echo '```' >> "$GITHUB_STEP_SUMMARY"
19 changes: 19 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,19 @@
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v6.0.0
hooks:
- id: check-docstring-first
- id: end-of-file-fixer
- id: trailing-whitespace
- repo: https://github.com/asottile/setup-cfg-fmt
rev: v3.2.0
hooks:
- id: setup-cfg-fmt
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.14.10
hooks:
# Run the formatter.
- id: ruff-format
# Run the linter.
- id: ruff-check
args: [--fix,--unsafe-fixes]
57 changes: 36 additions & 21 deletions dlclive/core/inferenceutils.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,10 @@
#
# Licensed under GNU Lesser General Public License v3.0
#


# NOTE - DUPLICATED @C-Achard 2026-01-26: Copied from the original DeepLabCut codebase
# from deeplabcut/core/inferenceutils.py
from __future__ import annotations

import heapq
Expand All @@ -17,9 +21,10 @@
import pickle
import warnings
from collections import defaultdict
from collections.abc import Iterable
from dataclasses import dataclass
from math import erf, sqrt
from typing import Any, Iterable, Tuple
from typing import Any

import networkx as nx
import numpy as np
Expand All @@ -41,7 +46,7 @@ def _conv_square_to_condensed_indices(ind_row, ind_col, n):
return n * ind_col - ind_col * (ind_col + 1) // 2 + ind_row - 1 - ind_col


Position = Tuple[float, float]
Position = tuple[float, float]


@dataclass(frozen=True)
Expand Down Expand Up @@ -155,7 +160,7 @@ def soft_identity(self):
unq, idx, cnt = np.unique(data[:, 3], return_inverse=True, return_counts=True)
avg = np.bincount(idx, weights=data[:, 2]) / cnt
soft = softmax(avg)
return dict(zip(unq.astype(int), soft))
return dict(zip(unq.astype(int), soft, strict=False))

@property
def affinity(self):
Expand Down Expand Up @@ -262,7 +267,8 @@ def __init__(
self._has_identity = "identity" in self[0]
if identity_only and not self._has_identity:
warnings.warn(
"The network was not trained with identity; setting `identity_only` to False."
"The network was not trained with identity; setting `identity_only` to False.",
stacklevel=2,
)
self.identity_only = identity_only & self._has_identity
self.nan_policy = nan_policy
Expand Down Expand Up @@ -344,15 +350,19 @@ def calibrate(self, train_data_file):
pass
n_bpts = len(df.columns.get_level_values("bodyparts").unique())
if n_bpts == 1:
warnings.warn("There is only one keypoint; skipping calibration...")
warnings.warn(
"There is only one keypoint; skipping calibration...", stacklevel=2
)
return

xy = df.to_numpy().reshape((-1, n_bpts, 2))
frac_valid = np.mean(~np.isnan(xy), axis=(1, 2))
# Only keeps skeletons that are more than 90% complete
xy = xy[frac_valid >= 0.9]
if not xy.size:
warnings.warn("No complete poses were found. Skipping calibration...")
warnings.warn(
"No complete poses were found. Skipping calibration...", stacklevel=2
)
return

# TODO Normalize dists by longest length?
Expand All @@ -369,7 +379,8 @@ def calibrate(self, train_data_file):
except np.linalg.LinAlgError:
# Covariance matrix estimation fails due to numerical singularities
warnings.warn(
"The assembler could not be robustly calibrated. Continuing without it..."
"The assembler could not be robustly calibrated. Continuing without it...",
stacklevel=2,
)

def calc_assembly_mahalanobis_dist(
Expand Down Expand Up @@ -428,10 +439,12 @@ def _flatten_detections(data_dict):
ids = [np.ones(len(arr), dtype=int) * -1 for arr in confidence]
else:
ids = [arr.argmax(axis=1) for arr in ids]
for i, (coords, conf, id_) in enumerate(zip(coordinates, confidence, ids)):
for i, (coords, conf, id_) in enumerate(
zip(coordinates, confidence, ids, strict=False)
):
if not np.any(coords):
continue
for xy, p, g in zip(coords, conf, id_):
for xy, p, g in zip(coords, conf, id_, strict=False):
joint = Joint(tuple(xy), p.item(), i, ind, g)
ind += 1
yield joint
Expand Down Expand Up @@ -474,13 +487,13 @@ def extract_best_links(self, joints_dict, costs, trees=None):
(conf >= self.pcutoff * self.pcutoff) & (aff >= self.min_affinity)
)
candidates = sorted(
zip(rows, cols, aff[rows, cols], lengths[rows, cols]),
zip(rows, cols, aff[rows, cols], lengths[rows, cols], strict=False),
key=lambda x: x[2],
reverse=True,
)
i_seen = set()
j_seen = set()
for i, j, w, l in candidates:
for i, j, w, _l in candidates:
if i not in i_seen and j not in j_seen:
i_seen.add(i)
j_seen.add(j)
Expand All @@ -502,7 +515,7 @@ def extract_best_links(self, joints_dict, costs, trees=None):
]
aff = aff[np.ix_(keep_s, keep_t)]
rows, cols = linear_sum_assignment(aff, maximize=True)
for row, col in zip(rows, cols):
for row, col in zip(rows, cols, strict=False):
w = aff[row, col]
if w >= self.min_affinity:
links.append(Link(dets_s[keep_s[row]], dets_t[keep_t[col]], w))
Expand Down Expand Up @@ -548,9 +561,9 @@ def push_to_stack(i):
d = self.calc_assembly_mahalanobis_dist(assembly, nan_policy=nan_policy)
if d < d_old:
push_to_stack(new_ind)
if tabu:
_, _, link = heapq.heappop(tabu)
heapq.heappush(stack, (-link.affinity, next(counter), link))
if tabu:
_, _, link = heapq.heappop(tabu)
heapq.heappush(stack, (-link.affinity, next(counter), link))
else:
heapq.heappush(tabu, (d - d_old, next(counter), best))
assembly.__dict__.update(assembly._dict)
Expand Down Expand Up @@ -665,7 +678,7 @@ def build_assemblies(self, links):
for idx in store[j]._idx:
store[idx] = store[i]
except KeyError:
# Some links may reference indices that were never added to `store`;
# Some links may reference indices that were never added to `store`;
# in that case we intentionally skip merging for this link
pass

Expand Down Expand Up @@ -791,7 +804,7 @@ def _assemble(self, data_dict, ind_frame):
]
else:
scores = [ass._affinity for ass in assemblies]
lst = list(zip(scores, assemblies))
lst = list(zip(scores, assemblies, strict=False))
assemblies = []
while lst:
temp = max(lst, key=lambda x: x[0])
Expand Down Expand Up @@ -1074,7 +1087,7 @@ def match_assemblies(
if ~np.isnan(oks):
mat[i, j] = oks
rows, cols = linear_sum_assignment(mat, maximize=True)
for row, col in zip(rows, cols):
for row, col in zip(rows, cols, strict=False):
matched[row].ground_truth = ground_truth[col]
matched[row].oks = mat[row, col]
_ = inds_true.remove(col)
Expand All @@ -1087,7 +1100,7 @@ def parse_ground_truth_data_file(h5_file):
try:
df.drop("single", axis=1, level="individuals", inplace=True)
except KeyError:
# Ignore if the "single" individual column is absent
# Ignore if the "single" individual column is absent
pass
# Cast columns of dtype 'object' to float to avoid TypeError
# further down in _parse_ground_truth_data.
Expand Down Expand Up @@ -1128,7 +1141,7 @@ def find_outlier_assemblies(dict_of_assemblies, criterion="area", qs=(5, 95)):
for frame_ind, assemblies in dict_of_assemblies.items():
for assembly in assemblies:
tuples.append((frame_ind, getattr(assembly, criterion)))
frame_inds, vals = zip(*tuples)
frame_inds, vals = zip(*tuples, strict=False)
vals = np.asarray(vals)
lo, up = np.percentile(vals, qs, interpolation="nearest")
inds = np.flatnonzero((vals < lo) | (vals > up)).tolist()
Expand Down Expand Up @@ -1246,12 +1259,14 @@ def evaluate_assembly(
ass_pred_dict,
ass_true_dict,
oks_sigma=0.072,
oks_thresholds=np.linspace(0.5, 0.95, 10),
oks_thresholds=None,
margin=0,
symmetric_kpts=None,
greedy_matching=False,
with_tqdm: bool = True,
):
if oks_thresholds is None:
oks_thresholds = np.linspace(0.5, 0.95, 10)
if greedy_matching:
return evaluate_assembly_greedy(
ass_true_dict,
Expand Down
35 changes: 11 additions & 24 deletions dlclive/display.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,9 @@

try:
from tkinter import Label, Tk

from PIL import ImageTk

_TKINTER_AVAILABLE = True
except ImportError:
_TKINTER_AVAILABLE = False
Expand Down Expand Up @@ -59,7 +61,9 @@ def set_display(self, im_size, bodyparts):
self.lab.pack()

all_colors = getattr(cc, self.cmap)
self.colors = all_colors[:: int(len(all_colors) / bodyparts)]
# Avoid 0 step
step = max(1, int(len(all_colors) / bodyparts))
self.colors = all_colors[::step]

def display_frame(self, frame, pose=None):
"""
Expand All @@ -75,10 +79,10 @@ def display_frame(self, frame, pose=None):
"""
if not _TKINTER_AVAILABLE:
raise ImportError("tkinter is not available. Cannot display frames.")

im_size = (frame.shape[1], frame.shape[0])
img = Image.fromarray(frame) # avoid undefined image if pose is None
if pose is not None:
img = Image.fromarray(frame)
draw = ImageDraw.Draw(img)

if len(pose.shape) == 2:
Expand All @@ -91,33 +95,16 @@ def display_frame(self, frame, pose=None):
for j in range(pose.shape[1]):
if pose[i, j, 2] > self.pcutoff:
try:
x0 = (
pose[i, j, 0] - self.radius
if pose[i, j, 0] - self.radius > 0
else 0
)
x1 = (
pose[i, j, 0] + self.radius
if pose[i, j, 0] + self.radius < im_size[0]
else im_size[1]
)
y0 = (
pose[i, j, 1] - self.radius
if pose[i, j, 1] - self.radius > 0
else 0
)
y1 = (
pose[i, j, 1] + self.radius
if pose[i, j, 1] + self.radius < im_size[1]
else im_size[0]
)
x0 = max(0, pose[i, j, 0] - self.radius)
x1 = min(im_size[0], pose[i, j, 0] + self.radius)
y0 = max(0, pose[i, j, 1] - self.radius)
y1 = min(im_size[1], pose[i, j, 1] + self.radius)
coords = [x0, y0, x1, y1]
draw.ellipse(
coords, fill=self.colors[j], outline=self.colors[j]
)
except Exception as e:
print(e)

img_tk = ImageTk.PhotoImage(image=img, master=self.window)
self.lab.configure(image=img_tk)
self.window.update()
Expand Down
15 changes: 6 additions & 9 deletions dlclive/dlclive.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

Licensed under GNU Lesser General Public License v3.0
"""

from __future__ import annotations

from pathlib import Path
Expand Down Expand Up @@ -197,12 +198,12 @@ def __init__(
self.processor = processor
self.convert2rgb = convert2rgb

self.pose: np.ndarray | None = None

if isinstance(display, Display):
self.display = display
elif display:
self.display = Display(
pcutoff=pcutoff, radius=display_radius, cmap=display_cmap
)
self.display = Display(pcutoff=pcutoff, radius=display_radius, cmap=display_cmap)
else:
self.display = None

Expand Down Expand Up @@ -250,9 +251,7 @@ def process_frame(self, frame: np.ndarray) -> np.ndarray:
processed frame: convert type, crop, convert color
"""
if self.cropping:
frame = frame[
self.cropping[2] : self.cropping[3], self.cropping[0] : self.cropping[1]
]
frame = frame[self.cropping[2] : self.cropping[3], self.cropping[0] : self.cropping[1]]

if self.dynamic[0]:
if self.pose is not None:
Expand All @@ -263,9 +262,7 @@ def process_frame(self, frame: np.ndarray) -> np.ndarray:
elif len(self.pose) == 1:
pose = self.pose[0]
else:
raise ValueError(
"Cannot use Dynamic Cropping - more than 1 individual found"
)
raise ValueError("Cannot use Dynamic Cropping - more than 1 individual found")

else:
pose = self.pose
Expand Down
Loading