From cf6749574ab1e8c3e0e447ac4f80ad310561f075 Mon Sep 17 00:00:00 2001 From: edavalosanaya Date: Thu, 30 Nov 2023 23:13:01 -0600 Subject: [PATCH] Added test for new fixation input. --- ettk/types.py | 4 +- test/conftest.py | 51 +++++++++ test/test_planar_tracker.py | 199 +++++++++++++++++++++++++++++++++--- 3 files changed, 235 insertions(+), 19 deletions(-) diff --git a/ettk/types.py b/ettk/types.py index 3f2aa94..11fa55b 100644 --- a/ettk/types.py +++ b/ettk/types.py @@ -102,8 +102,8 @@ class SurfaceEntry: @dataclass class PlanarResult: - aruco: ArucoResult - surfaces: Dict[str, SurfaceEntry] + aruco: ArucoResult = field(default_factory=ArucoResult) + surfaces: Dict[str, SurfaceEntry] = field(default_factory=dict) @dataclass diff --git a/test/conftest.py b/test/conftest.py index 7fcfbc4..5725e53 100644 --- a/test/conftest.py +++ b/test/conftest.py @@ -1,7 +1,9 @@ import logging import pathlib import os +from typing import Optional +import pandas as pd import pytest import cv2 import numpy as np @@ -79,6 +81,11 @@ / 'tg3' / '20230418T181553Z' ) +FIXATION_TOBII_PATH = ( + CWD + / 'data' + / '220245.csv' +) # VIDEO_START_INDEX = 0 VIDEO_START_INDEX = 36000 # paper @@ -139,6 +146,10 @@ def pytest_configure(): def rec_data(): return get_rec_data(VIDEO_TOBII_REC_PATH) +@pytest.fixture +def rec_data_export(): + return get_rec_data_export(VIDEO_TOBII_REC_PATH) + def get_rec_data(path): @@ -155,3 +166,43 @@ def get_rec_data(path): gaze = ettk.utils.tobii.load_gaze_data(path) # gaze = None return cap, gaze + + +def get_rec_data_export(path): + + # Load the video and get a single frame + video_path = path / "scenevideo.mp4" + assert video_path.exists() + + cap = cv2.VideoCapture(str(video_path), 0) + length = int(cap.get(cv2.CAP_PROP_FRAME_COUNT)) + assert length > VIDEO_START_INDEX + 1 + cap.set(cv2.CAP_PROP_POS_FRAMES, VIDEO_START_INDEX) + + # Load other eye-tracking information + gaze = pd.read_csv(FIXATION_TOBII_PATH) + gaze['timestamp'] = gaze['Recording timestamp'] / 1000000 + gaze = gaze[gaze['Eye movement type'] == 'Fixation'] + gaze = gaze[['timestamp', 'Fixation point X', 'Fixation point Y']] + gaze.rename(columns={'Fixation point X': 'x', 'Fixation point Y': 'y'}, inplace=True) + + # Combine fixations to have start and end times + prev_row: Optional[pd.Series] = None + fixation_data = {'timestamp': [], 'end_timestamp': [], 'x': [], 'y': []} + for _, row in gaze.iterrows(): + + if isinstance(prev_row, pd.Series): + if (prev_row['x'], prev_row['y']) == (row['x'], row['y']): + fixation_data['end_timestamp'][-1] = row['timestamp'] + prev_row = row + continue + + fixation_data['timestamp'].append(row['timestamp']) + fixation_data['end_timestamp'].append(row['timestamp']) + fixation_data['x'].append(row['x']) + fixation_data['y'].append(row['y']) + prev_row = row + + gaze = pd.DataFrame(fixation_data) + + return cap, gaze diff --git a/test/test_planar_tracker.py b/test/test_planar_tracker.py index 9f11359..ab9a258 100644 --- a/test/test_planar_tracker.py +++ b/test/test_planar_tracker.py @@ -19,6 +19,7 @@ from dataclasses import asdict import ettk +from ettk.types import PlanarResult from scipy.spatial.transform import Rotation as R import pytest import cv2 @@ -267,7 +268,7 @@ def test_planar_tracking_step_by_gaze(rec_data): # Keep track of clocks delta = expected_frame_index - current_frame_index - # logger.debug(f"{expected_frame_index} - {current_frame_index} = {delta}") + logger.debug(f"{expected_frame_index} - {current_frame_index} = {delta}") current_frame_index += 1 # import pdb; pdb.set_trace() @@ -328,7 +329,6 @@ def test_planar_tracking_step_by_gaze(rec_data): # Get information surface_config = surface_configs[fix_result.surface_id] - # pt = fix_result.pt if isinstance(surface_config.template, np.ndarray): img = surface_config.template @@ -339,22 +339,187 @@ def test_planar_tracking_step_by_gaze(rec_data): s_w *= RATIO img = np.zeros((int(s_h), int(s_w), 3)) - # # Get drawing surface - # if isinstance(surface_config.template, np.ndarray): - # img = surface_config.template - # s_h, s_w = img.shape[:2] - # pt *= 20 - # else: - # s_h, s_w = surface_config.height, surface_config.width - # RATIO = 30 - # s_h *= RATIO - # s_w *= RATIO - # pt = pt * RATIO - # img = np.zeros((int(s_h), int(s_w), 3)) - - # # Compute relative fix - # rel_fix = (pt[0] / s_w, pt[1] / s_h) + # Store data + df['timestamp'].append(timestamp) + df['surface_id'].append(fix_result.surface_id) + df['x'].append(fix_result.rel_pt[0]) + df['y'].append(fix_result.rel_pt[1]) + df['uncertainty'].append(fix_result.uncertainty) + + # Reset background + draw[0:1080, 1920:] = 100 + + draw_surface = ettk.utils.vis.draw_fix((fix_result.pt[0], fix_result.pt[1]), img) + d_h, d_w = draw_surface.shape[:2] + y = d_h//2 + x = 1920 + 1000//2 - d_w//2 + draw[y:y+d_h, x:x+d_w] = draw_surface + + else: + df['timestamp'].append(timestamp) + df['surface_id'].append(None) + df['x'].append(-1) + df['y'].append(-1) + df['uncertainty'].append(-1) + + # Close everything + writer.release() + cv2.destroyAllWindows() + + # Save the data + df = pd.DataFrame(df) + df = df.round(decimals=3) + df.to_csv(str(OUTPUT_DIR/'planar_tracking.csv'), index=False) + + +def test_planar_tracking_step_by_gaze_fixation_exported(rec_data_export): + + # Get original video + cap, gaze = rec_data_export + fps = cap.get(cv2.CAP_PROP_FPS) + current_frame_index = cap.get(cv2.CAP_PROP_POS_FRAMES) + current_time = current_frame_index / fps + gaze = gaze[gaze.timestamp >= current_time].reset_index(drop=True) + w, h = 1920, 1080 + + # Video Writer + fourcc = cv2.VideoWriter_fourcc(*'XVID') + writer = cv2.VideoWriter(str(OUTPUT_DIR/'planar_tracking.avi'), fourcc, fps, (1920+1000, 1080)) + + # Surface Configs + surface_configs = { + 'unwrap1': unwrap1_config, + 'unwrap2': unwrap2_config, + 'unwrap3': unwrap3_config, + 'suffrage1': suffrage1_config, + 'suffrage2': suffrage2_config, + 'suffrage3': suffrage3_config, + 'mooca1': mooca1_config, + 'mooca2': mooca2_config, + 'mooca3': mooca3_config, + 'mooca4': mooca4_config, + 'mooca5': mooca5_config, + 'mooca6': mooca6_config, + 'mooca7': mooca7_config, + 'mooca8': mooca8_config, + 'mooca9': mooca9_config, + 'mooca10': mooca10_config, + 'monitor': monitor_config + } + + # Tracker + aruco_tracker = ettk.ArucoTracker(aruco_omit=[5, 2, 1, 0, 4, 6, 36, 37]) + planar_tracker = ettk.PlanarTracker( + surface_configs=list(surface_configs.values()), + aruco_tracker=aruco_tracker + ) + + # Keep track of the PlanarResults + planar_results = PlanarResult() + draw = np.ones((1080, 1920+1000, 3)).astype(np.uint8) * 100 + exit_flag = False + df = {'timestamp': [], 'surface_id': [], 'x': [], 'y': [], 'uncertainty': []} + + for i, row in gaze.iterrows(): + + # Get the fixation + # try: + # raw_fix = row["gaze2d"] + # except IndexError: + # raw_fix = [0, 0] + + # if isinstance(raw_fix, str): + # raw_fix = ast.literal_eval(raw_fix) + + # fix = (int(raw_fix[0] * w), int(raw_fix[1] * h)) + fix = (int(row['x']), int(row['y'])) + # import pdb; pdb.set_trace() + + # First, check if we need to update our planar results + timestamp = row.timestamp + expected_frame_index = int(timestamp * fps) + + # If we expected a new frame, then perform image processing + while (current_frame_index <= expected_frame_index or current_frame_index == 0): + + try: + ret, frame = cap.read() + except Exception as e: + break + + # Keep track of clocks + delta = expected_frame_index - current_frame_index + # logger.debug(f"{expected_frame_index} - {current_frame_index} = {delta}") + current_frame_index += 1 + # import pdb; pdb.set_trace() + + if ret: + + # Checking FPS + tic = time.perf_counter() + + # Processing + # if delta <= 0: + planar_results = planar_tracker.step(frame) + + # Draw + frame = ettk.utils.vis.draw_fix(fix, frame) + frame = ettk.utils.vis.draw_aruco_markers(frame , **asdict(planar_results.aruco), with_ids=True) + for surface in planar_results.surfaces.values(): + frame = ettk.utils.draw_axis(frame , surface.rvec, surface.tvec) + + # Debugging + # for hypothesis in surface.hypotheses: + # draw = ettk.utils.draw_axis(draw, hypothesis.rvec, hypothesis.tvec) + frame = ettk.utils.vis.draw_surface_corners(frame , surface.corners) + + # If homo, draw it + # if surface.homography is not None: + # corners = surface.homography.corners + # draw = ettk.utils.vis.draw_surface_corners(draw, corners) + + # Checking FPS + toc = time.perf_counter() + fps_performance = 1 / (toc - tic) + + # Draw FPS + frame = cv2.putText(frame , f"FPS: {fps_performance:.2f}", (10, 30), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 255), 2, cv2.LINE_AA) + draw[0:1080, 0:1920] = frame + + # Testing + # frame = ettk.utils.vis.draw_lines(frame , surface.lines) + + cv2.imshow('draw', draw) + key = cv2.waitKey(1) + writer.write(draw) + + if key & 0xFF == ord("q"): + exit_flag = True + break + else: + break + + if exit_flag: + break + + # Obtain the XY of the fixation + if planar_results: + fix_result = ettk.utils.surface_map_points(planar_results, fix) + if fix_result: + + # Get information + surface_config = surface_configs[fix_result.surface_id] + + if isinstance(surface_config.template, np.ndarray): + img = surface_config.template + else: + s_h, s_w = surface_config.height, surface_config.width + RATIO = 30 + s_h *= RATIO + s_w *= RATIO + img = np.zeros((int(s_h), int(s_w), 3)) + # Store data df['timestamp'].append(timestamp) df['surface_id'].append(fix_result.surface_id)