diff --git a/CHANGELOG.txt b/CHANGELOG.txt new file mode 100644 index 0000000..e69de29 diff --git a/README.md b/README.md index 82df5fa..7368050 100644 --- a/README.md +++ b/README.md @@ -1,2 +1,26 @@ # psychopy-eyetracker-tobii -Extension for PsychoPy which adds support for Tobii eyetrackers (via ioHub) + +Extension for PsychoPy which adds support for [Tobii](https://www.tobii.com/) +eyetrackers (via ioHub) + +## Supported Devices + +Installing this package alongside PsychoPy will enable support for the following +devices: + +* Supported Tobii eye trackers + +## Installing + +Install this package with the following shell command:: + + pip install psychopy-eyetracker-tobii + +You may also use PsychoPy's builtin plugin/package manager to install this +package. + +## Usage + +Once the package is installed, PsychoPy will automatically load it when started +and the `psychopy.iohub.devices.eyetracker.hw.tobii` namespace will contain the +loaded objects. \ No newline at end of file diff --git a/psychopy_eyetracker_tobii/__init__.py b/psychopy_eyetracker_tobii/__init__.py new file mode 100644 index 0000000..6b3a5e7 --- /dev/null +++ b/psychopy_eyetracker_tobii/__init__.py @@ -0,0 +1,12 @@ +#!/usr/bin/env python +# -*- coding: utf-8 -*- + +# Originally part of the PsychoPy library +# Copyright (C) 2002-2018 Jonathan Peirce (C) 2019-2023 Open Science Tools Ltd. +# Distributed under the terms of the GNU General Public License (GPL). + +"""Extension package for PsychoPy which adds support for various hardware +devices by Tobii. +""" + +__version__ = "0.0.2" \ No newline at end of file diff --git a/psychopy_eyetracker_tobii/tobii/__init__.py b/psychopy_eyetracker_tobii/tobii/__init__.py new file mode 100644 index 0000000..a83e1ba --- /dev/null +++ b/psychopy_eyetracker_tobii/tobii/__init__.py @@ -0,0 +1,6 @@ +# -*- coding: utf-8 -*- +# Part of the PsychoPy library +# Copyright (C) 2012-2020 iSolver Software Solutions (C) 2021 Open Science Tools Ltd. +# Distributed under the terms of the GNU General Public License (GPL). + +from .eyetracker import * diff --git a/psychopy_eyetracker_tobii/tobii/calibration.py b/psychopy_eyetracker_tobii/tobii/calibration.py new file mode 100644 index 0000000..e4e5446 --- /dev/null +++ b/psychopy_eyetracker_tobii/tobii/calibration.py @@ -0,0 +1,215 @@ +# -*- coding: utf-8 -*- +# Part of the PsychoPy library +# Copyright (C) 2012-2020 iSolver Software Solutions (C) 2021 Open Science Tools Ltd. +# Distributed under the terms of the GNU General Public License (GPL). + +from psychopy.iohub.devices.eyetracker.calibration import BaseCalibrationProcedure +from collections import OrderedDict +from psychopy import visual +from psychopy.constants import EventConstants +import gevent + + +class TobiiCalibrationProcedure(BaseCalibrationProcedure): + def __init__(self, eyetrackerInterface, calibration_args): + self.feedback_resources = OrderedDict() + self.tobii_calibration = None + self.cal_result_dict = dict(status="Calibration Not Started") + BaseCalibrationProcedure.__init__(self, eyetrackerInterface, calibration_args, allow_escape_in_progress=True) + + def createGraphics(self): + """ + """ + BaseCalibrationProcedure.createGraphics(self) + + # create Tobii eye position feedback graphics + # + sw, sh = self.screenSize + self.hbox_bar_length = hbox_bar_length = sw / 4 + hbox_bar_height = 6 + marker_diameter = 7 + self.marker_heights = (-sh / 2.0 * .7, -sh / 2.0 * .75, -sh / + 2.0 * .8, -sh / 2.0 * .7, -sh / 2.0 * .75, -sh / 2.0 * .8) + + bar_vertices = ([-hbox_bar_length / 2, -hbox_bar_height / 2], [hbox_bar_length / 2, -hbox_bar_height / 2], + [hbox_bar_length / 2, hbox_bar_height / 2], [-hbox_bar_length / 2, hbox_bar_height / 2]) + + self.feedback_resources['hbox_bar_x'] = visual.ShapeStim( + win=self.window, + lineColor='White', + fillColor='Firebrick', + vertices=bar_vertices, + units='pix', + pos=( + 0, + self.marker_heights[0])) + self.feedback_resources['hbox_bar_y'] = visual.ShapeStim( + win=self.window, + lineColor='White', + fillColor='DarkSlateGray', + vertices=bar_vertices, + units='pix', + pos=( + 0, + self.marker_heights[1])) + self.feedback_resources['hbox_bar_z'] = visual.ShapeStim( + win=self.window, + lineColor='White', + fillColor='GoldenRod', + vertices=bar_vertices, + units='pix', + pos=( + 0, + self.marker_heights[2])) + + marker_vertices = [-marker_diameter, 0], [0, marker_diameter], [marker_diameter, 0], [0, -marker_diameter] + self.feedback_resources['left_hbox_marker_x'] = visual.ShapeStim( + win=self.window, + lineColor='White', + fillColor='Black', + vertices=marker_vertices, + units='pix', + pos=( + 0, + self.marker_heights[0])) + self.feedback_resources['left_hbox_marker_y'] = visual.ShapeStim( + win=self.window, + lineColor='White', + fillColor='Black', + units='pix', + vertices=marker_vertices, + pos=( + 0, + self.marker_heights[1])) + self.feedback_resources['left_hbox_marker_z'] = visual.ShapeStim( + win=self.window, + lineColor='White', + fillColor='Black', + units='pix', + vertices=marker_vertices, + pos=( + 0, + self.marker_heights[2])) + self.feedback_resources['right_hbox_marker_x'] = visual.ShapeStim( + win=self.window, + lineColor='White', + fillColor='DimGray', + units='pix', + vertices=marker_vertices, + pos=( + 0, + self.marker_heights[0])) + self.feedback_resources['right_hbox_marker_y'] = visual.ShapeStim( + win=self.window, + lineColor='White', + fillColor='DimGray', + units='pix', + vertices=marker_vertices, + pos=( + 0, + self.marker_heights[1])) + self.feedback_resources['right_hbox_marker_z'] = visual.ShapeStim( + win=self.window, + lineColor='White', + fillColor='DimGray', + units='pix', + vertices=marker_vertices, + pos=( + 0, + self.marker_heights[2])) + + def getHeadBoxPosition(self, events): + # KeyboardInputEvent.CLASS_ATTRIBUTE_NAMES.index('key_id') + left_eye_cam_x = None + left_eye_cam_y = None + left_eye_cam_z = None + right_eye_cam_x = None + right_eye_cam_y = None + right_eye_cam_z = None + + if len(events) == 0: + return (left_eye_cam_x, left_eye_cam_y, left_eye_cam_z), (right_eye_cam_x, right_eye_cam_y, right_eye_cam_z) + + event = events[-1] + if abs(event.left_eye_cam_x) != 1.0 and abs(event.left_eye_cam_y) != 1.0: + left_eye_cam_x = 1.0 - event.left_eye_cam_x + left_eye_cam_y = event.left_eye_cam_y + if event.left_eye_cam_z != 0.0: + left_eye_cam_z = event.left_eye_cam_z + if abs(event.right_eye_cam_x) != 1.0 and abs(event.right_eye_cam_y) != 1.0: + right_eye_cam_x = 1.0 - event.right_eye_cam_x + right_eye_cam_y = event.right_eye_cam_y + if event.right_eye_cam_z != 0.0: + right_eye_cam_z = event.right_eye_cam_z + return (left_eye_cam_x, left_eye_cam_y, left_eye_cam_z), (right_eye_cam_x, right_eye_cam_y, right_eye_cam_z) + + def showIntroScreen(self, text_msg='Press SPACE to Start Calibration; ESCAPE to Exit.'): + self.clearAllEventBuffers() + self._eyetracker.setRecordingState(True) + + while True: + self.textLineStim.setText(text_msg) + event_named_tuples = [] + for e in self._eyetracker.getEvents(EventConstants.BINOCULAR_EYE_SAMPLE): + event_named_tuples.append( + EventConstants.getClass(EventConstants.BINOCULAR_EYE_SAMPLE).createEventAsNamedTuple(e)) + leye_box_pos, reye_box_pos = self.getHeadBoxPosition(event_named_tuples) + lx, ly, lz = leye_box_pos + rx, ry, rz = reye_box_pos + eye_positions = (lx, ly, lz, rx, ry, rz) + marker_names = ( + 'left_hbox_marker_x', + 'left_hbox_marker_y', + 'left_hbox_marker_z', + 'right_hbox_marker_x', + 'right_hbox_marker_y', + 'right_hbox_marker_z') + marker_heights = self.marker_heights + hbox_bar_length = self.hbox_bar_length + + for i, p in enumerate(eye_positions): + if p is not None: + mpoint = hbox_bar_length * p - hbox_bar_length / 2.0, marker_heights[i] + self.feedback_resources[marker_names[i]].setPos(mpoint) + self.feedback_resources[marker_names[i]].setOpacity(1.0) + else: + self.feedback_resources[marker_names[i]].setOpacity(0.0) + + self.textLineStim.draw() + [r.draw() for r in self.feedback_resources.values()] + self.window.flip() + + msg = self.getNextMsg() + if msg == 'SPACE_KEY_ACTION': + self._eyetracker.setRecordingState(False) + self.clearAllEventBuffers() + return True + elif msg == 'QUIT': + self._eyetracker.setRecordingState(False) + self.clearAllEventBuffers() + return False + self.MsgPump() + gevent.sleep() + + def startCalibrationHook(self): + self.tobii_calibration = self._eyetracker._tobii.newScreenCalibration() + self.tobii_calibration.enter_calibration_mode() + + def registerCalibrationPointHook(self, pt): + self.tobii_calibration.collect_data(pt[0], pt[1]) + + def finishCalibrationHook(self, aborted=False): + cal_result_dict = dict(status="Calibration Aborted") + if not aborted: + calibration_result = self.tobii_calibration.compute_and_apply() + cal_result_dict = dict(status=calibration_result.status) + cal_result_dict['points'] = [] + for cp in calibration_result.calibration_points: + csamples = [] + for cs in cp.calibration_samples: + csamples.append((cs.left_eye.position_on_display_area, cs.left_eye.validity)) + cal_result_dict['points'].append((cp.position_on_display_area, csamples)) + + self.tobii_calibration.leave_calibration_mode() + self.tobii_calibration = None + self.cal_result_dict = cal_result_dict diff --git a/psychopy_eyetracker_tobii/tobii/default_eyetracker.yaml b/psychopy_eyetracker_tobii/tobii/default_eyetracker.yaml new file mode 100644 index 0000000..e2a8a97 --- /dev/null +++ b/psychopy_eyetracker_tobii/tobii/default_eyetracker.yaml @@ -0,0 +1,175 @@ +eyetracker.hw.tobii.EyeTracker: + # Indicates if the device should actually be loaded at experiment runtime. + enable: True + + # The variable name of the device that will be used to access the ioHub Device class + # during experiment run-time, via the devices.[name] attribute of the ioHub + # connection or experiment runtime class. + name: tracker + + # Should eye tracker events be saved to the ioHub DataStore file when the device + # is recording data ? + save_events: True + + # Should eye tracker events be sent to the Experiment process when the device + # is recording data ? + stream_events: True + + # How many eye events (including samples) should be saved in the ioHub event buffer before + # old eye events start being replaced by new events. When the event buffer reaches + # the maximum event length of the buffer defined here, older events will start to be dropped. + event_buffer_length: 1024 + + # The Tobii implementation of the common eye tracker interface supports the + # BinocularEyeSampleEvent event type. + monitor_event_types: [BinocularEyeSampleEvent,] + + # The model name of the Tobii device that you wish to connect to can be specified here, + # and only Tobii systems matching that model name will be considered as possible candidates for connection. + # If you only have one Tobii system connected to the computer, this field can just be left empty. + model_name: + + # The serial number of the Tobii device that you wish to connect to can be specified here, + # and only the Tobii system matching that serial number will be connected to, if found. + # If you only have one Tobii system connected to the computer, this field can just be left empty, + # in which case the first Tobii device found will be connected to. + serial_number: + + calibration: + # The Tobii ioHub Common Eye Tracker Interface currently supports + # 3, 5, 9, and 13 point calibration modes. + # THREE_POINTS, FIVE_POINTS, NINE_POINTS,THIRTEEN_POINTS + # + type: NINE_POINTS + + # color_type: rgb, rgb255, named, hex, etc. Leave blank to use window's color space. + color_type: + + # unit_type: norm, pix, height, deg, etc. Leave blank to use window's unit type. + unit_type: + + # Should the target positions be randomized? + # + randomize: True + + # auto_pace can be True or False. If True, the eye tracker will + # automatically progress from one calibration point to the next. + # If False, a manual key or button press is needed to progress to + # the next point. + # + auto_pace: True + + # target_duration is the number of sec that a calibration point should + # be displayed before moving onto the next point. + # Target size expansion / contraction optionally occurs during this time period as well. + target_duration: 1.5 + + # target_delay specifies the time between target position presentations. + # Target position animation optionally occurs during this time period as well. + target_delay: 0.75 + + # **pacing_speed is deprecated. Please use 'target_delay' instead.** + pacing_speed: + + # screen_background_color specifies the r,g,b background color to + # set the calibration, validation, etc, screens to. Each element of the color + # should be a value between 0 and 255. 0 == black, 255 == white. + # + screen_background_color: [128,128,128] + + # text_color specifies the foreground color of any text used during calibration. + # If empty, text_color is calculated automatically based on screen_background_color. + text_color: + + # Target type defines what form of calibration graphic should be used + # during calibration, validation, etc. modes. + # Currently the Tobii implementation supports the following + # target type: CIRCLE_TARGET. + # To do: Add support for other types, etc. + # + target_type: CIRCLE_TARGET + + # The associated target attribute properties can be supplied + # for the given target_type. + target_attributes: + # CIRCLE_TARGET is drawn using two PsychoPy + # Circle Stim. The _outer_ circle is drawn first, and should be + # be larger than the _inner_ circle, which is drawn on top of the + # outer circle. The target_attributes starting with 'outer_' define + # how the outer circle of the calibration targets should be drawn. + # The target_attributes starting with 'inner_' define + # how the inner circle of the calibration targets should be drawn. + # + # outer_diameter: The size of the outer circle of the calibration target + # + outer_diameter: 35.0 + # outer_stroke_width: The thickness of the outer circle edge. + # + outer_stroke_width: 2.0 + # outer_fill_color: color to use to fill the outer circle. + # + outer_fill_color: [128,128,128] + # outer_line_color: color to used for the outer circle edge. + # + outer_line_color: [255,255,255] + # inner_diameter: The size of the inner circle calibration target + # + inner_diameter: 7.0 + # inner_stroke_width: The thickness of the inner circle edge. + # + inner_stroke_width: 1.0 + # inner_fill_color: color to use to fill the inner circle. + # + inner_fill_color: [0,0,0] + # inner_line_color: color to used for the inner circle edge. + # + inner_line_color: [0,0,0] + # The Tobii Calibration routine supports using moving target graphics. + # The following parameters control target movement (if any). + # + animate: + # enable: True if the calibration target should be animated. + # False specifies that the calibration targets could just jump + # from one calibration position to another. + # + enable: True + # expansion_ratio: The outer circle of the calibration target + # can expand (and contract) when displayed at each position. + # expansion_ratio gives the largest size of the outer circle + # as a ratio of the outer_diameter length. For example, + # if outer_diameter = 30, and expansion_ratio = 2.0, then + # the outer circle of each calibration point will expand out + # to 60 pixels. Set expansion_ratio to 1.0 for no expansion. + # + expansion_ratio: 3.0 + # contract_only: If the calibration target should expand from + # the outer circle initial diameter to the larger diameter + # and then contract back to the original diameter, set + # contract_only to False. To only have the outer circle target + # go from an expanded state to the smaller size, set this to True. + # + contract_only: True + # ** movement_velocity: No longer supported, please use target_delay instead. ** + # + #movement_velocity: + # ** expansion_speed: No longer supported, target_duration is now used. ** + # + #expansion_speed: + + runtime_settings: + # The supported sampling rates for Tobii are model dependent. + # If the sampling rate specified here is not supported by the model being used, + # the Tobii device will continue to use it's current sampling rate. + sampling_rate: 60 + + # Tobii supports BINOCULAR tracking mode only. + track_eyes: BINOCULAR + + # manufacturer_name: Used by Builder as the displayed name in the eye tracker selection dropdown. + # + manufacturer_name: Tobii Technology + + # Do not change this value. + auto_report_events: False + + device_number: 0 diff --git a/psychopy_eyetracker_tobii/tobii/eyetracker.py b/psychopy_eyetracker_tobii/tobii/eyetracker.py new file mode 100644 index 0000000..9859a05 --- /dev/null +++ b/psychopy_eyetracker_tobii/tobii/eyetracker.py @@ -0,0 +1,530 @@ +# -*- coding: utf-8 -*- +# Part of the PsychoPy library +# Copyright (C) 2012-2020 iSolver Software Solutions (C) 2021 Open Science Tools Ltd. +# Distributed under the terms of the GNU General Public License (GPL). + +import math +from psychopy.iohub.constants import EventConstants, EyeTrackerConstants +from psychopy.iohub.devices import Computer, Device +from psychopy.iohub.devices.eyetracker import EyeTrackerDevice +from .calibration import TobiiCalibrationProcedure +from psychopy.iohub.devices.eyetracker.eye_events import * +from psychopy.iohub.errors import print2err, printExceptionDetailsToStdErr +try: + from .tobiiwrapper import TobiiTracker +except Exception: + print2err('Error importing tobiiwrapper.TobiiTracker') + printExceptionDetailsToStdErr() + + +class EyeTracker(EyeTrackerDevice): + """ + To start iohub with a Tobii eye tracker device, add the Tobii + device to the dictionary passed to launchHubServer or the + experiment's iohub_config.yaml:: + + eyetracker.hw.tobii.EyeTracker + + Examples: + A. Start ioHub with a Tobii device and run tracker calibration:: + + from psychopy.iohub import launchHubServer + from psychopy.core import getTime, wait + + iohub_config = {'eyetracker.hw.tobii.EyeTracker': + {'name': 'tracker', 'runtime_settings': {'sampling_rate': 120}}} + + io = launchHubServer(**iohub_config) + + # Get the eye tracker device. + tracker = io.devices.tracker + + # run eyetracker calibration + r = tracker.runSetupProcedure() + + B. Print all eye tracker events received for 2 seconds:: + + # Check for and print any eye tracker events received... + tracker.setRecordingState(True) + + stime = getTime() + while getTime()-stime < 2.0: + for e in tracker.getEvents(): + print(e) + + C. Print current eye position for 5 seconds:: + + # Check for and print current eye position every 100 msec. + stime = getTime() + while getTime()-stime < 5.0: + print(tracker.getPosition()) + wait(0.1) + + tracker.setRecordingState(False) + + # Stop the ioHub Server + io.quit() + """ + _tobii = None + + DEVICE_TIMEBASE_TO_SEC = 0.000001 + EVENT_CLASS_NAMES = [ + 'MonocularEyeSampleEvent', + 'BinocularEyeSampleEvent', + 'FixationStartEvent', + 'FixationEndEvent', + 'SaccadeStartEvent', + 'SaccadeEndEvent', + 'BlinkStartEvent', + 'BlinkEndEvent'] + __slots__ = [] + + def __init__(self, *args, **kwargs): + EyeTrackerDevice.__init__(self, *args, **kwargs) + + if self.model_name: + self.model_name = self.model_name.strip() + if len(self.model_name) == 0: + self.model_name = None + model_name = self.model_name + serial_num = self.getConfiguration().get('serial_number') + + EyeTracker._tobii = None + try: + EyeTracker._tobii = TobiiTracker(serial_num, model_name) + except Exception: + print2err('Error creating Tobii Device class') + printExceptionDetailsToStdErr() + + # Apply license file if needed + try: + license_file = self.getConfiguration().get('license_file', "") + if license_file != "": + with open(license_file, "rb") as f: + license = f.read() + res = self._tobii._eyetracker.apply_licenses(license) + if len(res) == 0: + print2err("Successfully applied Tobii license from: {}".format(license_file)) + else: + print2err("Error: Failed to apply Tobii license from single key. " + "Validation result: %s." % (res[0].validation_result)) + else: + print2err("No Tobii license_file in config. Skipping.") + except Exception: + print2err("Error calling Tobii.apply_licenses with file {}.".format(license_file)) + printExceptionDetailsToStdErr() + + srate = self._runtime_settings['sampling_rate'] + if srate and srate in self._tobii.getAvailableSamplingRates(): + self._tobii.setSamplingRate(srate) + + self._latest_sample = None + self._latest_gaze_position = None + + def trackerTime(self): + """Current eye tracker time in the eye tracker's native time base. The + Tobii system uses a usec timebase. + + Args: + None + + Returns: + float: current native eye tracker time. (in usec for the Tobii) + + """ + if self._tobii: + return self._tobii.getCurrentEyeTrackerTime() + return EyeTrackerConstants.EYETRACKER_ERROR + + def trackerSec(self): + """Current eye tracker time, normalized to sec.msec format. + + Args: + None + + Returns: + float: current native eye tracker time in sec.msec-usec format. + + """ + if self._tobii: + return self._tobii.getCurrentEyeTrackerTime() * self.DEVICE_TIMEBASE_TO_SEC + return EyeTrackerConstants.EYETRACKER_ERROR + + def setConnectionState(self, enable): + """ + setConnectionState is a no-op when using the Tobii system, as the + connection is established when the Tobii EyeTracker classes are created, + and remains active until the program ends, or a error occurs resulting + in the loss of the tracker connection. + + Args: + enable (bool): True = enable the connection, False = disable the connection. + + Return: + bool: indicates the current connection state to the eye tracking hardware. + """ + if self._tobii: + return True + return False + + def isConnected(self): + """isConnected returns whether the Tobii is connected to the experiment + PC and if the tracker state is valid. Returns True if the tracker can + be put into Record mode, etc and False if there is an error with the + tracker or tracker connection with the experiment PC. + + Args: + None + + Return: + bool: True = the eye tracking hardware is connected. False otherwise. + + """ + if self._tobii: + return True + return False + + def sendMessage(self, message_contents, time_offset=None): + """The sendMessage method is not supported by the Tobii implementation + of the Common Eye Tracker Interface, as the Tobii SDK does not support + saving eye data to a native data file during recording.""" + return EyeTrackerConstants.EYETRACKER_INTERFACE_METHOD_NOT_SUPPORTED + + def sendCommand(self, key, value=None): + """The sendCommand method is not supported by the Tobii Common Eye + Tracker Interface.""" + return EyeTrackerConstants.EYETRACKER_INTERFACE_METHOD_NOT_SUPPORTED + + def runSetupProcedure(self, calibration_args={}): + """runSetupProcedure performs a calibration routine for the Tobii eye + tracking system. + """ + try: + genv = TobiiCalibrationProcedure(self, calibration_args) + + genv.runCalibration() + calibration_result = genv.cal_result_dict + + # On some graphics cards, we have to minimize before closing or the calibration window will stay visible + # after close is called. + genv.window.winHandle.set_visible(False) + genv.window.winHandle.minimize() + + genv.window.close() + + genv._unregisterEventMonitors() + genv.clearAllEventBuffers() + + return calibration_result + + except Exception: + print2err('Error during runSetupProcedure') + printExceptionDetailsToStdErr() + return EyeTrackerConstants.EYETRACKER_ERROR + + def enableEventReporting(self, enabled=True): + """enableEventReporting is functionally identical to the eye tracker + device specific enableEventReporting method.""" + + try: + enabled = EyeTrackerDevice.enableEventReporting(self, enabled) + self.setRecordingState(enabled) + return enabled + except Exception as e: + print2err('Error during enableEventReporting') + printExceptionDetailsToStdErr() + return EyeTrackerConstants.EYETRACKER_ERROR + + def setRecordingState(self, recording): + """setRecordingState is used to start or stop the recording of data + from the eye tracking device. + + args: + recording (bool): if True, the eye tracker will start recordng available + eye data and sending it to the experiment program if data streaming + was enabled for the device. If recording == False, then the eye + tracker stops recording eye data and streaming it to the experiment. + + If the eye tracker is already recording, and setRecordingState(True) is + called, the eye tracker will simple continue recording and the method call + is a no-op. Likewise if the system has already stopped recording and + setRecordingState(False) is called again. + + Args: + recording (bool): if True, the eye tracker will start recordng data.; false = stop recording data. + + Return: + bool: the current recording state of the eye tracking device + + """ + if self._tobii and recording is True and not self.isRecordingEnabled(): + #ioHub.print2err("Starting Tracking... ") + self._tobii.startTracking(self._handleNativeEvent) + return EyeTrackerDevice.enableEventReporting(self, True) + + elif self._tobii and recording is False and self.isRecordingEnabled(): + self._tobii.stopTracking() + #ioHub.print2err("Stopping Tracking... ") + self._latest_sample = None + self._latest_gaze_position = None + return EyeTrackerDevice.enableEventReporting(self, False) + + return self.isRecordingEnabled() + + def isRecordingEnabled(self): + """isRecordingEnabled returns the recording state from the eye tracking + device. + + Args: + None + + Return: + bool: True == the device is recording data; False == Recording is not occurring + + """ + if self._tobii: + return self._tobii._isRecording + return False + + def getLastSample(self): + """Returns the latest sample retrieved from the Tobii device. The Tobii + system always using the BinocularSample Event type. + + Args: + None + + Returns: + None: If the eye tracker is not currently recording data. + + EyeSample: If the eye tracker is recording in a monocular tracking mode, the latest sample event of this event type is returned. + + BinocularEyeSample: If the eye tracker is recording in a binocular tracking mode, the latest sample event of this event type is returned. + + """ + return self._latest_sample + + def getLastGazePosition(self): + """Returns the latest 2D eye gaze position retrieved from the Tobii + device. This represents where the eye tracker is reporting each eye + gaze vector is intersecting the calibrated surface. + + In general, the y or vertical component of each eyes gaze position should + be the same value, since in typical user populations the two eyes are + yoked vertically when they move. Therefore any difference between the + two eyes in the y dimension is likely due to eye tracker error. + + Differences between the x, or horizontal component of the gaze position, + indicate that the participant is being reported as looking behind or + in front of the calibrated plane. When a user is looking at the + calibration surface , the x component of the two eyes gaze position should be the same. + Differences between the x value for each eye either indicates that the + user is not focussing at the calibrated depth, or that there is error in the eye data. + + The above remarks are true for any eye tracker in general. + + The getLastGazePosition method returns the most recent eye gaze position + retrieved from the eye tracker device. This is the position on the + calibrated 2D surface that the eye tracker is reporting as the current + eye position. The units are in the units in use by the Display device. + + If binocular recording is being performed, the average position of both + eyes is returned. + + If no samples have been received from the eye tracker, or the + eye tracker is not currently recording data, None is returned. + + Args: + None + + Returns: + None: If the eye tracker is not currently recording data or no eye samples have been received. + + tuple: Latest (gaze_x,gaze_y) position of the eye(s) + + """ + return self._latest_gaze_position + + def _setSamplingRate(self, sampling_rate): + return self._tobii.setSamplingRate(sampling_rate) + + def _poll(self): + """The Tobii system uses a callback approach to providing new eye data + as it becomes available, so polling (and therefore this method) are not + used.""" + pass + + def _handleNativeEvent(self, *args, **kwargs): + """This method is called every time there is new eye data available + from the Tobii system, which will be roughly equal to the sampling rate + eye data is being recorded at. + + The callback needs to return as quickly as possible so there is + no chance of overlapping calls being made to the callback. + Therefore this method simply puts the event data received from + the eye tracker device, and the local ioHub time the callback + was called, into a buffer for processing by the ioHub event + system. + """ + if self.isReportingEvents(): + try: + logged_time = Computer.getTime() + tobii_logged_time = self._tobii.getCurrentLocalTobiiTime() * self.DEVICE_TIMEBASE_TO_SEC + + eye_data_event = args[0] + + data_delay = tobii_logged_time - (eye_data_event['system_time_stamp'] * self.DEVICE_TIMEBASE_TO_SEC) + + device_event_time = eye_data_event['device_time_stamp'] + iohub_event_time = (logged_time - data_delay) + self._addNativeEventToBuffer( + (logged_time, + device_event_time, + iohub_event_time, + data_delay, + eye_data_event)) + return True + except Exception: + print2err('ERROR IN _handleNativeEvent') + printExceptionDetailsToStdErr() + else: + print2err( + 'self._handleNativeEvent called but isReportingEvents == false') + + + def _getIOHubEventObject(self, native_event_data): + """The _getIOHubEventObject method is called by the ioHub Server to + convert new native device event objects that have been received to the + appropriate ioHub Event type representation. + + The Tobii ioHub eye tracker implementation uses a callback method + to register new native device events with the ioHub Server. + Therefore this method converts the native Tobii event data into + an appropriate ioHub Event representation. + + Args: + native_event_data: object or tuple of (callback_time, native_event_object) + + Returns: + tuple: The appropriate ioHub Event type in list form. + + """ + try: + logged_time, device_event_time, iohub_event_time, data_delay, eye_data_event = native_event_data + + event_type = EventConstants.BINOCULAR_EYE_SAMPLE + + left_gaze_x, left_gaze_y = eye_data_event['left_gaze_point_on_display_area'] + right_gaze_x, right_gaze_y = eye_data_event['right_gaze_point_on_display_area'] + + status = 0 + + if eye_data_event['left_gaze_point_validity'] > 0: + left_gaze_x, left_gaze_y = self._eyeTrackerToDisplayCoords( + (left_gaze_x, left_gaze_y)) + else: + status += 20 + + if eye_data_event['right_gaze_point_validity'] > 0: + right_gaze_x, right_gaze_y = self._eyeTrackerToDisplayCoords( + (right_gaze_x, right_gaze_y)) + else: + status += 2 + + right_gx, right_gy, right_gz = eye_data_event['right_gaze_origin_in_trackbox_coordinate_system'] + left_gx, left_gy, left_gz = eye_data_event['left_gaze_origin_in_trackbox_coordinate_system'] + + confidenceInterval = 0.0 + binocSample = [ + 0, + 0, + 0, # device id (not currently used) + Device._getNextEventID(), + event_type, + device_event_time, + logged_time, + iohub_event_time, + confidenceInterval, + data_delay, + 0, # filtered id (always 0 right now) + left_gaze_x, + left_gaze_y, + EyeTrackerConstants.UNDEFINED, + left_gx, + left_gy, + left_gz, + EyeTrackerConstants.UNDEFINED, # Left Eye Angle x + EyeTrackerConstants.UNDEFINED, # Left Eye Angle y + EyeTrackerConstants.UNDEFINED, # Left Camera Sensor position x + EyeTrackerConstants.UNDEFINED, # Left Camera Sensor position y + eye_data_event['left_pupil_diameter'], + EyeTrackerConstants.PUPIL_DIAMETER_MM, + EyeTrackerConstants.UNDEFINED, # Left pupil size measure 2 + EyeTrackerConstants.UNDEFINED, # Left pupil size measure 2 type + EyeTrackerConstants.UNDEFINED, # Left PPD x + EyeTrackerConstants.UNDEFINED, # Left PPD y + EyeTrackerConstants.UNDEFINED, # Left velocity x + EyeTrackerConstants.UNDEFINED, # Left velocity y + EyeTrackerConstants.UNDEFINED, # Left velocity xy + right_gaze_x, + right_gaze_y, + EyeTrackerConstants.UNDEFINED, # Right Eye Angle z + right_gx, + right_gy, + right_gz, + EyeTrackerConstants.UNDEFINED, # Right Eye Angle x + EyeTrackerConstants.UNDEFINED, # Right Eye Angle y + EyeTrackerConstants.UNDEFINED, # Right Camera Sensor position x + EyeTrackerConstants.UNDEFINED, # Right Camera Sensor position y + eye_data_event['right_pupil_diameter'], + EyeTrackerConstants.PUPIL_DIAMETER_MM, + EyeTrackerConstants.UNDEFINED, # Right pupil size measure 2 + EyeTrackerConstants.UNDEFINED, # Right pupil size measure 2 type + EyeTrackerConstants.UNDEFINED, # Right PPD x + EyeTrackerConstants.UNDEFINED, # Right PPD y + EyeTrackerConstants.UNDEFINED, # right velocity x + EyeTrackerConstants.UNDEFINED, # right velocity y + EyeTrackerConstants.UNDEFINED, # right velocity xy + status + ] + + self._latest_sample = binocSample + + if eye_data_event['left_gaze_point_validity'] == eye_data_event['right_gaze_point_validity'] == 0: + self._latest_gaze_position = None + elif eye_data_event['left_gaze_point_validity'] == eye_data_event['right_gaze_point_validity'] == 1: + self._latest_gaze_position = [(right_gaze_x + left_gaze_x) / 2.0, + (right_gaze_y + left_gaze_y) / 2.0] + elif eye_data_event['left_gaze_point_validity'] == 1: + self._latest_gaze_position = [left_gaze_x, left_gaze_y] + elif eye_data_event['right_gaze_point_validity'] == 1: + self._latest_gaze_position = [right_gaze_x, right_gaze_y] + + self._last_callback_time = logged_time + + return binocSample + except Exception: + printExceptionDetailsToStdErr() + return None + + def _eyeTrackerToDisplayCoords(self, eyetracker_point): + """Converts Tobii gaze positions to the Display device coordinate + space.""" + gaze_x, gaze_y = eyetracker_point + left, top, right, bottom = self._display_device.getCoordBounds() + w, h = right - left, top - bottom + x, y = left + w * gaze_x, bottom + h * (1.0 - gaze_y) + return x, y + + def _displayToEyeTrackerCoords(self, display_x, display_y): + """Converts a Display device point to Tobii gaze position coordinate + space.""" + left, top, right, bottom = self._display_device.getCoordBounds() + w, h = right - left, top - bottom + + return (left - display_x) / w, (top - display_y) / h + + def _close(self): + if EyeTracker._tobii: + EyeTracker._tobii.disconnect() + EyeTracker._tobii = None + EyeTrackerDevice._close(self) diff --git a/psychopy_eyetracker_tobii/tobii/supported_config_settings.yaml b/psychopy_eyetracker_tobii/tobii/supported_config_settings.yaml new file mode 100644 index 0000000..908ecc8 --- /dev/null +++ b/psychopy_eyetracker_tobii/tobii/supported_config_settings.yaml @@ -0,0 +1,115 @@ +eyetracker.hw.tobii.EyeTracker: + name: tracker + enable: IOHUB_BOOL + model_name: + IOHUB_STRING: + min_length: 0 + max_length: 64 + serial_number: + IOHUB_STRING: + min_length: 0 + max_length: 32 + license_file: + IOHUB_STRING: + min_length: 0 + max_length: 1024 + save_events: IOHUB_BOOL + stream_events: IOHUB_BOOL + auto_report_events: False + event_buffer_length: + IOHUB_INT: + min: 1 + max: 8192 + monitor_event_types: [BinocularEyeSampleEvent,] + runtime_settings: + sampling_rate: + IOHUB_INT: + min: 30 + max: 1200 + track_eyes: [BINOCULAR,] + calibration: + # The Tobii ioHub Common Eye Tracker Interface currently support + # a 3, 5 and 9 point calibration mode. + # THREE_POINTS,FIVE_POINTS,NINE_POINTS + type: + IOHUB_LIST: + valid_values: [THREE_POINTS, FIVE_POINTS, NINE_POINTS, THIRTEEN_POINTS] + min_length: 1 + max_length: 1 + unit_type: + IOHUB_LIST: + valid_values: [norm, pix, cm, height, deg, degFlatPos, degFlat] + min_length: 0 + max_length: 1 + color_type: + IOHUB_LIST: + valid_values: [rgb, dkl, lms, hsv, hex, named, rgb255] + min_length: 0 + max_length: 1 + randomize: IOHUB_BOOL + target_positions: [] + auto_pace: IOHUB_BOOL + target_duration: + IOHUB_FLOAT: + min: 0.5 + max: 2.5 + target_delay: + IOHUB_FLOAT: + min: 0.5 + max: 2.5 + # pacing_speed is deprecated. Please use 'target_delay' instead. + pacing_speed: + IOHUB_FLOAT: + min: 0.5 + max: 2.5 + screen_background_color: IOHUB_COLOR + text_color: IOHUB_COLOR + target_type: + IOHUB_LIST: + valid_values: [ CIRCLE_TARGET, CUSTOM ] + min_length: 1 + max_length: 1 + target_attributes: + custom: + module_name: + class_name: + class_kwargs: + outer_diameter: + IOHUB_FLOAT: + min: 0.01 + max: 1000.0 + outer_stroke_width: + IOHUB_FLOAT: + min: 0.01 + max: 1000.0 + outer_fill_color: IOHUB_COLOR + outer_line_color: IOHUB_COLOR + inner_diameter: + IOHUB_FLOAT: + min: 0.01 + max: 1000.0 + inner_stroke_width: + IOHUB_FLOAT: + min: 0.01 + max: 1000.0 + inner_fill_color: IOHUB_COLOR + inner_line_color: IOHUB_COLOR + animate: + enable: IOHUB_BOOL +# movement_velocity is no longer supported, use target_delay instead. +# movement_velocity: +# IOHUB_FLOAT: +# min: -1.0 +# max: 1000.0 + expansion_ratio: # expands to 3 x the starting size + IOHUB_FLOAT: + min: 1.0 + max: 100.0 +# expansion_speed is no longer supported, use target_duration instead +# expansion_speed: +# IOHUB_FLOAT: +# min: -1.0 +# max: 100.0 + contract_only: IOHUB_BOOL + device_number: 0 + manufacturer_name: Tobii Technology \ No newline at end of file diff --git a/psychopy_eyetracker_tobii/tobii/tobiiwrapper.py b/psychopy_eyetracker_tobii/tobii/tobiiwrapper.py new file mode 100644 index 0000000..dd1bc2c --- /dev/null +++ b/psychopy_eyetracker_tobii/tobii/tobiiwrapper.py @@ -0,0 +1,158 @@ +# -*- coding: utf-8 -*- +# Part of the PsychoPy library +# Copyright (C) 2012-2020 iSolver Software Solutions (C) 2021 Open Science Tools Ltd. +# Distributed under the terms of the GNU General Public License (GPL). + +import numpy as np +from psychopy.iohub.devices import Computer +from psychopy.iohub.errors import print2err, printExceptionDetailsToStdErr + +getTime = Computer.getTime + +try: + import tobii_research +except Exception: + # This can happen when it is Sphinx auto-doc loading the file + printExceptionDetailsToStdErr() + +# Tobii Eye Tracker +class TobiiTracker(): + try: + CALIBRATION_STATUS_SUCCESS = tobii_research.CALIBRATION_STATUS_SUCCESS + except: + CALIBRATION_STATUS_SUCCESS = 1 + + def __init__(self, serial_number=None, model=None): + """ + """ + self._eyetracker = None + retry_count = 10 + trackers = [] + while len(trackers)==0 or retry_count > 0: + trackers = tobii_research.find_all_eyetrackers() + retry_count = retry_count - 1 + + if len(trackers)==0: + raise RuntimeError('Could detect any Tobii devices.') + + if serial_number or model: + for et in trackers: + if serial_number == et.serial_number: + self._eyetracker = et + break + if model == et.model: + self._eyetracker = et + break + else: + self._eyetracker = trackers[0] + + if self._eyetracker is None: + raise RuntimeError('Could not connect to Tobii.') + + self._last_eye_data = None + self._isRecording = False + + def on_eyetracker_data(self, *args, **kwargs): + """ + Default (standalone test use only) event handler. + """ + eye_data = args[0] + print2err('on_eyetracker_data:') + print2err(eye_data) + print2err() + self._last_eye_data = eye_data + + def getCurrentEyeTrackerTime(self): + ''' + Using tobii_research.get_system_time_stamp() as current tracker time. + TODO: Find out how to accurately get current device time without + having an event time. + ''' + return tobii_research.get_system_time_stamp() + + def getCurrentLocalTobiiTime(self): + return tobii_research.get_system_time_stamp() + + + def newScreenCalibration(self): + if self._eyetracker: + return tobii_research.ScreenBasedCalibration(self._eyetracker) + + def startTracking(self, et_data_rx_callback=None): + if et_data_rx_callback: + self.on_eyetracker_data = et_data_rx_callback + self._last_eye_data = None + self._eyetracker.subscribe_to(tobii_research.EYETRACKER_GAZE_DATA, + self.on_eyetracker_data, + as_dictionary=True) + self._isRecording = True + return True + + def stopTracking(self): + self._eyetracker.unsubscribe_from(tobii_research.EYETRACKER_GAZE_DATA, + self.on_eyetracker_data) + self._isRecording = False + + def getName(self): + return self._eyetracker.name + + def setName(self, name): + try: + self._eyetracker.set_device_name(name) + except tobii_research.EyeTrackerFeatureNotSupportedError: + print2err("This eye tracker doesn't support changing the device name.") + except tobii_research.EyeTrackerLicenseError: + print2err("You need a higher level license to change the device name.") + + def setSamplingRate(self, rate): + if rate in self.getAvailableSamplingRates(): + self._eyetracker.set_gaze_output_frequency(rate) + return self.getSamplingRate() + + def getAvailableSamplingRates(self): + return self._eyetracker.get_all_gaze_output_frequencies() + + def getSamplingRate(self): + return self._eyetracker.get_gaze_output_frequency() + + def getMode(self): + return self._eyetracker.get_eye_tracking_mode() + + def getAvailableModes(self): + return self._eyetracker.get_all_eye_tracking_modes() + + def setMode(self, imode): + cmode = self.getMode() + try: + self._eyetracker.set_eye_tracking_mode(imode) + except: + self._eyetracker.set_eye_tracking_mode(cmode) + + def getHeadBox(self): + hb = self._eyetracker.get_track_box() + if hb: + bll = hb.back_lower_left + blr = hb.back_lower_right + bup = hb.back_upper_left + bur = hb.back_upper_right + fll = hb.front_lower_left + flr = hb.front_lower_right + ful = hb.front_upper_left + fur = hb.front_upper_right + + return np.asarray([ + bll, + blr, + bup, + bur, + fll, + flr, + ful, + fur + ]) + return None + + def disconnect(self): + if self._isRecording: + self.stopTracking() + self._eyetracker = None diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..87d9f86 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,33 @@ +[build-system] +requires = ["setuptools>=40.8.0", "wheel"] +build-backend = "setuptools.build_meta" + +[project] +name = "psychopy-eyetracker-tobii" +version = "0.0.21" +description = "Extension package for PsychoPy which adds support for Tobii eyetrackers." +readme = "README.md" +requires-python = ">= 3.7" +license = {text = "GNU General Public License v3 (GPLv3)"} +authors = [ + { name = "Jon Peirce", email = "jon@opensceincetools.org" }, + { name = "Matthew Cutone", email = "mcutone@opensceincetools.org" }, +] +classifiers = [ + "License :: OSI Approved :: GNU General Public License v3 (GPLv3)", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: Implementation :: CPython", +] +urls.homepage = "https://github.com/psychopy/psychopy-eyetracker-tobii" +urls.changelog = "https://github.com/psychopy/psychopy-eyetracker-tobii/blob/main/CHANGELOG.txt" +urls.documentation = "https://pages.github.com/psychopy/psychopy-eyetracker-tobii" +urls.repository = "https://github.com/psychopy/psychopy-eyetracker-tobii" +dependencies = [ + "tobiiresearch" +] \ No newline at end of file