diff --git a/README.rst b/README.rst index 83574372..eac00692 100644 --- a/README.rst +++ b/README.rst @@ -19,6 +19,8 @@ A modular package to control stimulation and track behavior in zebrafish experim :target: http://www.portugueslab.com/stytra/ +If you are using Stytra for your own research, please `cite us `_! + Stytra is divided into independent modules which can be assembled depending on the experimental requirements. @@ -88,4 +90,4 @@ In the second case, you might want to have a look at the camera APIs section bel from their `github repository `_. The problem will be resolved once the next pyqtgraph version is released. -For further detais please consult the `documentation `_ +For further details please consult the `documentation `_ diff --git a/docs/figures/framerates.svg b/docs/figures/framerates.svg new file mode 100644 index 00000000..a6de9d74 --- /dev/null +++ b/docs/figures/framerates.svg @@ -0,0 +1,768 @@ + + + + + + + + image/svg+xml + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/docs/figures/freely.png b/docs/figures/freely.png new file mode 100644 index 00000000..db7b720c Binary files /dev/null and b/docs/figures/freely.png differ diff --git a/docs/figures/imaging.png b/docs/figures/imaging.png deleted file mode 100644 index 1976e1fd..00000000 Binary files a/docs/figures/imaging.png and /dev/null differ diff --git a/docs/figures/low_cost.png b/docs/figures/low_cost.png new file mode 100644 index 00000000..cf68e69c Binary files /dev/null and b/docs/figures/low_cost.png differ diff --git a/docs/figures/parts_full.png b/docs/figures/parts_full.png new file mode 100644 index 00000000..a9b92449 Binary files /dev/null and b/docs/figures/parts_full.png differ diff --git a/docs/source/devdocs/module_desc.rst b/docs/source/devdocs/0_module_desc.rst similarity index 98% rename from docs/source/devdocs/module_desc.rst rename to docs/source/devdocs/0_module_desc.rst index 2a64e632..53e7567c 100644 --- a/docs/source/devdocs/module_desc.rst +++ b/docs/source/devdocs/0_module_desc.rst @@ -2,8 +2,6 @@ Module description ================== -Modules -------- .. glossary:: :py:mod:`stytra` The root module, contains the Stytra class for running the experiment diff --git a/docs/source/devdocs/pipelines.rst b/docs/source/devdocs/1_pipelines.rst similarity index 96% rename from docs/source/devdocs/pipelines.rst rename to docs/source/devdocs/1_pipelines.rst index cb792964..dbe6d0aa 100644 --- a/docs/source/devdocs/pipelines.rst +++ b/docs/source/devdocs/1_pipelines.rst @@ -33,7 +33,7 @@ Nodes must have: - A _process method which contains optional parameters as keyword arguments, annotated with Params for everything that can be changed from the user interface. -The _process function **has to** output a :class:`NodeOutput ` named tuple (from :module:`stytra.tracking.pipelines`) which contains a list of diagnostic messages (can be empty), and either an image if the node is a :class:`ImageToImageNode ` or a NamedTuple if the node is a :class:`ImageToDataNode ` +The _process function **has to** output a :class:`NodeOutput ` named tuple (from :py:mod:`stytra.tracking.pipelines`) which contains a list of diagnostic messages (can be empty), and either an image if the node is a :class:`ImageToImageNode ` or a NamedTuple if the node is a :class:`ImageToDataNode ` Optionally, if the processing function is stateful (depends on previous inputs), you can define a reset function which resets the state. diff --git a/docs/source/devdocs/triggering_intro.rst b/docs/source/devdocs/2_triggering_intro.rst similarity index 100% rename from docs/source/devdocs/triggering_intro.rst rename to docs/source/devdocs/2_triggering_intro.rst diff --git a/docs/source/devdocs/data_saving.rst b/docs/source/devdocs/3_data_saving.rst similarity index 100% rename from docs/source/devdocs/data_saving.rst rename to docs/source/devdocs/3_data_saving.rst diff --git a/docs/source/devdocs/parameters_stytra.rst b/docs/source/devdocs/4_parameters_stytra.rst similarity index 100% rename from docs/source/devdocs/parameters_stytra.rst rename to docs/source/devdocs/4_parameters_stytra.rst diff --git a/docs/source/devdocs/modules/modules.rst b/docs/source/devdocs/modules/modules.rst index 75a769f9..6ed45f63 100644 --- a/docs/source/devdocs/modules/modules.rst +++ b/docs/source/devdocs/modules/modules.rst @@ -1,7 +1,7 @@ -stytra -====== +modules +======= .. toctree:: :maxdepth: 4 - stytra + stytra.* diff --git a/docs/source/devdocs/modules/stytra.hardware.video.cameras.rst b/docs/source/devdocs/modules/stytra.hardware.video.cameras.rst index 407f976b..cf20d4e4 100644 --- a/docs/source/devdocs/modules/stytra.hardware.video.cameras.rst +++ b/docs/source/devdocs/modules/stytra.hardware.video.cameras.rst @@ -4,18 +4,18 @@ stytra.hardware.video.cameras package Submodules ---------- -stytra.hardware.video.cameras.abstract\_class module ----------------------------------------------------- +stytra.hardware.video.cameras.avt module +---------------------------------------- -.. automodule:: stytra.hardware.video.cameras.abstract_class +.. automodule:: stytra.hardware.video.cameras.avt :members: :undoc-members: :show-inheritance: -stytra.hardware.video.cameras.avt module ----------------------------------------- +stytra.hardware.video.cameras.interface module +---------------------------------------------- -.. automodule:: stytra.hardware.video.cameras.avt +.. automodule:: stytra.hardware.video.cameras.interface :members: :undoc-members: :show-inheritance: diff --git a/docs/source/index.rst b/docs/source/index.rst index 4e89ccf8..ef0e5f29 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -8,7 +8,7 @@ Stytra: an open-source, integrated system for stimulation, tracking and closed-l Vilim Štih\ :sup:`#`\ , Luigi Petrucco\ :sup:`#`\ , Andreas M. Kist* and Ruben Portugues -Research Group of Sensorimotor Control, Max Planck Institute of Neurobiology, +`Research Group of Sensorimotor Control `_, `Max Planck Institute of Neurobiology `_, Martinsried, Germany \ :sup:`#`\ These authors contributed equally to this work. @@ -52,6 +52,8 @@ how Stytra can serve as a platform to design behavioral experiments involving tr or visual stimulation with other animals and provide an `example integration `_ with the DeepLabCut neural network-based tracking method. +If you are using Stytra for your own research, please `cite us `_! + If you encounter any issues, please report them `here `_. diff --git a/docs/source/overview/2_stimulation_intro.rst b/docs/source/overview/2_stimulation_intro.rst index 7fdd12bc..f82cd271 100644 --- a/docs/source/overview/2_stimulation_intro.rst +++ b/docs/source/overview/2_stimulation_intro.rst @@ -3,14 +3,21 @@ Stimulation =========== -Experimental protocols in Stytra are defined as sequences of timed stimuli presented to the animal through a projector or external actuators. A sequence of stimuli, defined as a Python list of :class:`~stytra.stimulation.stimuli.generic_stimuli.Stimulus` objects, is defined in a :class:`~stytra.stimulation.Protocol` object. This structure enables straightforward design of new experimental protocols, requiring very little knowledge of the general structure of the library and only basic Python syntax. A dedicated class coordinates the timed execution of the protocol relying on a ``QTimer`` from the PyQt5 library, ensuring a temporal resolution in the order of 15-20 ms (around the response time of a normal monitor, see inset. Drawing very complex stimuli consisting of many polygons or requiring online computation of large arrays can decrease the stimulus display performance. The stimulus display framerate can be monitored online from the user interface when the protocol is running (see the lower left corner of the window in :ref:`interface`. Milli- or microsecond precision, which might be required for optogenetic experiments, for example, is currently not supported. Each :class:`~stytra.stimulation.stimuli.generic_stimuli.Stimulus` has methods which are called at starting time or at every subsequent time step while it is set. In this way one can generate dynamically changing stimuli, or trigger external devices. New :class:`~stytra.stimulation.stimuli.generic_stimuli.Stimulus` types can be easily added to the library just by subclassing :class:`~stytra.stimulation.stimuli.generic_stimuli.Stimulus` and re-defining the :meth:`~stytra.stimulation.stimuli.generic_stimuli.Stimulus.start` and :meth:`~stytra.stimulation.stimuli.generic_stimuli.Stimulus.update` methods. -:sidebar:`Framerates` +Experimental protocols in Stytra are defined as sequences of timed stimuli presented to the animal through a projector or external actuators. A sequence of stimuli, defined as a Python list of :class:`~stytra.stimulation.stimuli.generic_stimuli.Stimulus` objects, is defined in a :class:`~stytra.stimulation.Protocol` object. This structure enables straightforward design of new experimental protocols, requiring very little knowledge of the general structure of the library and only basic Python syntax. A dedicated class coordinates the timed execution of the protocol relying on a ``QTimer`` from the PyQt5 library, ensuring a temporal resolution in the order of 15-20 ms (around the response time of a normal monitor, see inset). Drawing very complex stimuli consisting of many polygons or requiring online computation of large arrays can decrease the stimulus display performance. -.. autoclass::`~stytra.stimulation.ProtocolRunner` +.. figure:: ../../figures/framerates.svg + :align: right + :figwidth: 200px -A large number of stimuli is included in the package. In particular, a library of visual stimuli has been implemented as :class:`~stytra.stimulation.stimuli.visual.VisualStimulus` objects using the `QPainter `_ object, a part of the Qt GUI library, enabling efficient drawing with OpenGL. Relying on a set of high-level drawing primitives makes the code very readable and maintainable. Stytra already includes common stimuli used in visual neuroscience, such as moving bars, dots, whole-field translation or rotations of patterns on a screen, and additional features such as movie playback and the presentation of images from a file (which can be generated by packages such as Imagen :cite:`imagen`). The classes describing visual stimuli can be combined, and new stimuli where these patterns are moved or masked can be quickly defined by combining the appropriate :class:`~stytra.stimulation.stimuli.generic_stimuli.Stimulus` types. Finally, new stimuli can be easily created by redefining the :meth:`~stytra.stimulation.stimuli.visual.VisualStimulus.paint` method in a new :class:`~stytra.stimulation.stimuli.visual.VisualStimulus` object. Multiple stimuli can be presented simultaneously using :class:`~stytra.stimulation.stimuli.visual.StimulusCombiner`. Presenting different stimuli depending on animal behavior or external signals can be achieved using the :class:`~stytra.stimulation.stimuli.conditional.ConditionalWrapper` container, or with similarly designed custom objects. Visual stimuli are usually displayed on a secondary screen, therefore Stytra provides a convenient interface for positioning and calibrating the stimulation window (visible in :ref:`interface` on the right-hand side). Although in our experiments we are using a single stimulation monitor, displaying stimuli on multiple screens can be achieved with virtual desktop technology or screen-splitting hardware boards. Importantly, all stimulus parameters are specified in physical units and are therefore independent of the display hardware. Finally, the timed execution of code inside :class:`~stytra.stimulation.stimuli.generic_stimuli.Stimulus` objects can be used to control hardware via I/O boards or serial communication with micro-controllers such as `Arduino `_ or `MicroPython PyBoard `_. For example, in this way one may deliver odors or temperature stimuli or optogenetic stimulation. Examples for a few different kinds of stimuli are provided below. -For a description of how to synchronize the stimulus with an external data-acquisition device such a s a microscope, see :ref:`_trig-desc`. + Interval duration when flickering a white stimulus on every update of the display loop. The screen was recorded at 2 kHz. + +The stimulus display framerate can be monitored online from the user interface when the protocol is running (see the lower left corner of the window in :ref:`interface `. Milli- or microsecond precision, which might be required for optogenetic experiments, for example, is currently not supported. Each :class:`~stytra.stimulation.stimuli.generic_stimuli.Stimulus` has methods which are called at starting time or at every subsequent time step while it is set. In this way one can generate dynamically changing stimuli, or trigger external devices. New :class:`~stytra.stimulation.stimuli.generic_stimuli.Stimulus` types can be easily added to the library just by subclassing :class:`~stytra.stimulation.stimuli.generic_stimuli.Stimulus` and re-defining the :meth:`~stytra.stimulation.stimuli.generic_stimuli.Stimulus.start` and :meth:`~stytra.stimulation.stimuli.generic_stimuli.Stimulus.update` methods. + + + +A large number of stimuli is included in the package. In particular, a library of visual stimuli has been implemented as :class:`~stytra.stimulation.stimuli.visual.VisualStimulus` objects using the `QPainter `_ object, a part of the Qt GUI library, enabling efficient drawing with OpenGL. Relying on a set of high-level drawing primitives makes the code very readable and maintainable. Stytra already includes common stimuli used in visual neuroscience, such as moving bars, dots, whole-field translation or rotations of patterns on a screen, and additional features such as movie playback and the presentation of images from a file (which can be generated by packages such as Imagen :cite:`imagen`). The classes describing visual stimuli can be combined, and new stimuli where these patterns are moved or masked can be quickly defined by combining the appropriate :class:`~stytra.stimulation.stimuli.generic_stimuli.Stimulus` types. Finally, new stimuli can be easily created by redefining the :meth:`~stytra.stimulation.stimuli.visual.VisualStimulus.paint` method in a new :class:`~stytra.stimulation.stimuli.visual.VisualStimulus` object. Multiple stimuli can be presented simultaneously using :class:`~stytra.stimulation.stimuli.visual.StimulusCombiner`. Presenting different stimuli depending on animal behavior or external signals can be achieved using the :class:`~stytra.stimulation.stimuli.conditional.ConditionalWrapper` container, or with similarly designed custom objects. Visual stimuli are usually displayed on a secondary screen, therefore Stytra provides a convenient interface for positioning and calibrating the stimulation window (visible in :ref:`interface ` on the right-hand side). Although in our experiments we are using a single stimulation monitor, displaying stimuli on multiple screens can be achieved with virtual desktop technology or screen-splitting hardware boards. Importantly, all stimulus parameters are specified in physical units and are therefore independent of the display hardware. Finally, the timed execution of code inside :class:`~stytra.stimulation.stimuli.generic_stimuli.Stimulus` objects can be used to control hardware via I/O boards or serial communication with micro-controllers such as `Arduino `_ or `MicroPython PyBoard `_. For example, in this way one may deliver odors or temperature stimuli or optogenetic stimulation. Examples for a few different kinds of stimuli are provided below. +For a description of how to synchronize the stimulus with an external data-acquisition device such a s a microscope, see the :ref:`triggering ` section of the developer documentation. @@ -88,8 +95,4 @@ Note that this code would require an installed NI board and the nidaqmx library def get_stim_sequence(): return([SetVoltageStimulus(duration=10, dev="Dev1", chan="ao0", voltage=0), - SetVoltageStimulus(duration=1, dev="Dev1", chan="ao0", voltage=3.5)]) - - - -.. bibliography:: biblio.bib \ No newline at end of file + SetVoltageStimulus(duration=1, dev="Dev1", chan="ao0", voltage=3.5)]) \ No newline at end of file diff --git a/docs/source/overview/3_tracking.rst b/docs/source/overview/3_tracking.rst index afe2946e..60997855 100644 --- a/docs/source/overview/3_tracking.rst +++ b/docs/source/overview/3_tracking.rst @@ -35,7 +35,7 @@ Tail tracking Zebrafish larvae swim in discrete units called bouts, and different types of swim bouts, from startle responses to forward swimming are caused by different tail motion patterns :cite:`Budick2565`. The tail of the larvae can be easily skeletonized and described as a curve discretized into 7-10 segments :cite:`portugues2014whole` . The tail tracking functions work by finding the angle of a tail segment given the position and the orientation of the previous one. The starting position of the tail, as well as a rough tail orientation and length need to be specified beforehand using start and end points, movable over the camera image displayed in the user interface (as can be seen below). -To find the tail segments, two different functions are implemented. The first one looks at pixels along an arc to find their maximum (or minimum, if the image is inverted) where the current segment would end (as already described in e.g. :cite:`portugues2014whole`). The second method, introduced here, is based on centers of mass of sampling windows (see figure below), and provides a more reliable and smoother estimate over a wider range of resolutions and illumination methods. The image contrast and tail segment numbers have to be adjusted for each setup, which can be easily accomplished through the live view of the filtering and tracking results. In the documentation we provide :ref:`guidelines ` on choosing these parameters. To compare results across different setups which might have different camera resolutions, the resulting tail shape can be interpolated to a fixed number of segments regardless of the number of traced points. +To find the tail segments, two different functions are implemented. The first one looks at pixels along an arc to find their maximum (or minimum, if the image is inverted) where the current segment would end (as already described in e.g. :cite:`portugues2014whole`). The second method, introduced here, is based on centers of mass of sampling windows (see figure below), and provides a more reliable and smoother estimate over a wider range of resolutions and illumination methods. The image contrast and tail segment numbers have to be adjusted for each setup, which can be easily accomplished through the live view of the filtering and tracking results. In the documentation we provide :ref:`guidelines ` on choosing these parameters. To compare results across different setups which might have different camera resolutions, the resulting tail shape can be interpolated to a fixed number of segments regardless of the number of traced points. .. image:: ../../figures/tracking_img.png :height: 320px diff --git a/docs/source/overview/5_external_synch.rst b/docs/source/overview/5_external_sync.rst similarity index 95% rename from docs/source/overview/5_external_synch.rst rename to docs/source/overview/5_external_sync.rst index f4f7a27b..18c7fef9 100644 --- a/docs/source/overview/5_external_synch.rst +++ b/docs/source/overview/5_external_sync.rst @@ -1,30 +1,30 @@ -.. raw:: html - - - -.. _imaging-example: - -Synchronize stimulation with data acquisition -============================================= - -Here, we demonstrate the communication with a custom-built two-photon microscope. We performed two-photon calcium imaging in a seven days post fertilization, head-restrained fish larva pan-neuronally expressing the calcium indicator GCaMP6f (Tg(*elavl3*:GCaMP6f), :cite:`wolf2017sensorimotor`). For a complete description of the calcium imaging protocol see :cite:`kist2017whole`. These and following experiments were performed in accordance with approved protocols set by the Max Planck Society and the Regierung von Oberbayern. - - -We designed a simple protocol in Stytra consisting of either open- or closed-loop forward-moving gratings, similar to the optomotor assay described in the closed-loop section, with the gain set to either 0 or 1. At the beginning of the experiment, the microscope sends a ZeroMQ message to Stytra, as described in the previous section. This triggers the beginning of the visual stimulation protocol, as well as the online tracking of the fish tail, with a 10-20 ms delay. - -The figure belows shows the trace obtained from the live tracking of the tail during the experiment together with the vigor, the gain, and the grating velocities before and after calculating the closed loop velocity. Light shades represent open-loop trials and dark shades closed loop trials, and the triggering time is maked by an arrow: - -.. raw:: html - :file: ../../figures/imaging_behav.html - -To analyse the obtained imaging data, we used the behavioural data saved by Stytra to build regressors for grating speed and tail motion (for a description of regressor-based analysis of calcium signals, see :cite:`portugues2014whole`). Then, we computed pixel-wise correlation coefficients of calcium activity and the two regressors. The figure below reports the results obtained by imaging a large area of the fish brain, covering all regions from the rhombencephalon to the optic tectum. As expected, calcium signals in the region of the optic tectum are highly correlated with motion in the visual field, while events in more caudal regions of the reticular formation are highly correlated with swimming bouts: - -.. raw:: html - :file: ../../figures/imaging_cmaps.html - -To look at actual fluorescence traces, we investigated the activity of the pixels around the maximum of the correlation maps, highlighted by the square on the fish anatomies. Below, the plot shows the average activity in these regions together with the regressor traces for the vigor and the grating velocities. - -.. raw:: html - :file: ../../figures/imaging_traces.html - +.. raw:: html + + + +.. _imaging-example: + +Synchronizing stimulation with data acquisition +=============================================== + +Here, we demonstrate the communication with a custom-built two-photon microscope. We performed two-photon calcium imaging in a seven days post fertilization, head-restrained fish larva pan-neuronally expressing the calcium indicator GCaMP6f (Tg(*elavl3*:GCaMP6f), :cite:`wolf2017sensorimotor`). For a complete description of the calcium imaging protocol see :cite:`kist2017whole`. These and following experiments were performed in accordance with approved protocols set by the Max Planck Society and the Regierung von Oberbayern. + + +We designed a simple protocol in Stytra consisting of either open- or closed-loop forward-moving gratings, similar to the optomotor assay described in the closed-loop section, with the gain set to either 0 or 1. At the beginning of the experiment, the microscope sends a ZeroMQ message to Stytra, as described in the previous section. This triggers the beginning of the visual stimulation protocol, as well as the online tracking of the fish tail, with a 10-20 ms delay. + +The figure belows shows the trace obtained from the live tracking of the tail during the experiment together with the vigor, the gain, and the grating velocities before and after calculating the closed loop velocity. Light shades represent open-loop trials and dark shades closed loop trials, and the triggering time is maked by an arrow: + +.. raw:: html + :file: ../../figures/imaging_behav.html + +To analyse the obtained imaging data, we used the behavioural data saved by Stytra to build regressors for grating speed and tail motion (for a description of regressor-based analysis of calcium signals, see :cite:`portugues2014whole`). Then, we computed pixel-wise correlation coefficients of calcium activity and the two regressors. The figure below reports the results obtained by imaging a large area of the fish brain, covering all regions from the rhombencephalon to the optic tectum. As expected, calcium signals in the region of the optic tectum are highly correlated with motion in the visual field, while events in more caudal regions of the reticular formation are highly correlated with swimming bouts: + +.. raw:: html + :file: ../../figures/imaging_cmaps.html + +To look at actual fluorescence traces, we investigated the activity of the pixels around the maximum of the correlation maps, highlighted by the square on the fish anatomies. Below, the plot shows the average activity in these regions together with the regressor traces for the vigor and the grating velocities. + +.. raw:: html + :file: ../../figures/imaging_traces.html + The Stytra script used for this experiment is available in the examples, and the analysis in a separate `github repository `_. \ No newline at end of file diff --git a/docs/source/overview/7_comparison.rst b/docs/source/overview/7_comparison.rst index 977378a0..6967970a 100644 --- a/docs/source/overview/7_comparison.rst +++ b/docs/source/overview/7_comparison.rst @@ -83,4 +83,4 @@ performance is mainly limited by the camera frame rate. Limitations ----------- -Support for different hardware would require some extensions in the architecture. Simultaneous use of multiple cameras is currently not supported, but this requires a minor rewriting of the frame dispatching module. We will both continue to extend Stytra's capabilities and support any contributions that expand the library to cover a wider range of experimental conditions. Finally, it is important to note that the choice of Python as a language would make it difficult to obtain millisecond-level or higher temporal precision (e.g. for closed-loop electrophysiology). To this aim, existing solutions based on compiled languages should be employed, such as :cite`ciliberti2017falcon` (a system for closed-loop electrophysiology in C++). Another possibility would be to combine Open Ephys and Bonsai, as in :cite:`buccino2018open`. +Support for different hardware would require some extensions in the architecture. Simultaneous use of multiple cameras is currently not supported, but this requires a minor rewriting of the frame dispatching module. We will both continue to extend Stytra's capabilities and support any contributions that expand the library to cover a wider range of experimental conditions. Finally, it is important to note that the choice of Python as a language would make it difficult to obtain millisecond-level or higher temporal precision (e.g. for closed-loop electrophysiology). To this aim, existing solutions based on compiled languages should be employed, such as :cite:`ciliberti2017falcon` (a system for closed-loop electrophysiology in C++). Another possibility would be to combine Open Ephys and Bonsai, as in :cite:`buccino2018open`. diff --git a/docs/source/overview/7_references.rst b/docs/source/overview/7_references.rst deleted file mode 100644 index 09511020..00000000 --- a/docs/source/overview/7_references.rst +++ /dev/null @@ -1,4 +0,0 @@ -References -========== - -.. bibliography:: biblio.bib \ No newline at end of file diff --git a/docs/source/overview/8_replication.rst b/docs/source/overview/8_replication.rst index 2313b577..3b7afb19 100644 --- a/docs/source/overview/8_replication.rst +++ b/docs/source/overview/8_replication.rst @@ -1,3 +1,8 @@ +.. raw:: html + + + + Experiment replication ====================== diff --git a/docs/source/overview/9_references.rst b/docs/source/overview/9_references.rst new file mode 100644 index 00000000..54af7e51 --- /dev/null +++ b/docs/source/overview/9_references.rst @@ -0,0 +1,5 @@ +References +========== + +.. bibliography:: biblio.bib + :all: \ No newline at end of file diff --git a/docs/source/userguide/0_install_guide.rst b/docs/source/userguide/0_install_guide.rst index 3c85202d..8ea64386 100644 --- a/docs/source/userguide/0_install_guide.rst +++ b/docs/source/userguide/0_install_guide.rst @@ -65,7 +65,7 @@ install using the -e argument:: -Now you can have a look at the stytra :ref:`Examples gallery`, or you can start +Now you can have a look at the stytra :ref:`Examples gallery `, or you can start :ref:`Configuring a computer for Stytra experiments`. In the second case, you might want to have a look at the camera APIs section below first. @@ -99,7 +99,7 @@ and download and install the Vimba SDK. Then install the python wrapper pip install git+https://github.com/morefigs/pymba.git or, if using 64bit windows, you can grab the installation file from `here `_. -open the terminal in the folder where you downloaded it and install:: +Open the terminal in the folder where you downloaded it and install:: pip install pymba-0.1-py3-none-any.whl @@ -109,16 +109,18 @@ spinnaker: Point Grey / FLIR Go the the `FLIR support website `_, download the SDK and the Python API. -1. Install the SDK, by chosing the camera and OS, and then downloading - e.g. Spinnaker 1.15.0.63 Full SDK - Windows (64-bit) — 07/27/2018 - 517.392MB - or the equivalent for your operating system +1. Install the SDK, by chosing the camera and OS, and then downloading e.g. Spinnaker 1.15.0.63 Full SDK - Windows (64-bit) — 07/27/2018 - 517.392MB or the equivalent for your operating system + +2. Install the python module:: -2. Install the python module pip install "path_to_extracted_zip/spinnaker_python-1.15.0.63-cp36-cp36m-win_amd64.whl" (with the file with the appropriate OS and Python versions) +.. note:: + The FLIR/Spinnaker Python API currently does not support Python 3.7, so you might need to install a Python 3.6 conda environment to use it. + National Instruments framegrabber with Mikrotron camera ....................................................... diff --git a/docs/source/userguide/1_examples_gallery.rst b/docs/source/userguide/1_examples_gallery.rst index 85cd8ba4..affec15f 100644 --- a/docs/source/userguide/1_examples_gallery.rst +++ b/docs/source/userguide/1_examples_gallery.rst @@ -1,3 +1,5 @@ +.. _examples-gallery: + Designing and running experiments ================================= @@ -43,7 +45,7 @@ the second is the screen used to display the visual stimuli. In a real experimen to make sure this second window is presented to the animal. For details on positioning and calibration, please refer to :ref:`calibration` -For an introduction to the functionality of the user interface, see :ref:`Stytra user interface`. +For an introduction to the functionality of the user interface, see :ref:`Stytra user interface`. To start the experiment, just press the play button: a flash will appear on the screen after 4 seconds. diff --git a/docs/source/userguide/3_tracking.rst b/docs/source/userguide/3_tracking.rst index c36c7b89..2364355e 100644 --- a/docs/source/userguide/3_tracking.rst +++ b/docs/source/userguide/3_tracking.rst @@ -1,9 +1,13 @@ +Tracking configuration +====================== + + .. _fishtracking: -Configuring tracking of freely-swimming fish --------------------------------------------- +Freely-swimming fish +-------------------- -.. image:: ../screenshots/freeswim_tracking.png +.. image:: ../../screenshots/freeswim_tracking.png :scale: 30% :alt: freely-swimming tracking screenshot :align: center @@ -33,8 +37,8 @@ Configuring tracking of freely-swimming fish .. _tailtracking: -Configuring tracking of embedded fish -------------------------------------- +Embedded fish +------------- 1) Ensure that the exposure time is not longer than 1.5 miliseconds, otherwise the tracking will not be correct for fast tail movements diff --git a/docs/source/userguide/calibration.rst b/docs/source/userguide/4_calibration.rst similarity index 97% rename from docs/source/userguide/calibration.rst rename to docs/source/userguide/4_calibration.rst index 1d288b72..3a98b03d 100644 --- a/docs/source/userguide/calibration.rst +++ b/docs/source/userguide/4_calibration.rst @@ -28,7 +28,7 @@ Calibration for freely-swimming zebrafish experiments To calibrate the camera image to the displayed image, the Circle Calibrator is used (it is enabled automatically for freely-swimming experiments). -.. image:: ../screenshots/calibration.png +.. image:: ../../screenshots/calibration.png :scale: 30% :alt: freely-swimming tracking screenshot :align: center diff --git a/docs/source/userguide/configuring_computer.rst b/docs/source/userguide/5_configuring_computer.rst similarity index 100% rename from docs/source/userguide/configuring_computer.rst rename to docs/source/userguide/5_configuring_computer.rst diff --git a/docs/source/userguide/coordinate_systems.rst b/docs/source/userguide/6_coordinate_systems.rst similarity index 100% rename from docs/source/userguide/coordinate_systems.rst rename to docs/source/userguide/6_coordinate_systems.rst diff --git a/stytra/stimulation/stimuli/conditional.py b/stytra/stimulation/stimuli/conditional.py index 3b335539..09e9b53d 100644 --- a/stytra/stimulation/stimuli/conditional.py +++ b/stytra/stimulation/stimuli/conditional.py @@ -237,6 +237,25 @@ def chceck_condition_off(self): class CenteringWrapper(SingleConditionalWrapper): + """ A wrapper which shows the centering stimulus (radial gratings) + when the fish exits a given radius from the display center + + Parameters + ---------- + stimulus: Stimlus + the stimulus to be displayed when not centering + + centering_stimulus: Stimulus, optional + by default radial gratings + + margin: float + the centering activating radius in mm + + + **kwargs + other arguments supplied to :class:`ConditionalStimulus` + + """ def __init__(self, stimulus, *args, centering_stimulus=None, margin=45, **kwargs): super().__init__(*args, stim_on=stimulus, @@ -259,8 +278,30 @@ def paint(self, p, w, h): class TwoRadiusCenteringWrapper(ConditionalWrapper): + """ An extension of the :class:`CenteringWrapper` that takes two radii, + a smaller one, to stop the centering stimulus, and a bigger one to start + it again + + Parameters + ---------- + stimulus: Stimlus + the stimulus to be displayed when not centering + + centering_stimulus: Stimulus, optional + by default radial gratings + + r_out: float + the centering activating radius in mm + + r_in: float + the centering deactivating radius in mm + + **kwargs + other arguments supplied to :class:`ConditionalStimulus` + + """ def __init__(self, stimulus, *args, centering_stimulus=None, r_out=45, - r_in=25, + r_in=20, **kwargs): super().__init__(*args, stim_on=stimulus, stim_off=(centering_stimulus or RadialSineStimulus(