From 5593877737c6392d9fe539a4b0e599a99bcc2bfc Mon Sep 17 00:00:00 2001 From: Bryn <31349775+8ryn@users.noreply.github.com> Date: Thu, 17 Aug 2023 17:17:59 +0100 Subject: [PATCH 1/3] Creation and setup of example Jupyter notebook #12 --- docker-compose.yml | 2 +- docs/user/how-to/run-scan.rst | 19 ++ sim/build.sh | 2 +- sim/jupyter/Dockerfile | 7 + sim/jupyter/build.sh | 5 + src/tomoscan/setup.ipynb | 360 ++++++++++++++++++++++++++++++++++ 6 files changed, 393 insertions(+), 2 deletions(-) create mode 100644 sim/jupyter/Dockerfile create mode 100755 sim/jupyter/build.sh create mode 100644 src/tomoscan/setup.ipynb diff --git a/docker-compose.yml b/docker-compose.yml index ab81e34..c413561 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -97,7 +97,7 @@ services: - --pv-prefix - "EPAC-DEV:PULSE:" - --period - - "5" + - "2" - --epics-time-offset - "-0.05" - --enable-gate diff --git a/docs/user/how-to/run-scan.rst b/docs/user/how-to/run-scan.rst index 7a97fa9..3336195 100644 --- a/docs/user/how-to/run-scan.rst +++ b/docs/user/how-to/run-scan.rst @@ -1,5 +1,9 @@ Running a scan ================== +Scans can either be run within an IPython terminal or a Jupyter notebook. + +IPython terminal +---------------- * Start the interactive bluesky environment. It is important to mount the output directory and run the container within the same network as the docker-compose environment. This can be achieved by running: @@ -11,6 +15,21 @@ Running a scan There are two scan modes which are explained below. Outputs from the scan are saved to the data directory. +Jupyter notebook +----------------- + +* Start the Jupyter notebook container, mounting the output directory and the directory containing the example Jupyter notebook. The container must also be run within the same network as the docker-compose environment. Do this by running: + +.. code-block:: bash + + docker run -p 8888:8888 -v `pwd`/src/tomoscan:/home/jovyan/work -v `pwd`/data:/home/jovyan/data --net tomoscan_default tomoscan_jupyter + +* Copy and paste the URL starting :code:`http://127.0.0.1:8888` that appears in the terminal into a browser to launch the Jupyter server +* Launch the setup notebook which is found in the work folder +* Start the phoebus screen to monitor the scan's progress. Navigate to the display folder and run :code:`./startOverview.sh` +* Follow the steps in the Jupyter notebook + + Synced scan ------------- In the default setup the pulse generator triggers the laser IOC and then when running the synced scan the motor moves to its next position, waits for the laser PV to diff --git a/sim/build.sh b/sim/build.sh index 30d51bd..91faf41 100755 --- a/sim/build.sh +++ b/sim/build.sh @@ -2,7 +2,7 @@ set -e -BUILD_DIRECTORIES="areaDetectorDock motorDock pmac pulsedLaser" +BUILD_DIRECTORIES="areaDetectorDock motorDock pmac pulsedLaser jupyter" for DIR in $BUILD_DIRECTORIES; do echo "+--------------------------" diff --git a/sim/jupyter/Dockerfile b/sim/jupyter/Dockerfile new file mode 100644 index 0000000..e41d9a2 --- /dev/null +++ b/sim/jupyter/Dockerfile @@ -0,0 +1,7 @@ +#Due to copying a top level file this must be run from the top level directory +FROM jupyter/base-notebook + +#Long term this list should somehow be synced with the bluesky pip install list +RUN set -ex && pip install h5py bluesky ophyd ipython matplotlib databroker pyepics area-detector-handlers +RUN mkdir -p ~/.config/databroker +COPY ./mongo.yml /home/jovyan/.config/databroker/mongo.yml \ No newline at end of file diff --git a/sim/jupyter/build.sh b/sim/jupyter/build.sh new file mode 100755 index 0000000..f101981 --- /dev/null +++ b/sim/jupyter/build.sh @@ -0,0 +1,5 @@ +#!/bin/bash +DIR=$(pwd) +cd ../.. +docker build -t tomoscan_jupyter -f sim/jupyter/Dockerfile . +cd $DIR \ No newline at end of file diff --git a/src/tomoscan/setup.ipynb b/src/tomoscan/setup.ipynb new file mode 100644 index 0000000..6204ae9 --- /dev/null +++ b/src/tomoscan/setup.ipynb @@ -0,0 +1,360 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import time as ttime\n", + "\n", + "from bluesky.plans import count, scan\n", + "from ophyd import EpicsSignal" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Here the ophyd areadetector classes are setup:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from ophyd.areadetector.filestore_mixins import FileStoreHDF5IterativeWrite\n", + "from ophyd.areadetector.plugins import HDF5Plugin_V34\n", + "from ophyd import SingleTrigger, AreaDetector, ADComponent\n", + "from ophyd.areadetector import cam\n", + "\n", + "class MyHDF5Plugin(FileStoreHDF5IterativeWrite, HDF5Plugin_V34):\n", + " ...\n", + "\n", + "\n", + "class MyDetector(SingleTrigger, AreaDetector):\n", + " cam = ADComponent(cam.AreaDetectorCam, \"CAM:\")\n", + " hdf1 = ADComponent(\n", + " MyHDF5Plugin,\n", + " \"HDF1:\",\n", + " write_path_template=\"/out/%Y/%m/%d/\",\n", + " #read_path_template=\"/home/bar/Projects/tomoscan/data/%Y/%m/%d/\",\n", + " read_path_template=\"/home/jovyan/data/%Y/%m/%d/\"\n", + " \n", + " )" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from ophyd import Device, Component, EpicsSignalRO\n", + "\n", + "class MyLaser(Device):\n", + " power = Component(EpicsSignalRO, \"laser:power\")\n", + " pulse_id = Component(EpicsSignalRO, \"EPAC-DEV:PULSE:PULSE_ID\", name=\"pulse_id\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The next code block defines a function used to poll PVs until they take a particular value.\n", + "This function should be a temporary measure and a more streamlined approach monitoring the PV integrated proerly with Bluesky/Ophyd is hoped to be used in the future." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def wait_for_value(signal: EpicsSignal, value, poll_time=0.01, timeout=10):\n", + " expiration_time = ttime.time() + timeout\n", + " current_value = signal.get()\n", + " while current_value != value:\n", + " # ttime.sleep(poll_time)\n", + " yield from bps.sleep(poll_time)\n", + " if ttime.time() > expiration_time:\n", + " raise TimeoutError(\n", + " \"Timed out waiting for %r to take value %r after %r seconds\"\n", + " % (signal, value, timeout)\n", + " )\n", + " current_value = signal.get()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "This custom plan moves the motor and then waits for the laser pulse before taking the next reading." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from bluesky.plan_stubs import mv\n", + "import bluesky.plan_stubs as bps\n", + "\n", + "def pulse_sync(detectors, motor, laser, start, stop, steps):\n", + " step_size = (stop - start) / (steps - 1)\n", + "\n", + " for det in detectors:\n", + " yield from bps.stage(det)\n", + "\n", + " yield from bps.open_run()\n", + " for i in range(steps):\n", + " yield from bps.checkpoint() # allows pausing/rewinding\n", + " yield from mv(motor, start + i * step_size)\n", + " yield from wait_for_value(\n", + " laser.power, 0, poll_time=0.01, timeout=10\n", + " ) # Want to be at 0 initially such that image taken on pulse\n", + " yield from wait_for_value(laser.power, 1, poll_time=0.001, timeout=10)\n", + " yield from bps.trigger_and_read(list(detectors) + [motor] + [laser])\n", + " yield from bps.close_run()\n", + "\n", + " for det in detectors:\n", + " yield from bps.unstage(det)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "This custom plan moves the motor based on the detector status. It is designed to be used when the detector is being directly triggered outside of bluesky." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "def passive_scan(detectors, motor, start, stop, steps, adStatus, pulse_ID):\n", + " step_size = (stop - start) / (steps - 1)\n", + "\n", + " yield from mv(motor, start) # Move motor to starting position since may take time\n", + "\n", + " yield from bps.open_run()\n", + "\n", + " for det in detectors:\n", + " yield from bps.stage(det)\n", + "\n", + " for i in range(steps):\n", + " yield from mv(motor, start + i * step_size)\n", + " yield from bps.checkpoint()\n", + " yield from wait_for_value(adStatus, 2, poll_time=0.001, timeout=10)\n", + " yield from bps.trigger_and_read([motor] + [pulse_ID])\n", + " yield from wait_for_value(adStatus, 0, poll_time=0.001, timeout=10)\n", + "\n", + " for det in detectors:\n", + " yield from bps.unstage(det)\n", + "\n", + " yield from bps.close_run()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "Here we initialise the detector object. \n", + "This will give an error saying that caRepeater couldn't be located, this is not an issue. A second related error message may later appear which can also be ignored." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "prefix = \"ADT:USER1:\"\n", + "det = MyDetector(prefix, name=\"det\")" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The detector's HDF plugin is then set to create the necessary output directory if it does not exist and the detector is primed.\n", + "The plugin's \"kind\" is required to be set to 3 such that the resulting HDF files are accessible via the databroker." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "det.hdf1.create_directory.put(-5)\n", + "det.hdf1.warmup()\n", + "det.hdf1.kind = 3 # config | normal, required to include images in run documents" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The camera's stage signatures are area detector configurations which are set whenever the detector is staged within Bluesky." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "det.cam.stage_sigs[\"image_mode\"] = \"Multiple\"\n", + "det.cam.stage_sigs[\"acquire_time\"] = 0.05\n", + "det.cam.stage_sigs[\"num_images\"] = 1" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The motor and laser objects are created. In the case of the laser we wait for the PVs to connect succesfully." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from ophyd import EpicsMotor\n", + "\n", + "motor1 = EpicsMotor(\"motorS:axis1\", name=\"motor1\")\n", + "laser1 = MyLaser(\"\", name=\"laser1\")\n", + "laser1.wait_for_connection()" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "We load the Bluesky run engine and subscribe the best effort callback. The best effort callback aims to print and plot useful information as scans are performed." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "from bluesky import RunEngine\n", + "from bluesky.callbacks.best_effort import BestEffortCallback\n", + "\n", + "RE = RunEngine()\n", + "bec = BestEffortCallback()\n", + "RE.subscribe(bec)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The databroker is linked to the running mongoDB database and the run engine is set to insert all data captured to the databroker." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "import databroker\n", + "\n", + "catalog = databroker.catalog[\"mongo\"]\n", + "RE.subscribe(catalog.v1.insert)" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "As a first example run this synced scan which takes 11 readings at intervals between motor positions of -10 and +10\n", + "This will also generate a table and a plot of the motor position. In this case the plot does not show much information." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "uids = RE(pulse_sync([det], motor1, laser1, -10, 10, 11))" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "To access the data stored in the databroker catalog run the following:" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "run = catalog[uids[0]] #Accesses the run based on its uid, the most recent run can also be accessed as catalog[-1]\n", + "data = run.primary.read()\n", + "data" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "The image data is accessed as shown below." + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [ + "image = data[\"det_image\"]\n", + "frame = image[0][0] # Index 1 refers to the time of the image and the second index refers to the frame number\n", + "frame.plot.pcolormesh()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3 (ipykernel)", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.10.12" + } + }, + "nbformat": 4, + "nbformat_minor": 4 +} From 5e49d82184a7e00ebc8935a28932be6a387c1c5d Mon Sep 17 00:00:00 2001 From: Bryn <31349775+8ryn@users.noreply.github.com> Date: Fri, 18 Aug 2023 10:53:10 +0100 Subject: [PATCH 2/3] gitignore addition --- .gitignore | 1 + 1 file changed, 1 insertion(+) diff --git a/.gitignore b/.gitignore index d36cd46..896218d 100644 --- a/.gitignore +++ b/.gitignore @@ -81,3 +81,4 @@ lib/ # Output files data/* !data/.placeholder +src/tomoscan/.ipynb_checkpoints/setup-checkpoint.ipynb From 4498e840e8502ee7f81d21ae18fb3cd7ce7ba84f Mon Sep 17 00:00:00 2001 From: Bryn <31349775+8ryn@users.noreply.github.com> Date: Wed, 23 Aug 2023 13:51:19 +0100 Subject: [PATCH 3/3] Modify jupyter notebook to import from main script --- src/tomoscan/setup.ipynb | 276 +-------------------------------------- 1 file changed, 6 insertions(+), 270 deletions(-) diff --git a/src/tomoscan/setup.ipynb b/src/tomoscan/setup.ipynb index 6204ae9..dc7785d 100644 --- a/src/tomoscan/setup.ipynb +++ b/src/tomoscan/setup.ipynb @@ -1,225 +1,12 @@ { "cells": [ - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import time as ttime\n", - "\n", - "from bluesky.plans import count, scan\n", - "from ophyd import EpicsSignal" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Here the ophyd areadetector classes are setup:" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "from ophyd.areadetector.filestore_mixins import FileStoreHDF5IterativeWrite\n", - "from ophyd.areadetector.plugins import HDF5Plugin_V34\n", - "from ophyd import SingleTrigger, AreaDetector, ADComponent\n", - "from ophyd.areadetector import cam\n", - "\n", - "class MyHDF5Plugin(FileStoreHDF5IterativeWrite, HDF5Plugin_V34):\n", - " ...\n", - "\n", - "\n", - "class MyDetector(SingleTrigger, AreaDetector):\n", - " cam = ADComponent(cam.AreaDetectorCam, \"CAM:\")\n", - " hdf1 = ADComponent(\n", - " MyHDF5Plugin,\n", - " \"HDF1:\",\n", - " write_path_template=\"/out/%Y/%m/%d/\",\n", - " #read_path_template=\"/home/bar/Projects/tomoscan/data/%Y/%m/%d/\",\n", - " read_path_template=\"/home/jovyan/data/%Y/%m/%d/\"\n", - " \n", - " )" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "from ophyd import Device, Component, EpicsSignalRO\n", - "\n", - "class MyLaser(Device):\n", - " power = Component(EpicsSignalRO, \"laser:power\")\n", - " pulse_id = Component(EpicsSignalRO, \"EPAC-DEV:PULSE:PULSE_ID\", name=\"pulse_id\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The next code block defines a function used to poll PVs until they take a particular value.\n", - "This function should be a temporary measure and a more streamlined approach monitoring the PV integrated proerly with Bluesky/Ophyd is hoped to be used in the future." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "def wait_for_value(signal: EpicsSignal, value, poll_time=0.01, timeout=10):\n", - " expiration_time = ttime.time() + timeout\n", - " current_value = signal.get()\n", - " while current_value != value:\n", - " # ttime.sleep(poll_time)\n", - " yield from bps.sleep(poll_time)\n", - " if ttime.time() > expiration_time:\n", - " raise TimeoutError(\n", - " \"Timed out waiting for %r to take value %r after %r seconds\"\n", - " % (signal, value, timeout)\n", - " )\n", - " current_value = signal.get()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "This custom plan moves the motor and then waits for the laser pulse before taking the next reading." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "from bluesky.plan_stubs import mv\n", - "import bluesky.plan_stubs as bps\n", - "\n", - "def pulse_sync(detectors, motor, laser, start, stop, steps):\n", - " step_size = (stop - start) / (steps - 1)\n", - "\n", - " for det in detectors:\n", - " yield from bps.stage(det)\n", - "\n", - " yield from bps.open_run()\n", - " for i in range(steps):\n", - " yield from bps.checkpoint() # allows pausing/rewinding\n", - " yield from mv(motor, start + i * step_size)\n", - " yield from wait_for_value(\n", - " laser.power, 0, poll_time=0.01, timeout=10\n", - " ) # Want to be at 0 initially such that image taken on pulse\n", - " yield from wait_for_value(laser.power, 1, poll_time=0.001, timeout=10)\n", - " yield from bps.trigger_and_read(list(detectors) + [motor] + [laser])\n", - " yield from bps.close_run()\n", - "\n", - " for det in detectors:\n", - " yield from bps.unstage(det)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "This custom plan moves the motor based on the detector status. It is designed to be used when the detector is being directly triggered outside of bluesky." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "def passive_scan(detectors, motor, start, stop, steps, adStatus, pulse_ID):\n", - " step_size = (stop - start) / (steps - 1)\n", - "\n", - " yield from mv(motor, start) # Move motor to starting position since may take time\n", - "\n", - " yield from bps.open_run()\n", - "\n", - " for det in detectors:\n", - " yield from bps.stage(det)\n", - "\n", - " for i in range(steps):\n", - " yield from mv(motor, start + i * step_size)\n", - " yield from bps.checkpoint()\n", - " yield from wait_for_value(adStatus, 2, poll_time=0.001, timeout=10)\n", - " yield from bps.trigger_and_read([motor] + [pulse_ID])\n", - " yield from wait_for_value(adStatus, 0, poll_time=0.001, timeout=10)\n", - "\n", - " for det in detectors:\n", - " yield from bps.unstage(det)\n", - "\n", - " yield from bps.close_run()" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "Here we initialise the detector object. \n", - "This will give an error saying that caRepeater couldn't be located, this is not an issue. A second related error message may later appear which can also be ignored." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "prefix = \"ADT:USER1:\"\n", - "det = MyDetector(prefix, name=\"det\")" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The detector's HDF plugin is then set to create the necessary output directory if it does not exist and the detector is primed.\n", - "The plugin's \"kind\" is required to be set to 3 such that the resulting HDF files are accessible via the databroker." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "det.hdf1.create_directory.put(-5)\n", - "det.hdf1.warmup()\n", - "det.hdf1.kind = 3 # config | normal, required to include images in run documents" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The camera's stage signatures are area detector configurations which are set whenever the detector is staged within Bluesky." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "det.cam.stage_sigs[\"image_mode\"] = \"Multiple\"\n", - "det.cam.stage_sigs[\"acquire_time\"] = 0.05\n", - "det.cam.stage_sigs[\"num_images\"] = 1" - ] - }, { "cell_type": "markdown", "metadata": {}, "source": [ - "The motor and laser objects are created. In the case of the laser we wait for the PVs to connect succesfully." + "First the detector, laser and motor are setup along with the bluesky environment from the ophyd_inter_setup file.\n", + "Due to the different file structure of the Jupyter notebook the read path must be updated.\n", + "An error related to caRepeater will likely appear, this is not a concern." ] }, { @@ -228,59 +15,15 @@ "metadata": {}, "outputs": [], "source": [ - "from ophyd import EpicsMotor\n", - "\n", - "motor1 = EpicsMotor(\"motorS:axis1\", name=\"motor1\")\n", - "laser1 = MyLaser(\"\", name=\"laser1\")\n", - "laser1.wait_for_connection()" + "from ophyd_inter_setup import *\n", + "det.hdf1.read_path_template = \"/home/jovyan/data/%Y/%m/%d/\" " ] }, { "cell_type": "markdown", "metadata": {}, "source": [ - "We load the Bluesky run engine and subscribe the best effort callback. The best effort callback aims to print and plot useful information as scans are performed." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "from bluesky import RunEngine\n", - "from bluesky.callbacks.best_effort import BestEffortCallback\n", - "\n", - "RE = RunEngine()\n", - "bec = BestEffortCallback()\n", - "RE.subscribe(bec)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "The databroker is linked to the running mongoDB database and the run engine is set to insert all data captured to the databroker." - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import databroker\n", - "\n", - "catalog = databroker.catalog[\"mongo\"]\n", - "RE.subscribe(catalog.v1.insert)" - ] - }, - { - "cell_type": "markdown", - "metadata": {}, - "source": [ - "As a first example run this synced scan which takes 11 readings at intervals between motor positions of -10 and +10\n", - "This will also generate a table and a plot of the motor position. In this case the plot does not show much information." + "The synced scan can then be run. This example scans in 11 steps between -10 and +10." ] }, { @@ -327,13 +70,6 @@ "frame = image[0][0] # Index 1 refers to the time of the image and the second index refers to the frame number\n", "frame.plot.pcolormesh()" ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] } ], "metadata": {