From ef8872eb6adb93a2520f1bb4b772d3d9fb5d7d8a Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Thu, 25 Jan 2024 13:23:27 -0800 Subject: [PATCH 01/57] Create measure_psf.py --- mantis/acquisition/scripts/measure_psf.py | 71 +++++++++++++++++++++++ 1 file changed, 71 insertions(+) create mode 100644 mantis/acquisition/scripts/measure_psf.py diff --git a/mantis/acquisition/scripts/measure_psf.py b/mantis/acquisition/scripts/measure_psf.py new file mode 100644 index 00000000..2fae52d5 --- /dev/null +++ b/mantis/acquisition/scripts/measure_psf.py @@ -0,0 +1,71 @@ +# %% +import napari +import numpy as np +import pandas as pd +import tifffile + +from napari_psf_analysis.psf_analysis.extract.BeadExtractor import BeadExtractor +from napari_psf_analysis.psf_analysis.image import Calibrated3DImage +from napari_psf_analysis.psf_analysis.psf import PSF +from scipy.ndimage import uniform_filter +from skimage.feature import peak_local_max + +# %% +data_path = r'Z:\2022_12_22_LS_after_SL2\epi_beads_100nm_fl_mount_after_SL2_1\LS_beads_100nm_fl_mount_after_SL2_1_MMStack_Pos0.ome.tif' +zyx_data = tifffile.imread(data_path) +spacing = (0.250, 0.069, 0.069) # in um + +# %% +viewer = napari.Viewer() +viewer.add_image(zyx_data) + +# %% + +# runs in about 10 seconds, sensitive to parameters +# finds ~310 peaks +points = peak_local_max( + uniform_filter(zyx_data, size=3), # helps remove hot pixels, adds ~3s + min_distance=25, + threshold_abs=200, + num_peaks=1000, # limit to top 1000 peaks + exclude_border=(3, 10, 10), # in zyx +) + +viewer.add_points(points, name='peaks local max', size=12, symbol='ring', edge_color='yellow') + +# %% + +patch_size = (spacing[0] * 10, spacing[1] * 15, spacing[2] * 15) +# round to nearest 0.5 um +patch_size = np.round(np.asarray(patch_size) / 0.5) * 0.5 + +# extract bead patches +bead_extractor = BeadExtractor( + image=Calibrated3DImage(data=zyx_data.astype(np.float64), spacing=spacing), + patch_size=patch_size, +) +beads = bead_extractor.extract_beads(points=points) +bead_offsets = np.asarray([bead.offset for bead in beads]) + + +def analyze_psf(bead: Calibrated3DImage): + psf = PSF(image=bead) + try: + psf.analyze() + return psf.get_summary_dict() + except RuntimeError: + # skip over beads where psf analysis failed + return {} + + +# analyze bead patches +results = [analyze_psf(bead) for bead in beads] +num_failed = sum([result == {} for result in results]) + +df = pd.DataFrame.from_records(results) +df['z_mu'] += bead_offsets[:, 0] * spacing[0] +df['y_mu'] += bead_offsets[:, 1] * spacing[1] +df['x_mu'] += bead_offsets[:, 2] * spacing[2] +df = df.dropna() + +# %% From 6b944e4487720eef8c473048a6caab63d7125a50 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Fri, 26 Jan 2024 17:12:26 -0800 Subject: [PATCH 02/57] partial psf analysis report --- mantis/acquisition/scripts/measure_psf.py | 159 ++++++++++++++++++++-- 1 file changed, 148 insertions(+), 11 deletions(-) diff --git a/mantis/acquisition/scripts/measure_psf.py b/mantis/acquisition/scripts/measure_psf.py index 2fae52d5..7559df88 100644 --- a/mantis/acquisition/scripts/measure_psf.py +++ b/mantis/acquisition/scripts/measure_psf.py @@ -1,4 +1,10 @@ # %% +import webbrowser + +from pathlib import Path + +import markdown +import matplotlib.pyplot as plt import napari import numpy as np import pandas as pd @@ -7,17 +13,21 @@ from napari_psf_analysis.psf_analysis.extract.BeadExtractor import BeadExtractor from napari_psf_analysis.psf_analysis.image import Calibrated3DImage from napari_psf_analysis.psf_analysis.psf import PSF +from numpy.typing import ArrayLike from scipy.ndimage import uniform_filter from skimage.feature import peak_local_max # %% + +data_dir = Path(r'Z:\2022_12_22_LS_after_SL2') +dataset = 'epi_beads_100nm_fl_mount_after_SL2_1' + +# data_path = data_dir / dataset / (dataset+'_MMStack_Pos0.ome.tif') data_path = r'Z:\2022_12_22_LS_after_SL2\epi_beads_100nm_fl_mount_after_SL2_1\LS_beads_100nm_fl_mount_after_SL2_1_MMStack_Pos0.ome.tif' zyx_data = tifffile.imread(data_path) -spacing = (0.250, 0.069, 0.069) # in um -# %% -viewer = napari.Viewer() -viewer.add_image(zyx_data) +scale = (0.250, 0.069, 0.069) # in um +axis_labels = ("Z", "Y", "X") # %% @@ -31,17 +41,21 @@ exclude_border=(3, 10, 10), # in zyx ) +# %% +viewer = napari.Viewer() +viewer.add_image(zyx_data) + viewer.add_points(points, name='peaks local max', size=12, symbol='ring', edge_color='yellow') # %% -patch_size = (spacing[0] * 10, spacing[1] * 15, spacing[2] * 15) +patch_size = (scale[0] * 11, scale[1] * 15, scale[2] * 15) # round to nearest 0.5 um -patch_size = np.round(np.asarray(patch_size) / 0.5) * 0.5 +# patch_size = np.round(np.asarray(patch_size) / 0.5) * 0.5 # extract bead patches bead_extractor = BeadExtractor( - image=Calibrated3DImage(data=zyx_data.astype(np.float64), spacing=spacing), + image=Calibrated3DImage(data=zyx_data.astype(np.float64), spacing=scale), patch_size=patch_size, ) beads = bead_extractor.extract_beads(points=points) @@ -53,7 +67,7 @@ def analyze_psf(bead: Calibrated3DImage): try: psf.analyze() return psf.get_summary_dict() - except RuntimeError: + except Exception: # skip over beads where psf analysis failed return {} @@ -63,9 +77,132 @@ def analyze_psf(bead: Calibrated3DImage): num_failed = sum([result == {} for result in results]) df = pd.DataFrame.from_records(results) -df['z_mu'] += bead_offsets[:, 0] * spacing[0] -df['y_mu'] += bead_offsets[:, 1] * spacing[1] -df['x_mu'] += bead_offsets[:, 2] * spacing[2] +df['z_mu'] += bead_offsets[:, 0] * scale[0] +df['y_mu'] += bead_offsets[:, 1] * scale[1] +df['x_mu'] += bead_offsets[:, 2] * scale[2] df = df.dropna() # %% + + +def visualize_psf(out_dir: str, zyx_data: ArrayLike, zyx_scale: tuple, axis_labels: tuple): + scale_Z, scale_Y, scale_X = zyx_scale + shape_Z, shape_Y, shape_X = zyx_data.shape + cmap = 'viridis' + fig, ax = plt.subplots(1, 3) + + ax[0].imshow( + zyx_data[shape_Z // 2, :, :], cmap=cmap, origin='lower', aspect=scale_Y / scale_X + ) + ax[0].set_xlabel(axis_labels[-1]) + ax[0].set_ylabel(axis_labels[-2]) + + ax[1].imshow( + zyx_data[:, shape_Y // 2, :], cmap=cmap, origin='lower', aspect=scale_Z / scale_X + ) + ax[1].set_xlabel(axis_labels[-1]) + ax[1].set_ylabel(axis_labels[-3]) + + ax[2].imshow( + zyx_data[:, :, shape_X // 2], cmap=cmap, origin='lower', aspect=scale_Z / scale_Y + ) + ax[2].set_xlabel(axis_labels[-2]) + ax[2].set_ylabel(axis_labels[-3]) + + for _ax in ax.flatten(): + _ax.set_xticks([]) + _ax.set_yticks([]) + + plt.tight_layout() + plt.subplots_adjust(left=0.12, wspace=0.5) + fig_size = fig.get_size_inches() + fig_size_scaling = 3 / fig_size[0] # set width to 3 inches + fig.set_figwidth(fig_size[0] * fig_size_scaling) + fig.set_figheight(fig_size[1] * fig_size_scaling) + fig.savefig(out_dir) + + +def plot_fwhm_vs_z(out_dir: str, z, fwhm_x, fwhm_y, fwhm_z): + fig, ax = plt.subplots(1, 1) + + ax.plot(z, fwhm_x, 'o', z, fwhm_y, 'o') + ax.set_ylabel('X and Y FWHM (um)') + ax.set_xlabel('Z position (um)') + + ax2 = ax.twinx() + ax2.plot(z, fwhm_z, 'o', color='green') + ax2.set_ylabel('Z FWHM (um)', color='green') + ax2.tick_params(axis='y', labelcolor='green') + + fig.savefig(out_dir) + + +# %% generate plots + +plots_dir = data_dir / 'plots' +plots_dir.mkdir(parents=True, exist_ok=True) + +random_bead_number = np.random.choice(len(df), 3) +bead_psf_paths = [''] * 3 +for i, path, bead_number in zip(range(3), bead_psf_paths, random_bead_number): + path = plots_dir / f'bead_psf_{i}.png' + bead_psf_paths[i] = path + visualize_psf(path, beads[bead_number].data, scale, axis_labels) + +fwhm_vs_z_3d_path = plots_dir / 'fwhm_vs_z_3d.png' +plot_fwhm_vs_z( + fwhm_vs_z_3d_path, + df['z_mu'].values, + *[df[col].values for col in ('zyx_x_fwhm', 'zyx_y_fwhm', 'zyx_z_fwhm')], +) + +fwhm_vs_z_1d_path = plots_dir / 'fwhm_vs_z_1d.png' +plot_fwhm_vs_z( + fwhm_vs_z_1d_path, + df['z_mu'].values, + *[df[col].values for col in ('x_fwhm', 'y_fwhm', 'z_fwhm')], +) + +mean_1d_fwhm = [df[col].mean() for col in ('x_fwhm', 'y_fwhm', 'z_fwhm')] +mean_3d_fwhm = [df[col].mean() for col in ('zyx_x_fwhm', 'zyx_y_fwhm', 'zyx_z_fwhm')] +mean_pc_fwhm = [df[col].mean() for col in ('zyx_pc3_fwhm', 'zyx_pc2_fwhm', 'zyx_pc1_fwhm')] + +# %% + +# generate html report +html = markdown.markdown( + f''' +# PSF Analysis + +## Overview +Dataset name: {dataset} + +Scale: {scale} + +Mean FWHM: + +* 1D: {'({:.3f}, {:.3f}, {:.3f})'.format(*mean_1d_fwhm)} +* 3D: {'({:.3f}, {:.3f}, {:.3f})'.format(*mean_3d_fwhm)} +* PC: {'({:.3f}, {:.3f}, {:.3f})'.format(*mean_pc_fwhm)} + +## Representative bead PSF images +![bead psf 1]({bead_psf_paths[0]} "bead psf {random_bead_number[0]}") +![bead psf 2]({bead_psf_paths[1]} "bead psf {random_bead_number[1]}") +![bead psf 3]({bead_psf_paths[2]} "bead psf {random_bead_number[2]}") + +## XYZ FWHM versus Z position (3D) +![fwhm vs z]({fwhm_vs_z_3d_path} "fwhm vs z") + +''' +) + +# %% + +# save html file and show in browser +html_file_path = Path(r'C:\Users\labelfree\Documents\temphtml.html') +with open(html_file_path, 'w') as file: + file.write(html) + +webbrowser.open('file://' + str(html_file_path)) + +# %% From 62b75671718f9c49dd302b424be13ab3dfcef033 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Mon, 29 Jan 2024 13:31:40 -0800 Subject: [PATCH 03/57] update psf analysis report --- .../acquisition/scripts/github-markdown.css | 1195 +++++++++++++++++ mantis/acquisition/scripts/measure_psf.py | 394 ++++-- 2 files changed, 1464 insertions(+), 125 deletions(-) create mode 100644 mantis/acquisition/scripts/github-markdown.css diff --git a/mantis/acquisition/scripts/github-markdown.css b/mantis/acquisition/scripts/github-markdown.css new file mode 100644 index 00000000..d32a3146 --- /dev/null +++ b/mantis/acquisition/scripts/github-markdown.css @@ -0,0 +1,1195 @@ +@media (prefers-color-scheme: dark) { + .markdown-body, + [data-theme="dark"] { + /*dark*/ + color-scheme: dark; + --color-prettylights-syntax-comment: #8b949e; + --color-prettylights-syntax-constant: #79c0ff; + --color-prettylights-syntax-entity: #d2a8ff; + --color-prettylights-syntax-storage-modifier-import: #c9d1d9; + --color-prettylights-syntax-entity-tag: #7ee787; + --color-prettylights-syntax-keyword: #ff7b72; + --color-prettylights-syntax-string: #a5d6ff; + --color-prettylights-syntax-variable: #ffa657; + --color-prettylights-syntax-brackethighlighter-unmatched: #f85149; + --color-prettylights-syntax-invalid-illegal-text: #f0f6fc; + --color-prettylights-syntax-invalid-illegal-bg: #8e1519; + --color-prettylights-syntax-carriage-return-text: #f0f6fc; + --color-prettylights-syntax-carriage-return-bg: #b62324; + --color-prettylights-syntax-string-regexp: #7ee787; + --color-prettylights-syntax-markup-list: #f2cc60; + --color-prettylights-syntax-markup-heading: #1f6feb; + --color-prettylights-syntax-markup-italic: #c9d1d9; + --color-prettylights-syntax-markup-bold: #c9d1d9; + --color-prettylights-syntax-markup-deleted-text: #ffdcd7; + --color-prettylights-syntax-markup-deleted-bg: #67060c; + --color-prettylights-syntax-markup-inserted-text: #aff5b4; + --color-prettylights-syntax-markup-inserted-bg: #033a16; + --color-prettylights-syntax-markup-changed-text: #ffdfb6; + --color-prettylights-syntax-markup-changed-bg: #5a1e02; + --color-prettylights-syntax-markup-ignored-text: #c9d1d9; + --color-prettylights-syntax-markup-ignored-bg: #1158c7; + --color-prettylights-syntax-meta-diff-range: #d2a8ff; + --color-prettylights-syntax-brackethighlighter-angle: #8b949e; + --color-prettylights-syntax-sublimelinter-gutter-mark: #484f58; + --color-prettylights-syntax-constant-other-reference-link: #a5d6ff; + --color-fg-default: #e6edf3; + --color-fg-muted: #848d97; + --color-fg-subtle: #6e7681; + --color-canvas-default: #0d1117; + --color-canvas-subtle: #161b22; + --color-border-default: #30363d; + --color-border-muted: #21262d; + --color-neutral-muted: rgba(110,118,129,0.4); + --color-accent-fg: #2f81f7; + --color-accent-emphasis: #1f6feb; + --color-success-fg: #3fb950; + --color-success-emphasis: #238636; + --color-attention-fg: #d29922; + --color-attention-emphasis: #9e6a03; + --color-attention-subtle: rgba(187,128,9,0.15); + --color-danger-fg: #f85149; + --color-danger-emphasis: #da3633; + --color-done-fg: #a371f7; + --color-done-emphasis: #8957e5; + } +} + +@media (prefers-color-scheme: light) { + .markdown-body, + [data-theme="light"] { + /*light*/ + color-scheme: light; + --color-prettylights-syntax-comment: #57606a; + --color-prettylights-syntax-constant: #0550ae; + --color-prettylights-syntax-entity: #6639ba; + --color-prettylights-syntax-storage-modifier-import: #24292f; + --color-prettylights-syntax-entity-tag: #116329; + --color-prettylights-syntax-keyword: #cf222e; + --color-prettylights-syntax-string: #0a3069; + --color-prettylights-syntax-variable: #953800; + --color-prettylights-syntax-brackethighlighter-unmatched: #82071e; + --color-prettylights-syntax-invalid-illegal-text: #f6f8fa; + --color-prettylights-syntax-invalid-illegal-bg: #82071e; + --color-prettylights-syntax-carriage-return-text: #f6f8fa; + --color-prettylights-syntax-carriage-return-bg: #cf222e; + --color-prettylights-syntax-string-regexp: #116329; + --color-prettylights-syntax-markup-list: #3b2300; + --color-prettylights-syntax-markup-heading: #0550ae; + --color-prettylights-syntax-markup-italic: #24292f; + --color-prettylights-syntax-markup-bold: #24292f; + --color-prettylights-syntax-markup-deleted-text: #82071e; + --color-prettylights-syntax-markup-deleted-bg: #ffebe9; + --color-prettylights-syntax-markup-inserted-text: #116329; + --color-prettylights-syntax-markup-inserted-bg: #dafbe1; + --color-prettylights-syntax-markup-changed-text: #953800; + --color-prettylights-syntax-markup-changed-bg: #ffd8b5; + --color-prettylights-syntax-markup-ignored-text: #eaeef2; + --color-prettylights-syntax-markup-ignored-bg: #0550ae; + --color-prettylights-syntax-meta-diff-range: #8250df; + --color-prettylights-syntax-brackethighlighter-angle: #57606a; + --color-prettylights-syntax-sublimelinter-gutter-mark: #8c959f; + --color-prettylights-syntax-constant-other-reference-link: #0a3069; + --color-fg-default: #1F2328; + --color-fg-muted: #656d76; + --color-fg-subtle: #6e7781; + --color-canvas-default: #ffffff; + --color-canvas-subtle: #f6f8fa; + --color-border-default: #d0d7de; + --color-border-muted: hsla(210,18%,87%,1); + --color-neutral-muted: rgba(175,184,193,0.2); + --color-accent-fg: #0969da; + --color-accent-emphasis: #0969da; + --color-success-fg: #1a7f37; + --color-success-emphasis: #1f883d; + --color-attention-fg: #9a6700; + --color-attention-emphasis: #9a6700; + --color-attention-subtle: #fff8c5; + --color-danger-fg: #d1242f; + --color-danger-emphasis: #cf222e; + --color-done-fg: #8250df; + --color-done-emphasis: #8250df; + } +} + +.markdown-body { + -ms-text-size-adjust: 100%; + -webkit-text-size-adjust: 100%; + margin: 0; + color: var(--color-fg-default); + background-color: var(--color-canvas-default); + font-family: -apple-system,BlinkMacSystemFont,"Segoe UI","Noto Sans",Helvetica,Arial,sans-serif,"Apple Color Emoji","Segoe UI Emoji"; + font-size: 16px; + line-height: 1.5; + word-wrap: break-word; +} + +.markdown-body .octicon { + display: inline-block; + fill: currentColor; + vertical-align: text-bottom; +} + +.markdown-body h1:hover .anchor .octicon-link:before, +.markdown-body h2:hover .anchor .octicon-link:before, +.markdown-body h3:hover .anchor .octicon-link:before, +.markdown-body h4:hover .anchor .octicon-link:before, +.markdown-body h5:hover .anchor .octicon-link:before, +.markdown-body h6:hover .anchor .octicon-link:before { + width: 16px; + height: 16px; + content: ' '; + display: inline-block; + background-color: currentColor; + -webkit-mask-image: url("data:image/svg+xml,"); + mask-image: url("data:image/svg+xml,"); +} + +.markdown-body details, +.markdown-body figcaption, +.markdown-body figure { + display: block; +} + +.markdown-body summary { + display: list-item; +} + +.markdown-body [hidden] { + display: none !important; +} + +.markdown-body a { + background-color: transparent; + color: var(--color-accent-fg); + text-decoration: none; +} + +.markdown-body abbr[title] { + border-bottom: none; + -webkit-text-decoration: underline dotted; + text-decoration: underline dotted; +} + +.markdown-body b, +.markdown-body strong { + font-weight: var(--base-text-weight-semibold, 600); +} + +.markdown-body dfn { + font-style: italic; +} + +.markdown-body h1 { + margin: .67em 0; + font-weight: var(--base-text-weight-semibold, 600); + padding-bottom: .3em; + font-size: 2em; + border-bottom: 1px solid var(--color-border-muted); +} + +.markdown-body mark { + background-color: var(--color-attention-subtle); + color: var(--color-fg-default); +} + +.markdown-body small { + font-size: 90%; +} + +.markdown-body sub, +.markdown-body sup { + font-size: 75%; + line-height: 0; + position: relative; + vertical-align: baseline; +} + +.markdown-body sub { + bottom: -0.25em; +} + +.markdown-body sup { + top: -0.5em; +} + +.markdown-body img { + border-style: none; + max-width: 100%; + box-sizing: content-box; + background-color: var(--color-canvas-default); +} + +.markdown-body code, +.markdown-body kbd, +.markdown-body pre, +.markdown-body samp { + font-family: monospace; + font-size: 1em; +} + +.markdown-body figure { + margin: 1em 40px; +} + +.markdown-body hr { + box-sizing: content-box; + overflow: hidden; + background: transparent; + border-bottom: 1px solid var(--color-border-muted); + height: .25em; + padding: 0; + margin: 24px 0; + background-color: var(--color-border-default); + border: 0; +} + +.markdown-body input { + font: inherit; + margin: 0; + overflow: visible; + font-family: inherit; + font-size: inherit; + line-height: inherit; +} + +.markdown-body [type=button], +.markdown-body [type=reset], +.markdown-body [type=submit] { + -webkit-appearance: button; + appearance: button; +} + +.markdown-body [type=checkbox], +.markdown-body [type=radio] { + box-sizing: border-box; + padding: 0; +} + +.markdown-body [type=number]::-webkit-inner-spin-button, +.markdown-body [type=number]::-webkit-outer-spin-button { + height: auto; +} + +.markdown-body [type=search]::-webkit-search-cancel-button, +.markdown-body [type=search]::-webkit-search-decoration { + -webkit-appearance: none; + appearance: none; +} + +.markdown-body ::-webkit-input-placeholder { + color: inherit; + opacity: .54; +} + +.markdown-body ::-webkit-file-upload-button { + -webkit-appearance: button; + appearance: button; + font: inherit; +} + +.markdown-body a:hover { + text-decoration: underline; +} + +.markdown-body ::placeholder { + color: var(--color-fg-subtle); + opacity: 1; +} + +.markdown-body hr::before { + display: table; + content: ""; +} + +.markdown-body hr::after { + display: table; + clear: both; + content: ""; +} + +.markdown-body table { + border-spacing: 0; + border-collapse: collapse; + display: block; + width: max-content; + max-width: 100%; + overflow: auto; +} + +.markdown-body td, +.markdown-body th { + padding: 0; +} + +.markdown-body details summary { + cursor: pointer; +} + +.markdown-body details:not([open])>*:not(summary) { + display: none !important; +} + +.markdown-body a:focus, +.markdown-body [role=button]:focus, +.markdown-body input[type=radio]:focus, +.markdown-body input[type=checkbox]:focus { + outline: 2px solid var(--color-accent-fg); + outline-offset: -2px; + box-shadow: none; +} + +.markdown-body a:focus:not(:focus-visible), +.markdown-body [role=button]:focus:not(:focus-visible), +.markdown-body input[type=radio]:focus:not(:focus-visible), +.markdown-body input[type=checkbox]:focus:not(:focus-visible) { + outline: solid 1px transparent; +} + +.markdown-body a:focus-visible, +.markdown-body [role=button]:focus-visible, +.markdown-body input[type=radio]:focus-visible, +.markdown-body input[type=checkbox]:focus-visible { + outline: 2px solid var(--color-accent-fg); + outline-offset: -2px; + box-shadow: none; +} + +.markdown-body a:not([class]):focus, +.markdown-body a:not([class]):focus-visible, +.markdown-body input[type=radio]:focus, +.markdown-body input[type=radio]:focus-visible, +.markdown-body input[type=checkbox]:focus, +.markdown-body input[type=checkbox]:focus-visible { + outline-offset: 0; +} + +.markdown-body kbd { + display: inline-block; + padding: 3px 5px; + font: 11px ui-monospace,SFMono-Regular,SF Mono,Menlo,Consolas,Liberation Mono,monospace; + line-height: 10px; + color: var(--color-fg-default); + vertical-align: middle; + background-color: var(--color-canvas-subtle); + border: solid 1px var(--color-neutral-muted); + border-bottom-color: var(--color-neutral-muted); + border-radius: 6px; + box-shadow: inset 0 -1px 0 var(--color-neutral-muted); +} + +.markdown-body h1, +.markdown-body h2, +.markdown-body h3, +.markdown-body h4, +.markdown-body h5, +.markdown-body h6 { + margin-top: 24px; + margin-bottom: 16px; + font-weight: var(--base-text-weight-semibold, 600); + line-height: 1.25; +} + +.markdown-body h2 { + font-weight: var(--base-text-weight-semibold, 600); + padding-bottom: .3em; + font-size: 1.5em; + border-bottom: 1px solid var(--color-border-muted); +} + +.markdown-body h3 { + font-weight: var(--base-text-weight-semibold, 600); + font-size: 1.25em; +} + +.markdown-body h4 { + font-weight: var(--base-text-weight-semibold, 600); + font-size: 1em; +} + +.markdown-body h5 { + font-weight: var(--base-text-weight-semibold, 600); + font-size: .875em; +} + +.markdown-body h6 { + font-weight: var(--base-text-weight-semibold, 600); + font-size: .85em; + color: var(--color-fg-muted); +} + +.markdown-body p { + margin-top: 0; + margin-bottom: 10px; +} + +.markdown-body blockquote { + margin: 0; + padding: 0 1em; + color: var(--color-fg-muted); + border-left: .25em solid var(--color-border-default); +} + +.markdown-body ul, +.markdown-body ol { + margin-top: 0; + margin-bottom: 0; + padding-left: 2em; +} + +.markdown-body ol ol, +.markdown-body ul ol { + list-style-type: lower-roman; +} + +.markdown-body ul ul ol, +.markdown-body ul ol ol, +.markdown-body ol ul ol, +.markdown-body ol ol ol { + list-style-type: lower-alpha; +} + +.markdown-body dd { + margin-left: 0; +} + +.markdown-body tt, +.markdown-body code, +.markdown-body samp { + font-family: ui-monospace,SFMono-Regular,SF Mono,Menlo,Consolas,Liberation Mono,monospace; + font-size: 12px; +} + +.markdown-body pre { + margin-top: 0; + margin-bottom: 0; + font-family: ui-monospace,SFMono-Regular,SF Mono,Menlo,Consolas,Liberation Mono,monospace; + font-size: 12px; + word-wrap: normal; +} + +.markdown-body .octicon { + display: inline-block; + overflow: visible !important; + vertical-align: text-bottom; + fill: currentColor; +} + +.markdown-body input::-webkit-outer-spin-button, +.markdown-body input::-webkit-inner-spin-button { + margin: 0; + -webkit-appearance: none; + appearance: none; +} + +.markdown-body .mr-2 { + margin-right: var(--base-size-8, 8px) !important; +} + +.markdown-body::before { + display: table; + content: ""; +} + +.markdown-body::after { + display: table; + clear: both; + content: ""; +} + +.markdown-body>*:first-child { + margin-top: 0 !important; +} + +.markdown-body>*:last-child { + margin-bottom: 0 !important; +} + +.markdown-body a:not([href]) { + color: inherit; + text-decoration: none; +} + +.markdown-body .absent { + color: var(--color-danger-fg); +} + +.markdown-body .anchor { + float: left; + padding-right: 4px; + margin-left: -20px; + line-height: 1; +} + +.markdown-body .anchor:focus { + outline: none; +} + +.markdown-body p, +.markdown-body blockquote, +.markdown-body ul, +.markdown-body ol, +.markdown-body dl, +.markdown-body table, +.markdown-body pre, +.markdown-body details { + margin-top: 0; + margin-bottom: 16px; +} + +.markdown-body blockquote>:first-child { + margin-top: 0; +} + +.markdown-body blockquote>:last-child { + margin-bottom: 0; +} + +.markdown-body h1 .octicon-link, +.markdown-body h2 .octicon-link, +.markdown-body h3 .octicon-link, +.markdown-body h4 .octicon-link, +.markdown-body h5 .octicon-link, +.markdown-body h6 .octicon-link { + color: var(--color-fg-default); + vertical-align: middle; + visibility: hidden; +} + +.markdown-body h1:hover .anchor, +.markdown-body h2:hover .anchor, +.markdown-body h3:hover .anchor, +.markdown-body h4:hover .anchor, +.markdown-body h5:hover .anchor, +.markdown-body h6:hover .anchor { + text-decoration: none; +} + +.markdown-body h1:hover .anchor .octicon-link, +.markdown-body h2:hover .anchor .octicon-link, +.markdown-body h3:hover .anchor .octicon-link, +.markdown-body h4:hover .anchor .octicon-link, +.markdown-body h5:hover .anchor .octicon-link, +.markdown-body h6:hover .anchor .octicon-link { + visibility: visible; +} + +.markdown-body h1 tt, +.markdown-body h1 code, +.markdown-body h2 tt, +.markdown-body h2 code, +.markdown-body h3 tt, +.markdown-body h3 code, +.markdown-body h4 tt, +.markdown-body h4 code, +.markdown-body h5 tt, +.markdown-body h5 code, +.markdown-body h6 tt, +.markdown-body h6 code { + padding: 0 .2em; + font-size: inherit; +} + +.markdown-body summary h1, +.markdown-body summary h2, +.markdown-body summary h3, +.markdown-body summary h4, +.markdown-body summary h5, +.markdown-body summary h6 { + display: inline-block; +} + +.markdown-body summary h1 .anchor, +.markdown-body summary h2 .anchor, +.markdown-body summary h3 .anchor, +.markdown-body summary h4 .anchor, +.markdown-body summary h5 .anchor, +.markdown-body summary h6 .anchor { + margin-left: -40px; +} + +.markdown-body summary h1, +.markdown-body summary h2 { + padding-bottom: 0; + border-bottom: 0; +} + +.markdown-body ul.no-list, +.markdown-body ol.no-list { + padding: 0; + list-style-type: none; +} + +.markdown-body ol[type="a s"] { + list-style-type: lower-alpha; +} + +.markdown-body ol[type="A s"] { + list-style-type: upper-alpha; +} + +.markdown-body ol[type="i s"] { + list-style-type: lower-roman; +} + +.markdown-body ol[type="I s"] { + list-style-type: upper-roman; +} + +.markdown-body ol[type="1"] { + list-style-type: decimal; +} + +.markdown-body div>ol:not([type]) { + list-style-type: decimal; +} + +.markdown-body ul ul, +.markdown-body ul ol, +.markdown-body ol ol, +.markdown-body ol ul { + margin-top: 0; + margin-bottom: 0; +} + +.markdown-body li>p { + margin-top: 16px; +} + +.markdown-body li+li { + margin-top: .25em; +} + +.markdown-body dl { + padding: 0; +} + +.markdown-body dl dt { + padding: 0; + margin-top: 16px; + font-size: 1em; + font-style: italic; + font-weight: var(--base-text-weight-semibold, 600); +} + +.markdown-body dl dd { + padding: 0 16px; + margin-bottom: 16px; +} + +.markdown-body table th { + font-weight: var(--base-text-weight-semibold, 600); +} + +.markdown-body table th, +.markdown-body table td { + padding: 6px 13px; + border: 1px solid var(--color-border-default); +} + +.markdown-body table td>:last-child { + margin-bottom: 0; +} + +.markdown-body table tr { + background-color: var(--color-canvas-default); + border-top: 1px solid var(--color-border-muted); +} + +.markdown-body table tr:nth-child(2n) { + background-color: var(--color-canvas-subtle); +} + +.markdown-body table img { + background-color: transparent; +} + +.markdown-body img[align=right] { + padding-left: 20px; +} + +.markdown-body img[align=left] { + padding-right: 20px; +} + +.markdown-body .emoji { + max-width: none; + vertical-align: text-top; + background-color: transparent; +} + +.markdown-body span.frame { + display: block; + overflow: hidden; +} + +.markdown-body span.frame>span { + display: block; + float: left; + width: auto; + padding: 7px; + margin: 13px 0 0; + overflow: hidden; + border: 1px solid var(--color-border-default); +} + +.markdown-body span.frame span img { + display: block; + float: left; +} + +.markdown-body span.frame span span { + display: block; + padding: 5px 0 0; + clear: both; + color: var(--color-fg-default); +} + +.markdown-body span.align-center { + display: block; + overflow: hidden; + clear: both; +} + +.markdown-body span.align-center>span { + display: block; + margin: 13px auto 0; + overflow: hidden; + text-align: center; +} + +.markdown-body span.align-center span img { + margin: 0 auto; + text-align: center; +} + +.markdown-body span.align-right { + display: block; + overflow: hidden; + clear: both; +} + +.markdown-body span.align-right>span { + display: block; + margin: 13px 0 0; + overflow: hidden; + text-align: right; +} + +.markdown-body span.align-right span img { + margin: 0; + text-align: right; +} + +.markdown-body span.float-left { + display: block; + float: left; + margin-right: 13px; + overflow: hidden; +} + +.markdown-body span.float-left span { + margin: 13px 0 0; +} + +.markdown-body span.float-right { + display: block; + float: right; + margin-left: 13px; + overflow: hidden; +} + +.markdown-body span.float-right>span { + display: block; + margin: 13px auto 0; + overflow: hidden; + text-align: right; +} + +.markdown-body code, +.markdown-body tt { + padding: .2em .4em; + margin: 0; + font-size: 85%; + white-space: break-spaces; + background-color: var(--color-neutral-muted); + border-radius: 6px; +} + +.markdown-body code br, +.markdown-body tt br { + display: none; +} + +.markdown-body del code { + text-decoration: inherit; +} + +.markdown-body samp { + font-size: 85%; +} + +.markdown-body pre code { + font-size: 100%; +} + +.markdown-body pre>code { + padding: 0; + margin: 0; + word-break: normal; + white-space: pre; + background: transparent; + border: 0; +} + +.markdown-body .highlight { + margin-bottom: 16px; +} + +.markdown-body .highlight pre { + margin-bottom: 0; + word-break: normal; +} + +.markdown-body .highlight pre, +.markdown-body pre { + padding: 16px; + overflow: auto; + font-size: 85%; + line-height: 1.45; + color: var(--color-fg-default); + background-color: var(--color-canvas-subtle); + border-radius: 6px; +} + +.markdown-body pre code, +.markdown-body pre tt { + display: inline; + max-width: auto; + padding: 0; + margin: 0; + overflow: visible; + line-height: inherit; + word-wrap: normal; + background-color: transparent; + border: 0; +} + +.markdown-body .csv-data td, +.markdown-body .csv-data th { + padding: 5px; + overflow: hidden; + font-size: 12px; + line-height: 1; + text-align: left; + white-space: nowrap; +} + +.markdown-body .csv-data .blob-num { + padding: 10px 8px 9px; + text-align: right; + background: var(--color-canvas-default); + border: 0; +} + +.markdown-body .csv-data tr { + border-top: 0; +} + +.markdown-body .csv-data th { + font-weight: var(--base-text-weight-semibold, 600); + background: var(--color-canvas-subtle); + border-top: 0; +} + +.markdown-body [data-footnote-ref]::before { + content: "["; +} + +.markdown-body [data-footnote-ref]::after { + content: "]"; +} + +.markdown-body .footnotes { + font-size: 12px; + color: var(--color-fg-muted); + border-top: 1px solid var(--color-border-default); +} + +.markdown-body .footnotes ol { + padding-left: 16px; +} + +.markdown-body .footnotes ol ul { + display: inline-block; + padding-left: 16px; + margin-top: 16px; +} + +.markdown-body .footnotes li { + position: relative; +} + +.markdown-body .footnotes li:target::before { + position: absolute; + top: -8px; + right: -8px; + bottom: -8px; + left: -24px; + pointer-events: none; + content: ""; + border: 2px solid var(--color-accent-emphasis); + border-radius: 6px; +} + +.markdown-body .footnotes li:target { + color: var(--color-fg-default); +} + +.markdown-body .footnotes .data-footnote-backref g-emoji { + font-family: monospace; +} + +.markdown-body .pl-c { + color: var(--color-prettylights-syntax-comment); +} + +.markdown-body .pl-c1, +.markdown-body .pl-s .pl-v { + color: var(--color-prettylights-syntax-constant); +} + +.markdown-body .pl-e, +.markdown-body .pl-en { + color: var(--color-prettylights-syntax-entity); +} + +.markdown-body .pl-smi, +.markdown-body .pl-s .pl-s1 { + color: var(--color-prettylights-syntax-storage-modifier-import); +} + +.markdown-body .pl-ent { + color: var(--color-prettylights-syntax-entity-tag); +} + +.markdown-body .pl-k { + color: var(--color-prettylights-syntax-keyword); +} + +.markdown-body .pl-s, +.markdown-body .pl-pds, +.markdown-body .pl-s .pl-pse .pl-s1, +.markdown-body .pl-sr, +.markdown-body .pl-sr .pl-cce, +.markdown-body .pl-sr .pl-sre, +.markdown-body .pl-sr .pl-sra { + color: var(--color-prettylights-syntax-string); +} + +.markdown-body .pl-v, +.markdown-body .pl-smw { + color: var(--color-prettylights-syntax-variable); +} + +.markdown-body .pl-bu { + color: var(--color-prettylights-syntax-brackethighlighter-unmatched); +} + +.markdown-body .pl-ii { + color: var(--color-prettylights-syntax-invalid-illegal-text); + background-color: var(--color-prettylights-syntax-invalid-illegal-bg); +} + +.markdown-body .pl-c2 { + color: var(--color-prettylights-syntax-carriage-return-text); + background-color: var(--color-prettylights-syntax-carriage-return-bg); +} + +.markdown-body .pl-sr .pl-cce { + font-weight: bold; + color: var(--color-prettylights-syntax-string-regexp); +} + +.markdown-body .pl-ml { + color: var(--color-prettylights-syntax-markup-list); +} + +.markdown-body .pl-mh, +.markdown-body .pl-mh .pl-en, +.markdown-body .pl-ms { + font-weight: bold; + color: var(--color-prettylights-syntax-markup-heading); +} + +.markdown-body .pl-mi { + font-style: italic; + color: var(--color-prettylights-syntax-markup-italic); +} + +.markdown-body .pl-mb { + font-weight: bold; + color: var(--color-prettylights-syntax-markup-bold); +} + +.markdown-body .pl-md { + color: var(--color-prettylights-syntax-markup-deleted-text); + background-color: var(--color-prettylights-syntax-markup-deleted-bg); +} + +.markdown-body .pl-mi1 { + color: var(--color-prettylights-syntax-markup-inserted-text); + background-color: var(--color-prettylights-syntax-markup-inserted-bg); +} + +.markdown-body .pl-mc { + color: var(--color-prettylights-syntax-markup-changed-text); + background-color: var(--color-prettylights-syntax-markup-changed-bg); +} + +.markdown-body .pl-mi2 { + color: var(--color-prettylights-syntax-markup-ignored-text); + background-color: var(--color-prettylights-syntax-markup-ignored-bg); +} + +.markdown-body .pl-mdr { + font-weight: bold; + color: var(--color-prettylights-syntax-meta-diff-range); +} + +.markdown-body .pl-ba { + color: var(--color-prettylights-syntax-brackethighlighter-angle); +} + +.markdown-body .pl-sg { + color: var(--color-prettylights-syntax-sublimelinter-gutter-mark); +} + +.markdown-body .pl-corl { + text-decoration: underline; + color: var(--color-prettylights-syntax-constant-other-reference-link); +} + +.markdown-body g-emoji { + display: inline-block; + min-width: 1ch; + font-family: "Apple Color Emoji","Segoe UI Emoji","Segoe UI Symbol"; + font-size: 1em; + font-style: normal !important; + font-weight: var(--base-text-weight-normal, 400); + line-height: 1; + vertical-align: -0.075em; +} + +.markdown-body g-emoji img { + width: 1em; + height: 1em; +} + +.markdown-body .task-list-item { + list-style-type: none; +} + +.markdown-body .task-list-item label { + font-weight: var(--base-text-weight-normal, 400); +} + +.markdown-body .task-list-item.enabled label { + cursor: pointer; +} + +.markdown-body .task-list-item+.task-list-item { + margin-top: 4px; +} + +.markdown-body .task-list-item .handle { + display: none; +} + +.markdown-body .task-list-item-checkbox { + margin: 0 .2em .25em -1.4em; + vertical-align: middle; +} + +.markdown-body .contains-task-list:dir(rtl) .task-list-item-checkbox { + margin: 0 -1.6em .25em .2em; +} + +.markdown-body .contains-task-list { + position: relative; +} + +.markdown-body .contains-task-list:hover .task-list-item-convert-container, +.markdown-body .contains-task-list:focus-within .task-list-item-convert-container { + display: block; + width: auto; + height: 24px; + overflow: visible; + clip: auto; +} + +.markdown-body ::-webkit-calendar-picker-indicator { + filter: invert(50%); +} + +.markdown-body .markdown-alert { + padding: var(--base-size-8) var(--base-size-16); + margin-bottom: 16px; + color: inherit; + border-left: .25em solid var(--color-border-default); +} + +.markdown-body .markdown-alert>:first-child { + margin-top: 0; +} + +.markdown-body .markdown-alert>:last-child { + margin-bottom: 0; +} + +.markdown-body .markdown-alert .markdown-alert-title { + display: flex; + font-weight: var(--base-text-weight-medium, 500); + align-items: center; + line-height: 1; +} + +.markdown-body .markdown-alert.markdown-alert-note { + border-left-color: var(--color-accent-emphasis); +} + +.markdown-body .markdown-alert.markdown-alert-note .markdown-alert-title { + color: var(--color-accent-fg); +} + +.markdown-body .markdown-alert.markdown-alert-important { + border-left-color: var(--color-done-emphasis); +} + +.markdown-body .markdown-alert.markdown-alert-important .markdown-alert-title { + color: var(--color-done-fg); +} + +.markdown-body .markdown-alert.markdown-alert-warning { + border-left-color: var(--color-attention-emphasis); +} + +.markdown-body .markdown-alert.markdown-alert-warning .markdown-alert-title { + color: var(--color-attention-fg); +} + +.markdown-body .markdown-alert.markdown-alert-tip { + border-left-color: var(--color-success-emphasis); +} + +.markdown-body .markdown-alert.markdown-alert-tip .markdown-alert-title { + color: var(--color-success-fg); +} + +.markdown-body .markdown-alert.markdown-alert-caution { + border-left-color: var(--color-danger-emphasis); +} + +.markdown-body .markdown-alert.markdown-alert-caution .markdown-alert-title { + color: var(--color-danger-fg); +} diff --git a/mantis/acquisition/scripts/measure_psf.py b/mantis/acquisition/scripts/measure_psf.py index 7559df88..d3f9b3b5 100644 --- a/mantis/acquisition/scripts/measure_psf.py +++ b/mantis/acquisition/scripts/measure_psf.py @@ -1,4 +1,7 @@ # %% +import datetime +import pickle +import shutil import webbrowser from pathlib import Path @@ -13,23 +16,235 @@ from napari_psf_analysis.psf_analysis.extract.BeadExtractor import BeadExtractor from napari_psf_analysis.psf_analysis.image import Calibrated3DImage from napari_psf_analysis.psf_analysis.psf import PSF -from numpy.typing import ArrayLike from scipy.ndimage import uniform_filter from skimage.feature import peak_local_max -# %% -data_dir = Path(r'Z:\2022_12_22_LS_after_SL2') +def analyze_psf(bead: Calibrated3DImage): + psf = PSF(image=bead) + try: + psf.analyze() + return psf.get_summary_dict() + except Exception: + # skip over beads where psf analysis failed + return {} + + +def adjust_fig(fig, ax): + for _ax in ax.flatten(): + _ax.set_xticks([]) + _ax.set_yticks([]) + + plt.tight_layout() + plt.subplots_adjust(wspace=0.5) + fig_size = fig.get_size_inches() + fig_size_scaling = 5 / fig_size[0] # set width to 5 inches + fig.set_figwidth(fig_size[0] * fig_size_scaling) + fig.set_figheight(fig_size[1] * fig_size_scaling) + + +def plot_psf_slices( + plots_dir: str, beads: list, zyx_scale: tuple, axis_labels: tuple, bead_numbers: list +): + num_beads = len(beads) + scale_Z, scale_Y, scale_X = zyx_scale + shape_Z, shape_Y, shape_X = beads[0].data.shape + cmap = 'viridis' + + bead_xy_psf_path = plots_dir / 'beads_xy_psf.png' + fig, ax = plt.subplots(1, num_beads) + for _ax, bead, bead_number in zip(ax, beads, bead_numbers): + _ax.imshow( + bead.data[shape_Z // 2, :, :], cmap=cmap, origin='lower', aspect=scale_Y / scale_X + ) + _ax.set_xlabel(axis_labels[-1]) + _ax.set_ylabel(axis_labels[-2]) + _ax.set_title(f'Bead: {bead_number}') + adjust_fig(fig, ax) + fig.set_figheight(2) + fig.savefig(bead_xy_psf_path) + + bead_xz_psf_path = plots_dir / 'beads_xz_psf.png' + fig, ax = plt.subplots(1, num_beads) + for _ax, bead in zip(ax, beads): + _ax.imshow( + bead.data[:, shape_Y // 2, :], cmap=cmap, origin='lower', aspect=scale_Z / scale_X + ) + _ax.set_xlabel(axis_labels[-1]) + _ax.set_ylabel(axis_labels[-3]) + adjust_fig(fig, ax) + fig.savefig(bead_xz_psf_path) + + bead_yz_psf_path = plots_dir / 'beads_yz_psf.png' + fig, ax = plt.subplots(1, num_beads) + for _ax, bead in zip(ax, beads): + _ax.imshow( + bead.data[:, :, shape_X // 2], cmap=cmap, origin='lower', aspect=scale_Z / scale_Y + ) + _ax.set_xlabel(axis_labels[-2]) + _ax.set_ylabel(axis_labels[-3]) + adjust_fig(fig, ax) + fig.savefig(bead_yz_psf_path) + + return bead_xy_psf_path, bead_xz_psf_path, bead_yz_psf_path + + +def plot_fwhm_vs_acq_axes(plots_dir: str, x, y, z, fwhm_x, fwhm_y, fwhm_z, axis_labels: tuple): + def plot_fwhm_vs_acq_axis(out_dir: str, x, fwhm_x, fwhm_y, fwhm_z, x_axis_label: str): + fig, ax = plt.subplots(1, 1) + ax.plot(x, fwhm_x, 'o', x, fwhm_y, 'o') + ax.set_ylabel('{} and {} FWHM (um)'.format(*axis_labels[1:][::-1])) + ax.set_xlabel('{} position (um)'.format(x_axis_label)) + + ax2 = ax.twinx() + ax2.plot(x, fwhm_z, 'o', color='green') + ax2.set_ylabel('{} FWHM (um)'.format(axis_labels[0]), color='green') + ax2.tick_params(axis='y', labelcolor='green') + fig.savefig(out_dir) + + out_dirs = [plots_dir / f'fwhm_vs_{axis}.png' for axis in axis_labels] + for our_dir, x_axis, x_axis_label in zip(out_dirs, (z, y, x), axis_labels): + plot_fwhm_vs_acq_axis(our_dir, x_axis, fwhm_x, fwhm_y, fwhm_z, x_axis_label) + + return out_dirs + + +def plot_psf_amp(plots_dir: str, x, y, z, amp, axis_labels: tuple): + psf_amp_xy_path = plots_dir / 'psf_amp_xy.png' + fig, ax = plt.subplots(1, 1) + + sc = ax.scatter( + x, + y, + c=amp, + vmin=np.quantile(amp, 0.01), + vmax=np.quantile(amp, 0.99), + cmap='summer', + ) + ax.set_aspect('equal') + ax.set_xlabel(f'{axis_labels[-1]} (um)') + ax.set_ylabel(f'{axis_labels[-2]} (um)') + plt.colorbar(sc, label='Amplitude (a.u.)') + fig.savefig(psf_amp_xy_path) + + psf_amp_z_path = plots_dir / 'psf_amp_z.png' + fig, ax = plt.subplots(1, 1) + ax.scatter(z, amp) + ax.set_xlabel(f'{axis_labels[-3]} (um)') + ax.set_ylabel('Amplitude (a.u.)') + fig.savefig(psf_amp_z_path) + + return psf_amp_xy_path, psf_amp_z_path + + +def generate_html_report( + dataset_name: str, + data_path: str, + dataset_scale: tuple, + num_beads_total_good_bad: tuple, + fwhm_3d_mean: tuple, + fwhm_3d_std: tuple, + fwhm_pc_mean: tuple, + bead_psf_slices_paths: tuple, + fwhm_vs_acq_axes_paths: tuple, + psf_amp_paths: tuple, + axis_labels: tuple, +): + + # string indents need to be like that, otherwise this turns into a code block + report_str = f''' +# PSF Analysis + +## Overview + +### Dataset + +* Name: `{dataset_name}` +* Path: `{data_path}` +* Scale: {dataset_scale[::-1]} um +* Date analyzed: {datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")} + +### Number of beads + +* Defected: {num_beads_total_good_bad[0]} +* Analyzed: {num_beads_total_good_bad[1]} +* Skipped: {num_beads_total_good_bad[2]} + +### FWHM + +* {axis_labels[-1]}: {fwhm_3d_mean[-1]:.3f} ± {fwhm_3d_std[0]:.3f} um +* {axis_labels[-2]}: {fwhm_3d_mean[-2]:.3f} ± {fwhm_3d_std[1]:.3f} um +* {axis_labels[-3]}: {fwhm_3d_mean[-2]:.3f} ± {fwhm_3d_std[2]:.3f} um +* PC: {'{:.3f} um, {:.3f} um, {:.3f} um'.format(*fwhm_pc_mean)} + +## Representative bead PSF images +![beads xy psf]({bead_psf_slices_paths[0]}) +![beads xz psf]({bead_psf_slices_paths[1]}) +![beads yz psf]({bead_psf_slices_paths[2]}) + +## FWHM versus {axis_labels[0]} position +![fwhm vs z]({fwhm_vs_acq_axes_paths[0]} "fwhm vs z") + +## FWHM versus {axis_labels[1]} position +![fwhm vs z]({fwhm_vs_acq_axes_paths[1]} "fwhm vs y") + +## FWHM versus {axis_labels[2]} position +![fwhm vs z]({fwhm_vs_acq_axes_paths[2]} "fwhm vs x") + +## PSF amplitude versus {axis_labels[-1]}-{axis_labels[-2]} position +![psf amp xy]({psf_amp_paths[0]} "psf amp xy") + +## PSF amplitude versus {axis_labels[-3]} position +![psf amp z]({psf_amp_paths[1]} "psf amp z") +''' + + css_style = ''' + + + +''' + + html = markdown.markdown(report_str) + formatted_html = f''' +{css_style} +
+{html} +
+'''.strip() + + return formatted_html + + +# %% Load data - swap with data acquisition block + +data_dir = Path('/Users/ivan.ivanov/Documents/images_local/') dataset = 'epi_beads_100nm_fl_mount_after_SL2_1' # data_path = data_dir / dataset / (dataset+'_MMStack_Pos0.ome.tif') -data_path = r'Z:\2022_12_22_LS_after_SL2\epi_beads_100nm_fl_mount_after_SL2_1\LS_beads_100nm_fl_mount_after_SL2_1_MMStack_Pos0.ome.tif' +psf_analysis_path = data_dir / dataset / 'psf_analysis' +data_path = data_dir / dataset / 'LS_beads_100nm_fl_mount_after_SL2_1_MMStack_Pos0.ome.tif' zyx_data = tifffile.imread(data_path) scale = (0.250, 0.069, 0.069) # in um axis_labels = ("Z", "Y", "X") -# %% +psf_analysis_path.mkdir(exist_ok=True) + +# %% Find peaks # runs in about 10 seconds, sensitive to parameters # finds ~310 peaks @@ -41,17 +256,16 @@ exclude_border=(3, 10, 10), # in zyx ) -# %% +# %% Visualize in napari + viewer = napari.Viewer() viewer.add_image(zyx_data) viewer.add_points(points, name='peaks local max', size=12, symbol='ring', edge_color='yellow') -# %% +# %% Extract and analyze bead patches patch_size = (scale[0] * 11, scale[1] * 15, scale[2] * 15) -# round to nearest 0.5 um -# patch_size = np.round(np.asarray(patch_size) / 0.5) * 0.5 # extract bead patches bead_extractor = BeadExtractor( @@ -61,20 +275,11 @@ beads = bead_extractor.extract_beads(points=points) bead_offsets = np.asarray([bead.offset for bead in beads]) - -def analyze_psf(bead: Calibrated3DImage): - psf = PSF(image=bead) - try: - psf.analyze() - return psf.get_summary_dict() - except Exception: - # skip over beads where psf analysis failed - return {} - - # analyze bead patches +num_beads = len(beads) results = [analyze_psf(bead) for bead in beads] num_failed = sum([result == {} for result in results]) +num_successful = num_beads - num_failed df = pd.DataFrame.from_records(results) df['z_mu'] += bead_offsets[:, 0] * scale[0] @@ -82,126 +287,65 @@ def analyze_psf(bead: Calibrated3DImage): df['x_mu'] += bead_offsets[:, 2] * scale[2] df = df.dropna() -# %% - - -def visualize_psf(out_dir: str, zyx_data: ArrayLike, zyx_scale: tuple, axis_labels: tuple): - scale_Z, scale_Y, scale_X = zyx_scale - shape_Z, shape_Y, shape_X = zyx_data.shape - cmap = 'viridis' - fig, ax = plt.subplots(1, 3) - - ax[0].imshow( - zyx_data[shape_Z // 2, :, :], cmap=cmap, origin='lower', aspect=scale_Y / scale_X - ) - ax[0].set_xlabel(axis_labels[-1]) - ax[0].set_ylabel(axis_labels[-2]) - - ax[1].imshow( - zyx_data[:, shape_Y // 2, :], cmap=cmap, origin='lower', aspect=scale_Z / scale_X - ) - ax[1].set_xlabel(axis_labels[-1]) - ax[1].set_ylabel(axis_labels[-3]) - - ax[2].imshow( - zyx_data[:, :, shape_X // 2], cmap=cmap, origin='lower', aspect=scale_Z / scale_Y - ) - ax[2].set_xlabel(axis_labels[-2]) - ax[2].set_ylabel(axis_labels[-3]) - - for _ax in ax.flatten(): - _ax.set_xticks([]) - _ax.set_yticks([]) - - plt.tight_layout() - plt.subplots_adjust(left=0.12, wspace=0.5) - fig_size = fig.get_size_inches() - fig_size_scaling = 3 / fig_size[0] # set width to 3 inches - fig.set_figwidth(fig_size[0] * fig_size_scaling) - fig.set_figheight(fig_size[1] * fig_size_scaling) - fig.savefig(out_dir) - - -def plot_fwhm_vs_z(out_dir: str, z, fwhm_x, fwhm_y, fwhm_z): - fig, ax = plt.subplots(1, 1) - - ax.plot(z, fwhm_x, 'o', z, fwhm_y, 'o') - ax.set_ylabel('X and Y FWHM (um)') - ax.set_xlabel('Z position (um)') - - ax2 = ax.twinx() - ax2.plot(z, fwhm_z, 'o', color='green') - ax2.set_ylabel('Z FWHM (um)', color='green') - ax2.tick_params(axis='y', labelcolor='green') - - fig.savefig(out_dir) +# %% Generate plots - -# %% generate plots - -plots_dir = data_dir / 'plots' +plots_dir = psf_analysis_path / 'plots' plots_dir.mkdir(parents=True, exist_ok=True) +random_bead_number = sorted(np.random.choice(len(df), 3)) -random_bead_number = np.random.choice(len(df), 3) -bead_psf_paths = [''] * 3 -for i, path, bead_number in zip(range(3), bead_psf_paths, random_bead_number): - path = plots_dir / f'bead_psf_{i}.png' - bead_psf_paths[i] = path - visualize_psf(path, beads[bead_number].data, scale, axis_labels) +bead_psf_slices_paths = plot_psf_slices( + plots_dir, [beads[i] for i in random_bead_number], scale, axis_labels, random_bead_number +) -fwhm_vs_z_3d_path = plots_dir / 'fwhm_vs_z_3d.png' -plot_fwhm_vs_z( - fwhm_vs_z_3d_path, +fwhm_vs_acq_axes_paths = plot_fwhm_vs_acq_axes( + plots_dir, + df['x_mu'].values, + df['y_mu'].values, df['z_mu'].values, *[df[col].values for col in ('zyx_x_fwhm', 'zyx_y_fwhm', 'zyx_z_fwhm')], + axis_labels, ) -fwhm_vs_z_1d_path = plots_dir / 'fwhm_vs_z_1d.png' -plot_fwhm_vs_z( - fwhm_vs_z_1d_path, +psf_amp_paths = plot_psf_amp( + plots_dir, + df['x_mu'].values, + df['y_mu'].values, df['z_mu'].values, - *[df[col].values for col in ('x_fwhm', 'y_fwhm', 'z_fwhm')], + df['zyx_amp'].values, + axis_labels, ) -mean_1d_fwhm = [df[col].mean() for col in ('x_fwhm', 'y_fwhm', 'z_fwhm')] -mean_3d_fwhm = [df[col].mean() for col in ('zyx_x_fwhm', 'zyx_y_fwhm', 'zyx_z_fwhm')] -mean_pc_fwhm = [df[col].mean() for col in ('zyx_pc3_fwhm', 'zyx_pc2_fwhm', 'zyx_pc1_fwhm')] - -# %% - -# generate html report -html = markdown.markdown( - f''' -# PSF Analysis - -## Overview -Dataset name: {dataset} - -Scale: {scale} - -Mean FWHM: - -* 1D: {'({:.3f}, {:.3f}, {:.3f})'.format(*mean_1d_fwhm)} -* 3D: {'({:.3f}, {:.3f}, {:.3f})'.format(*mean_3d_fwhm)} -* PC: {'({:.3f}, {:.3f}, {:.3f})'.format(*mean_pc_fwhm)} - -## Representative bead PSF images -![bead psf 1]({bead_psf_paths[0]} "bead psf {random_bead_number[0]}") -![bead psf 2]({bead_psf_paths[1]} "bead psf {random_bead_number[1]}") -![bead psf 3]({bead_psf_paths[2]} "bead psf {random_bead_number[2]}") - -## XYZ FWHM versus Z position (3D) -![fwhm vs z]({fwhm_vs_z_3d_path} "fwhm vs z") - -''' +# mean_1d_fwhm = [df[col].mean() for col in ('x_fwhm', 'y_fwhm', 'z_fwhm')] +fwhm_3d_mean = [df[col].mean() for col in ('zyx_z_fwhm', 'zyx_y_fwhm', 'zyx_x_fwhm')] +fwhm_3d_std = [df[col].std() for col in ('zyx_z_fwhm', 'zyx_y_fwhm', 'zyx_x_fwhm')] +fwhm_pc_mean = [df[col].mean() for col in ('zyx_pc3_fwhm', 'zyx_pc2_fwhm', 'zyx_pc1_fwhm')] + +# %% Generate HTML report + +html_report = generate_html_report( + dataset, + data_path.parent, + scale, + (num_beads, num_successful, num_failed), + fwhm_3d_mean, + fwhm_3d_std, + fwhm_pc_mean, + bead_psf_slices_paths, + fwhm_vs_acq_axes_paths, + psf_amp_paths, + axis_labels, ) -# %% - # save html file and show in browser -html_file_path = Path(r'C:\Users\labelfree\Documents\temphtml.html') +with open(psf_analysis_path / 'peaks.pkl', 'wb') as file: + pickle.dump(points, file) + +df.to_csv(psf_analysis_path / 'psf_analysis.csv', index=False) + +shutil.copy('github-markdown.css', psf_analysis_path) +html_file_path = psf_analysis_path / ('psf_analysis_report.html') with open(html_file_path, 'w') as file: - file.write(html) + file.write(html_report) webbrowser.open('file://' + str(html_file_path)) From 9be0c1ae10d667becbd83054ccc14163d84c6270 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Tue, 30 Jan 2024 12:37:45 -0800 Subject: [PATCH 04/57] refactor psf analysis --- mantis/acquisition/scripts/measure_psf.py | 351 +++++----------------- mantis/analysis/analyze_psf.py | 317 +++++++++++++++++++ 2 files changed, 396 insertions(+), 272 deletions(-) create mode 100644 mantis/analysis/analyze_psf.py diff --git a/mantis/acquisition/scripts/measure_psf.py b/mantis/acquisition/scripts/measure_psf.py index d3f9b3b5..432cc707 100644 --- a/mantis/acquisition/scripts/measure_psf.py +++ b/mantis/acquisition/scripts/measure_psf.py @@ -1,324 +1,128 @@ # %% -import datetime import pickle import shutil import webbrowser from pathlib import Path -import markdown -import matplotlib.pyplot as plt import napari import numpy as np -import pandas as pd -import tifffile - -from napari_psf_analysis.psf_analysis.extract.BeadExtractor import BeadExtractor -from napari_psf_analysis.psf_analysis.image import Calibrated3DImage -from napari_psf_analysis.psf_analysis.psf import PSF -from scipy.ndimage import uniform_filter -from skimage.feature import peak_local_max - - -def analyze_psf(bead: Calibrated3DImage): - psf = PSF(image=bead) - try: - psf.analyze() - return psf.get_summary_dict() - except Exception: - # skip over beads where psf analysis failed - return {} - - -def adjust_fig(fig, ax): - for _ax in ax.flatten(): - _ax.set_xticks([]) - _ax.set_yticks([]) - - plt.tight_layout() - plt.subplots_adjust(wspace=0.5) - fig_size = fig.get_size_inches() - fig_size_scaling = 5 / fig_size[0] # set width to 5 inches - fig.set_figwidth(fig_size[0] * fig_size_scaling) - fig.set_figheight(fig_size[1] * fig_size_scaling) - - -def plot_psf_slices( - plots_dir: str, beads: list, zyx_scale: tuple, axis_labels: tuple, bead_numbers: list -): - num_beads = len(beads) - scale_Z, scale_Y, scale_X = zyx_scale - shape_Z, shape_Y, shape_X = beads[0].data.shape - cmap = 'viridis' - - bead_xy_psf_path = plots_dir / 'beads_xy_psf.png' - fig, ax = plt.subplots(1, num_beads) - for _ax, bead, bead_number in zip(ax, beads, bead_numbers): - _ax.imshow( - bead.data[shape_Z // 2, :, :], cmap=cmap, origin='lower', aspect=scale_Y / scale_X - ) - _ax.set_xlabel(axis_labels[-1]) - _ax.set_ylabel(axis_labels[-2]) - _ax.set_title(f'Bead: {bead_number}') - adjust_fig(fig, ax) - fig.set_figheight(2) - fig.savefig(bead_xy_psf_path) - - bead_xz_psf_path = plots_dir / 'beads_xz_psf.png' - fig, ax = plt.subplots(1, num_beads) - for _ax, bead in zip(ax, beads): - _ax.imshow( - bead.data[:, shape_Y // 2, :], cmap=cmap, origin='lower', aspect=scale_Z / scale_X - ) - _ax.set_xlabel(axis_labels[-1]) - _ax.set_ylabel(axis_labels[-3]) - adjust_fig(fig, ax) - fig.savefig(bead_xz_psf_path) - - bead_yz_psf_path = plots_dir / 'beads_yz_psf.png' - fig, ax = plt.subplots(1, num_beads) - for _ax, bead in zip(ax, beads): - _ax.imshow( - bead.data[:, :, shape_X // 2], cmap=cmap, origin='lower', aspect=scale_Z / scale_Y - ) - _ax.set_xlabel(axis_labels[-2]) - _ax.set_ylabel(axis_labels[-3]) - adjust_fig(fig, ax) - fig.savefig(bead_yz_psf_path) - - return bead_xy_psf_path, bead_xz_psf_path, bead_yz_psf_path - - -def plot_fwhm_vs_acq_axes(plots_dir: str, x, y, z, fwhm_x, fwhm_y, fwhm_z, axis_labels: tuple): - def plot_fwhm_vs_acq_axis(out_dir: str, x, fwhm_x, fwhm_y, fwhm_z, x_axis_label: str): - fig, ax = plt.subplots(1, 1) - ax.plot(x, fwhm_x, 'o', x, fwhm_y, 'o') - ax.set_ylabel('{} and {} FWHM (um)'.format(*axis_labels[1:][::-1])) - ax.set_xlabel('{} position (um)'.format(x_axis_label)) - - ax2 = ax.twinx() - ax2.plot(x, fwhm_z, 'o', color='green') - ax2.set_ylabel('{} FWHM (um)'.format(axis_labels[0]), color='green') - ax2.tick_params(axis='y', labelcolor='green') - fig.savefig(out_dir) - - out_dirs = [plots_dir / f'fwhm_vs_{axis}.png' for axis in axis_labels] - for our_dir, x_axis, x_axis_label in zip(out_dirs, (z, y, x), axis_labels): - plot_fwhm_vs_acq_axis(our_dir, x_axis, fwhm_x, fwhm_y, fwhm_z, x_axis_label) - - return out_dirs - - -def plot_psf_amp(plots_dir: str, x, y, z, amp, axis_labels: tuple): - psf_amp_xy_path = plots_dir / 'psf_amp_xy.png' - fig, ax = plt.subplots(1, 1) - - sc = ax.scatter( - x, - y, - c=amp, - vmin=np.quantile(amp, 0.01), - vmax=np.quantile(amp, 0.99), - cmap='summer', - ) - ax.set_aspect('equal') - ax.set_xlabel(f'{axis_labels[-1]} (um)') - ax.set_ylabel(f'{axis_labels[-2]} (um)') - plt.colorbar(sc, label='Amplitude (a.u.)') - fig.savefig(psf_amp_xy_path) - - psf_amp_z_path = plots_dir / 'psf_amp_z.png' - fig, ax = plt.subplots(1, 1) - ax.scatter(z, amp) - ax.set_xlabel(f'{axis_labels[-3]} (um)') - ax.set_ylabel('Amplitude (a.u.)') - fig.savefig(psf_amp_z_path) - - return psf_amp_xy_path, psf_amp_z_path - - -def generate_html_report( - dataset_name: str, - data_path: str, - dataset_scale: tuple, - num_beads_total_good_bad: tuple, - fwhm_3d_mean: tuple, - fwhm_3d_std: tuple, - fwhm_pc_mean: tuple, - bead_psf_slices_paths: tuple, - fwhm_vs_acq_axes_paths: tuple, - psf_amp_paths: tuple, - axis_labels: tuple, -): - - # string indents need to be like that, otherwise this turns into a code block - report_str = f''' -# PSF Analysis - -## Overview - -### Dataset - -* Name: `{dataset_name}` -* Path: `{data_path}` -* Scale: {dataset_scale[::-1]} um -* Date analyzed: {datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")} - -### Number of beads - -* Defected: {num_beads_total_good_bad[0]} -* Analyzed: {num_beads_total_good_bad[1]} -* Skipped: {num_beads_total_good_bad[2]} - -### FWHM - -* {axis_labels[-1]}: {fwhm_3d_mean[-1]:.3f} ± {fwhm_3d_std[0]:.3f} um -* {axis_labels[-2]}: {fwhm_3d_mean[-2]:.3f} ± {fwhm_3d_std[1]:.3f} um -* {axis_labels[-3]}: {fwhm_3d_mean[-2]:.3f} ± {fwhm_3d_std[2]:.3f} um -* PC: {'{:.3f} um, {:.3f} um, {:.3f} um'.format(*fwhm_pc_mean)} - -## Representative bead PSF images -![beads xy psf]({bead_psf_slices_paths[0]}) -![beads xz psf]({bead_psf_slices_paths[1]}) -![beads yz psf]({bead_psf_slices_paths[2]}) - -## FWHM versus {axis_labels[0]} position -![fwhm vs z]({fwhm_vs_acq_axes_paths[0]} "fwhm vs z") - -## FWHM versus {axis_labels[1]} position -![fwhm vs z]({fwhm_vs_acq_axes_paths[1]} "fwhm vs y") - -## FWHM versus {axis_labels[2]} position -![fwhm vs z]({fwhm_vs_acq_axes_paths[2]} "fwhm vs x") - -## PSF amplitude versus {axis_labels[-1]}-{axis_labels[-2]} position -![psf amp xy]({psf_amp_paths[0]} "psf amp xy") - -## PSF amplitude versus {axis_labels[-3]} position -![psf amp z]({psf_amp_paths[1]} "psf amp z") -''' - - css_style = ''' - - - -''' - - html = markdown.markdown(report_str) - formatted_html = f''' -{css_style} -
-{html} -
-'''.strip() - - return formatted_html +from iohub.reader import open_ome_zarr, read_micromanager + +from mantis.analysis.analyze_psf import ( + analyze_psf, + detect_peaks, + generate_html_report, + plot_fwhm_vs_acq_axes, + plot_psf_amp, + plot_psf_slices, +) # %% Load data - swap with data acquisition block -data_dir = Path('/Users/ivan.ivanov/Documents/images_local/') -dataset = 'epi_beads_100nm_fl_mount_after_SL2_1' +data_dir = Path(r'Z:\2023_03_30_beads') +dataset = 'beads_ip_0.74_1' -# data_path = data_dir / dataset / (dataset+'_MMStack_Pos0.ome.tif') -psf_analysis_path = data_dir / dataset / 'psf_analysis' -data_path = data_dir / dataset / 'LS_beads_100nm_fl_mount_after_SL2_1_MMStack_Pos0.ome.tif' -zyx_data = tifffile.imread(data_path) +# data_dir = Path(r'Z:\2022_12_22_LS_after_SL2') +# dataset = 'epi_beads_100nm_fl_mount_after_SL2_1' +# data_path = data_dir / dataset / 'LS_beads_100nm_fl_mount_after_SL2_1_MMStack_Pos0.ome.tif' +# zyx_data = tifffile.imread(data_path) -scale = (0.250, 0.069, 0.069) # in um -axis_labels = ("Z", "Y", "X") +data_path = data_dir / dataset -psf_analysis_path.mkdir(exist_ok=True) +if str(data_path).endswith('.zarr'): + ds = open_ome_zarr(data_path / '0/0/0') + zyx_data = ds.data[0, 0] +else: + ds = read_micromanager(str(data_path)) + zyx_data = ds.get_array(0)[0, 0] -# %% Find peaks +scale = (0.1565, 0.116, 0.116) # in um +# axis_labels = ("Z", "Y", "X") +axis_labels = ("SCAN", "TILT", "COVERSLIP") -# runs in about 10 seconds, sensitive to parameters -# finds ~310 peaks -points = peak_local_max( - uniform_filter(zyx_data, size=3), # helps remove hot pixels, adds ~3s - min_distance=25, - threshold_abs=200, - num_peaks=1000, # limit to top 1000 peaks - exclude_border=(3, 10, 10), # in zyx -) +# %% Detect peaks + +raw = False +if axis_labels == ("SCAN", "TILT", "COVERSLIP"): + raw = True + +peaks = detect_peaks(zyx_data, raw=raw) +print(f'Number of peaks detected: {len(peaks)}') # %% Visualize in napari viewer = napari.Viewer() viewer.add_image(zyx_data) -viewer.add_points(points, name='peaks local max', size=12, symbol='ring', edge_color='yellow') +viewer.add_points(peaks, name='peaks local max', size=12, symbol='ring', edge_color='yellow') # %% Extract and analyze bead patches -patch_size = (scale[0] * 11, scale[1] * 15, scale[2] * 15) - -# extract bead patches -bead_extractor = BeadExtractor( - image=Calibrated3DImage(data=zyx_data.astype(np.float64), spacing=scale), - patch_size=patch_size, +beads, df_gaussian_fit, df_1d_peak_width = analyze_psf( + zyx_data=zyx_data, + points=peaks, + scale=scale, ) -beads = bead_extractor.extract_beads(points=points) -bead_offsets = np.asarray([bead.offset for bead in beads]) # analyze bead patches num_beads = len(beads) -results = [analyze_psf(bead) for bead in beads] -num_failed = sum([result == {} for result in results]) -num_successful = num_beads - num_failed - -df = pd.DataFrame.from_records(results) -df['z_mu'] += bead_offsets[:, 0] * scale[0] -df['y_mu'] += bead_offsets[:, 1] * scale[1] -df['x_mu'] += bead_offsets[:, 2] * scale[2] -df = df.dropna() +num_successful = len(df_gaussian_fit) +num_failed = num_beads - num_successful # %% Generate plots +psf_analysis_path = data_dir / dataset / 'psf_analysis' +psf_analysis_path.mkdir(exist_ok=True) + plots_dir = psf_analysis_path / 'plots' plots_dir.mkdir(parents=True, exist_ok=True) -random_bead_number = sorted(np.random.choice(len(df), 3)) +random_bead_number = sorted(np.random.choice(num_successful, 3)) bead_psf_slices_paths = plot_psf_slices( plots_dir, [beads[i] for i in random_bead_number], scale, axis_labels, random_bead_number ) + +if raw: + plot_data_x = [df_1d_peak_width[col].values for col in ('x_mu', 'y_mu', 'z_mu')] + plot_data_y = [ + df_1d_peak_width[col].values for col in ('1d_x_fwhm', '1d_y_fwhm', '1d_z_fwhm') + ] +else: + plot_data_x = [df_gaussian_fit[col].values for col in ('x_mu', 'y_mu', 'z_mu')] + plot_data_y = [ + df_gaussian_fit[col].values for col in ('zyx_x_fwhm', 'zyx_y_fwhm', 'zyx_z_fwhm') + ] + fwhm_vs_acq_axes_paths = plot_fwhm_vs_acq_axes( plots_dir, - df['x_mu'].values, - df['y_mu'].values, - df['z_mu'].values, - *[df[col].values for col in ('zyx_x_fwhm', 'zyx_y_fwhm', 'zyx_z_fwhm')], + *plot_data_x, + *plot_data_y, axis_labels, ) psf_amp_paths = plot_psf_amp( plots_dir, - df['x_mu'].values, - df['y_mu'].values, - df['z_mu'].values, - df['zyx_amp'].values, + df_gaussian_fit['x_mu'].values, + df_gaussian_fit['y_mu'].values, + df_gaussian_fit['z_mu'].values, + df_gaussian_fit['zyx_amp'].values, axis_labels, ) -# mean_1d_fwhm = [df[col].mean() for col in ('x_fwhm', 'y_fwhm', 'z_fwhm')] -fwhm_3d_mean = [df[col].mean() for col in ('zyx_z_fwhm', 'zyx_y_fwhm', 'zyx_x_fwhm')] -fwhm_3d_std = [df[col].std() for col in ('zyx_z_fwhm', 'zyx_y_fwhm', 'zyx_x_fwhm')] -fwhm_pc_mean = [df[col].mean() for col in ('zyx_pc3_fwhm', 'zyx_pc2_fwhm', 'zyx_pc1_fwhm')] +fwhm_3d_mean = [ + df_gaussian_fit[col].mean() for col in ('zyx_z_fwhm', 'zyx_y_fwhm', 'zyx_x_fwhm') +] +fwhm_3d_std = [ + df_gaussian_fit[col].std() for col in ('zyx_z_fwhm', 'zyx_y_fwhm', 'zyx_x_fwhm') +] +fwhm_pc_mean = [ + df_gaussian_fit[col].mean() for col in ('zyx_pc3_fwhm', 'zyx_pc2_fwhm', 'zyx_pc1_fwhm') +] +fwhm_1d_mean = df_1d_peak_width.mean() +fwhm_1d_std = df_1d_peak_width.std() # %% Generate HTML report @@ -327,6 +131,8 @@ def generate_html_report( data_path.parent, scale, (num_beads, num_successful, num_failed), + fwhm_1d_mean, + fwhm_1d_std, fwhm_3d_mean, fwhm_3d_std, fwhm_pc_mean, @@ -338,9 +144,10 @@ def generate_html_report( # save html file and show in browser with open(psf_analysis_path / 'peaks.pkl', 'wb') as file: - pickle.dump(points, file) + pickle.dump(peaks, file) -df.to_csv(psf_analysis_path / 'psf_analysis.csv', index=False) +df_gaussian_fit.to_csv(psf_analysis_path / 'psf_gaussian_fit.csv', index=False) +df_1d_peak_width.to_csv(psf_analysis_path / 'psf_1d_peak_width.csv', index=False) shutil.copy('github-markdown.css', psf_analysis_path) html_file_path = psf_analysis_path / ('psf_analysis_report.html') diff --git a/mantis/analysis/analyze_psf.py b/mantis/analysis/analyze_psf.py new file mode 100644 index 00000000..84a04f29 --- /dev/null +++ b/mantis/analysis/analyze_psf.py @@ -0,0 +1,317 @@ +import datetime + +from typing import List + +import markdown +import matplotlib.pyplot as plt +import numpy as np +import pandas as pd + +from napari_psf_analysis.psf_analysis.extract.BeadExtractor import BeadExtractor +from napari_psf_analysis.psf_analysis.image import Calibrated3DImage +from napari_psf_analysis.psf_analysis.psf import PSF +from numpy.typing import ArrayLike +from scipy.ndimage import uniform_filter +from scipy.signal import peak_widths +from skimage.feature import peak_local_max + + +def analyze_psf(zyx_data: ArrayLike, points: ArrayLike, scale: tuple): + patch_size = (scale[0] * 15, scale[1] * 18, scale[2] * 18) + + # extract bead patches + bead_extractor = BeadExtractor( + image=Calibrated3DImage(data=zyx_data.astype(np.uint16), spacing=scale), + patch_size=patch_size, + ) + beads = bead_extractor.extract_beads(points=points) + beads_data = [bead.data for bead in beads] + + # analyze bead patches + results = [] + for bead in beads: + psf = PSF(image=bead) + try: + psf.analyze() + summary_dict = psf.get_summary_dict() + except Exception: + summary_dict = {} + results.append(summary_dict) + + df_gaussian_fit = pd.DataFrame.from_records(results) + + bead_offsets = np.asarray([bead.offset for bead in beads]) + df_gaussian_fit['z_mu'] += bead_offsets[:, 0] * scale[0] + df_gaussian_fit['y_mu'] += bead_offsets[:, 1] * scale[1] + df_gaussian_fit['x_mu'] += bead_offsets[:, 2] * scale[2] + + df_1d_peak_width = pd.DataFrame( + [calculate_peak_widths(bead, scale) for bead in beads_data], + columns=(f'1d_{i}_fwhm' for i in ('z', 'y', 'x')), + ) + df_1d_peak_width = pd.concat( + (df_gaussian_fit[['z_mu', 'y_mu', 'x_mu']], df_1d_peak_width), axis=1 + ) + + # clean up dataframes + df_gaussian_fit = df_gaussian_fit.dropna() + df_1d_peak_width = df_1d_peak_width.loc[ + ~(df_1d_peak_width[['1d_z_fwhm', '1d_y_fwhm', '1d_x_fwhm']] == 0).any(axis=1) + ] + + return beads_data, df_gaussian_fit, df_1d_peak_width + + +def calculate_peak_widths(zyx_data: ArrayLike, zyx_scale: tuple): + scale_Z, scale_Y, scale_X = zyx_scale + shape_Z, shape_Y, shape_X = zyx_data.shape + + try: + z_fwhm = peak_widths(zyx_data[:, shape_Y // 2, shape_X // 2], [shape_Z // 2])[0][0] + y_fwhm = peak_widths(zyx_data[shape_Z // 2, :, shape_X // 2], [shape_Y // 2])[0][0] + x_fwhm = peak_widths(zyx_data[shape_Z // 2, shape_Y // 2, :], [shape_X // 2])[0][0] + except Exception: + z_fwhm, y_fwhm, x_fwhm = (0.0, 0.0, 0.0) + + return z_fwhm * scale_Z, y_fwhm * scale_Y, x_fwhm * scale_X + + +def _adjust_fig(fig, ax): + for _ax in ax.flatten(): + _ax.set_xticks([]) + _ax.set_yticks([]) + + plt.tight_layout() + plt.subplots_adjust(wspace=0.5) + fig_size = fig.get_size_inches() + fig_size_scaling = 5 / fig_size[0] # set width to 5 inches + fig.set_figwidth(fig_size[0] * fig_size_scaling) + fig.set_figheight(fig_size[1] * fig_size_scaling) + + +def plot_psf_slices( + plots_dir: str, + beads: List[ArrayLike], + zyx_scale: tuple, + axis_labels: tuple, + bead_numbers: list, +): + num_beads = len(beads) + scale_Z, scale_Y, scale_X = zyx_scale + shape_Z, shape_Y, shape_X = beads[0].shape + cmap = 'viridis' + + bead_xy_psf_path = plots_dir / 'beads_xy_psf.png' + fig, ax = plt.subplots(1, num_beads) + for _ax, bead, bead_number in zip(ax, beads, bead_numbers): + _ax.imshow( + bead[shape_Z // 2, :, :], cmap=cmap, origin='lower', aspect=scale_Y / scale_X + ) + _ax.set_xlabel(axis_labels[-1]) + _ax.set_ylabel(axis_labels[-2]) + _ax.set_title(f'Bead: {bead_number}') + _adjust_fig(fig, ax) + fig.set_figheight(2) + fig.savefig(bead_xy_psf_path) + + bead_xz_psf_path = plots_dir / 'beads_xz_psf.png' + fig, ax = plt.subplots(1, num_beads) + for _ax, bead in zip(ax, beads): + _ax.imshow( + bead[:, shape_Y // 2, :], cmap=cmap, origin='lower', aspect=scale_Z / scale_X + ) + _ax.set_xlabel(axis_labels[-1]) + _ax.set_ylabel(axis_labels[-3]) + _adjust_fig(fig, ax) + fig.savefig(bead_xz_psf_path) + + bead_yz_psf_path = plots_dir / 'beads_yz_psf.png' + fig, ax = plt.subplots(1, num_beads) + for _ax, bead in zip(ax, beads): + _ax.imshow( + bead[:, :, shape_X // 2], cmap=cmap, origin='lower', aspect=scale_Z / scale_Y + ) + _ax.set_xlabel(axis_labels[-2]) + _ax.set_ylabel(axis_labels[-3]) + _adjust_fig(fig, ax) + fig.savefig(bead_yz_psf_path) + + return bead_xy_psf_path, bead_xz_psf_path, bead_yz_psf_path + + +def plot_fwhm_vs_acq_axes(plots_dir: str, x, y, z, fwhm_x, fwhm_y, fwhm_z, axis_labels: tuple): + def plot_fwhm_vs_acq_axis(out_dir: str, x, fwhm_x, fwhm_y, fwhm_z, x_axis_label: str): + fig, ax = plt.subplots(1, 1) + artist1 = ax.plot(x, fwhm_x, 'o', x, fwhm_y, 'o') + ax.set_ylabel('{} and {} FWHM (um)'.format(*axis_labels[1:][::-1])) + ax.set_xlabel('{} position (um)'.format(x_axis_label)) + + ax2 = ax.twinx() + artist2 = ax2.plot(x, fwhm_z, 'o', color='green') + ax2.set_ylabel('{} FWHM (um)'.format(axis_labels[0]), color='green') + ax2.tick_params(axis='y', labelcolor='green') + plt.legend(artist1 + artist2, axis_labels[::-1]) + fig.savefig(out_dir) + + out_dirs = [plots_dir / f'fwhm_vs_{axis}.png' for axis in axis_labels] + for our_dir, x_axis, x_axis_label in zip(out_dirs, (z, y, x), axis_labels): + plot_fwhm_vs_acq_axis(our_dir, x_axis, fwhm_x, fwhm_y, fwhm_z, x_axis_label) + + return out_dirs + + +def plot_psf_amp(plots_dir: str, x, y, z, amp, axis_labels: tuple): + psf_amp_xy_path = plots_dir / 'psf_amp_xy.png' + fig, ax = plt.subplots(1, 1) + + sc = ax.scatter( + x, + y, + c=amp, + vmin=np.quantile(amp, 0.01), + vmax=np.quantile(amp, 0.99), + cmap='summer', + ) + ax.set_aspect('equal') + ax.set_xlabel(f'{axis_labels[-1]} (um)') + ax.set_ylabel(f'{axis_labels[-2]} (um)') + plt.colorbar(sc, label='Amplitude (a.u.)') + fig.savefig(psf_amp_xy_path) + + psf_amp_z_path = plots_dir / 'psf_amp_z.png' + fig, ax = plt.subplots(1, 1) + ax.scatter(z, amp) + ax.set_xlabel(f'{axis_labels[-3]} (um)') + ax.set_ylabel('Amplitude (a.u.)') + fig.savefig(psf_amp_z_path) + + return psf_amp_xy_path, psf_amp_z_path + + +def generate_html_report( + dataset_name: str, + data_path: str, + dataset_scale: tuple, + num_beads_total_good_bad: tuple, + fwhm_1d_mean: tuple, + fwhm_1d_std: tuple, + fwhm_3d_mean: tuple, + fwhm_3d_std: tuple, + fwhm_pc_mean: tuple, + bead_psf_slices_paths: tuple, + fwhm_vs_acq_axes_paths: tuple, + psf_amp_paths: tuple, + axis_labels: tuple, +): + + # string indents need to be like that, otherwise this turns into a code block + report_str = f''' +# PSF Analysis + +## Overview + +### Dataset + +* Name: `{dataset_name}` +* Path: `{data_path}` +* Scale: {dataset_scale[::-1]} um +* Date analyzed: {datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")} + +### Number of beads + +* Defected: {num_beads_total_good_bad[0]} +* Analyzed: {num_beads_total_good_bad[1]} +* Skipped: {num_beads_total_good_bad[2]} + +### FWHM + +* **3D Gaussian fit** + - {axis_labels[-1]}: {fwhm_3d_mean[-1]:.3f} ± {fwhm_3d_std[0]:.3f} um + - {axis_labels[-2]}: {fwhm_3d_mean[-2]:.3f} ± {fwhm_3d_std[1]:.3f} um + - {axis_labels[-3]}: {fwhm_3d_mean[-3]:.3f} ± {fwhm_3d_std[2]:.3f} um +* 1D profile + - {axis_labels[-1]}: {fwhm_1d_mean[-1]:.3f} ± {fwhm_1d_std[0]:.3f} um + - {axis_labels[-2]}: {fwhm_1d_mean[-2]:.3f} ± {fwhm_1d_std[1]:.3f} um + - {axis_labels[-3]}: {fwhm_1d_mean[-3]:.3f} ± {fwhm_1d_std[2]:.3f} um +* 3D principal components + - {'{:.3f} um, {:.3f} um, {:.3f} um'.format(*fwhm_pc_mean)} + +## Representative bead PSF images +![beads xy psf]({bead_psf_slices_paths[0]}) +![beads xz psf]({bead_psf_slices_paths[1]}) +![beads yz psf]({bead_psf_slices_paths[2]}) + +## FWHM versus {axis_labels[0]} position +![fwhm vs z]({fwhm_vs_acq_axes_paths[0]} "fwhm vs z") + +## FWHM versus {axis_labels[1]} position +![fwhm vs z]({fwhm_vs_acq_axes_paths[1]} "fwhm vs y") + +## FWHM versus {axis_labels[2]} position +![fwhm vs z]({fwhm_vs_acq_axes_paths[2]} "fwhm vs x") + +## PSF amplitude versus {axis_labels[-1]}-{axis_labels[-2]} position +![psf amp xy]({psf_amp_paths[0]} "psf amp xy") + +## PSF amplitude versus {axis_labels[-3]} position +![psf amp z]({psf_amp_paths[1]} "psf amp z") +''' + + css_style = ''' + + + +''' + + html = markdown.markdown(report_str) + formatted_html = f''' +{css_style} +
+{html} +
+'''.strip() + + return formatted_html + + +def detect_peaks( + zyx_data, + raw=False, + min_distance=25, + threshold_abs=200, + num_peaks=1000, + exclude_border=(3, 10, 10), +): + # helps speed up peak detection + if raw: + zyx_data = np.swapaxes(zyx_data, 0, 1) + + # runs in about 10 seconds, sensitive to parameters + # finds ~310 peaks + peaks = peak_local_max( + uniform_filter(zyx_data, size=3), # helps remove hot pixels, adds ~3s + min_distance=min_distance, + threshold_abs=threshold_abs, + num_peaks=num_peaks, # limit to top 1000 peaks + exclude_border=exclude_border, # in zyx + ) + + if raw: + zyx_data = np.swapaxes(zyx_data, 0, 1) + peaks = peaks[:, (1, 0, 2)] + + return peaks From e4453745f556f68de90accd7a414c2bb029aa6ce Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Wed, 31 Jan 2024 14:39:52 -0800 Subject: [PATCH 05/57] refactor psf analysis report --- mantis/acquisition/scripts/measure_psf.py | 239 +++++++++++++--------- mantis/analysis/analyze_psf.py | 144 ++++++++++++- mantis/analysis/deskew.py | 62 ++++-- 3 files changed, 325 insertions(+), 120 deletions(-) diff --git a/mantis/acquisition/scripts/measure_psf.py b/mantis/acquisition/scripts/measure_psf.py index 432cc707..b0b791fb 100644 --- a/mantis/acquisition/scripts/measure_psf.py +++ b/mantis/acquisition/scripts/measure_psf.py @@ -1,47 +1,53 @@ # %% -import pickle -import shutil -import webbrowser - from pathlib import Path +import cupy as cp import napari import numpy as np +from cupyx.scipy.ndimage import affine_transform + +# from iohub.ngff_meta import TransformationMeta from iohub.reader import open_ome_zarr, read_micromanager +from mantis.analysis.AnalysisSettings import DeskewSettings from mantis.analysis.analyze_psf import ( analyze_psf, detect_peaks, - generate_html_report, - plot_fwhm_vs_acq_axes, - plot_psf_amp, - plot_psf_slices, + extract_beads, + generate_report, +) +from mantis.analysis.deskew import ( # _average_n_slices, + _get_transform_matrix, + get_deskewed_data_shape, ) # %% Load data - swap with data acquisition block data_dir = Path(r'Z:\2023_03_30_beads') dataset = 'beads_ip_0.74_1' +data_path = data_dir / dataset # data_dir = Path(r'Z:\2022_12_22_LS_after_SL2') # dataset = 'epi_beads_100nm_fl_mount_after_SL2_1' -# data_path = data_dir / dataset / 'LS_beads_100nm_fl_mount_after_SL2_1_MMStack_Pos0.ome.tif' -# zyx_data = tifffile.imread(data_path) - -data_path = data_dir / dataset +# zyx_data = tifffile.imread(data_dir / dataset / 'LS_beads_100nm_fl_mount_after_SL2_1_MMStack_Pos0.ome.tif') +# scale = (0.250, 0.069, 0.069) # in um +# axis_labels = ("Z", "Y", "X") if str(data_path).endswith('.zarr'): ds = open_ome_zarr(data_path / '0/0/0') zyx_data = ds.data[0, 0] + # channel_names = ds.channel_names else: ds = read_micromanager(str(data_path)) zyx_data = ds.get_array(0)[0, 0] + # channel_names = ds.channel_names scale = (0.1565, 0.116, 0.116) # in um -# axis_labels = ("Z", "Y", "X") axis_labels = ("SCAN", "TILT", "COVERSLIP") +deskew = True + # %% Detect peaks raw = False @@ -60,100 +66,143 @@ # %% Extract and analyze bead patches -beads, df_gaussian_fit, df_1d_peak_width = analyze_psf( +beads, offsets = extract_beads( zyx_data=zyx_data, points=peaks, scale=scale, ) -# analyze bead patches -num_beads = len(beads) -num_successful = len(df_gaussian_fit) -num_failed = num_beads - num_successful - -# %% Generate plots - -psf_analysis_path = data_dir / dataset / 'psf_analysis' -psf_analysis_path.mkdir(exist_ok=True) - -plots_dir = psf_analysis_path / 'plots' -plots_dir.mkdir(parents=True, exist_ok=True) -random_bead_number = sorted(np.random.choice(num_successful, 3)) - -bead_psf_slices_paths = plot_psf_slices( - plots_dir, [beads[i] for i in random_bead_number], scale, axis_labels, random_bead_number -) - - -if raw: - plot_data_x = [df_1d_peak_width[col].values for col in ('x_mu', 'y_mu', 'z_mu')] - plot_data_y = [ - df_1d_peak_width[col].values for col in ('1d_x_fwhm', '1d_y_fwhm', '1d_z_fwhm') - ] -else: - plot_data_x = [df_gaussian_fit[col].values for col in ('x_mu', 'y_mu', 'z_mu')] - plot_data_y = [ - df_gaussian_fit[col].values for col in ('zyx_x_fwhm', 'zyx_y_fwhm', 'zyx_z_fwhm') - ] - -fwhm_vs_acq_axes_paths = plot_fwhm_vs_acq_axes( - plots_dir, - *plot_data_x, - *plot_data_y, - axis_labels, -) - -psf_amp_paths = plot_psf_amp( - plots_dir, - df_gaussian_fit['x_mu'].values, - df_gaussian_fit['y_mu'].values, - df_gaussian_fit['z_mu'].values, - df_gaussian_fit['zyx_amp'].values, - axis_labels, +df_gaussian_fit, df_1d_peak_width = analyze_psf( + zyx_patches=beads, + bead_offsets=offsets, + scale=scale, ) -fwhm_3d_mean = [ - df_gaussian_fit[col].mean() for col in ('zyx_z_fwhm', 'zyx_y_fwhm', 'zyx_x_fwhm') -] -fwhm_3d_std = [ - df_gaussian_fit[col].std() for col in ('zyx_z_fwhm', 'zyx_y_fwhm', 'zyx_x_fwhm') -] -fwhm_pc_mean = [ - df_gaussian_fit[col].mean() for col in ('zyx_pc3_fwhm', 'zyx_pc2_fwhm', 'zyx_pc1_fwhm') -] -fwhm_1d_mean = df_1d_peak_width.mean() -fwhm_1d_std = df_1d_peak_width.std() - # %% Generate HTML report -html_report = generate_html_report( +psf_analysis_path = data_dir / dataset / 'psf_analysis' +generate_report( + psf_analysis_path, + data_dir, dataset, - data_path.parent, + beads, + peaks, + df_gaussian_fit, + df_1d_peak_width, scale, - (num_beads, num_successful, num_failed), - fwhm_1d_mean, - fwhm_1d_std, - fwhm_3d_mean, - fwhm_3d_std, - fwhm_pc_mean, - bead_psf_slices_paths, - fwhm_vs_acq_axes_paths, - psf_amp_paths, axis_labels, ) -# save html file and show in browser -with open(psf_analysis_path / 'peaks.pkl', 'wb') as file: - pickle.dump(peaks, file) - -df_gaussian_fit.to_csv(psf_analysis_path / 'psf_gaussian_fit.csv', index=False) -df_1d_peak_width.to_csv(psf_analysis_path / 'psf_1d_peak_width.csv', index=False) - -shutil.copy('github-markdown.css', psf_analysis_path) -html_file_path = psf_analysis_path / ('psf_analysis_report.html') -with open(html_file_path, 'w') as file: - file.write(html_report) - -webbrowser.open('file://' + str(html_file_path)) - +# %% Deskew data + +if raw and deskew: + num_chunks = 2 + chunked_data = np.split(zyx_data, num_chunks, axis=-1) + chunk_shape = chunked_data[0].shape + + settings = DeskewSettings( + pixel_size_um=scale[-1], + ls_angle_deg=30, + scan_step_um=scale[-3], + keep_overhang=True, + average_n_slices=3, + ) + # T, C, Z, Y, X = (1, 1) + chunk_shape + + deskewed_shape, voxel_size = get_deskewed_data_shape( + chunk_shape, + settings.ls_angle_deg, + settings.px_to_scan_ratio, + settings.keep_overhang, + settings.average_n_slices, + settings.pixel_size_um, + ) + + matrix = _get_transform_matrix( + chunk_shape, + settings.ls_angle_deg, + settings.px_to_scan_ratio, + settings.keep_overhang, + ) + + matrix_gpu = cp.asarray(matrix) + deskewed_chunks = [] + for chunk in chunked_data: + deskewed_data_gpu = affine_transform( + cp.asarray(chunk), + matrix_gpu, + output_shape=deskewed_shape, + order=1, + cval=80, + ) + deskewed_chunks.append(cp.asnumpy(deskewed_data_gpu)) + del deskewed_data_gpu + + # concatenate arrays in reverse order + # identical to cpu deskew using ndi.affine_transform + deskewed_data = np.concatenate(deskewed_chunks[::-1], axis=-2) + + # TODO: average_n_slices + + # TODO: save deskewed data to zarr + + # df_deskew_gaussian_fit, df_deskew_1d_peak_width = analyze_psf( + # zyx_patches=deskewed_beads, + # bead_offsets=offsets, + # scale=voxel_size, + # ) + + # psf_analysis_path = data_dir / dataset / 'psf_analysis_deskewed' + # generate_report( + # psf_analysis_path, + # data_dir, + # dataset, + # deskewed_beads, + # peaks, + # df_deskew_gaussian_fit, + # df_deskew_1d_peak_width, + # voxel_size, + # axis_labels=("Z", "Y", "X"), + # ) + + # ct = np.cos(settings.ls_angle_deg * np.pi / 180) + # Z_shift = 0 + # if not settings.keep_overhang: + # Z_shift = int(np.floor(Y * ct * settings.px_to_scan_ratio)) + # matrix = np.array( + # [ + # [ + # -settings.px_to_scan_ratio * ct, + # 0, + # settings.px_to_scan_ratio, + # Z_shift, + # ], + # [-1, 0, 0, Y - 1], + # [0, -1, 0, X - 1], + # ] + # ) + + # deskewed_data = deskew_data( + # zyx_data, + # settings.ls_angle_deg, + # settings.px_to_scan_ratio, + # settings.keep_overhang, + # settings.average_n_slices, + # ) + + # # Create a zarr store + # transform = TransformationMeta( + # type="scale", + # scale=2 * (1,) + voxel_size, + # ) + # output_path = data_dir / (dataset + '_deskewed.zarr') + + # with open_ome_zarr(output_path, layout="hcs", mode="w", channel_names=channel_names) as output_dataset: + # pos = dataset.create_position('0', '0', '0') + # pos.create_image( + # name="0", + # data=deskewed_data, + # chunks=(1, 1) + deskewed_shape, # may be bigger than 500 MB + # transform=[transform], + # ) # %% diff --git a/mantis/analysis/analyze_psf.py b/mantis/analysis/analyze_psf.py index 84a04f29..f918e6c1 100644 --- a/mantis/analysis/analyze_psf.py +++ b/mantis/analysis/analyze_psf.py @@ -1,5 +1,9 @@ import datetime +import pickle +import shutil +import webbrowser +from pathlib import Path from typing import List import markdown @@ -16,7 +20,126 @@ from skimage.feature import peak_local_max -def analyze_psf(zyx_data: ArrayLike, points: ArrayLike, scale: tuple): +def _make_plots( + output_path: Path, + beads: List[ArrayLike], + df_gaussian_fit: pd.DataFrame, + df_1d_peak_width: pd.DataFrame, + scale: tuple, + axis_labels: tuple, + raw: bool = False, +): + plots_dir = output_path / 'plots' + plots_dir.mkdir(parents=True, exist_ok=True) + random_bead_number = sorted(np.random.choice(len(beads), 3)) + + bead_psf_slices_paths = plot_psf_slices( + plots_dir, + [beads[i] for i in random_bead_number], + scale, + axis_labels, + random_bead_number, + ) + + if raw: + plot_data_x = [df_1d_peak_width[col].values for col in ('x_mu', 'y_mu', 'z_mu')] + plot_data_y = [ + df_1d_peak_width[col].values for col in ('1d_x_fwhm', '1d_y_fwhm', '1d_z_fwhm') + ] + else: + plot_data_x = [df_gaussian_fit[col].values for col in ('x_mu', 'y_mu', 'z_mu')] + plot_data_y = [ + df_gaussian_fit[col].values for col in ('zyx_x_fwhm', 'zyx_y_fwhm', 'zyx_z_fwhm') + ] + + fwhm_vs_acq_axes_paths = plot_fwhm_vs_acq_axes( + plots_dir, + *plot_data_x, + *plot_data_y, + axis_labels, + ) + + psf_amp_paths = plot_psf_amp( + plots_dir, + df_gaussian_fit['x_mu'].values, + df_gaussian_fit['y_mu'].values, + df_gaussian_fit['z_mu'].values, + df_gaussian_fit['zyx_amp'].values, + axis_labels, + ) + + return (bead_psf_slices_paths, fwhm_vs_acq_axes_paths, psf_amp_paths) + + +def generate_report( + output_path: Path, + data_dir: Path, + dataset: str, + beads: List[ArrayLike], + peaks: ArrayLike, + df_gaussian_fit: pd.DataFrame, + df_1d_peak_width: pd.DataFrame, + scale: tuple, + axis_labels: tuple, +): + output_path.mkdir(exist_ok=True) + + num_beads = len(beads) + num_successful = len(df_gaussian_fit) + num_failed = num_beads - num_successful + + # make plots + (bead_psf_slices_paths, fwhm_vs_acq_axes_paths, psf_amp_paths) = _make_plots( + output_path, beads, df_gaussian_fit, df_1d_peak_width, scale, axis_labels, raw=False + ) + + # calculate statistics + fwhm_3d_mean = [ + df_gaussian_fit[col].mean() for col in ('zyx_z_fwhm', 'zyx_y_fwhm', 'zyx_x_fwhm') + ] + fwhm_3d_std = [ + df_gaussian_fit[col].std() for col in ('zyx_z_fwhm', 'zyx_y_fwhm', 'zyx_x_fwhm') + ] + fwhm_pc_mean = [ + df_gaussian_fit[col].mean() for col in ('zyx_pc3_fwhm', 'zyx_pc2_fwhm', 'zyx_pc1_fwhm') + ] + fwhm_1d_mean = df_1d_peak_width.mean() + fwhm_1d_std = df_1d_peak_width.std() + + # generate html report + html_report = _generate_html( + dataset, + data_dir, + scale, + (num_beads, num_successful, num_failed), + fwhm_1d_mean, + fwhm_1d_std, + fwhm_3d_mean, + fwhm_3d_std, + fwhm_pc_mean, + bead_psf_slices_paths, + fwhm_vs_acq_axes_paths, + psf_amp_paths, + axis_labels, + ) + + # save html report and other results + with open(output_path / 'peaks.pkl', 'wb') as file: + pickle.dump(peaks, file) + + df_gaussian_fit.to_csv(output_path / 'psf_gaussian_fit.csv', index=False) + df_1d_peak_width.to_csv(output_path / 'psf_1d_peak_width.csv', index=False) + + shutil.copy('github-markdown.css', output_path) + html_file_path = output_path / ('psf_analysis_report.html') + with open(html_file_path, 'w') as file: + file.write(html_report) + + # display html report + webbrowser.open('file://' + str(html_file_path)) + + +def extract_beads(zyx_data: ArrayLike, points: ArrayLike, scale: tuple): patch_size = (scale[0] * 15, scale[1] * 18, scale[2] * 18) # extract bead patches @@ -25,11 +148,18 @@ def analyze_psf(zyx_data: ArrayLike, points: ArrayLike, scale: tuple): patch_size=patch_size, ) beads = bead_extractor.extract_beads(points=points) + # remove bad beads + beads = [bead for bead in beads if bead.data.size > 0] beads_data = [bead.data for bead in beads] + bead_offset = [bead.offset for bead in beads] + + return beads_data, bead_offset + - # analyze bead patches +def analyze_psf(zyx_patches: List[ArrayLike], bead_offsets: List[tuple], scale: tuple): results = [] - for bead in beads: + for patch, offset in zip(zyx_patches, bead_offsets): + bead = Calibrated3DImage(data=patch.astype(np.uint16), spacing=scale, offset=offset) psf = PSF(image=bead) try: psf.analyze() @@ -39,14 +169,14 @@ def analyze_psf(zyx_data: ArrayLike, points: ArrayLike, scale: tuple): results.append(summary_dict) df_gaussian_fit = pd.DataFrame.from_records(results) + bead_offsets = np.asarray(bead_offsets) - bead_offsets = np.asarray([bead.offset for bead in beads]) df_gaussian_fit['z_mu'] += bead_offsets[:, 0] * scale[0] df_gaussian_fit['y_mu'] += bead_offsets[:, 1] * scale[1] df_gaussian_fit['x_mu'] += bead_offsets[:, 2] * scale[2] df_1d_peak_width = pd.DataFrame( - [calculate_peak_widths(bead, scale) for bead in beads_data], + [calculate_peak_widths(zyx_patch, scale) for zyx_patch in zyx_patches], columns=(f'1d_{i}_fwhm' for i in ('z', 'y', 'x')), ) df_1d_peak_width = pd.concat( @@ -59,7 +189,7 @@ def analyze_psf(zyx_data: ArrayLike, points: ArrayLike, scale: tuple): ~(df_1d_peak_width[['1d_z_fwhm', '1d_y_fwhm', '1d_x_fwhm']] == 0).any(axis=1) ] - return beads_data, df_gaussian_fit, df_1d_peak_width + return df_gaussian_fit, df_1d_peak_width def calculate_peak_widths(zyx_data: ArrayLike, zyx_scale: tuple): @@ -188,7 +318,7 @@ def plot_psf_amp(plots_dir: str, x, y, z, amp, axis_labels: tuple): return psf_amp_xy_path, psf_amp_z_path -def generate_html_report( +def _generate_html( dataset_name: str, data_path: str, dataset_scale: tuple, diff --git a/mantis/analysis/deskew.py b/mantis/analysis/deskew.py index ec42c57f..ae780014 100644 --- a/mantis/analysis/deskew.py +++ b/mantis/analysis/deskew.py @@ -53,6 +53,47 @@ def _get_averaged_shape(deskewed_data_shape: tuple, average_window_width: int) - return averaged_shape +def _get_transform_matrix( + data_shape: tuple, ls_angle_deg: float, px_to_scan_ratio: float, keep_overhang: bool +): + """ + Compute affine transformation matrix used to deskew data. + + Parameters + ---------- + data_shape : tuple + ls_angle_deg : float + px_to_scan_ratio : float + keep_overhang : bool + + Returns + ------- + matrix : np.array + Affine transformation matrix. + """ + Z, Y, X = data_shape + + ct = np.cos(ls_angle_deg * np.pi / 180) + Z_shift = 0 + if not keep_overhang: + Z_shift = int(np.floor(Y * ct * px_to_scan_ratio)) + + matrix = np.array( + [ + [ + -px_to_scan_ratio * ct, + 0, + px_to_scan_ratio, + Z_shift, + ], + [-1, 0, 0, Y - 1], + [0, -1, 0, X - 1], + ] + ) + + return matrix + + def get_deskewed_data_shape( raw_data_shape: tuple, ls_angle_deg: float, @@ -155,25 +196,10 @@ def deskew_data( cval = np.min(np.ravel(raw_data)) # Prepare transforms - Z, Y, X = raw_data.shape - - ct = np.cos(ls_angle_deg * np.pi / 180) - Z_shift = 0 - if not keep_overhang: - Z_shift = int(np.floor(Y * ct * px_to_scan_ratio)) - - matrix = np.array( - [ - [ - -px_to_scan_ratio * ct, - 0, - px_to_scan_ratio, - Z_shift, - ], - [-1, 0, 0, Y - 1], - [0, -1, 0, X - 1], - ] + matrix = _get_transform_matrix( + raw_data.shape, ls_angle_deg, px_to_scan_ratio, keep_overhang ) + output_shape, _ = get_deskewed_data_shape( raw_data.shape, ls_angle_deg, px_to_scan_ratio, keep_overhang ) From 2f271b0754ac0bae291c87fd84a10e703d0dd212 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Fri, 2 Feb 2024 16:24:45 -0800 Subject: [PATCH 06/57] add report on deskewed data --- mantis/acquisition/scripts/measure_psf.py | 149 +++++++++++----------- mantis/analysis/analyze_psf.py | 6 +- 2 files changed, 77 insertions(+), 78 deletions(-) diff --git a/mantis/acquisition/scripts/measure_psf.py b/mantis/acquisition/scripts/measure_psf.py index b0b791fb..c932b637 100644 --- a/mantis/acquisition/scripts/measure_psf.py +++ b/mantis/acquisition/scripts/measure_psf.py @@ -6,8 +6,7 @@ import numpy as np from cupyx.scipy.ndimage import affine_transform - -# from iohub.ngff_meta import TransformationMeta +from iohub.ngff_meta import TransformationMeta from iohub.reader import open_ome_zarr, read_micromanager from mantis.analysis.AnalysisSettings import DeskewSettings @@ -17,7 +16,8 @@ extract_beads, generate_report, ) -from mantis.analysis.deskew import ( # _average_n_slices, +from mantis.analysis.deskew import ( + _average_n_slices, _get_transform_matrix, get_deskewed_data_shape, ) @@ -37,22 +37,21 @@ if str(data_path).endswith('.zarr'): ds = open_ome_zarr(data_path / '0/0/0') zyx_data = ds.data[0, 0] - # channel_names = ds.channel_names + channel_names = ds.channel_names else: ds = read_micromanager(str(data_path)) zyx_data = ds.get_array(0)[0, 0] - # channel_names = ds.channel_names + channel_names = ds.channel_names scale = (0.1565, 0.116, 0.116) # in um axis_labels = ("SCAN", "TILT", "COVERSLIP") -deskew = True - -# %% Detect peaks - raw = False if axis_labels == ("SCAN", "TILT", "COVERSLIP"): raw = True +deskew = True + +# %% Detect peaks peaks = detect_peaks(zyx_data, raw=raw) print(f'Number of peaks detected: {len(peaks)}') @@ -80,7 +79,7 @@ # %% Generate HTML report -psf_analysis_path = data_dir / dataset / 'psf_analysis' +psf_analysis_path = data_dir / (dataset + '_psf_analysis') generate_report( psf_analysis_path, data_dir, @@ -93,9 +92,10 @@ axis_labels, ) -# %% Deskew data +# %% Deskew data and analyze if raw and deskew: + # deskew num_chunks = 2 chunked_data = np.split(zyx_data, num_chunks, axis=-1) chunk_shape = chunked_data[0].shape @@ -109,13 +109,11 @@ ) # T, C, Z, Y, X = (1, 1) + chunk_shape - deskewed_shape, voxel_size = get_deskewed_data_shape( + deskewed_shape, _ = get_deskewed_data_shape( chunk_shape, settings.ls_angle_deg, settings.px_to_scan_ratio, settings.keep_overhang, - settings.average_n_slices, - settings.pixel_size_um, ) matrix = _get_transform_matrix( @@ -142,67 +140,64 @@ # identical to cpu deskew using ndi.affine_transform deskewed_data = np.concatenate(deskewed_chunks[::-1], axis=-2) - # TODO: average_n_slices - - # TODO: save deskewed data to zarr - - # df_deskew_gaussian_fit, df_deskew_1d_peak_width = analyze_psf( - # zyx_patches=deskewed_beads, - # bead_offsets=offsets, - # scale=voxel_size, - # ) - - # psf_analysis_path = data_dir / dataset / 'psf_analysis_deskewed' - # generate_report( - # psf_analysis_path, - # data_dir, - # dataset, - # deskewed_beads, - # peaks, - # df_deskew_gaussian_fit, - # df_deskew_1d_peak_width, - # voxel_size, - # axis_labels=("Z", "Y", "X"), - # ) - - # ct = np.cos(settings.ls_angle_deg * np.pi / 180) - # Z_shift = 0 - # if not settings.keep_overhang: - # Z_shift = int(np.floor(Y * ct * settings.px_to_scan_ratio)) - # matrix = np.array( - # [ - # [ - # -settings.px_to_scan_ratio * ct, - # 0, - # settings.px_to_scan_ratio, - # Z_shift, - # ], - # [-1, 0, 0, Y - 1], - # [0, -1, 0, X - 1], - # ] - # ) - - # deskewed_data = deskew_data( - # zyx_data, - # settings.ls_angle_deg, - # settings.px_to_scan_ratio, - # settings.keep_overhang, - # settings.average_n_slices, - # ) - - # # Create a zarr store - # transform = TransformationMeta( - # type="scale", - # scale=2 * (1,) + voxel_size, - # ) - # output_path = data_dir / (dataset + '_deskewed.zarr') - - # with open_ome_zarr(output_path, layout="hcs", mode="w", channel_names=channel_names) as output_dataset: - # pos = dataset.create_position('0', '0', '0') - # pos.create_image( - # name="0", - # data=deskewed_data, - # chunks=(1, 1) + deskewed_shape, # may be bigger than 500 MB - # transform=[transform], - # ) + averaged_deskewed_data = _average_n_slices( + deskewed_data, average_window_width=settings.average_n_slices + ) + + deskewed_shape, voxel_size = get_deskewed_data_shape( + zyx_data.shape, + settings.ls_angle_deg, + settings.px_to_scan_ratio, + settings.keep_overhang, + settings.average_n_slices, + settings.pixel_size_um, + ) + + # detect peaks again :( + deskewed_peaks = detect_peaks(averaged_deskewed_data, raw=False) + print(f'Number of peaks detected: {len(peaks)}') + + deskewed_beads, deskewed_offsets = extract_beads( + zyx_data=averaged_deskewed_data, + points=deskewed_peaks, + scale=scale, + ) + + df_deskewed_gaussian_fit, df_deskewed_1d_peak_width = analyze_psf( + zyx_patches=deskewed_beads, + bead_offsets=deskewed_offsets, + scale=voxel_size, + ) + + output_zarr_path = data_dir / (dataset + '_deskewed.zarr') + report_path = data_dir / (dataset + '_deskewed_psf_analysis') + generate_report( + report_path, + output_zarr_path, + dataset, + deskewed_beads, + deskewed_peaks, + df_deskewed_gaussian_fit, + df_deskewed_1d_peak_width, + voxel_size, + ('Z', 'Y', 'X'), + ) + + # Save to zarr store + transform = TransformationMeta( + type="scale", + scale=2 * (1,) + voxel_size, + ) + + with open_ome_zarr( + output_zarr_path, layout="hcs", mode="w", channel_names=channel_names + ) as output_dataset: + pos = output_dataset.create_position('0', '0', '0') + pos.create_image( + name="0", + data=averaged_deskewed_data[None, None, ...], + chunks=(1, 1, 50) + deskewed_shape[1:], # may be bigger than 500 MB + transform=[transform], + ) + # %% diff --git a/mantis/analysis/analyze_psf.py b/mantis/analysis/analyze_psf.py index f918e6c1..5da64a98 100644 --- a/mantis/analysis/analyze_psf.py +++ b/mantis/analysis/analyze_psf.py @@ -88,9 +88,13 @@ def generate_report( num_successful = len(df_gaussian_fit) num_failed = num_beads - num_successful + raw = False + if axis_labels == ("SCAN", "TILT", "COVERSLIP"): + raw = True + # make plots (bead_psf_slices_paths, fwhm_vs_acq_axes_paths, psf_amp_paths) = _make_plots( - output_path, beads, df_gaussian_fit, df_1d_peak_width, scale, axis_labels, raw=False + output_path, beads, df_gaussian_fit, df_1d_peak_width, scale, axis_labels, raw=raw ) # calculate statistics From 03230470a31cfdbc92947bd06ba9c322a25a1f81 Mon Sep 17 00:00:00 2001 From: Talon Chandler Date: Fri, 2 Feb 2024 17:58:34 -0800 Subject: [PATCH 07/57] initial psf simulations --- mantis/analysis/deskew.py | 43 +++++--- mantis/analysis/scripts/simulate_psf.py | 138 ++++++++++++++++++++++++ 2 files changed, 169 insertions(+), 12 deletions(-) create mode 100644 mantis/analysis/scripts/simulate_psf.py diff --git a/mantis/analysis/deskew.py b/mantis/analysis/deskew.py index ec42c57f..f5e71bab 100644 --- a/mantis/analysis/deskew.py +++ b/mantis/analysis/deskew.py @@ -2,6 +2,33 @@ import scipy +def _deskew_matrix(px_to_scan_ratio, ct): + """3x3 deskew matrix, relating sampling coordinates to deskewed coordinates + + Parameters + ---------- + px_to_scan_ratio : float + Ratio of the pixel size to light sheet scan step + ct : float + cos(theta), where theta is the light-sheet tilt angle + + Returns + ------- + 3x3 array + """ + return np.array( + [ + [ + -px_to_scan_ratio * ct, + 0, + px_to_scan_ratio, + ], + [-1, 0, 0], + [0, -1, 0], + ] + ) + + def _average_n_slices(data, average_window_width=1): """Average an array over its first axis @@ -162,18 +189,10 @@ def deskew_data( if not keep_overhang: Z_shift = int(np.floor(Y * ct * px_to_scan_ratio)) - matrix = np.array( - [ - [ - -px_to_scan_ratio * ct, - 0, - px_to_scan_ratio, - Z_shift, - ], - [-1, 0, 0, Y - 1], - [0, -1, 0, X - 1], - ] - ) + deskew_matrix = _deskew_matrix(px_to_scan_ratio, ct) + translation = np.array([[Z_shift], [Y - 1], [X - 1]]) + matrix = np.concatenate((deskew_matrix, translation), axis=1) + output_shape, _ = get_deskewed_data_shape( raw_data.shape, ls_angle_deg, px_to_scan_ratio, keep_overhang ) diff --git a/mantis/analysis/scripts/simulate_psf.py b/mantis/analysis/scripts/simulate_psf.py new file mode 100644 index 00000000..0656e49a --- /dev/null +++ b/mantis/analysis/scripts/simulate_psf.py @@ -0,0 +1,138 @@ +# Variable abbreviations +# stc = scan, tilt, coverslip --- raw data coordinates +# otf = optical transfer function +# psf = point spread function + +from waveorder.models.isotropic_fluorescent_thick_3d import calculate_transfer_function +from mantis.analysis.deskew import _deskew_matrix +import scipy +import numpy as np +import torch +import napari + + +def _apply_centered_affine(zyx_array, M): + """Applies a translation-free affine transformation to a 3D array while + maintaining the center coordinate at (zyx_array.shape // 2) + + For mantis - useful for moving PSFs between skewed and deskewed spaces. + + Parameters + ---------- + zyx_array : NDArray with ndim == 3 + 3D input array + M : NDArry with shape = (3, 3) + 3x3 transformation matrix, the translation-free part of an affine matrix + Can model reflection, scaling, rotation, and shear. + + Returns + ------- + NDArray with ndim == 3 + transformed matrix with shape matched to input + """ + + # keep (zyx_array.shape // 2) centered + offset = np.dot(np.eye(3) - M, np.array(zyx_array.shape) // 2) + + return scipy.ndimage.affine_transform( + zyx_array, + M, + offset=offset, + output_shape=zyx_array.shape, + order=1, + cval=0, + ) + + +v = napari.Viewer() + +# ---- psf input parameters (to be refactored) + +# sampling parameters +psf_stc_shape = 3 * (30,) +psf_stc_scale = 3 * (0.116,) # um +supersample_factor = 5 + +# illumination and detection parameters +ls_angle_deg = 30 + +# illumination parameters +ls_scan_waist_fwhm = 1.0 # um + +# detection parameters +wavelength_emission = 0.550 # um +numerical_aperture_detection = 1.35 +index_of_refraction_media = 1.404 + +# ---- + +# internal simulation parameters +px_to_scan_ratio = psf_stc_scale[1] / psf_stc_scale[0] +ct = np.cos(ls_angle_deg * np.pi / 180) +st = np.sin(ls_angle_deg * np.pi / 180) +deskew_matrix = _deskew_matrix(px_to_scan_ratio, ct) +skew_matrix = np.linalg.inv(deskew_matrix) + +psf_stc_ss_shape = np.array(psf_stc_shape) * supersample_factor +psf_stc_ss_scale = np.array(psf_stc_scale) / supersample_factor +psf_zyx_ss_scale = np.array( + [st * psf_stc_ss_scale[0], psf_stc_ss_scale[1], psf_stc_ss_scale[2]] +) + +# calculate illumination psf +ls_scan_waist_std = ls_scan_waist_fwhm / (2 * np.sqrt(2 * np.log(2))) +scan_positions = psf_stc_ss_scale[0] * ( + np.arange(psf_stc_ss_shape[0]) - (psf_stc_ss_shape[0] / 2) +) +illumination_psf_scan = np.exp(-(scan_positions**2) / (2 * ls_scan_waist_std**2)) + +# calculate detection psf in zyx coordinates using waveorder +detection_otf_zyx = calculate_transfer_function( + psf_stc_ss_shape, + psf_zyx_ss_scale[1], + psf_zyx_ss_scale[0], + wavelength_emission, + 0, + index_of_refraction_media, + numerical_aperture_detection, +) + +detection_psf_zyx = np.array( + torch.real(torch.fft.ifftshift(torch.fft.ifftn(detection_otf_zyx, dim=(0, 1, 2)))) +) + +detection_psf_stc = _apply_centered_affine(detection_psf_zyx, skew_matrix) +psf_stc = np.einsum('i,ijk->ijk', illumination_psf_scan, detection_psf_stc) + +# this dense illumination_psf is not necessary, but it's useful for debugging +illumination_psf_stc = np.einsum( + 'i,ijk->ijk', illumination_psf_scan, np.ones_like(detection_psf_stc) +) + +# prepare viewer +v.scale_bar.visible = True +v.scale_bar.unit = "um" + +v.add_image(illumination_psf_stc, name="raw illumination", scale=psf_stc_ss_scale) +v.add_image(detection_psf_stc, name="raw detection", scale=psf_stc_ss_scale) +v.add_image(psf_stc, name="raw total", scale=psf_stc_ss_scale) + +v.add_image( + _apply_centered_affine(illumination_psf_stc, deskew_matrix), + name="deskewed illumination", + scale=psf_stc_ss_scale, +) +v.add_image( + _apply_centered_affine(detection_psf_stc, deskew_matrix), + name="deskewed detection", + scale=psf_stc_ss_scale, +) +v.add_image( + _apply_centered_affine(psf_stc, deskew_matrix), + name="deskewed total", + scale=psf_stc_ss_scale, +) + +import pdb + +pdb.set_trace() From 5f274cf88290429fe0bb8e2c0b372463e4a62b01 Mon Sep 17 00:00:00 2001 From: Ziwen Liu <67518483+ziw-liu@users.noreply.github.com> Date: Wed, 7 Feb 2024 16:21:42 -0800 Subject: [PATCH 08/57] GPU peak detection with torch (#129) * GPU peak detection with torch * update bead detector * update psf measurement script * style --------- Co-authored-by: Ivan Ivanov --- mantis/acquisition/scripts/measure_psf.py | 106 +++++++++++-- mantis/analysis/analyze_psf.py | 179 ++++++++++++++++++---- 2 files changed, 240 insertions(+), 45 deletions(-) diff --git a/mantis/acquisition/scripts/measure_psf.py b/mantis/acquisition/scripts/measure_psf.py index c932b637..9732984c 100644 --- a/mantis/acquisition/scripts/measure_psf.py +++ b/mantis/acquisition/scripts/measure_psf.py @@ -1,9 +1,13 @@ # %% +import gc +import time + from pathlib import Path import cupy as cp import napari import numpy as np +import torch from cupyx.scipy.ndimage import affine_transform from iohub.ngff_meta import TransformationMeta @@ -22,14 +26,54 @@ get_deskewed_data_shape, ) +epi_bead_detection_settings = { + "block_size": (8, 8, 8), + "blur_kernel_size": 3, + "min_distance": 20, + "threshold_abs": 200.0, + "max_num_peaks": 500, + "exclude_border": (5, 5, 5), + "device": "cuda" if torch.cuda.is_available() else "cpu", +} + +ls_bead_detection_settings = { + "block_size": (64, 64, 32), + "blur_kernel_size": 3, + "nms_distance": 32, + "min_distance": 50, + "threshold_abs": 250.0, + "max_num_peaks": 2000, + "exclude_border": (5, 10, 5), + "device": "cuda" if torch.cuda.is_available() else "cpu", +} + +deskew_bead_detection_settings = { + "block_size": (64, 32, 16), + "blur_kernel_size": 3, + "nms_distance": 10, + "min_distance": 50, + "threshold_abs": 200.0, + "max_num_peaks": 500, + "exclude_border": (5, 5, 5), + "device": "cuda" if torch.cuda.is_available() else "cpu", +} + # %% Load data - swap with data acquisition block -data_dir = Path(r'Z:\2023_03_30_beads') +deskew = True +view = False + +data_dir = Path(r'E:\temp_2023_03_30_beads') dataset = 'beads_ip_0.74_1' -data_path = data_dir / dataset -# data_dir = Path(r'Z:\2022_12_22_LS_after_SL2') +scale = (0.1565, 0.116, 0.116) # in um +axis_labels = ("SCAN", "TILT", "COVERSLIP") + +# data_dir = Path(r'E:\temp_2022_12_22_LS_after_SL2') # dataset = 'epi_beads_100nm_fl_mount_after_SL2_1' + +data_path = data_dir / dataset + # zyx_data = tifffile.imread(data_dir / dataset / 'LS_beads_100nm_fl_mount_after_SL2_1_MMStack_Pos0.ome.tif') # scale = (0.250, 0.069, 0.069) # in um # axis_labels = ("Z", "Y", "X") @@ -43,28 +87,35 @@ zyx_data = ds.get_array(0)[0, 0] channel_names = ds.channel_names -scale = (0.1565, 0.116, 0.116) # in um -axis_labels = ("SCAN", "TILT", "COVERSLIP") - raw = False if axis_labels == ("SCAN", "TILT", "COVERSLIP"): raw = True -deskew = True # %% Detect peaks -peaks = detect_peaks(zyx_data, raw=raw) -print(f'Number of peaks detected: {len(peaks)}') +t1 = time.time() +peaks = detect_peaks( + zyx_data, + **ls_bead_detection_settings, + verbose=True, +) +gc.collect() +torch.cuda.empty_cache() +t2 = time.time() +print(f'Time to detect peaks: {t2-t1}') # %% Visualize in napari -viewer = napari.Viewer() -viewer.add_image(zyx_data) - -viewer.add_points(peaks, name='peaks local max', size=12, symbol='ring', edge_color='yellow') +if view: + viewer = napari.Viewer() + viewer.add_image(zyx_data) + viewer.add_points( + peaks, name='peaks local max', size=12, symbol='ring', edge_color='yellow' + ) # %% Extract and analyze bead patches +t1 = time.time() beads, offsets = extract_beads( zyx_data=zyx_data, points=peaks, @@ -76,6 +127,8 @@ bead_offsets=offsets, scale=scale, ) +t2 = time.time() +print(f'Time to analyze PSFs: {t2-t1}') # %% Generate HTML report @@ -107,8 +160,8 @@ keep_overhang=True, average_n_slices=3, ) - # T, C, Z, Y, X = (1, 1) + chunk_shape + t1 = time.time() deskewed_shape, _ = get_deskewed_data_shape( chunk_shape, settings.ls_angle_deg, @@ -135,6 +188,7 @@ ) deskewed_chunks.append(cp.asnumpy(deskewed_data_gpu)) del deskewed_data_gpu + cp._default_memory_pool.free_all_blocks() # concatenate arrays in reverse order # identical to cpu deskew using ndi.affine_transform @@ -152,10 +206,27 @@ settings.average_n_slices, settings.pixel_size_um, ) + t2 = time.time() + print(f'Time to deskew: {t2-t1: .2f} seconds') # detect peaks again :( - deskewed_peaks = detect_peaks(averaged_deskewed_data, raw=False) - print(f'Number of peaks detected: {len(peaks)}') + t1 = time.time() + deskewed_peaks = detect_peaks( + averaged_deskewed_data, + **deskew_bead_detection_settings, + verbose=True, + ) + gc.collect() + torch.cuda.empty_cache() + t2 = time.time() + print(f'Time to detect deskewed peaks: {t2-t1: .2f} seconds') + + if view: + viewer2 = napari.Viewer() + viewer2.add_image(averaged_deskewed_data) + viewer2.add_points( + deskewed_peaks, name='peaks local max', size=12, symbol='ring', edge_color='yellow' + ) deskewed_beads, deskewed_offsets = extract_beads( zyx_data=averaged_deskewed_data, @@ -163,11 +234,14 @@ scale=scale, ) + t1 = time.time() df_deskewed_gaussian_fit, df_deskewed_1d_peak_width = analyze_psf( zyx_patches=deskewed_beads, bead_offsets=deskewed_offsets, scale=voxel_size, ) + t2 = time.time() + print(f'Time to analyze deskewed PSFs: {t2-t1: .2f} seconds') output_zarr_path = data_dir / (dataset + '_deskewed.zarr') report_path = data_dir / (dataset + '_deskewed_psf_analysis') diff --git a/mantis/analysis/analyze_psf.py b/mantis/analysis/analyze_psf.py index 5da64a98..b11f93b6 100644 --- a/mantis/analysis/analyze_psf.py +++ b/mantis/analysis/analyze_psf.py @@ -10,14 +10,14 @@ import matplotlib.pyplot as plt import numpy as np import pandas as pd +import torch +import torch.nn.functional as F from napari_psf_analysis.psf_analysis.extract.BeadExtractor import BeadExtractor from napari_psf_analysis.psf_analysis.image import Calibrated3DImage from napari_psf_analysis.psf_analysis.psf import PSF from numpy.typing import ArrayLike -from scipy.ndimage import uniform_filter from scipy.signal import peak_widths -from skimage.feature import peak_local_max def _make_plots( @@ -107,8 +107,8 @@ def generate_report( fwhm_pc_mean = [ df_gaussian_fit[col].mean() for col in ('zyx_pc3_fwhm', 'zyx_pc2_fwhm', 'zyx_pc1_fwhm') ] - fwhm_1d_mean = df_1d_peak_width.mean() - fwhm_1d_std = df_1d_peak_width.std() + fwhm_1d_mean = [df_1d_peak_width[col].mean() for col in ('1d_z_fwhm', '1d_y_fwhm', '1d_x_fwhm')] + fwhm_1d_std = [df_1d_peak_width[col].std() for col in ('1d_z_fwhm', '1d_y_fwhm', '1d_x_fwhm')] # generate html report html_report = _generate_html( @@ -353,7 +353,7 @@ def _generate_html( ### Number of beads -* Defected: {num_beads_total_good_bad[0]} +* Detected: {num_beads_total_good_bad[0]} * Analyzed: {num_beads_total_good_bad[1]} * Skipped: {num_beads_total_good_bad[2]} @@ -423,29 +423,150 @@ def _generate_html( def detect_peaks( - zyx_data, - raw=False, - min_distance=25, - threshold_abs=200, - num_peaks=1000, - exclude_border=(3, 10, 10), + zyx_data: np.ndarray, + block_size: int | tuple[int, int, int] = (8, 8, 8), + nms_distance: int = 3, + min_distance: int = 40, + threshold_abs: float = 200.0, + max_num_peaks: int = 500, + exclude_border: tuple[int, int, int] | None = None, + blur_kernel_size: int = 3, + device: str = "cpu", + verbose: bool = False, ): - # helps speed up peak detection - if raw: - zyx_data = np.swapaxes(zyx_data, 0, 1) - - # runs in about 10 seconds, sensitive to parameters - # finds ~310 peaks - peaks = peak_local_max( - uniform_filter(zyx_data, size=3), # helps remove hot pixels, adds ~3s - min_distance=min_distance, - threshold_abs=threshold_abs, - num_peaks=num_peaks, # limit to top 1000 peaks - exclude_border=exclude_border, # in zyx - ) - - if raw: - zyx_data = np.swapaxes(zyx_data, 0, 1) - peaks = peaks[:, (1, 0, 2)] + """Detect peaks with local maxima. + This is an approximate torch implementation of `skimage.feature.peak_local_max`. + The algorithm works well with small kernel size, by default (8, 8, 8) which + generates a large number of peak candidates, and strict peak rejection criteria + - e.g. max_num_peaks=500, which selects top 500 brightest peaks and + threshold_abs=200.0, which selects peaks with intensity of at least 200 counts. + + Parameters + ---------- + zyx_data : np.ndarray + 3D image data + block_size : int | tuple[int, int, int], optional + block size to find approximate local maxima, by default (8, 8, 8) + nms_distance : int, optional + non-maximum suppression distance, by default 3 + distance is calculated assuming a Cartesian coordinate system + min_distance : int, optional + minimum distance between detections, + distance needs to be smaller than block size for efficiency, + by default 40 + threshold_abs : float, optional + lower bound of detected peak intensity, by default 200.0 + max_num_peaks : int, optional + max number of candidate detections to consider, by default 500 + exclude_border : tuple[int, int, int] | None, optional + width of borders to exclude, by default None + blur_kernel_size : int, optional + uniform kernel size to blur the image before detection + to avoid hot pixels, by default 3 + device : str, optional + compute device string for torch, + e.g. "cpu" (slow), "cuda" (single GPU) or "cuda:0" (0th GPU among multiple), + by default "cpu" + verbose : bool, optional + print number of peaks detected and rejected, by default False + + Returns + ------- + np.ndarray + 3D coordinates of detected peaks (N, 3) + + """ + zyx_shape = zyx_data.shape[-3:] + zyx_image = torch.from_numpy(zyx_data.astype(np.float16)[None, None]) + + if device != "cpu": + zyx_image = zyx_image.to(device) + + if blur_kernel_size: + if blur_kernel_size % 2 != 1: + raise ValueError(f"kernel_size={blur_kernel_size} must be an odd number") + # smooth image + # input and output variables need to be different for proper memory clearance + smooth_image = F.avg_pool3d( + input=zyx_image, + kernel_size=blur_kernel_size, + stride=1, + padding=blur_kernel_size // 2, + count_include_pad=False, + ) - return peaks + # detect peaks as local maxima + peak_value, peak_idx = ( + p.flatten().clone() + for p in F.max_pool3d( + smooth_image, + kernel_size=block_size, + stride=block_size, + padding=(block_size[0] // 2, block_size[1] // 2, block_size[2] // 2), + return_indices=True, + ) + ) + num_peaks = len(peak_idx) + + # select only top max_num_peaks brightest peaks + # peak_value (and peak_idx) are now sorted by brightness + peak_value, sort_mask = peak_value.topk(min(max_num_peaks, peak_value.nelement())) + peak_idx = peak_idx[sort_mask] + num_rejected_max_num_peaks = num_peaks - len(sort_mask) + + # select only peaks above intensity threshold + num_rejected_threshold_abs = 0 + if threshold_abs: + abs_mask = peak_value > threshold_abs + peak_value = peak_value[abs_mask] + peak_idx = peak_idx[abs_mask] + num_rejected_threshold_abs = sum(~abs_mask) + + # remove artifacts of multiple peaks detected at block boundaries + # requires torch>=2.2 + coords = torch.stack(torch.unravel_index(peak_idx, zyx_shape), -1) + fcoords = coords.float() + dist = torch.cdist(fcoords, fcoords) + dist_mask = torch.ones(len(coords), dtype=bool, device=device) + + nearby_peaks = torch.nonzero(torch.triu(dist < nms_distance, diagonal=1)) + dist_mask[nearby_peaks[:, 1]] = False # peak in second column is dimmer + num_rejected_nms_distance = sum(~dist_mask) + + # remove peaks withing min_distance of each other + num_rejected_min_distance = 0 + if min_distance: + _dist_mask = dist < min_distance + # exclude distances from nearby peaks rejected above + _dist_mask[nearby_peaks[:, 0], nearby_peaks[:, 1]] = False + dist_mask &= _dist_mask.sum(1) < 2 # Ziwen magic + num_rejected_min_distance = sum(~dist_mask) - num_rejected_nms_distance + coords = coords[dist_mask] + + # remove peaks near the border + num_rejected_exclude_border = 0 + match exclude_border: + case None: + pass + case (int(), int(), int()): + for dim, size in enumerate(exclude_border): + border_mask = (size < coords[:, dim]) & ( + coords[:, dim] < zyx_shape[dim] - size + ) + coords = coords[border_mask] + num_rejected_exclude_border += sum(~border_mask) + case _: + raise ValueError(f"invalid argument exclude_border={exclude_border}") + + num_peaks_returned = len(coords) + if verbose: + print(f'Number of peaks detected: {num_peaks}') + print(f'Number of peaks rejected by max_num_peaks: {num_rejected_max_num_peaks}') + print(f'Number of peaks rejected by threshold_abs: {num_rejected_threshold_abs}') + print(f'Number of peaks rejected by nms_distance: {num_rejected_nms_distance}') + print(f'Number of peaks rejected by min_distance: {num_rejected_min_distance}') + print(f'Number of peaks rejected by exclude_border: {num_rejected_exclude_border}') + print(f'Number of peaks returned: {num_peaks_returned}') + + del zyx_image, smooth_image + return coords.cpu().numpy() From 05673e745fc00660ad4fcc07a4f2660d85913e2f Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Wed, 7 Feb 2024 16:32:21 -0800 Subject: [PATCH 09/57] ignore psf analysis warnings --- mantis/acquisition/scripts/measure_psf.py | 25 ++++++++++++++--------- 1 file changed, 15 insertions(+), 10 deletions(-) diff --git a/mantis/acquisition/scripts/measure_psf.py b/mantis/acquisition/scripts/measure_psf.py index 9732984c..8fd605fe 100644 --- a/mantis/acquisition/scripts/measure_psf.py +++ b/mantis/acquisition/scripts/measure_psf.py @@ -8,6 +8,7 @@ import napari import numpy as np import torch +import warnings from cupyx.scipy.ndimage import affine_transform from iohub.ngff_meta import TransformationMeta @@ -122,11 +123,13 @@ scale=scale, ) -df_gaussian_fit, df_1d_peak_width = analyze_psf( - zyx_patches=beads, - bead_offsets=offsets, - scale=scale, -) +with warnings.catch_warnings(): + warnings.simplefilter("ignore") + df_gaussian_fit, df_1d_peak_width = analyze_psf( + zyx_patches=beads, + bead_offsets=offsets, + scale=scale, + ) t2 = time.time() print(f'Time to analyze PSFs: {t2-t1}') @@ -235,11 +238,13 @@ ) t1 = time.time() - df_deskewed_gaussian_fit, df_deskewed_1d_peak_width = analyze_psf( - zyx_patches=deskewed_beads, - bead_offsets=deskewed_offsets, - scale=voxel_size, - ) + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + df_deskewed_gaussian_fit, df_deskewed_1d_peak_width = analyze_psf( + zyx_patches=deskewed_beads, + bead_offsets=deskewed_offsets, + scale=voxel_size, + ) t2 = time.time() print(f'Time to analyze deskewed PSFs: {t2-t1: .2f} seconds') From dcb977abc4d31f2de18014c732f2f1ddb4353558 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Tue, 13 Feb 2024 17:12:34 -0800 Subject: [PATCH 10/57] update script with acquisition settings --- mantis/acquisition/scripts/measure_psf.py | 79 +++++++++++++++++------ 1 file changed, 58 insertions(+), 21 deletions(-) diff --git a/mantis/acquisition/scripts/measure_psf.py b/mantis/acquisition/scripts/measure_psf.py index 8fd605fe..050cde3c 100644 --- a/mantis/acquisition/scripts/measure_psf.py +++ b/mantis/acquisition/scripts/measure_psf.py @@ -13,6 +13,7 @@ from cupyx.scipy.ndimage import affine_transform from iohub.ngff_meta import TransformationMeta from iohub.reader import open_ome_zarr, read_micromanager +from pycromanager import Acquisition, multi_d_acquisition_events, Core from mantis.analysis.AnalysisSettings import DeskewSettings from mantis.analysis.analyze_psf import ( @@ -42,7 +43,7 @@ "blur_kernel_size": 3, "nms_distance": 32, "min_distance": 50, - "threshold_abs": 250.0, + "threshold_abs": 200.0, "max_num_peaks": 2000, "exclude_border": (5, 10, 5), "device": "cuda" if torch.cuda.is_available() else "cpu", @@ -59,34 +60,70 @@ "device": "cuda" if torch.cuda.is_available() else "cpu", } -# %% Load data - swap with data acquisition block - -deskew = True -view = False +mmc = Core() -data_dir = Path(r'E:\temp_2023_03_30_beads') -dataset = 'beads_ip_0.74_1' +# %% +data_dir = Path(r'D:\2024_02_08_mantis_alignment') +dataset = '2024_02_13_LS_0.169_96wp_IP_Galvo_0.72' + +# epi settings +# z_stage = 'PiezoStage:Q:35' +# z_step = 0.2 # in um +# z_range = (-2, 50) # in um +# pixel_size = 0.069 # in um +# axis_labels = ("Z", "Y", "X") -scale = (0.1565, 0.116, 0.116) # in um +# ls_settings +z_stage = 'AP Galvo' +z_step = 0.205 # in um +z_range = (-100, 85) # in um +pixel_size = 0.116 # in um axis_labels = ("SCAN", "TILT", "COVERSLIP") -# data_dir = Path(r'E:\temp_2022_12_22_LS_after_SL2') -# dataset = 'epi_beads_100nm_fl_mount_after_SL2_1' +# epi illumination ls detection settings +# z_stage = 'PiezoStage:Q:35' +# z_step = 0.2 # in um +# z_range = (-2, 50) # in um +# pixel_size = 0.116 # in um +# axis_labels = ("Z", "Y", "X") +deskew = True +view = True +scale = (z_step, pixel_size, pixel_size) data_path = data_dir / dataset -# zyx_data = tifffile.imread(data_dir / dataset / 'LS_beads_100nm_fl_mount_after_SL2_1_MMStack_Pos0.ome.tif') -# scale = (0.250, 0.069, 0.069) # in um -# axis_labels = ("Z", "Y", "X") +mmc.set_property('Core', 'Focus', z_stage) +z_pos = mmc.get_position(z_stage) +events = multi_d_acquisition_events( + z_start=z_pos + z_range[0], + z_end=z_pos + z_range[1], + z_step=z_step, +) -if str(data_path).endswith('.zarr'): - ds = open_ome_zarr(data_path / '0/0/0') - zyx_data = ds.data[0, 0] - channel_names = ds.channel_names -else: - ds = read_micromanager(str(data_path)) - zyx_data = ds.get_array(0)[0, 0] - channel_names = ds.channel_names +camera = mmc.get_camera_device() +if camera == 'Prime BSI Express': + mmc.set_property('Prime BSI Express', 'ExposeOutMode', 'Rolling Shutter') + mmc.set_property('TS2_TTL1-8', 'Blanking', 'On') + mmc.set_property('TS2_DAC03', 'Sequence', 'On') + +mmc.set_auto_shutter(False) +mmc.set_shutter_open(True) +with Acquisition( + directory=str(data_dir), + name=dataset, + show_display=False, +) as acq: + acq.acquire(events) +mmc.set_shutter_open(False) +mmc.set_auto_shutter(True) +mmc.set_position(z_stage, z_pos) + +if camera == 'Prime BSI Express': + mmc.set_property('TS2_TTL1-8', 'Blanking', 'Off') + +ds = acq.get_dataset() +zyx_data = np.asarray(ds.as_array()) +channel_names = ['GFP'] raw = False if axis_labels == ("SCAN", "TILT", "COVERSLIP"): From a212579aeac63a817b47f79f6ad04b5a07feb66e Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Tue, 13 Feb 2024 18:04:09 -0800 Subject: [PATCH 11/57] better paths formatting --- mantis/analysis/analyze_psf.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/mantis/analysis/analyze_psf.py b/mantis/analysis/analyze_psf.py index b11f93b6..b0ab8241 100644 --- a/mantis/analysis/analyze_psf.py +++ b/mantis/analysis/analyze_psf.py @@ -121,9 +121,9 @@ def generate_report( fwhm_3d_mean, fwhm_3d_std, fwhm_pc_mean, - bead_psf_slices_paths, - fwhm_vs_acq_axes_paths, - psf_amp_paths, + [str(_path.relative_to(output_path).as_posix()) for _path in bead_psf_slices_paths], + [str(_path.relative_to(output_path).as_posix()) for _path in fwhm_vs_acq_axes_paths], + [str(_path.relative_to(output_path).as_posix()) for _path in psf_amp_paths], axis_labels, ) From 77149b3eaea5aaaa6dd00d8ba124808b80089c12 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Tue, 13 Feb 2024 18:04:41 -0800 Subject: [PATCH 12/57] better paths formatting --- mantis/analysis/analyze_psf.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/mantis/analysis/analyze_psf.py b/mantis/analysis/analyze_psf.py index b0ab8241..d7014953 100644 --- a/mantis/analysis/analyze_psf.py +++ b/mantis/analysis/analyze_psf.py @@ -332,9 +332,9 @@ def _generate_html( fwhm_3d_mean: tuple, fwhm_3d_std: tuple, fwhm_pc_mean: tuple, - bead_psf_slices_paths: tuple, - fwhm_vs_acq_axes_paths: tuple, - psf_amp_paths: tuple, + bead_psf_slices_paths: list, + fwhm_vs_acq_axes_paths: list, + psf_amp_paths: list, axis_labels: tuple, ): From 8c36deb37280601404c2e7796484e7a38d7e6584 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Thu, 15 Feb 2024 16:17:09 -0800 Subject: [PATCH 13/57] update measurement script --- mantis/acquisition/scripts/measure_psf.py | 21 +++++++++++---------- 1 file changed, 11 insertions(+), 10 deletions(-) diff --git a/mantis/acquisition/scripts/measure_psf.py b/mantis/acquisition/scripts/measure_psf.py index 050cde3c..b51bd362 100644 --- a/mantis/acquisition/scripts/measure_psf.py +++ b/mantis/acquisition/scripts/measure_psf.py @@ -63,8 +63,8 @@ mmc = Core() # %% -data_dir = Path(r'D:\2024_02_08_mantis_alignment') -dataset = '2024_02_13_LS_0.169_96wp_IP_Galvo_0.72' +data_dir = Path(r'D:\2024_02_15_mantis_alignment') +dataset = '2024_02_15_LS_0.17_96wp_redo_epi_illum' # epi settings # z_stage = 'PiezoStage:Q:35' @@ -74,21 +74,21 @@ # axis_labels = ("Z", "Y", "X") # ls_settings +# z_stage = 'AP Galvo' +# z_step = 0.205 # in um +# z_range = (-100, 85) # in um +# pixel_size = 0.116 # in um +# axis_labels = ("SCAN", "TILT", "COVERSLIP") + +# epi illumination ls detection settings z_stage = 'AP Galvo' z_step = 0.205 # in um z_range = (-100, 85) # in um pixel_size = 0.116 # in um axis_labels = ("SCAN", "TILT", "COVERSLIP") -# epi illumination ls detection settings -# z_stage = 'PiezoStage:Q:35' -# z_step = 0.2 # in um -# z_range = (-2, 50) # in um -# pixel_size = 0.116 # in um -# axis_labels = ("Z", "Y", "X") - deskew = True -view = True +view = False scale = (z_step, pixel_size, pixel_size) data_path = data_dir / dataset @@ -124,6 +124,7 @@ ds = acq.get_dataset() zyx_data = np.asarray(ds.as_array()) channel_names = ['GFP'] +dataset = Path(ds.path).name raw = False if axis_labels == ("SCAN", "TILT", "COVERSLIP"): From 822428c0263f045a4961d350ae6e5817725755b5 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Wed, 28 Feb 2024 14:03:57 -0800 Subject: [PATCH 14/57] add html report title --- mantis/analysis/analyze_psf.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/mantis/analysis/analyze_psf.py b/mantis/analysis/analyze_psf.py index d7014953..730b692b 100644 --- a/mantis/analysis/analyze_psf.py +++ b/mantis/analysis/analyze_psf.py @@ -411,9 +411,16 @@ def _generate_html( ''' + head = f''' + + PSF Analysis: {dataset_name} + + ''' + html = markdown.markdown(report_str) formatted_html = f''' {css_style} +{head}
{html}
From 52c6f3b4abe626b4071ba5bb2df34f6290e5151f Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Tue, 19 Mar 2024 17:54:45 -0700 Subject: [PATCH 15/57] update psf analysis scripts --- mantis/acquisition/scripts/measure_psf.py | 144 +++++++++++++++------- mantis/analysis/analyze_psf.py | 2 +- 2 files changed, 100 insertions(+), 46 deletions(-) diff --git a/mantis/acquisition/scripts/measure_psf.py b/mantis/acquisition/scripts/measure_psf.py index b51bd362..a88387a7 100644 --- a/mantis/acquisition/scripts/measure_psf.py +++ b/mantis/acquisition/scripts/measure_psf.py @@ -15,6 +15,11 @@ from iohub.reader import open_ome_zarr, read_micromanager from pycromanager import Acquisition, multi_d_acquisition_events, Core +from mantis.acquisition.microscope_operations import ( + acquire_defocus_stack, + setup_kim101_stage +) + from mantis.analysis.AnalysisSettings import DeskewSettings from mantis.analysis.analyze_psf import ( analyze_psf, @@ -60,71 +65,120 @@ "device": "cuda" if torch.cuda.is_available() else "cpu", } +def check_acquisition_directory(root_dir: Path, acq_name: str, suffix='', idx=1) -> Path: + acq_dir = root_dir / f'{acq_name}_{idx}{suffix}' + if acq_dir.exists(): + return check_acquisition_directory(root_dir, acq_name, suffix, idx + 1) + return acq_dir + mmc = Core() # %% -data_dir = Path(r'D:\2024_02_15_mantis_alignment') -dataset = '2024_02_15_LS_0.17_96wp_redo_epi_illum' +data_dir = Path(r'D:\2024_03_18_mantis_alignment') +dataset = '2024_03_18_RR_Straight_O3_scan_Blackfly_smaller_z_step' +# dataset = '2024_03_18_epi_O1_benchmark' # epi settings # z_stage = 'PiezoStage:Q:35' # z_step = 0.2 # in um # z_range = (-2, 50) # in um -# pixel_size = 0.069 # in um +# pixel_size = 2 * 3.45 / 100 # in um +# # pixel_size = 3.45 / 55.7 # in um # axis_labels = ("Z", "Y", "X") -# ls_settings +# ls settings +# z_stage = 'AP Galvo' +# z_step = 0.205 # in um +# z_range = (-100, 85) # in um +# pixel_size = 0.116 # in um +# axis_labels = ("SCAN", "TILT", "COVERSLIP") + +# epi illumination rr detection settings # z_stage = 'AP Galvo' # z_step = 0.205 # in um # z_range = (-100, 85) # in um # pixel_size = 0.116 # in um # axis_labels = ("SCAN", "TILT", "COVERSLIP") -# epi illumination ls detection settings -z_stage = 'AP Galvo' -z_step = 0.205 # in um -z_range = (-100, 85) # in um -pixel_size = 0.116 # in um -axis_labels = ("SCAN", "TILT", "COVERSLIP") +# ls straight settings +z_stage = setup_kim101_stage('74000291') +step_per_um = 35 # matches ~30 nm per step quoted in PIA13 specs +z_start = 0 / step_per_um # in um +z_end = 1000 / step_per_um +z_step = 5 / step_per_um +z_range = np.arange(z_start, z_end + z_step, z_step) # in um +z_step /= 1.4 # count in 1.4x remote volume magnification +pixel_size = 3.45 / 40 / 1.4 # in um, counting the 1.4x remote volume magnification +axis_labels = ("Z", "Y", "X") + deskew = True -view = False +view = True scale = (z_step, pixel_size, pixel_size) data_path = data_dir / dataset -mmc.set_property('Core', 'Focus', z_stage) -z_pos = mmc.get_position(z_stage) -events = multi_d_acquisition_events( - z_start=z_pos + z_range[0], - z_end=z_pos + z_range[1], - z_step=z_step, -) +if isinstance(z_stage, str): + mmc.set_property('Core', 'Focus', z_stage) + z_pos = mmc.get_position(z_stage) + events = multi_d_acquisition_events( + z_start=z_pos + z_range[0], + z_end=z_pos + z_range[-1], + z_step=z_step, + ) -camera = mmc.get_camera_device() -if camera == 'Prime BSI Express': - mmc.set_property('Prime BSI Express', 'ExposeOutMode', 'Rolling Shutter') - mmc.set_property('TS2_TTL1-8', 'Blanking', 'On') - mmc.set_property('TS2_DAC03', 'Sequence', 'On') - -mmc.set_auto_shutter(False) -mmc.set_shutter_open(True) -with Acquisition( - directory=str(data_dir), - name=dataset, - show_display=False, -) as acq: - acq.acquire(events) -mmc.set_shutter_open(False) -mmc.set_auto_shutter(True) -mmc.set_position(z_stage, z_pos) - -if camera == 'Prime BSI Express': - mmc.set_property('TS2_TTL1-8', 'Blanking', 'Off') - -ds = acq.get_dataset() -zyx_data = np.asarray(ds.as_array()) -channel_names = ['GFP'] -dataset = Path(ds.path).name + camera = mmc.get_camera_device() + if camera == 'Prime BSI Express' and z_stage == 'AP Galvo': + mmc.set_property('Prime BSI Express', 'ExposeOutMode', 'Rolling Shutter') + mmc.set_property('TS2_TTL1-8', 'Blanking', 'On') + mmc.set_property('TS2_DAC03', 'Sequence', 'On') + + mmc.set_auto_shutter(False) + mmc.set_shutter_open(True) + with Acquisition( + directory=str(data_dir), + name=dataset, + show_display=False, + ) as acq: + acq.acquire(events) + mmc.set_shutter_open(False) + mmc.set_auto_shutter(True) + mmc.set_position(z_stage, z_pos) + + if camera == 'Prime BSI Express' and z_stage == 'AP Galvo': + mmc.set_property('TS2_TTL1-8', 'Blanking', 'Off') + + ds = acq.get_dataset() + zyx_data = np.asarray(ds.as_array()) + channel_names = ['GFP'] + dataset = Path(ds.path).name + ds.close() + +else: + acq_dir = check_acquisition_directory(data_dir, dataset, suffix='.zarr') + dataset = acq_dir.stem + + mmc.set_auto_shutter(False) + mmc.set_shutter_open(True) + z_range_microsteps = (z_range * step_per_um).astype(int) + zyx_data = acquire_defocus_stack(mmc, z_stage, z_range_microsteps) + mmc.set_shutter_open(False) + mmc.set_auto_shutter(True) + + # save to zarr store + channel_names = ['GFP'] + with open_ome_zarr( + data_dir / (dataset + '.zarr'), + layout="hcs", + mode="w", + channel_names=channel_names, + ) as output_dataset: + pos = output_dataset.create_position('0', '0', '0') + pos.create_image( + name="0", + data=zyx_data[None, None, ...], + chunks=(1, 1, 50) + zyx_data.shape[1:], # may be bigger than 500 MB + ) + z_stage.close() raw = False if axis_labels == ("SCAN", "TILT", "COVERSLIP"): @@ -135,7 +189,7 @@ t1 = time.time() peaks = detect_peaks( zyx_data, - **ls_bead_detection_settings, + **epi_bead_detection_settings, verbose=True, ) gc.collect() @@ -171,7 +225,7 @@ t2 = time.time() print(f'Time to analyze PSFs: {t2-t1}') -# %% Generate HTML report +# Generate HTML report psf_analysis_path = data_dir / (dataset + '_psf_analysis') generate_report( diff --git a/mantis/analysis/analyze_psf.py b/mantis/analysis/analyze_psf.py index 730b692b..367fa99f 100644 --- a/mantis/analysis/analyze_psf.py +++ b/mantis/analysis/analyze_psf.py @@ -348,7 +348,7 @@ def _generate_html( * Name: `{dataset_name}` * Path: `{data_path}` -* Scale: {dataset_scale[::-1]} um +* Scale: {tuple(np.round(dataset_scale[::-1], 3))} um * Date analyzed: {datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")} ### Number of beads From 4a8161cb97e5f870efdc20bfbebc14b60f2c3844 Mon Sep 17 00:00:00 2001 From: Talon Chandler Date: Sun, 5 May 2024 18:36:28 -0700 Subject: [PATCH 16/57] typo --- mantis/analysis/scripts/simulate_psf.py | 6 +----- 1 file changed, 1 insertion(+), 5 deletions(-) diff --git a/mantis/analysis/scripts/simulate_psf.py b/mantis/analysis/scripts/simulate_psf.py index 0656e49a..489cfad1 100644 --- a/mantis/analysis/scripts/simulate_psf.py +++ b/mantis/analysis/scripts/simulate_psf.py @@ -21,7 +21,7 @@ def _apply_centered_affine(zyx_array, M): ---------- zyx_array : NDArray with ndim == 3 3D input array - M : NDArry with shape = (3, 3) + M : NDArray with shape = (3, 3) 3x3 transformation matrix, the translation-free part of an affine matrix Can model reflection, scaling, rotation, and shear. @@ -132,7 +132,3 @@ def _apply_centered_affine(zyx_array, M): name="deskewed total", scale=psf_stc_ss_scale, ) - -import pdb - -pdb.set_trace() From 5ebe58c5c616419aef6c69ef5a9c3a760177bf92 Mon Sep 17 00:00:00 2001 From: Talon Chandler Date: Sun, 5 May 2024 18:37:01 -0700 Subject: [PATCH 17/57] update dependencies --- pyproject.toml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 2672f689..d95b2bf2 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -29,13 +29,16 @@ dependencies = [ "natsort", "ndtiff>=2.0", "nidaqmx", + "napari-psf-analyis", "numpy", + "markdown", "pycromanager==0.28.1", "pydantic", "pylablib==1.4.1", "scipy", "slurmkit @ git+https://github.com/royerlab/slurmkit", "tifffile", + "torch>=2.3", "waveorder @ git+https://github.com/mehta-lab/waveorder", "largestinteriorrectangle", "antspyx", From 5fe793feb8f209f6735a0b321cf441e1f63b616d Mon Sep 17 00:00:00 2001 From: Talon Chandler Date: Sun, 5 May 2024 18:37:23 -0700 Subject: [PATCH 18/57] larger PSF area --- mantis/analysis/analyze_psf.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/mantis/analysis/analyze_psf.py b/mantis/analysis/analyze_psf.py index 367fa99f..0b28a289 100644 --- a/mantis/analysis/analyze_psf.py +++ b/mantis/analysis/analyze_psf.py @@ -144,7 +144,8 @@ def generate_report( def extract_beads(zyx_data: ArrayLike, points: ArrayLike, scale: tuple): - patch_size = (scale[0] * 15, scale[1] * 18, scale[2] * 18) + patch_size = (scale[0] * 90, scale[1] * 90, scale[2] * 90) + print(patch_size) # extract bead patches bead_extractor = BeadExtractor( From b8623fdd56af27f8571d0c4521eab9fbe3a72934 Mon Sep 17 00:00:00 2001 From: Talon Chandler Date: Sun, 5 May 2024 18:37:32 -0700 Subject: [PATCH 19/57] main fitting script --- mantis/analysis/scripts/fit_psf_to_beads.py | 189 ++++++++++++++++++++ 1 file changed, 189 insertions(+) create mode 100644 mantis/analysis/scripts/fit_psf_to_beads.py diff --git a/mantis/analysis/scripts/fit_psf_to_beads.py b/mantis/analysis/scripts/fit_psf_to_beads.py new file mode 100644 index 00000000..5c2f5a28 --- /dev/null +++ b/mantis/analysis/scripts/fit_psf_to_beads.py @@ -0,0 +1,189 @@ +# %% +import gc +import napari +import numpy as np +import time +import torch + +from mantis.analysis.analyze_psf import detect_peaks, extract_beads +from mantis.analysis.deskew import _deskew_matrix +from mantis.analysis.scripts.simulate_psf import _apply_centered_affine +from iohub import read_micromanager +from waveorder import optics + +# %% Load beads (from ndtiff for now) +data_dir = ( + "/hpc/instruments/cm.mantis/2024_04_23_mantis_alignment/2024_05_05_LS_Oryx_LS_illum_8/" +) +input_dataset = read_micromanager(data_dir, data_type="ndtiff") +stc_data = input_dataset.get_array(position="0")[0, 0] + +# manual...pull from zarr later +s_step = 5 / 35 / 1.4 +tc_size = 3.45 / 40 / 1.4 +stc_scale = (s_step, tc_size, tc_size) + + +# %% Detect peaks and find an "average PSF" +ls_bead_detection_settings = { + "block_size": (64, 64, 32), + "blur_kernel_size": 3, + "nms_distance": 32, + "min_distance": 50, + "threshold_abs": 200.0, + "max_num_peaks": 2000, + "exclude_border": (5, 10, 5), + "device": "cuda" if torch.cuda.is_available() else "cpu", +} + +t1 = time.time() +peaks = detect_peaks( + stc_data, + **ls_bead_detection_settings, + verbose=True, +) +gc.collect() +torch.cuda.empty_cache() +t2 = time.time() +print(f'Time to detect peaks: {t2-t1}') + +# %% Extract beads +beads, offsets = extract_beads( + zyx_data=stc_data, + points=peaks, + scale=stc_scale, +) +stc_shape = beads[0].shape + +# Filter PSFs with different shapes +filtered_beads = [x for x in beads if x.shape == stc_shape] +bzyx_data = np.stack(filtered_beads) +normalized_bzyx_data = bzyx_data / np.max(bzyx_data, axis=(-3, -2, -1))[:, None, None, None] +average_psf = np.mean(normalized_bzyx_data, axis=0) + +# %% View PSFs +import napari + +v = napari.Viewer() +v.add_image(normalized_bzyx_data) +v.add_image(average_psf) + + +# %% Generate simulated PSF library +def calculate_transfer_function( + zyx_shape, + yx_pixel_size, + z_pixel_size, + wavelength_emission, + z_padding, + index_of_refraction_media, + numerical_aperture_detection, + coma_strength, +): + # Modified from waveorder + fy = torch.fft.fftfreq(zyx_shape[1], yx_pixel_size) + fx = torch.fft.fftfreq(zyx_shape[2], yx_pixel_size) + fyy, fxx = torch.meshgrid(fy, fx, indexing="ij") + radial_frequencies = torch.sqrt(fyy**2 + fxx**2) + + z_total = zyx_shape[0] + 2 * z_padding + z_position_list = torch.fft.ifftshift( + (torch.arange(z_total) - z_total // 2) * z_pixel_size + ) + + # Custom pupil + det_pupil = torch.zeros(radial_frequencies.shape, dtype=torch.complex64) + cutoff = numerical_aperture_detection / wavelength_emission + det_pupil[radial_frequencies < cutoff] = 1 + # det_pupil[((fxx) ** 2 + (fy)**2) ** 0.5 > cutoff] = 0 # add cutoff lune here + det_pupil *= np.exp( + coma_strength + * 1j + * ((3 * (radial_frequencies / cutoff) ** 3) - (2 * (radial_frequencies / cutoff))) + * torch.div(fxx + 1e-15, radial_frequencies + 1e-15) + ) # coma + + # v.add_image(torch.real(det_pupil).numpy()) + # v.add_image(torch.imag(det_pupil).numpy()) + + propagation_kernel = optics.generate_propagation_kernel( + radial_frequencies, + det_pupil, + wavelength_emission / index_of_refraction_media, + z_position_list, + ) + + point_spread_function = torch.abs(torch.fft.ifft2(propagation_kernel, dim=(1, 2))) ** 2 + optical_transfer_function = torch.fft.fftn(point_spread_function, dim=(0, 1, 2)) + optical_transfer_function /= torch.max(torch.abs(optical_transfer_function)) # normalize + + return optical_transfer_function + + +def generate_psf(numerical_aperture_detection, ls_angle_deg, coma_strength): + # detection parameters + wavelength_emission = 0.550 # um + index_of_refraction_media = 1.404 + + # internal simulation parameters + px_to_scan_ratio = stc_scale[1] / stc_scale[0] + ct = np.cos(ls_angle_deg * np.pi / 180) + st = np.sin(ls_angle_deg * np.pi / 180) + deskew_matrix = _deskew_matrix(px_to_scan_ratio, ct) + skew_matrix = np.linalg.inv(deskew_matrix) + + zyx_scale = np.array([st * stc_scale[0], stc_scale[1], stc_scale[2]]) + detection_otf_zyx = calculate_transfer_function( + stc_shape, + zyx_scale[1], + zyx_scale[0], + wavelength_emission, + 0, + index_of_refraction_media, + numerical_aperture_detection, + coma_strength, + ) + + detection_psf_zyx = np.array( + torch.real(torch.fft.ifftshift(torch.fft.ifftn(detection_otf_zyx, dim=(0, 1, 2)))) + ) + + simulated_psf = _apply_centered_affine(detection_psf_zyx, skew_matrix) + simulated_psf /= np.max(simulated_psf) + return simulated_psf, zyx_scale, deskew_matrix + + +# Define grid search +na_det_list = np.array([0.95, 1.15, 1.35]) +ls_angle_deg_list = np.array([30]) +coma_strength_list = np.array([-0.2, -0.1, 0, 0.1, 0.2]) +params = np.stack( + np.meshgrid(na_det_list, ls_angle_deg_list, coma_strength_list, indexing="ij"), axis=-1 +) + +pzyx_array = np.zeros(params.shape[:-1] + stc_shape) +pzyx_deskewed_array = np.zeros(params.shape[:-1] + stc_shape) + +for i in np.ndindex(params.shape[:-1]): + print(f"Simulating PSF with params: {params[i]}") + pzyx_array[i], zyx_scale, deskew_matrix = generate_psf(*params[i]) + pzyx_deskewed_array[i] = _apply_centered_affine(pzyx_array[i], deskew_matrix) + +print("Visualizing") +v = napari.Viewer() +v.add_image(average_psf, scale=stc_scale) +v.add_image(pzyx_array, scale=stc_scale) + +v.dims.axis_labels = ["NA", "", "COMA", "Z", "Y", "X"] + +# v.add_image(_apply_centered_affine(average_psf, deskew_matrix), scale=zyx_scale) +# v.add_image(pzyx_deskewed_array, scale=zyx_scale) + +# Optimize match +diff = np.sum((pzyx_array - average_psf) ** 2, axis=(-3, -2, -1)) +min_idx = np.unravel_index(np.argmin(diff), diff.shape) +print(min_idx) +print(params[min_idx]) + + +# %% Use PSF fit to deconvolve From 97c79686c8864d96ee0ad8db7d8ac553cb23e502 Mon Sep 17 00:00:00 2001 From: Talon Chandler Date: Tue, 7 May 2024 12:03:13 -0700 Subject: [PATCH 20/57] simple_psf iteration --- mantis/analysis/scripts/simple_psf.py | 114 ++++++++++++++++++++++++++ 1 file changed, 114 insertions(+) create mode 100644 mantis/analysis/scripts/simple_psf.py diff --git a/mantis/analysis/scripts/simple_psf.py b/mantis/analysis/scripts/simple_psf.py new file mode 100644 index 00000000..9b6d5c4d --- /dev/null +++ b/mantis/analysis/scripts/simple_psf.py @@ -0,0 +1,114 @@ +# %% +import warnings +import napari +import torch +import numpy as np +from waveorder import optics +from mantis.analysis.analyze_psf import analyze_psf, extract_beads, detect_peaks + + +# %% Generate simulated PSF library +def calculate_transfer_function( + zyx_shape, + yx_pixel_size, + z_pixel_size, + wavelength_emission, + z_padding, + index_of_refraction_media, + numerical_aperture_detection, +): + # Modified from waveorder + fy = torch.fft.fftfreq(zyx_shape[1], yx_pixel_size) + fx = torch.fft.fftfreq(zyx_shape[2], yx_pixel_size) + fyy, fxx = torch.meshgrid(fy, fx, indexing="ij") + radial_frequencies = torch.sqrt(fyy**2 + fxx**2) + + z_total = zyx_shape[0] + 2 * z_padding + z_position_list = torch.fft.ifftshift( + (torch.arange(z_total) - z_total // 2) * z_pixel_size + ) + + # Custom pupil + det_pupil = torch.zeros(radial_frequencies.shape, dtype=torch.complex64) + cutoff = numerical_aperture_detection / wavelength_emission + det_pupil[radial_frequencies < cutoff] = 1 + + propagation_kernel = optics.generate_propagation_kernel( + radial_frequencies, + det_pupil, + wavelength_emission / index_of_refraction_media, + z_position_list, + ) + + point_spread_function = torch.abs(torch.fft.ifft2(propagation_kernel, dim=(1, 2))) ** 2 + optical_transfer_function = torch.fft.fftn(point_spread_function, dim=(0, 1, 2)) + optical_transfer_function /= torch.max(torch.abs(optical_transfer_function)) # normalize + + return optical_transfer_function + + +def generate_psf(numerical_aperture_detection, zyx_shape, zyx_scale): + # detection parameters + wavelength_emission = 0.550 # um + index_of_refraction_media = 1.404 + + # internal simulation parameters + detection_otf_zyx = calculate_transfer_function( + zyx_shape, + zyx_scale[1], + zyx_scale[0], + wavelength_emission, + 0, + index_of_refraction_media, + numerical_aperture_detection, + ) + + simulated_psf = np.array( + torch.real(torch.fft.ifftshift(torch.fft.ifftn(detection_otf_zyx, dim=(0, 1, 2)))) + ) + simulated_psf *= 1e7 + return simulated_psf + + +numerical_apertures = [0.9, 1.1, 1.35] +zyx_shape = np.array([151, 151, 151]) +zyx_scale = (0.1, 0.0616, 0.0616) + +v = napari.Viewer() +for numerical_aperture in numerical_apertures: + print(f"Generating NA={numerical_aperture}") + + zyx_data = generate_psf(numerical_aperture, zyx_shape, zyx_scale) + v.add_image(zyx_data, name=f"{numerical_aperture}", scale=zyx_scale) + + epi_bead_detection_settings = { + "block_size": (8, 8, 8), + "blur_kernel_size": 3, + "min_distance": 0, + "threshold_abs": 100.0, + "max_num_peaks": 1, + "exclude_border": (0, 0, 0), + "device": "cuda" if torch.cuda.is_available() else "cpu", + } + + peaks = detect_peaks(zyx_data, **epi_bead_detection_settings, verbose=True) + + beads, offsets = extract_beads( + zyx_data=zyx_data, + points=peaks, + scale=zyx_scale, + ) + + print(f"Fitting NA={numerical_aperture}") + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + df_gaussian_fit, df_1d_peak_width = analyze_psf( + zyx_patches=beads, + bead_offsets=offsets, + scale=zyx_scale, + ) + + print(df_gaussian_fit) + print(df_1d_peak_width) + +# %% From 15d2f590fb4903c01e9de7ceee80b98c6da839d3 Mon Sep 17 00:00:00 2001 From: Talon Chandler Date: Tue, 7 May 2024 12:04:14 -0700 Subject: [PATCH 21/57] deconvolution prototype --- mantis/analysis/scripts/fit_psf_to_beads.py | 36 +++++++++++++++++++-- 1 file changed, 33 insertions(+), 3 deletions(-) diff --git a/mantis/analysis/scripts/fit_psf_to_beads.py b/mantis/analysis/scripts/fit_psf_to_beads.py index 5c2f5a28..002c4aa1 100644 --- a/mantis/analysis/scripts/fit_psf_to_beads.py +++ b/mantis/analysis/scripts/fit_psf_to_beads.py @@ -10,6 +10,7 @@ from mantis.analysis.scripts.simulate_psf import _apply_centered_affine from iohub import read_micromanager from waveorder import optics +from waveorder.models.isotropic_fluorescent_thick_3d import apply_inverse_transfer_function # %% Load beads (from ndtiff for now) data_dir = ( @@ -132,7 +133,7 @@ def generate_psf(numerical_aperture_detection, ls_angle_deg, coma_strength): deskew_matrix = _deskew_matrix(px_to_scan_ratio, ct) skew_matrix = np.linalg.inv(deskew_matrix) - zyx_scale = np.array([st * stc_scale[0], stc_scale[1], stc_scale[2]]) + zyx_scale = np.array([st * stc_scale[1], stc_scale[1], stc_scale[1]]) detection_otf_zyx = calculate_transfer_function( stc_shape, zyx_scale[1], @@ -154,7 +155,7 @@ def generate_psf(numerical_aperture_detection, ls_angle_deg, coma_strength): # Define grid search -na_det_list = np.array([0.95, 1.15, 1.35]) +na_det_list = np.array([0.95, 1.05, 1.15, 1.25, 1.35]) ls_angle_deg_list = np.array([30]) coma_strength_list = np.array([-0.2, -0.1, 0, 0.1, 0.2]) params = np.stack( @@ -186,4 +187,33 @@ def generate_psf(numerical_aperture_detection, ls_angle_deg, coma_strength): print(params[min_idx]) -# %% Use PSF fit to deconvolve +# %% Crop data for prototyping deconvolution +stc_data = stc_data[:200, :200, :500] + +# %% + +# Simple background subtraction and normalization +average_psf -= np.min(average_psf) +average_psf /= np.max(average_psf) + +# %% +zyx_padding = np.array(stc_data.shape) - np.array(average_psf.shape) +pad_width = [(x // 2, x // 2) if x % 2 == 0 else (x // 2, x // 2 + 1) for x in zyx_padding] +padded_average_psf = np.pad(average_psf, pad_width=pad_width, mode="constant", constant_values=0) +transfer_function = np.abs(np.fft.fftn(padded_average_psf)) +transfer_function /= np.max(transfer_function) +print(transfer_function.shape) + +# %% + +# %% +stc_data_deconvolved = apply_inverse_transfer_function(torch.tensor(stc_data), torch.tensor(transfer_function), 0, regularization_strength=1e-3) + +v = napari.Viewer() +v.add_image(padded_average_psf) +v.add_image(stc_data) +v.add_image(stc_data_deconvolved.numpy()) + + + +# %% From fb4cc720855ad3770cacd769116d7a642302d675 Mon Sep 17 00:00:00 2001 From: Talon Chandler Date: Tue, 7 May 2024 14:30:48 -0700 Subject: [PATCH 22/57] settings --- mantis/analysis/AnalysisSettings.py | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/mantis/analysis/AnalysisSettings.py b/mantis/analysis/AnalysisSettings.py index a71428f5..edc63ce3 100644 --- a/mantis/analysis/AnalysisSettings.py +++ b/mantis/analysis/AnalysisSettings.py @@ -67,3 +67,13 @@ def check_affine_transform(cls, v): raise ValueError("The array must contain valid numerical values.") return v + + +class PsfFromBeadsSettings(MyBaseModel): + axis0_patch_size: PositiveInt = 101 + axis1_patch_size: PositiveInt = 101 + axis2_patch_size: PositiveInt = 101 + + +class DeconvolveSettings(MyBaseModel): + regularization_strength: PositiveFloat = 0.001 From ea6acdbd43d9a1949f621e582ec30def9e3ff01b Mon Sep 17 00:00:00 2001 From: Talon Chandler Date: Tue, 7 May 2024 14:31:10 -0700 Subject: [PATCH 23/57] cutsom patch size --- mantis/analysis/analyze_psf.py | 15 ++++++++++----- 1 file changed, 10 insertions(+), 5 deletions(-) diff --git a/mantis/analysis/analyze_psf.py b/mantis/analysis/analyze_psf.py index 0b28a289..378733d3 100644 --- a/mantis/analysis/analyze_psf.py +++ b/mantis/analysis/analyze_psf.py @@ -107,8 +107,12 @@ def generate_report( fwhm_pc_mean = [ df_gaussian_fit[col].mean() for col in ('zyx_pc3_fwhm', 'zyx_pc2_fwhm', 'zyx_pc1_fwhm') ] - fwhm_1d_mean = [df_1d_peak_width[col].mean() for col in ('1d_z_fwhm', '1d_y_fwhm', '1d_x_fwhm')] - fwhm_1d_std = [df_1d_peak_width[col].std() for col in ('1d_z_fwhm', '1d_y_fwhm', '1d_x_fwhm')] + fwhm_1d_mean = [ + df_1d_peak_width[col].mean() for col in ('1d_z_fwhm', '1d_y_fwhm', '1d_x_fwhm') + ] + fwhm_1d_std = [ + df_1d_peak_width[col].std() for col in ('1d_z_fwhm', '1d_y_fwhm', '1d_x_fwhm') + ] # generate html report html_report = _generate_html( @@ -143,9 +147,10 @@ def generate_report( webbrowser.open('file://' + str(html_file_path)) -def extract_beads(zyx_data: ArrayLike, points: ArrayLike, scale: tuple): - patch_size = (scale[0] * 90, scale[1] * 90, scale[2] * 90) - print(patch_size) +def extract_beads( + zyx_data: ArrayLike, points: ArrayLike, scale: tuple, patch_size_voxels: tuple = None +): + patch_size = (scale[0] * patch_size_voxels[0], scale[1] * patch_size_voxels[1], scale[2] * patch_size_voxels[2]) # extract bead patches bead_extractor = BeadExtractor( From 2b84fb9ba9b0ccc66f0ef2bc3e09d256b44c9e76 Mon Sep 17 00:00:00 2001 From: Talon Chandler Date: Tue, 7 May 2024 14:31:23 -0700 Subject: [PATCH 24/57] deconvolve CLI --- mantis/cli/deconvolve.py | 118 +++++++++++++++++++++++++++++++++++++++ 1 file changed, 118 insertions(+) create mode 100644 mantis/cli/deconvolve.py diff --git a/mantis/cli/deconvolve.py b/mantis/cli/deconvolve.py new file mode 100644 index 00000000..eb05b0a4 --- /dev/null +++ b/mantis/cli/deconvolve.py @@ -0,0 +1,118 @@ +import click +import numpy as np +import torch + +from typing import List +from mantis.cli.parsing import ( + input_position_dirpaths, + config_filepath, + output_dirpath, + _str_to_path, +) +from mantis.analysis.AnalysisSettings import DeconvolveSettings +from mantis.cli.utils import yaml_to_model, create_empty_hcs_zarr +from iohub import open_ome_zarr +from pathlib import Path +from waveorder.models.isotropic_fluorescent_thick_3d import apply_inverse_transfer_function + + +def apply_deconvolve_single_position( + input_position_dirpath: str, psf_dirpath: str, config_filepath: str, output_dirpath: Path +): + """ + Apply deconvolution to a single position + """ + # Load the data + with open_ome_zarr(input_position_dirpath, mode="r") as input_dataset: + T, C, Z, Y, X = input_dataset.data.shape + zyx_data = input_dataset["0"][0, 0] + zyx_scale = input_dataset.scale[-3:] + + # Read settings + settings = yaml_to_model(Path(config_filepath), DeconvolveSettings) + + # Load the PSF + with open_ome_zarr(psf_dirpath, mode="r") as psf_dataset: + position = psf_dataset["0/0/0"] + psf_data = position["0"][0, 0] + psf_scale = position.scale[-3:] + + # Check if scales match + if psf_scale != zyx_scale: + click.echo( + f"Warning: PSF scale {psf_scale} does not match data scale {zyx_scale}. " + "Consider resampling the PSF." + ) + + # Apply deconvolution + click.echo("Padding PSF...") + zyx_padding = np.array(zyx_data.shape) - np.array(psf_data.shape) + pad_width = [(x // 2, x // 2) if x % 2 == 0 else (x // 2, x // 2 + 1) for x in zyx_padding] + padded_average_psf = np.pad( + psf_data, pad_width=pad_width, mode="constant", constant_values=0 + ) + + click.echo("Calculating transfer function...") + transfer_function = torch.abs(torch.fft.fftn(torch.tensor(padded_average_psf))) + transfer_function /= torch.max(transfer_function) + + click.echo("Deconvolving...") + zyx_data_deconvolved = apply_inverse_transfer_function( + torch.tensor(zyx_data), + torch.tensor(transfer_function), + 0, + regularization_strength=settings.regularization_strength, + ) + + # Save to output dirpath + click.echo("Saving to output...") + with open_ome_zarr(output_dirpath, mode="a") as output_dataset: + output_dataset["0"][0, 0] = zyx_data_deconvolved.numpy() + + +@click.command() +@input_position_dirpaths() +@click.option( + "--psf-dirpath", + "-p", + required=True, + type=click.Path(exists=True, file_okay=False, dir_okay=True), + callback=_str_to_path, + help="Path to psf.zarr", +) +@config_filepath() +@output_dirpath() +def deconvolve( + input_position_dirpaths: List[str], + psf_dirpath: str, + config_filepath: str, + output_dirpath: str, +): + """ + Deconvolve across T and C axes using a PSF and a configuration file + + >> mantis deconvolve -i ./input.zarr/*/*/* -p ./psf.zarr -c ./deconvolve_params.yml -o ./output.zarr + """ + # Convert string paths to Path objects + output_dirpath = Path(output_dirpath) + config_filepath = Path(config_filepath) + + # Create output zarr store + click.echo(f"Creating empty output zarr...") + with open_ome_zarr(str(input_position_dirpaths[0]), mode="r") as input_dataset: + create_empty_hcs_zarr( + store_path=output_dirpath, + position_keys=[p.parts[-3:] for p in input_position_dirpaths], + channel_names=input_dataset.channel_names, + shape=input_dataset.data.shape, + scale=input_dataset.scale, + ) + + # Loop through positions + for input_position_dirpath in input_position_dirpaths: + apply_deconvolve_single_position( + input_position_dirpath, + psf_dirpath, + config_filepath, + output_dirpath / Path(*input_position_dirpath.parts[-3:]), + ) From 74024e077b6ef8bdc4428b8cc898b31516df56ce Mon Sep 17 00:00:00 2001 From: Talon Chandler Date: Tue, 7 May 2024 14:31:36 -0700 Subject: [PATCH 25/57] psf-from-beads CLI --- mantis/cli/psf_from_beads.py | 107 +++++++++++++++++++++++++++++++++++ 1 file changed, 107 insertions(+) create mode 100644 mantis/cli/psf_from_beads.py diff --git a/mantis/cli/psf_from_beads.py b/mantis/cli/psf_from_beads.py new file mode 100644 index 00000000..a611789b --- /dev/null +++ b/mantis/cli/psf_from_beads.py @@ -0,0 +1,107 @@ +import gc +import click +import numpy as np +import torch +import time + +from iohub.ngff import open_ome_zarr +from iohub.ngff_meta import TransformationMeta +from pathlib import Path +from typing import List +from mantis.analysis.AnalysisSettings import PsfFromBeadsSettings +from mantis.analysis.analyze_psf import detect_peaks, extract_beads +from mantis.cli.parsing import input_position_dirpaths, output_dirpath, config_filepath +from mantis.cli.utils import yaml_to_model + + +@click.command() +@input_position_dirpaths() +@config_filepath() +@output_dirpath() +def psf_from_beads( + input_position_dirpaths: List[str], + config_filepath: str, + output_dirpath: str, +): + """ + Estimate the point spread function (PSF) from bead images + + >> mantis psf_from_beads -i ./beads.zarr/*/*/* -c ./psf_params.yml -o ./psf.zarr + """ + # Convert string paths to Path objects + output_dirpath = Path(output_dirpath) + config_filepath = Path(config_filepath) + + # Load the first position (TODO: consider averaging over positions) + click.echo(f"Loading data...") + with open_ome_zarr(str(input_position_dirpaths[0]), mode="r") as input_dataset: + T, C, Z, Y, X = input_dataset.data.shape + zyx_data = input_dataset["0"][0, 0] + zyx_scale = input_dataset.scale[-3:] + + # Read settings + settings = yaml_to_model(config_filepath, PsfFromBeadsSettings) + patch_size = ( + settings.axis0_patch_size, + settings.axis1_patch_size, + settings.axis2_patch_size, + ) + + # Some of these settings can be moved to PsfFromBeadsSettings as needed + bead_detection_settings = { + "block_size": (64, 64, 32), + "blur_kernel_size": 3, + "nms_distance": 32, + "min_distance": 50, + "threshold_abs": 200.0, + "max_num_peaks": 2000, + "exclude_border": (5, 10, 5), + "device": "cuda" if torch.cuda.is_available() else "cpu", + } + + # Detect and extract bead patches + click.echo(f"Detecting beads...") + t1 = time.time() + peaks = detect_peaks( + zyx_data, + **bead_detection_settings, + verbose=True, + ) + gc.collect() + + torch.cuda.empty_cache() + t2 = time.time() + click.echo(f'Time to detect peaks: {t2-t1}') + + beads, _ = extract_beads( + zyx_data=zyx_data, + points=peaks, + scale=zyx_scale, + patch_size_voxels=patch_size, + ) + + # Filter PSFs with non-standard shapes + filtered_beads = [x for x in beads if x.shape == beads[0].shape] + bzyx_data = np.stack(filtered_beads) + normalized_bzyx_data = ( + bzyx_data / np.max(bzyx_data, axis=(-3, -2, -1))[:, None, None, None] + ) + average_psf = np.mean(normalized_bzyx_data, axis=0) + + # Simple background subtraction and normalization + average_psf -= np.min(average_psf) + average_psf /= np.max(average_psf) + + # Save + with open_ome_zarr( + output_dirpath, layout="hcs", mode="w", channel_names=["PSF"] + ) as output_dataset: + pos = output_dataset.create_position("0", "0", "0") + array = pos.create_zeros( + name="0", + shape=2 * (1,) + average_psf.shape, + chunks=2 * (1,) + average_psf.shape, + dtype=np.float32, + transform=[TransformationMeta(type="scale", scale=2 * (1,) + tuple(zyx_scale))], + ) + array[0, 0] = average_psf From 8a0377f521a224358bb27cc4b2f56b3b5aab2e09 Mon Sep 17 00:00:00 2001 From: Talon Chandler Date: Tue, 7 May 2024 14:31:44 -0700 Subject: [PATCH 26/57] cli boilerplate --- mantis/cli/main.py | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/mantis/cli/main.py b/mantis/cli/main.py index e1af5110..3d0a736e 100644 --- a/mantis/cli/main.py +++ b/mantis/cli/main.py @@ -8,6 +8,8 @@ from mantis.cli.optimize_affine import optimize_affine from mantis.cli.run_acquisition import run_acquisition from mantis.cli.update_scale_metadata import update_scale_metadata +from mantis.cli.psf_from_beads import psf_from_beads +from mantis.cli.deconvolve import deconvolve CONTEXT = {"help_option_names": ["-h", "--help"]} @@ -31,3 +33,5 @@ def cli(): cli.add_command(optimize_affine) cli.add_command(apply_affine) cli.add_command(update_scale_metadata) +cli.add_command(psf_from_beads) +cli.add_command(deconvolve) From ec20b03fc98c0e6b57a5d0a600780e8ba2f958f3 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Wed, 15 May 2024 11:36:37 -0700 Subject: [PATCH 27/57] update psf measurement scripts --- mantis/acquisition/scripts/measure_psf.py | 65 +++++++++++++++-------- mantis/analysis/analyze_psf.py | 5 +- 2 files changed, 46 insertions(+), 24 deletions(-) diff --git a/mantis/acquisition/scripts/measure_psf.py b/mantis/acquisition/scripts/measure_psf.py index a88387a7..ee4f5d7e 100644 --- a/mantis/acquisition/scripts/measure_psf.py +++ b/mantis/acquisition/scripts/measure_psf.py @@ -73,10 +73,20 @@ def check_acquisition_directory(root_dir: Path, acq_name: str, suffix='', idx=1) mmc = Core() +# mmc.set_property('Prime BSI Express', 'ExposeOutMode', 'Rolling Shutter') +# mmc.set_property('Oryx2', 'Line Selector', 'Line5') +# mmc.update_system_state_cache() +# mmc.set_property('Oryx2', 'Line Mode', 'Output') +# mmc.set_property('Oryx2', 'Line Source', 'ExposureActive') + # %% -data_dir = Path(r'D:\2024_03_18_mantis_alignment') -dataset = '2024_03_18_RR_Straight_O3_scan_Blackfly_smaller_z_step' -# dataset = '2024_03_18_epi_O1_benchmark' +data_dir = Path(r'E:\2024_05_10_A594_CAAX_DRAQ5') +date = '2024_05_07' +# dataset = f'{date}_RR_Straight_O3_scan' +# dataset = f'{date}_epi_O1_benchmark' +# dataset = f'{date}_LS_Oryx_epi_illum' +# dataset = f'{date}_LS_Oryx_LS_illum' +dataset = f'{date}_LS_benchmark' # epi settings # z_stage = 'PiezoStage:Q:35' @@ -94,29 +104,35 @@ def check_acquisition_directory(root_dir: Path, acq_name: str, suffix='', idx=1) # axis_labels = ("SCAN", "TILT", "COVERSLIP") # epi illumination rr detection settings -# z_stage = 'AP Galvo' +z_stage = 'AP Galvo' # z_step = 0.205 # in um -# z_range = (-100, 85) # in um +# z_range = (-85, 85) # in um +z_step = 0.1 # in um, reduced range and smaller step size +z_range = (-31, 49) # in um # pixel_size = 0.116 # in um -# axis_labels = ("SCAN", "TILT", "COVERSLIP") +pixel_size = 6.5 / 40 / 1.4 # in um, no binning +axis_labels = ("SCAN", "TILT", "COVERSLIP") # ls straight settings -z_stage = setup_kim101_stage('74000291') -step_per_um = 35 # matches ~30 nm per step quoted in PIA13 specs -z_start = 0 / step_per_um # in um -z_end = 1000 / step_per_um -z_step = 5 / step_per_um -z_range = np.arange(z_start, z_end + z_step, z_step) # in um -z_step /= 1.4 # count in 1.4x remote volume magnification -pixel_size = 3.45 / 40 / 1.4 # in um, counting the 1.4x remote volume magnification -axis_labels = ("Z", "Y", "X") +# z_stage = setup_kim101_stage('74000291') +# step_per_um = 35 # matches ~30 nm per step quoted in PIA13 specs +# z_start = 0 / step_per_um # in um +# z_end = 1000 / step_per_um +# z_step = 5 / step_per_um +# # z_end = 500 / step_per_um +# # z_step = 20 / step_per_um +# z_range = np.arange(z_start, z_end + z_step, z_step) # in um +# z_step /= 1.4 # count in 1.4x remote volume magnification +# pixel_size = 3.45 / 40 / 1.4 # in um, counting the 1.4x remote volume magnification +# axis_labels = ("Z", "Y", "X") deskew = True -view = True +view = False scale = (z_step, pixel_size, pixel_size) data_path = data_dir / dataset +camera = mmc.get_camera_device() if isinstance(z_stage, str): mmc.set_property('Core', 'Focus', z_stage) z_pos = mmc.get_position(z_stage) @@ -126,9 +142,7 @@ def check_acquisition_directory(root_dir: Path, acq_name: str, suffix='', idx=1) z_step=z_step, ) - camera = mmc.get_camera_device() - if camera == 'Prime BSI Express' and z_stage == 'AP Galvo': - mmc.set_property('Prime BSI Express', 'ExposeOutMode', 'Rolling Shutter') + if camera in ('Prime BSI Express', 'Oryx2') and z_stage == 'AP Galvo': mmc.set_property('TS2_TTL1-8', 'Blanking', 'On') mmc.set_property('TS2_DAC03', 'Sequence', 'On') @@ -144,8 +158,9 @@ def check_acquisition_directory(root_dir: Path, acq_name: str, suffix='', idx=1) mmc.set_auto_shutter(True) mmc.set_position(z_stage, z_pos) - if camera == 'Prime BSI Express' and z_stage == 'AP Galvo': + if camera in ('Prime BSI Express', 'Oryx2') and z_stage == 'AP Galvo': mmc.set_property('TS2_TTL1-8', 'Blanking', 'Off') + mmc.set_property('TS2_DAC03', 'Sequence', 'Off') ds = acq.get_dataset() zyx_data = np.asarray(ds.as_array()) @@ -189,7 +204,8 @@ def check_acquisition_directory(root_dir: Path, acq_name: str, suffix='', idx=1) t1 = time.time() peaks = detect_peaks( zyx_data, - **epi_bead_detection_settings, + # **epi_bead_detection_settings, + **ls_bead_detection_settings, verbose=True, ) gc.collect() @@ -209,10 +225,15 @@ def check_acquisition_directory(root_dir: Path, acq_name: str, suffix='', idx=1) # %% Extract and analyze bead patches t1 = time.time() +if raw: + patch_size = (scale[0] * 30, scale[1] * 36, scale[2] * 18) +else: + patch_size = (scale[0] * 15, scale[1] * 18, scale[2] * 18) beads, offsets = extract_beads( zyx_data=zyx_data, points=peaks, scale=scale, + patch_size=patch_size, ) with warnings.catch_warnings(): @@ -244,7 +265,7 @@ def check_acquisition_directory(root_dir: Path, acq_name: str, suffix='', idx=1) if raw and deskew: # deskew - num_chunks = 2 + num_chunks = 4 chunked_data = np.split(zyx_data, num_chunks, axis=-1) chunk_shape = chunked_data[0].shape diff --git a/mantis/analysis/analyze_psf.py b/mantis/analysis/analyze_psf.py index 367fa99f..97b159e0 100644 --- a/mantis/analysis/analyze_psf.py +++ b/mantis/analysis/analyze_psf.py @@ -143,8 +143,9 @@ def generate_report( webbrowser.open('file://' + str(html_file_path)) -def extract_beads(zyx_data: ArrayLike, points: ArrayLike, scale: tuple): - patch_size = (scale[0] * 15, scale[1] * 18, scale[2] * 18) +def extract_beads(zyx_data: ArrayLike, points: ArrayLike, scale: tuple, patch_size: tuple = None): + if patch_size is None: + patch_size = (scale[0] * 15, scale[1] * 18, scale[2] * 18) # extract bead patches bead_extractor = BeadExtractor( From be0c63795626297714dd63412e991b19a491c850 Mon Sep 17 00:00:00 2001 From: Talon Chandler Date: Fri, 17 May 2024 11:53:50 -0700 Subject: [PATCH 28/57] first-pass characterize --- mantis/analysis/AnalysisSettings.py | 16 ++++++ mantis/analysis/analyze_psf.py | 9 +++- mantis/cli/characterize.py | 79 +++++++++++++++++++++++++++++ mantis/cli/main.py | 2 + 4 files changed, 105 insertions(+), 1 deletion(-) create mode 100644 mantis/cli/characterize.py diff --git a/mantis/analysis/AnalysisSettings.py b/mantis/analysis/AnalysisSettings.py index edc63ce3..e8833ad2 100644 --- a/mantis/analysis/AnalysisSettings.py +++ b/mantis/analysis/AnalysisSettings.py @@ -1,6 +1,7 @@ from typing import Literal, Optional, Union import numpy as np +import torch from pydantic import BaseModel, Extra, NonNegativeInt, PositiveFloat, PositiveInt, validator @@ -77,3 +78,18 @@ class PsfFromBeadsSettings(MyBaseModel): class DeconvolveSettings(MyBaseModel): regularization_strength: PositiveFloat = 0.001 + + +class CharacterizeSettings(MyBaseModel): + block_size: list[NonNegativeInt] = (64, 64, 32) + blur_kernel_size: NonNegativeInt = 3 + nms_distance: NonNegativeInt = 32 + min_distance: NonNegativeInt = 50 + threshold_abs: PositiveFloat = 200.0 + max_num_peaks: NonNegativeInt = 2000 + exclude_border: list[NonNegativeInt] = (5, 10, 5) + device: str = "cuda" + + @validator("device") + def check_device(cls, v): + return "cuda" if torch.cuda.is_available() else "cpu" diff --git a/mantis/analysis/analyze_psf.py b/mantis/analysis/analyze_psf.py index 378733d3..c354248e 100644 --- a/mantis/analysis/analyze_psf.py +++ b/mantis/analysis/analyze_psf.py @@ -150,7 +150,14 @@ def generate_report( def extract_beads( zyx_data: ArrayLike, points: ArrayLike, scale: tuple, patch_size_voxels: tuple = None ): - patch_size = (scale[0] * patch_size_voxels[0], scale[1] * patch_size_voxels[1], scale[2] * patch_size_voxels[2]) + if patch_size_voxels is None: + patch_size = (scale[0] * 15, scale[1] * 18, scale[2] * 18) + else: + patch_size = ( + scale[0] * patch_size_voxels[0], + scale[1] * patch_size_voxels[1], + scale[2] * patch_size_voxels[2], + ) # extract bead patches bead_extractor = BeadExtractor( diff --git a/mantis/cli/characterize.py b/mantis/cli/characterize.py new file mode 100644 index 00000000..4d80b4cf --- /dev/null +++ b/mantis/cli/characterize.py @@ -0,0 +1,79 @@ +import click +import time +import gc +import torch +import warnings + +from iohub.ngff import open_ome_zarr +from typing import List +from mantis.analysis.AnalysisSettings import CharacterizeSettings +from mantis.cli.parsing import input_position_dirpaths, output_dirpath, config_filepath +from mantis.cli.utils import yaml_to_model +from mantis.analysis.analyze_psf import detect_peaks, extract_beads, analyze_psf, generate_report + + +@click.command() +@input_position_dirpaths() +@config_filepath() +@output_dirpath() +def characterize( + input_position_dirpaths: List[str], + config_filepath: str, + output_dirpath: str, +): + """ + Characterize the point spread function (PSF) from bead images in an html report + + >> mantis characterize -i ./beads.zarr/*/*/* -c ./characterize_params.yml -o ./ + """ + click.echo(f"Loading data...") + with open_ome_zarr(str(input_position_dirpaths[0]), mode="r") as input_dataset: + T, C, Z, Y, X = input_dataset.data.shape + zyx_data = input_dataset["0"][0, 0] + zyx_scale = input_dataset.scale[-3:] + + # Read settings + settings = yaml_to_model(config_filepath, CharacterizeSettings) + + click.echo(f"Detecting peaks...") + t1 = time.time() + peaks = detect_peaks( + zyx_data, + **settings.dict(), + verbose=True, + ) + gc.collect() + torch.cuda.empty_cache() + t2 = time.time() + click.echo(f'Time to detect peaks: {t2-t1}') + + t1 = time.time() + beads, offsets = extract_beads( + zyx_data=zyx_data, + points=peaks, + scale=zyx_scale, + ) + + click.echo(f"Analyzing PSFs...") + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + df_gaussian_fit, df_1d_peak_width = analyze_psf( + zyx_patches=beads, + bead_offsets=offsets, + scale=zyx_scale, + ) + t2 = time.time() + click.echo(f'Time to analyze PSFs: {t2-t1}') + + # Generate HTML report + generate_report( + output_dirpath, + input_position_dirpaths[0], + "", + beads, + peaks, + df_gaussian_fit, + df_1d_peak_width, + zyx_scale, + ["AXIS 0", "AXIS 1", "AXIS 2"], + ) \ No newline at end of file diff --git a/mantis/cli/main.py b/mantis/cli/main.py index 3d0a736e..290f5e9e 100644 --- a/mantis/cli/main.py +++ b/mantis/cli/main.py @@ -10,6 +10,7 @@ from mantis.cli.update_scale_metadata import update_scale_metadata from mantis.cli.psf_from_beads import psf_from_beads from mantis.cli.deconvolve import deconvolve +from mantis.cli.characterize import characterize CONTEXT = {"help_option_names": ["-h", "--help"]} @@ -35,3 +36,4 @@ def cli(): cli.add_command(update_scale_metadata) cli.add_command(psf_from_beads) cli.add_command(deconvolve) +cli.add_command(characterize) From 6af909299d1195062cea8eda1a44103de61f6eb1 Mon Sep 17 00:00:00 2001 From: Talon Chandler Date: Fri, 17 May 2024 12:04:21 -0700 Subject: [PATCH 29/57] fix css issue --- mantis/analysis/analyze_psf.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/mantis/analysis/analyze_psf.py b/mantis/analysis/analyze_psf.py index c354248e..178f57a6 100644 --- a/mantis/analysis/analyze_psf.py +++ b/mantis/analysis/analyze_psf.py @@ -12,6 +12,8 @@ import pandas as pd import torch import torch.nn.functional as F +import mantis.acquisition.scripts +import importlib.resources as pkg_resources from napari_psf_analysis.psf_analysis.extract.BeadExtractor import BeadExtractor from napari_psf_analysis.psf_analysis.image import Calibrated3DImage @@ -138,7 +140,8 @@ def generate_report( df_gaussian_fit.to_csv(output_path / 'psf_gaussian_fit.csv', index=False) df_1d_peak_width.to_csv(output_path / 'psf_1d_peak_width.csv', index=False) - shutil.copy('github-markdown.css', output_path) + with pkg_resources.path(mantis.acquisition.scripts, 'github-markdown.css') as css_path: + shutil.copy(css_path, output_path) html_file_path = output_path / ('psf_analysis_report.html') with open(html_file_path, 'w') as file: file.write(html_report) From 892337f5d2979d96a74bf52767f8b10961fe16ed Mon Sep 17 00:00:00 2001 From: Talon Chandler Date: Tue, 28 May 2024 17:27:44 -0700 Subject: [PATCH 30/57] napari-psf-analysis is breaking fresh installations --- pyproject.toml | 1 - 1 file changed, 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index d95b2bf2..42e54cdc 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -29,7 +29,6 @@ dependencies = [ "natsort", "ndtiff>=2.0", "nidaqmx", - "napari-psf-analyis", "numpy", "markdown", "pycromanager==0.28.1", From 9cf4992c0f65776bb0ca5bdba87ba4f6bb5488ce Mon Sep 17 00:00:00 2001 From: Talon Chandler Date: Tue, 28 May 2024 17:28:12 -0700 Subject: [PATCH 31/57] auto-open webbrowser --- mantis/analysis/analyze_psf.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/mantis/analysis/analyze_psf.py b/mantis/analysis/analyze_psf.py index 178f57a6..14b36a66 100644 --- a/mantis/analysis/analyze_psf.py +++ b/mantis/analysis/analyze_psf.py @@ -147,7 +147,8 @@ def generate_report( file.write(html_report) # display html report - webbrowser.open('file://' + str(html_file_path)) + html_file_path = Path(html_file_path).absolute() + webbrowser.open(html_file_path.as_uri()) def extract_beads( From b85b9a82e8e90713fdcfac9e2d174c3a1418e552 Mon Sep 17 00:00:00 2001 From: Talon Chandler Date: Tue, 28 May 2024 17:29:01 -0700 Subject: [PATCH 32/57] handle negative with ints and clips --- mantis/analysis/analyze_psf.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/mantis/analysis/analyze_psf.py b/mantis/analysis/analyze_psf.py index 14b36a66..0093b767 100644 --- a/mantis/analysis/analyze_psf.py +++ b/mantis/analysis/analyze_psf.py @@ -165,7 +165,7 @@ def extract_beads( # extract bead patches bead_extractor = BeadExtractor( - image=Calibrated3DImage(data=zyx_data.astype(np.uint16), spacing=scale), + image=Calibrated3DImage(data=zyx_data.astype(np.int32), spacing=scale), patch_size=patch_size, ) beads = bead_extractor.extract_beads(points=points) @@ -180,7 +180,8 @@ def extract_beads( def analyze_psf(zyx_patches: List[ArrayLike], bead_offsets: List[tuple], scale: tuple): results = [] for patch, offset in zip(zyx_patches, bead_offsets): - bead = Calibrated3DImage(data=patch.astype(np.uint16), spacing=scale, offset=offset) + patch = np.clip(patch, 0, None) + bead = Calibrated3DImage(data=patch.astype(np.int32), spacing=scale, offset=offset) psf = PSF(image=bead) try: psf.analyze() @@ -256,7 +257,11 @@ def plot_psf_slices( fig, ax = plt.subplots(1, num_beads) for _ax, bead, bead_number in zip(ax, beads, bead_numbers): _ax.imshow( - bead[shape_Z // 2, :, :], cmap=cmap, origin='lower', aspect=scale_Y / scale_X + bead[shape_Z // 2, :, :], + cmap=cmap, + origin='lower', + aspect=scale_Y / scale_X, + vmin=0, ) _ax.set_xlabel(axis_labels[-1]) _ax.set_ylabel(axis_labels[-2]) From af1d36ac00a0ce56fa41080d7e2eeda0545f31e7 Mon Sep 17 00:00:00 2001 From: Talon Chandler Date: Tue, 28 May 2024 17:29:28 -0700 Subject: [PATCH 33/57] pass in axis labels --- mantis/analysis/AnalysisSettings.py | 1 + mantis/cli/characterize.py | 17 ++++++++++++----- 2 files changed, 13 insertions(+), 5 deletions(-) diff --git a/mantis/analysis/AnalysisSettings.py b/mantis/analysis/AnalysisSettings.py index e8833ad2..aeca8dcf 100644 --- a/mantis/analysis/AnalysisSettings.py +++ b/mantis/analysis/AnalysisSettings.py @@ -89,6 +89,7 @@ class CharacterizeSettings(MyBaseModel): max_num_peaks: NonNegativeInt = 2000 exclude_border: list[NonNegativeInt] = (5, 10, 5) device: str = "cuda" + axis_labels: list[str] = ["AXIS0", "AXIS1", "AXIS2"] @validator("device") def check_device(cls, v): diff --git a/mantis/cli/characterize.py b/mantis/cli/characterize.py index 4d80b4cf..54b6051b 100644 --- a/mantis/cli/characterize.py +++ b/mantis/cli/characterize.py @@ -9,7 +9,12 @@ from mantis.analysis.AnalysisSettings import CharacterizeSettings from mantis.cli.parsing import input_position_dirpaths, output_dirpath, config_filepath from mantis.cli.utils import yaml_to_model -from mantis.analysis.analyze_psf import detect_peaks, extract_beads, analyze_psf, generate_report +from mantis.analysis.analyze_psf import ( + detect_peaks, + extract_beads, + analyze_psf, + generate_report, +) @click.command() @@ -34,12 +39,14 @@ def characterize( # Read settings settings = yaml_to_model(config_filepath, CharacterizeSettings) - + settings_dict = settings.dict() + axis_labels = settings_dict.pop("axis_labels") + click.echo(f"Detecting peaks...") t1 = time.time() peaks = detect_peaks( zyx_data, - **settings.dict(), + **settings_dict, verbose=True, ) gc.collect() @@ -75,5 +82,5 @@ def characterize( df_gaussian_fit, df_1d_peak_width, zyx_scale, - ["AXIS 0", "AXIS 1", "AXIS 2"], - ) \ No newline at end of file + axis_labels, + ) From 952d222b173786de14494f418f69d83c61de78c6 Mon Sep 17 00:00:00 2001 From: Talon Chandler Date: Thu, 30 May 2024 12:04:41 -0700 Subject: [PATCH 34/57] average beads over multiple FOVs --- mantis/cli/psf_from_beads.py | 66 ++++++++++++++++++++++-------------- 1 file changed, 40 insertions(+), 26 deletions(-) diff --git a/mantis/cli/psf_from_beads.py b/mantis/cli/psf_from_beads.py index a611789b..4937edeb 100644 --- a/mantis/cli/psf_from_beads.py +++ b/mantis/cli/psf_from_beads.py @@ -32,12 +32,19 @@ def psf_from_beads( output_dirpath = Path(output_dirpath) config_filepath = Path(config_filepath) - # Load the first position (TODO: consider averaging over positions) + # Load the first position click.echo(f"Loading data...") - with open_ome_zarr(str(input_position_dirpaths[0]), mode="r") as input_dataset: - T, C, Z, Y, X = input_dataset.data.shape - zyx_data = input_dataset["0"][0, 0] - zyx_scale = input_dataset.scale[-3:] + pzyx_data = [] + for input_position_dirpath in input_position_dirpaths: + with open_ome_zarr(str(input_position_dirpath), mode="r") as input_dataset: + T, C, Z, Y, X = input_dataset.data.shape + pzyx_data.append(input_dataset["0"][0, 0]) + zyx_scale = input_dataset.scale[-3:] + + try: + pzyx_data = np.array(pzyx_data) + except: + raise "Concatenating position arrays failed." # Read settings settings = yaml_to_model(config_filepath, PsfFromBeadsSettings) @@ -59,30 +66,37 @@ def psf_from_beads( "device": "cuda" if torch.cuda.is_available() else "cpu", } - # Detect and extract bead patches - click.echo(f"Detecting beads...") - t1 = time.time() - peaks = detect_peaks( - zyx_data, - **bead_detection_settings, - verbose=True, - ) - gc.collect() + pbzyx_data = [] + for zyx_data in pzyx_data: + # Detect and extract bead patches + click.echo(f"Detecting beads...") + t1 = time.time() + peaks = detect_peaks( + zyx_data, + **bead_detection_settings, + verbose=True, + ) + gc.collect() - torch.cuda.empty_cache() - t2 = time.time() - click.echo(f'Time to detect peaks: {t2-t1}') + torch.cuda.empty_cache() + t2 = time.time() + click.echo(f'Time to detect peaks: {t2-t1}') - beads, _ = extract_beads( - zyx_data=zyx_data, - points=peaks, - scale=zyx_scale, - patch_size_voxels=patch_size, - ) + beads, _ = extract_beads( + zyx_data=zyx_data, + points=peaks, + scale=zyx_scale, + patch_size_voxels=patch_size, + ) + + # Filter PSFs with non-standard shapes + filtered_beads = [x for x in beads if x.shape == beads[0].shape] + bzyx_data = np.stack(filtered_beads) + pbzyx_data.append(bzyx_data) + + bzyx_data = np.concatenate(pbzyx_data) + click.echo(f"Total beads: {bzyx_data.shape[0]}") - # Filter PSFs with non-standard shapes - filtered_beads = [x for x in beads if x.shape == beads[0].shape] - bzyx_data = np.stack(filtered_beads) normalized_bzyx_data = ( bzyx_data / np.max(bzyx_data, axis=(-3, -2, -1))[:, None, None, None] ) From 1e42da7fc5225438d129b7e9302db210606fd9f8 Mon Sep 17 00:00:00 2001 From: Talon Chandler Date: Tue, 2 Jul 2024 14:26:55 -0700 Subject: [PATCH 35/57] adding WIP changes, used for most recent deconvolutions --- mantis/cli/deconvolve.py | 57 ++++++++++++++++++++++------------------ 1 file changed, 31 insertions(+), 26 deletions(-) diff --git a/mantis/cli/deconvolve.py b/mantis/cli/deconvolve.py index eb05b0a4..df40cde5 100644 --- a/mantis/cli/deconvolve.py +++ b/mantis/cli/deconvolve.py @@ -22,31 +22,22 @@ def apply_deconvolve_single_position( """ Apply deconvolution to a single position """ - # Load the data - with open_ome_zarr(input_position_dirpath, mode="r") as input_dataset: - T, C, Z, Y, X = input_dataset.data.shape - zyx_data = input_dataset["0"][0, 0] - zyx_scale = input_dataset.scale[-3:] - # Read settings settings = yaml_to_model(Path(config_filepath), DeconvolveSettings) + # Load the data + input_dataset = open_ome_zarr(input_position_dirpath, mode="r") + output_dataset = open_ome_zarr(output_dirpath, mode="a") + T, C, Z, Y, X = input_dataset.data.shape + # Load the PSF with open_ome_zarr(psf_dirpath, mode="r") as psf_dataset: position = psf_dataset["0/0/0"] psf_data = position["0"][0, 0] psf_scale = position.scale[-3:] - # Check if scales match - if psf_scale != zyx_scale: - click.echo( - f"Warning: PSF scale {psf_scale} does not match data scale {zyx_scale}. " - "Consider resampling the PSF." - ) - - # Apply deconvolution click.echo("Padding PSF...") - zyx_padding = np.array(zyx_data.shape) - np.array(psf_data.shape) + zyx_padding = np.array((Z, Y, X)) - np.array(psf_data.shape) pad_width = [(x // 2, x // 2) if x % 2 == 0 else (x // 2, x // 2 + 1) for x in zyx_padding] padded_average_psf = np.pad( psf_data, pad_width=pad_width, mode="constant", constant_values=0 @@ -56,18 +47,32 @@ def apply_deconvolve_single_position( transfer_function = torch.abs(torch.fft.fftn(torch.tensor(padded_average_psf))) transfer_function /= torch.max(transfer_function) - click.echo("Deconvolving...") - zyx_data_deconvolved = apply_inverse_transfer_function( - torch.tensor(zyx_data), - torch.tensor(transfer_function), - 0, - regularization_strength=settings.regularization_strength, - ) + zyx_scale = input_dataset.scale[-3:] + + # Check if scales match + if psf_scale != zyx_scale: + click.echo( + f"Warning: PSF scale {psf_scale} does not match data scale {zyx_scale}. " + "Consider resampling the PSF." + ) + + for t in range(1):#T): + for c in range(C): + zyx_data = input_dataset["0"][t, c] + + # Apply deconvolution + click.echo(f"Deconvolving channel {c}/{C-1}, time {t}/{T-1}") + zyx_data_deconvolved = apply_inverse_transfer_function( + torch.tensor(zyx_data), + torch.tensor(transfer_function), + 0, + regularization_strength=settings.regularization_strength, + ) + click.echo("Saving to output...") + output_dataset["0"][t, c] = zyx_data_deconvolved.numpy() - # Save to output dirpath - click.echo("Saving to output...") - with open_ome_zarr(output_dirpath, mode="a") as output_dataset: - output_dataset["0"][0, 0] = zyx_data_deconvolved.numpy() + input_dataset.close() + output_dataset.close() @click.command() From a87f41b2af7d9c256bc7380b8478f161bcf9459a Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Tue, 30 Jul 2024 21:37:34 -0400 Subject: [PATCH 36/57] style --- .flake8 | 1 + .pre-commit-config.yaml | 2 +- mantis/acquisition/scripts/measure_psf.py | 18 ++++++++-------- mantis/analysis/scripts/fit_psf_to_beads.py | 23 ++++++++++++--------- mantis/analysis/scripts/simple_psf.py | 7 +++++-- mantis/analysis/scripts/simulate_psf.py | 10 +++++---- mantis/cli/characterize.py | 23 ++++++++++++--------- mantis/cli/deconvolve.py | 22 +++++++++++--------- mantis/cli/main.py | 6 +++--- 9 files changed, 63 insertions(+), 49 deletions(-) diff --git a/.flake8 b/.flake8 index adaa55bb..2378f3cc 100644 --- a/.flake8 +++ b/.flake8 @@ -37,3 +37,4 @@ exclude = ignore, legacy, examples, + scripts diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 79ddf030..7fb92b48 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,4 +1,4 @@ -exclude: ^(docs|notebooks|ignore|/tests/artifacts|examples)/ +exclude: ^(docs|notebooks|ignore|/tests/artifacts|examples|scripts)/ repos: - repo: https://github.com/pre-commit/pre-commit-hooks rev: v4.1.0 diff --git a/mantis/acquisition/scripts/measure_psf.py b/mantis/acquisition/scripts/measure_psf.py index ee4f5d7e..29d7e0cc 100644 --- a/mantis/acquisition/scripts/measure_psf.py +++ b/mantis/acquisition/scripts/measure_psf.py @@ -1,6 +1,7 @@ # %% import gc import time +import warnings from pathlib import Path @@ -8,18 +9,13 @@ import napari import numpy as np import torch -import warnings from cupyx.scipy.ndimage import affine_transform from iohub.ngff_meta import TransformationMeta -from iohub.reader import open_ome_zarr, read_micromanager -from pycromanager import Acquisition, multi_d_acquisition_events, Core - -from mantis.acquisition.microscope_operations import ( - acquire_defocus_stack, - setup_kim101_stage -) +from iohub.reader import open_ome_zarr +from pycromanager import Acquisition, Core, multi_d_acquisition_events +from mantis.acquisition.microscope_operations import acquire_defocus_stack from mantis.analysis.AnalysisSettings import DeskewSettings from mantis.analysis.analyze_psf import ( analyze_psf, @@ -65,12 +61,14 @@ "device": "cuda" if torch.cuda.is_available() else "cpu", } + def check_acquisition_directory(root_dir: Path, acq_name: str, suffix='', idx=1) -> Path: acq_dir = root_dir / f'{acq_name}_{idx}{suffix}' if acq_dir.exists(): return check_acquisition_directory(root_dir, acq_name, suffix, idx + 1) return acq_dir + mmc = Core() # mmc.set_property('Prime BSI Express', 'ExposeOutMode', 'Rolling Shutter') @@ -86,7 +84,7 @@ def check_acquisition_directory(root_dir: Path, acq_name: str, suffix='', idx=1) # dataset = f'{date}_epi_O1_benchmark' # dataset = f'{date}_LS_Oryx_epi_illum' # dataset = f'{date}_LS_Oryx_LS_illum' -dataset = f'{date}_LS_benchmark' +dataset = f'{date}_LS_benchmark' # epi settings # z_stage = 'PiezoStage:Q:35' @@ -112,8 +110,10 @@ def check_acquisition_directory(root_dir: Path, acq_name: str, suffix='', idx=1) # pixel_size = 0.116 # in um pixel_size = 6.5 / 40 / 1.4 # in um, no binning axis_labels = ("SCAN", "TILT", "COVERSLIP") +step_per_um = None # ls straight settings +# from mantis.acquisition.microscope_operations import setup_kim101_stage # z_stage = setup_kim101_stage('74000291') # step_per_um = 35 # matches ~30 nm per step quoted in PIA13 specs # z_start = 0 / step_per_um # in um diff --git a/mantis/analysis/scripts/fit_psf_to_beads.py b/mantis/analysis/scripts/fit_psf_to_beads.py index 002c4aa1..4466c9bf 100644 --- a/mantis/analysis/scripts/fit_psf_to_beads.py +++ b/mantis/analysis/scripts/fit_psf_to_beads.py @@ -1,17 +1,19 @@ # %% import gc +import time + import napari import numpy as np -import time import torch -from mantis.analysis.analyze_psf import detect_peaks, extract_beads -from mantis.analysis.deskew import _deskew_matrix -from mantis.analysis.scripts.simulate_psf import _apply_centered_affine from iohub import read_micromanager from waveorder import optics from waveorder.models.isotropic_fluorescent_thick_3d import apply_inverse_transfer_function +from mantis.analysis.analyze_psf import detect_peaks, extract_beads +from mantis.analysis.deskew import _deskew_matrix +from mantis.analysis.scripts.simulate_psf import _apply_centered_affine + # %% Load beads (from ndtiff for now) data_dir = ( "/hpc/instruments/cm.mantis/2024_04_23_mantis_alignment/2024_05_05_LS_Oryx_LS_illum_8/" @@ -63,8 +65,6 @@ average_psf = np.mean(normalized_bzyx_data, axis=0) # %% View PSFs -import napari - v = napari.Viewer() v.add_image(normalized_bzyx_data) v.add_image(average_psf) @@ -190,7 +190,7 @@ def generate_psf(numerical_aperture_detection, ls_angle_deg, coma_strength): # %% Crop data for prototyping deconvolution stc_data = stc_data[:200, :200, :500] -# %% +# %% # Simple background subtraction and normalization average_psf -= np.min(average_psf) @@ -199,7 +199,9 @@ def generate_psf(numerical_aperture_detection, ls_angle_deg, coma_strength): # %% zyx_padding = np.array(stc_data.shape) - np.array(average_psf.shape) pad_width = [(x // 2, x // 2) if x % 2 == 0 else (x // 2, x // 2 + 1) for x in zyx_padding] -padded_average_psf = np.pad(average_psf, pad_width=pad_width, mode="constant", constant_values=0) +padded_average_psf = np.pad( + average_psf, pad_width=pad_width, mode="constant", constant_values=0 +) transfer_function = np.abs(np.fft.fftn(padded_average_psf)) transfer_function /= np.max(transfer_function) print(transfer_function.shape) @@ -207,7 +209,9 @@ def generate_psf(numerical_aperture_detection, ls_angle_deg, coma_strength): # %% # %% -stc_data_deconvolved = apply_inverse_transfer_function(torch.tensor(stc_data), torch.tensor(transfer_function), 0, regularization_strength=1e-3) +stc_data_deconvolved = apply_inverse_transfer_function( + torch.tensor(stc_data), torch.tensor(transfer_function), 0, regularization_strength=1e-3 +) v = napari.Viewer() v.add_image(padded_average_psf) @@ -215,5 +219,4 @@ def generate_psf(numerical_aperture_detection, ls_angle_deg, coma_strength): v.add_image(stc_data_deconvolved.numpy()) - # %% diff --git a/mantis/analysis/scripts/simple_psf.py b/mantis/analysis/scripts/simple_psf.py index 9b6d5c4d..0701ae38 100644 --- a/mantis/analysis/scripts/simple_psf.py +++ b/mantis/analysis/scripts/simple_psf.py @@ -1,10 +1,13 @@ # %% import warnings + import napari -import torch import numpy as np +import torch + from waveorder import optics -from mantis.analysis.analyze_psf import analyze_psf, extract_beads, detect_peaks + +from mantis.analysis.analyze_psf import analyze_psf, detect_peaks, extract_beads # %% Generate simulated PSF library diff --git a/mantis/analysis/scripts/simulate_psf.py b/mantis/analysis/scripts/simulate_psf.py index 489cfad1..f17d89a9 100644 --- a/mantis/analysis/scripts/simulate_psf.py +++ b/mantis/analysis/scripts/simulate_psf.py @@ -3,12 +3,14 @@ # otf = optical transfer function # psf = point spread function -from waveorder.models.isotropic_fluorescent_thick_3d import calculate_transfer_function -from mantis.analysis.deskew import _deskew_matrix -import scipy +import napari import numpy as np +import scipy import torch -import napari + +from waveorder.models.isotropic_fluorescent_thick_3d import calculate_transfer_function + +from mantis.analysis.deskew import _deskew_matrix def _apply_centered_affine(zyx_array, M): diff --git a/mantis/cli/characterize.py b/mantis/cli/characterize.py index 54b6051b..0d857997 100644 --- a/mantis/cli/characterize.py +++ b/mantis/cli/characterize.py @@ -1,20 +1,23 @@ -import click -import time import gc -import torch +import time import warnings -from iohub.ngff import open_ome_zarr from typing import List + +import click +import torch + +from iohub.ngff import open_ome_zarr + from mantis.analysis.AnalysisSettings import CharacterizeSettings -from mantis.cli.parsing import input_position_dirpaths, output_dirpath, config_filepath -from mantis.cli.utils import yaml_to_model from mantis.analysis.analyze_psf import ( + analyze_psf, detect_peaks, extract_beads, - analyze_psf, generate_report, ) +from mantis.cli.parsing import config_filepath, input_position_dirpaths, output_dirpath +from mantis.cli.utils import yaml_to_model @click.command() @@ -31,7 +34,7 @@ def characterize( >> mantis characterize -i ./beads.zarr/*/*/* -c ./characterize_params.yml -o ./ """ - click.echo(f"Loading data...") + click.echo("Loading data...") with open_ome_zarr(str(input_position_dirpaths[0]), mode="r") as input_dataset: T, C, Z, Y, X = input_dataset.data.shape zyx_data = input_dataset["0"][0, 0] @@ -42,7 +45,7 @@ def characterize( settings_dict = settings.dict() axis_labels = settings_dict.pop("axis_labels") - click.echo(f"Detecting peaks...") + click.echo("Detecting peaks...") t1 = time.time() peaks = detect_peaks( zyx_data, @@ -61,7 +64,7 @@ def characterize( scale=zyx_scale, ) - click.echo(f"Analyzing PSFs...") + click.echo("Analyzing PSFs...") with warnings.catch_warnings(): warnings.simplefilter("ignore") df_gaussian_fit, df_1d_peak_width = analyze_psf( diff --git a/mantis/cli/deconvolve.py b/mantis/cli/deconvolve.py index df40cde5..aa5a0c3d 100644 --- a/mantis/cli/deconvolve.py +++ b/mantis/cli/deconvolve.py @@ -1,19 +1,21 @@ +from pathlib import Path +from typing import List + import click import numpy as np import torch -from typing import List +from iohub import open_ome_zarr +from waveorder.models.isotropic_fluorescent_thick_3d import apply_inverse_transfer_function + +from mantis.analysis.AnalysisSettings import DeconvolveSettings from mantis.cli.parsing import ( - input_position_dirpaths, + _str_to_path, config_filepath, + input_position_dirpaths, output_dirpath, - _str_to_path, ) -from mantis.analysis.AnalysisSettings import DeconvolveSettings -from mantis.cli.utils import yaml_to_model, create_empty_hcs_zarr -from iohub import open_ome_zarr -from pathlib import Path -from waveorder.models.isotropic_fluorescent_thick_3d import apply_inverse_transfer_function +from mantis.cli.utils import create_empty_hcs_zarr, yaml_to_model def apply_deconvolve_single_position( @@ -56,7 +58,7 @@ def apply_deconvolve_single_position( "Consider resampling the PSF." ) - for t in range(1):#T): + for t in range(1): # T): for c in range(C): zyx_data = input_dataset["0"][t, c] @@ -103,7 +105,7 @@ def deconvolve( config_filepath = Path(config_filepath) # Create output zarr store - click.echo(f"Creating empty output zarr...") + click.echo("Creating empty output zarr...") with open_ome_zarr(str(input_position_dirpaths[0]), mode="r") as input_dataset: create_empty_hcs_zarr( store_path=output_dirpath, diff --git a/mantis/cli/main.py b/mantis/cli/main.py index 290f5e9e..5ba8e0a5 100644 --- a/mantis/cli/main.py +++ b/mantis/cli/main.py @@ -1,16 +1,16 @@ import click from mantis.cli.apply_affine import apply_affine +from mantis.cli.characterize import characterize +from mantis.cli.deconvolve import deconvolve from mantis.cli.deskew import deskew from mantis.cli.estimate_affine import estimate_affine from mantis.cli.estimate_bleaching import estimate_bleaching from mantis.cli.estimate_deskew import estimate_deskew from mantis.cli.optimize_affine import optimize_affine +from mantis.cli.psf_from_beads import psf_from_beads from mantis.cli.run_acquisition import run_acquisition from mantis.cli.update_scale_metadata import update_scale_metadata -from mantis.cli.psf_from_beads import psf_from_beads -from mantis.cli.deconvolve import deconvolve -from mantis.cli.characterize import characterize CONTEXT = {"help_option_names": ["-h", "--help"]} From b599c6b84fd5cf5cf9bbb4694b603361de2c3623 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Wed, 31 Jul 2024 17:39:37 -0400 Subject: [PATCH 37/57] add napari-psf-analysis dependency --- pyproject.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/pyproject.toml b/pyproject.toml index 2e140f6a..f94a53ff 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -44,6 +44,7 @@ dependencies = [ "largestinteriorrectangle", "antspyx", "pystackreg", + "napari-psf-analysis", ] From 638245a53df38c571bfab002bd5b84918604585a Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Thu, 1 Aug 2024 13:24:42 -0700 Subject: [PATCH 38/57] refactor characterize cli --- mantis/cli/characterize.py | 66 +++++++++++++++++++++++++------------- mantis/cli/main.py | 4 +-- 2 files changed, 45 insertions(+), 25 deletions(-) diff --git a/mantis/cli/characterize.py b/mantis/cli/characterize.py index 0d857997..44aa44cc 100644 --- a/mantis/cli/characterize.py +++ b/mantis/cli/characterize.py @@ -2,9 +2,11 @@ import time import warnings +from pathlib import Path from typing import List import click +import numpy as np import torch from iohub.ngff import open_ome_zarr @@ -20,28 +22,14 @@ from mantis.cli.utils import yaml_to_model -@click.command() -@input_position_dirpaths() -@config_filepath() -@output_dirpath() def characterize( - input_position_dirpaths: List[str], - config_filepath: str, - output_dirpath: str, + zyx_data: np.ndarray, + zyx_scale: tuple[float, float, float], + settings: CharacterizeSettings, + output_report_path: str, + input_dataset_path: str, + input_dataset_name: str, ): - """ - Characterize the point spread function (PSF) from bead images in an html report - - >> mantis characterize -i ./beads.zarr/*/*/* -c ./characterize_params.yml -o ./ - """ - click.echo("Loading data...") - with open_ome_zarr(str(input_position_dirpaths[0]), mode="r") as input_dataset: - T, C, Z, Y, X = input_dataset.data.shape - zyx_data = input_dataset["0"][0, 0] - zyx_scale = input_dataset.scale[-3:] - - # Read settings - settings = yaml_to_model(config_filepath, CharacterizeSettings) settings_dict = settings.dict() axis_labels = settings_dict.pop("axis_labels") @@ -77,9 +65,9 @@ def characterize( # Generate HTML report generate_report( - output_dirpath, - input_position_dirpaths[0], - "", + output_report_path, + input_dataset_path, + input_dataset_name, beads, peaks, df_gaussian_fit, @@ -87,3 +75,35 @@ def characterize( zyx_scale, axis_labels, ) + + +@click.command() +@input_position_dirpaths() +@config_filepath() +@output_dirpath() +def characterize_cli( + input_position_dirpaths: List[str], + config_filepath: str, + output_dirpath: str, +): + """ + Characterize the point spread function (PSF) from bead images and output an html report + + >> mantis characterize -i ./beads.zarr/*/*/* -c ./characterize_params.yml -o ./ + """ + if len(input_position_dirpaths) > 1: + warnings.warn("Only the first position will be characterized.") + + click.echo("Loading data...") + with open_ome_zarr(str(input_position_dirpaths[0]), mode="r") as input_dataset: + T, C, Z, Y, X = input_dataset.data.shape + zyx_data = input_dataset["0"][0, 0] + zyx_scale = input_dataset.scale[-3:] + + # Read settings + settings = yaml_to_model(config_filepath, CharacterizeSettings) + dataset_name = Path(input_position_dirpaths[0])[-4] + + characterize( + zyx_data, zyx_scale, settings, output_dirpath, input_position_dirpaths[0], dataset_name + ) diff --git a/mantis/cli/main.py b/mantis/cli/main.py index 34f7bc4e..341139cc 100644 --- a/mantis/cli/main.py +++ b/mantis/cli/main.py @@ -1,6 +1,6 @@ import click -from mantis.cli.characterize import characterize +from mantis.cli.characterize import characterize_cli from mantis.cli.concatenate import concatenate from mantis.cli.deconvolve import deconvolve from mantis.cli.deskew import deskew @@ -46,4 +46,4 @@ def cli(): cli.add_command(stabilize) cli.add_command(psf_from_beads) cli.add_command(deconvolve) -cli.add_command(characterize) +cli.add_command(characterize_cli, name='characterize') From ebde1cda16a2c908cd158703eb9a0017aed6fbd4 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Thu, 1 Aug 2024 14:36:24 -0700 Subject: [PATCH 39/57] more characterize refactor --- mantis/cli/characterize.py | 6 ++++-- mantis/cli/main.py | 4 ++-- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/mantis/cli/characterize.py b/mantis/cli/characterize.py index 44aa44cc..4e0ab8f8 100644 --- a/mantis/cli/characterize.py +++ b/mantis/cli/characterize.py @@ -22,7 +22,7 @@ from mantis.cli.utils import yaml_to_model -def characterize( +def characterize_peaks( zyx_data: np.ndarray, zyx_scale: tuple[float, float, float], settings: CharacterizeSettings, @@ -76,6 +76,8 @@ def characterize( axis_labels, ) + return peaks + @click.command() @input_position_dirpaths() @@ -104,6 +106,6 @@ def characterize_cli( settings = yaml_to_model(config_filepath, CharacterizeSettings) dataset_name = Path(input_position_dirpaths[0])[-4] - characterize( + _ = characterize_peaks( zyx_data, zyx_scale, settings, output_dirpath, input_position_dirpaths[0], dataset_name ) diff --git a/mantis/cli/main.py b/mantis/cli/main.py index 341139cc..34f7bc4e 100644 --- a/mantis/cli/main.py +++ b/mantis/cli/main.py @@ -1,6 +1,6 @@ import click -from mantis.cli.characterize import characterize_cli +from mantis.cli.characterize import characterize from mantis.cli.concatenate import concatenate from mantis.cli.deconvolve import deconvolve from mantis.cli.deskew import deskew @@ -46,4 +46,4 @@ def cli(): cli.add_command(stabilize) cli.add_command(psf_from_beads) cli.add_command(deconvolve) -cli.add_command(characterize_cli, name='characterize') +cli.add_command(characterize) From 7b291711837407ee0f011a09c6f5ccd866b714e4 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Thu, 1 Aug 2024 14:53:58 -0700 Subject: [PATCH 40/57] use characterize_peaks in measure_psf --- mantis/acquisition/scripts/measure_psf.py | 130 ++++++---------------- mantis/analysis/AnalysisSettings.py | 1 + mantis/cli/characterize.py | 2 + 3 files changed, 34 insertions(+), 99 deletions(-) diff --git a/mantis/acquisition/scripts/measure_psf.py b/mantis/acquisition/scripts/measure_psf.py index 29d7e0cc..6e79be94 100644 --- a/mantis/acquisition/scripts/measure_psf.py +++ b/mantis/acquisition/scripts/measure_psf.py @@ -1,7 +1,5 @@ # %% -import gc import time -import warnings from pathlib import Path @@ -16,18 +14,13 @@ from pycromanager import Acquisition, Core, multi_d_acquisition_events from mantis.acquisition.microscope_operations import acquire_defocus_stack -from mantis.analysis.AnalysisSettings import DeskewSettings -from mantis.analysis.analyze_psf import ( - analyze_psf, - detect_peaks, - extract_beads, - generate_report, -) +from mantis.analysis.AnalysisSettings import CharacterizeSettings, DeskewSettings from mantis.analysis.deskew import ( _average_n_slices, _get_transform_matrix, get_deskewed_data_shape, ) +from mantis.cli.characterize import characterize_peaks epi_bead_detection_settings = { "block_size": (8, 8, 8), @@ -196,22 +189,23 @@ def check_acquisition_directory(root_dir: Path, acq_name: str, suffix='', idx=1) z_stage.close() raw = False +patch_size = (scale[0] * 15, scale[1] * 18, scale[2] * 18) if axis_labels == ("SCAN", "TILT", "COVERSLIP"): raw = True + patch_size = (scale[0] * 30, scale[1] * 36, scale[2] * 18) -# %% Detect peaks +# %% Characterize peaks -t1 = time.time() -peaks = detect_peaks( - zyx_data, - # **epi_bead_detection_settings, - **ls_bead_detection_settings, - verbose=True, +peaks = characterize_peaks( + zyx_data=zyx_data, + zyx_scale=scale, + settings=CharacterizeSettings( + **ls_bead_detection_settings, axis_labels=axis_labels, patch_size=patch_size + ), + output_report_path=data_dir / (dataset + '_psf_analysis'), + input_dataset_path=data_dir, + input_dataset_name=dataset, ) -gc.collect() -torch.cuda.empty_cache() -t2 = time.time() -print(f'Time to detect peaks: {t2-t1}') # %% Visualize in napari @@ -222,46 +216,8 @@ def check_acquisition_directory(root_dir: Path, acq_name: str, suffix='', idx=1) peaks, name='peaks local max', size=12, symbol='ring', edge_color='yellow' ) -# %% Extract and analyze bead patches - -t1 = time.time() -if raw: - patch_size = (scale[0] * 30, scale[1] * 36, scale[2] * 18) -else: - patch_size = (scale[0] * 15, scale[1] * 18, scale[2] * 18) -beads, offsets = extract_beads( - zyx_data=zyx_data, - points=peaks, - scale=scale, - patch_size=patch_size, -) - -with warnings.catch_warnings(): - warnings.simplefilter("ignore") - df_gaussian_fit, df_1d_peak_width = analyze_psf( - zyx_patches=beads, - bead_offsets=offsets, - scale=scale, - ) -t2 = time.time() -print(f'Time to analyze PSFs: {t2-t1}') - -# Generate HTML report - -psf_analysis_path = data_dir / (dataset + '_psf_analysis') -generate_report( - psf_analysis_path, - data_dir, - dataset, - beads, - peaks, - df_gaussian_fit, - df_1d_peak_width, - scale, - axis_labels, -) - # %% Deskew data and analyze +output_zarr_path = data_dir / (dataset + '_deskewed.zarr') if raw and deskew: # deskew @@ -325,17 +281,18 @@ def check_acquisition_directory(root_dir: Path, acq_name: str, suffix='', idx=1) t2 = time.time() print(f'Time to deskew: {t2-t1: .2f} seconds') - # detect peaks again :( - t1 = time.time() - deskewed_peaks = detect_peaks( - averaged_deskewed_data, - **deskew_bead_detection_settings, - verbose=True, + # Characterize deskewed peaks + deskewed_peaks = characterize_peaks( + zyx_data=averaged_deskewed_data, + zyx_scale=voxel_size, + settings=CharacterizeSettings( + **deskew_bead_detection_settings, + axis_labels=('Z', 'Y', 'X'), + ), + output_report_path=data_dir / (dataset + '_deskewed_psf_analysis'), + input_dataset_path=output_zarr_path, + input_dataset_name=dataset, ) - gc.collect() - torch.cuda.empty_cache() - t2 = time.time() - print(f'Time to detect deskewed peaks: {t2-t1: .2f} seconds') if view: viewer2 = napari.Viewer() @@ -344,36 +301,11 @@ def check_acquisition_directory(root_dir: Path, acq_name: str, suffix='', idx=1) deskewed_peaks, name='peaks local max', size=12, symbol='ring', edge_color='yellow' ) - deskewed_beads, deskewed_offsets = extract_beads( - zyx_data=averaged_deskewed_data, - points=deskewed_peaks, - scale=scale, - ) - - t1 = time.time() - with warnings.catch_warnings(): - warnings.simplefilter("ignore") - df_deskewed_gaussian_fit, df_deskewed_1d_peak_width = analyze_psf( - zyx_patches=deskewed_beads, - bead_offsets=deskewed_offsets, - scale=voxel_size, - ) - t2 = time.time() - print(f'Time to analyze deskewed PSFs: {t2-t1: .2f} seconds') - - output_zarr_path = data_dir / (dataset + '_deskewed.zarr') - report_path = data_dir / (dataset + '_deskewed_psf_analysis') - generate_report( - report_path, - output_zarr_path, - dataset, - deskewed_beads, - deskewed_peaks, - df_deskewed_gaussian_fit, - df_deskewed_1d_peak_width, - voxel_size, - ('Z', 'Y', 'X'), - ) + # deskewed_beads, deskewed_offsets = extract_beads( + # zyx_data=averaged_deskewed_data, + # points=deskewed_peaks, + # scale=scale, ## Looks like a bug + # ) # Save to zarr store transform = TransformationMeta( diff --git a/mantis/analysis/AnalysisSettings.py b/mantis/analysis/AnalysisSettings.py index 99353bdd..4e1fad6b 100644 --- a/mantis/analysis/AnalysisSettings.py +++ b/mantis/analysis/AnalysisSettings.py @@ -96,6 +96,7 @@ class CharacterizeSettings(MyBaseModel): max_num_peaks: NonNegativeInt = 2000 exclude_border: list[NonNegativeInt] = (5, 10, 5) device: str = "cuda" + patch_size: tuple[PositiveFloat, PositiveFloat, PositiveFloat] | None = None axis_labels: list[str] = ["AXIS0", "AXIS1", "AXIS2"] @validator("device") diff --git a/mantis/cli/characterize.py b/mantis/cli/characterize.py index 4e0ab8f8..24217be7 100644 --- a/mantis/cli/characterize.py +++ b/mantis/cli/characterize.py @@ -31,6 +31,7 @@ def characterize_peaks( input_dataset_name: str, ): settings_dict = settings.dict() + patch_size = settings_dict.pop("patch_size") axis_labels = settings_dict.pop("axis_labels") click.echo("Detecting peaks...") @@ -50,6 +51,7 @@ def characterize_peaks( zyx_data=zyx_data, points=peaks, scale=zyx_scale, + patch_size=patch_size, ) click.echo("Analyzing PSFs...") From 5e3c7512acd4a65b9130dbff0594f42767d1c144 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Thu, 1 Aug 2024 15:17:02 -0700 Subject: [PATCH 41/57] clean up measure_psf analysis portion --- mantis/acquisition/scripts/measure_psf.py | 107 +++++++--------------- mantis/analysis/deskew.py | 2 +- 2 files changed, 36 insertions(+), 73 deletions(-) diff --git a/mantis/acquisition/scripts/measure_psf.py b/mantis/acquisition/scripts/measure_psf.py index 6e79be94..8f40efb0 100644 --- a/mantis/acquisition/scripts/measure_psf.py +++ b/mantis/acquisition/scripts/measure_psf.py @@ -1,27 +1,20 @@ # %% -import time - from pathlib import Path -import cupy as cp import napari import numpy as np import torch -from cupyx.scipy.ndimage import affine_transform from iohub.ngff_meta import TransformationMeta from iohub.reader import open_ome_zarr from pycromanager import Acquisition, Core, multi_d_acquisition_events from mantis.acquisition.microscope_operations import acquire_defocus_stack -from mantis.analysis.AnalysisSettings import CharacterizeSettings, DeskewSettings -from mantis.analysis.deskew import ( - _average_n_slices, - _get_transform_matrix, - get_deskewed_data_shape, -) +from mantis.analysis.AnalysisSettings import CharacterizeSettings +from mantis.analysis.deskew import deskew_data, get_deskewed_data_shape from mantis.cli.characterize import characterize_peaks +device = "cuda" if torch.cuda.is_available() else "cpu" epi_bead_detection_settings = { "block_size": (8, 8, 8), "blur_kernel_size": 3, @@ -29,7 +22,7 @@ "threshold_abs": 200.0, "max_num_peaks": 500, "exclude_border": (5, 5, 5), - "device": "cuda" if torch.cuda.is_available() else "cpu", + "device": device, } ls_bead_detection_settings = { @@ -40,7 +33,7 @@ "threshold_abs": 200.0, "max_num_peaks": 2000, "exclude_border": (5, 10, 5), - "device": "cuda" if torch.cuda.is_available() else "cpu", + "device": device, } deskew_bead_detection_settings = { @@ -51,7 +44,7 @@ "threshold_abs": 200.0, "max_num_peaks": 500, "exclude_border": (5, 5, 5), - "device": "cuda" if torch.cuda.is_available() else "cpu", + "device": device, } @@ -220,71 +213,41 @@ def check_acquisition_directory(root_dir: Path, acq_name: str, suffix='', idx=1) output_zarr_path = data_dir / (dataset + '_deskewed.zarr') if raw and deskew: - # deskew + # chunk data so that it fits in the GPU memory + # should not be necessary on the mantis GPU num_chunks = 4 chunked_data = np.split(zyx_data, num_chunks, axis=-1) - chunk_shape = chunked_data[0].shape - - settings = DeskewSettings( - pixel_size_um=scale[-1], - ls_angle_deg=30, - scan_step_um=scale[-3], - keep_overhang=True, - average_n_slices=3, - ) - t1 = time.time() - deskewed_shape, _ = get_deskewed_data_shape( - chunk_shape, - settings.ls_angle_deg, - settings.px_to_scan_ratio, - settings.keep_overhang, - ) + deskew_settings = { + "ls_angle_deg": 30, + "px_to_scan_ratio": round(scale[-1] / scale[-3], 3), + "keep_overhang": True, + "average_n_slices": 3, + } - matrix = _get_transform_matrix( - chunk_shape, - settings.ls_angle_deg, - settings.px_to_scan_ratio, - settings.keep_overhang, + deskewed_shape, deskewed_voxel_size = get_deskewed_data_shape( + raw_data_shape=zyx_data.shape, + pixel_size_um=scale[-1], + **deskew_settings, ) - matrix_gpu = cp.asarray(matrix) deskewed_chunks = [] for chunk in chunked_data: - deskewed_data_gpu = affine_transform( - cp.asarray(chunk), - matrix_gpu, - output_shape=deskewed_shape, - order=1, - cval=80, + deskewed_chunks.append( + deskew_data( + chunk, + device=device, + **deskew_settings, + ) ) - deskewed_chunks.append(cp.asnumpy(deskewed_data_gpu)) - del deskewed_data_gpu - cp._default_memory_pool.free_all_blocks() # concatenate arrays in reverse order - # identical to cpu deskew using ndi.affine_transform deskewed_data = np.concatenate(deskewed_chunks[::-1], axis=-2) - averaged_deskewed_data = _average_n_slices( - deskewed_data, average_window_width=settings.average_n_slices - ) - - deskewed_shape, voxel_size = get_deskewed_data_shape( - zyx_data.shape, - settings.ls_angle_deg, - settings.px_to_scan_ratio, - settings.keep_overhang, - settings.average_n_slices, - settings.pixel_size_um, - ) - t2 = time.time() - print(f'Time to deskew: {t2-t1: .2f} seconds') - # Characterize deskewed peaks deskewed_peaks = characterize_peaks( - zyx_data=averaged_deskewed_data, - zyx_scale=voxel_size, + zyx_data=deskewed_data, + zyx_scale=deskewed_voxel_size, settings=CharacterizeSettings( **deskew_bead_detection_settings, axis_labels=('Z', 'Y', 'X'), @@ -294,23 +257,23 @@ def check_acquisition_directory(root_dir: Path, acq_name: str, suffix='', idx=1) input_dataset_name=dataset, ) + # deskewed_beads, deskewed_offsets = extract_beads( + # zyx_data=deskewed_data, + # points=deskewed_peaks, + # scale=scale, ## Looks like there was a bug with the scale here, patch size may need retuning + # ) + if view: viewer2 = napari.Viewer() - viewer2.add_image(averaged_deskewed_data) + viewer2.add_image(deskewed_data) viewer2.add_points( deskewed_peaks, name='peaks local max', size=12, symbol='ring', edge_color='yellow' ) - # deskewed_beads, deskewed_offsets = extract_beads( - # zyx_data=averaged_deskewed_data, - # points=deskewed_peaks, - # scale=scale, ## Looks like a bug - # ) - # Save to zarr store transform = TransformationMeta( type="scale", - scale=2 * (1,) + voxel_size, + scale=2 * (1,) + deskewed_voxel_size, ) with open_ome_zarr( @@ -319,7 +282,7 @@ def check_acquisition_directory(root_dir: Path, acq_name: str, suffix='', idx=1) pos = output_dataset.create_position('0', '0', '0') pos.create_image( name="0", - data=averaged_deskewed_data[None, None, ...], + data=deskewed_data[None, None, ...], chunks=(1, 1, 50) + deskewed_shape[1:], # may be bigger than 500 MB transform=[transform], ) diff --git a/mantis/analysis/deskew.py b/mantis/analysis/deskew.py index ad3398b3..30d43736 100644 --- a/mantis/analysis/deskew.py +++ b/mantis/analysis/deskew.py @@ -187,7 +187,7 @@ def deskew_data( keep_overhang: bool, average_n_slices: int = 1, device='cpu', -): +) -> np.ndarray: """Deskews fluorescence data from the mantis microscope Parameters ---------- From 2c2bf89a5547c3ecda2beb53bca565599be2ec33 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Thu, 1 Aug 2024 19:20:08 -0400 Subject: [PATCH 42/57] bugfix --- mantis/cli/characterize.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mantis/cli/characterize.py b/mantis/cli/characterize.py index 24217be7..c76d7f79 100644 --- a/mantis/cli/characterize.py +++ b/mantis/cli/characterize.py @@ -85,7 +85,7 @@ def characterize_peaks( @input_position_dirpaths() @config_filepath() @output_dirpath() -def characterize_cli( +def characterize( input_position_dirpaths: List[str], config_filepath: str, output_dirpath: str, From 7058227d19cce13deed35624a5f5c768193978ae Mon Sep 17 00:00:00 2001 From: Talon Chandler Date: Mon, 5 Aug 2024 13:52:13 -0700 Subject: [PATCH 43/57] delete unused scripts --- mantis/analysis/scripts/fit_psf_to_beads.py | 222 -------------------- mantis/analysis/scripts/simple_psf.py | 117 ----------- mantis/analysis/scripts/simulate_psf.py | 136 ------------ 3 files changed, 475 deletions(-) delete mode 100644 mantis/analysis/scripts/fit_psf_to_beads.py delete mode 100644 mantis/analysis/scripts/simple_psf.py delete mode 100644 mantis/analysis/scripts/simulate_psf.py diff --git a/mantis/analysis/scripts/fit_psf_to_beads.py b/mantis/analysis/scripts/fit_psf_to_beads.py deleted file mode 100644 index 4466c9bf..00000000 --- a/mantis/analysis/scripts/fit_psf_to_beads.py +++ /dev/null @@ -1,222 +0,0 @@ -# %% -import gc -import time - -import napari -import numpy as np -import torch - -from iohub import read_micromanager -from waveorder import optics -from waveorder.models.isotropic_fluorescent_thick_3d import apply_inverse_transfer_function - -from mantis.analysis.analyze_psf import detect_peaks, extract_beads -from mantis.analysis.deskew import _deskew_matrix -from mantis.analysis.scripts.simulate_psf import _apply_centered_affine - -# %% Load beads (from ndtiff for now) -data_dir = ( - "/hpc/instruments/cm.mantis/2024_04_23_mantis_alignment/2024_05_05_LS_Oryx_LS_illum_8/" -) -input_dataset = read_micromanager(data_dir, data_type="ndtiff") -stc_data = input_dataset.get_array(position="0")[0, 0] - -# manual...pull from zarr later -s_step = 5 / 35 / 1.4 -tc_size = 3.45 / 40 / 1.4 -stc_scale = (s_step, tc_size, tc_size) - - -# %% Detect peaks and find an "average PSF" -ls_bead_detection_settings = { - "block_size": (64, 64, 32), - "blur_kernel_size": 3, - "nms_distance": 32, - "min_distance": 50, - "threshold_abs": 200.0, - "max_num_peaks": 2000, - "exclude_border": (5, 10, 5), - "device": "cuda" if torch.cuda.is_available() else "cpu", -} - -t1 = time.time() -peaks = detect_peaks( - stc_data, - **ls_bead_detection_settings, - verbose=True, -) -gc.collect() -torch.cuda.empty_cache() -t2 = time.time() -print(f'Time to detect peaks: {t2-t1}') - -# %% Extract beads -beads, offsets = extract_beads( - zyx_data=stc_data, - points=peaks, - scale=stc_scale, -) -stc_shape = beads[0].shape - -# Filter PSFs with different shapes -filtered_beads = [x for x in beads if x.shape == stc_shape] -bzyx_data = np.stack(filtered_beads) -normalized_bzyx_data = bzyx_data / np.max(bzyx_data, axis=(-3, -2, -1))[:, None, None, None] -average_psf = np.mean(normalized_bzyx_data, axis=0) - -# %% View PSFs -v = napari.Viewer() -v.add_image(normalized_bzyx_data) -v.add_image(average_psf) - - -# %% Generate simulated PSF library -def calculate_transfer_function( - zyx_shape, - yx_pixel_size, - z_pixel_size, - wavelength_emission, - z_padding, - index_of_refraction_media, - numerical_aperture_detection, - coma_strength, -): - # Modified from waveorder - fy = torch.fft.fftfreq(zyx_shape[1], yx_pixel_size) - fx = torch.fft.fftfreq(zyx_shape[2], yx_pixel_size) - fyy, fxx = torch.meshgrid(fy, fx, indexing="ij") - radial_frequencies = torch.sqrt(fyy**2 + fxx**2) - - z_total = zyx_shape[0] + 2 * z_padding - z_position_list = torch.fft.ifftshift( - (torch.arange(z_total) - z_total // 2) * z_pixel_size - ) - - # Custom pupil - det_pupil = torch.zeros(radial_frequencies.shape, dtype=torch.complex64) - cutoff = numerical_aperture_detection / wavelength_emission - det_pupil[radial_frequencies < cutoff] = 1 - # det_pupil[((fxx) ** 2 + (fy)**2) ** 0.5 > cutoff] = 0 # add cutoff lune here - det_pupil *= np.exp( - coma_strength - * 1j - * ((3 * (radial_frequencies / cutoff) ** 3) - (2 * (radial_frequencies / cutoff))) - * torch.div(fxx + 1e-15, radial_frequencies + 1e-15) - ) # coma - - # v.add_image(torch.real(det_pupil).numpy()) - # v.add_image(torch.imag(det_pupil).numpy()) - - propagation_kernel = optics.generate_propagation_kernel( - radial_frequencies, - det_pupil, - wavelength_emission / index_of_refraction_media, - z_position_list, - ) - - point_spread_function = torch.abs(torch.fft.ifft2(propagation_kernel, dim=(1, 2))) ** 2 - optical_transfer_function = torch.fft.fftn(point_spread_function, dim=(0, 1, 2)) - optical_transfer_function /= torch.max(torch.abs(optical_transfer_function)) # normalize - - return optical_transfer_function - - -def generate_psf(numerical_aperture_detection, ls_angle_deg, coma_strength): - # detection parameters - wavelength_emission = 0.550 # um - index_of_refraction_media = 1.404 - - # internal simulation parameters - px_to_scan_ratio = stc_scale[1] / stc_scale[0] - ct = np.cos(ls_angle_deg * np.pi / 180) - st = np.sin(ls_angle_deg * np.pi / 180) - deskew_matrix = _deskew_matrix(px_to_scan_ratio, ct) - skew_matrix = np.linalg.inv(deskew_matrix) - - zyx_scale = np.array([st * stc_scale[1], stc_scale[1], stc_scale[1]]) - detection_otf_zyx = calculate_transfer_function( - stc_shape, - zyx_scale[1], - zyx_scale[0], - wavelength_emission, - 0, - index_of_refraction_media, - numerical_aperture_detection, - coma_strength, - ) - - detection_psf_zyx = np.array( - torch.real(torch.fft.ifftshift(torch.fft.ifftn(detection_otf_zyx, dim=(0, 1, 2)))) - ) - - simulated_psf = _apply_centered_affine(detection_psf_zyx, skew_matrix) - simulated_psf /= np.max(simulated_psf) - return simulated_psf, zyx_scale, deskew_matrix - - -# Define grid search -na_det_list = np.array([0.95, 1.05, 1.15, 1.25, 1.35]) -ls_angle_deg_list = np.array([30]) -coma_strength_list = np.array([-0.2, -0.1, 0, 0.1, 0.2]) -params = np.stack( - np.meshgrid(na_det_list, ls_angle_deg_list, coma_strength_list, indexing="ij"), axis=-1 -) - -pzyx_array = np.zeros(params.shape[:-1] + stc_shape) -pzyx_deskewed_array = np.zeros(params.shape[:-1] + stc_shape) - -for i in np.ndindex(params.shape[:-1]): - print(f"Simulating PSF with params: {params[i]}") - pzyx_array[i], zyx_scale, deskew_matrix = generate_psf(*params[i]) - pzyx_deskewed_array[i] = _apply_centered_affine(pzyx_array[i], deskew_matrix) - -print("Visualizing") -v = napari.Viewer() -v.add_image(average_psf, scale=stc_scale) -v.add_image(pzyx_array, scale=stc_scale) - -v.dims.axis_labels = ["NA", "", "COMA", "Z", "Y", "X"] - -# v.add_image(_apply_centered_affine(average_psf, deskew_matrix), scale=zyx_scale) -# v.add_image(pzyx_deskewed_array, scale=zyx_scale) - -# Optimize match -diff = np.sum((pzyx_array - average_psf) ** 2, axis=(-3, -2, -1)) -min_idx = np.unravel_index(np.argmin(diff), diff.shape) -print(min_idx) -print(params[min_idx]) - - -# %% Crop data for prototyping deconvolution -stc_data = stc_data[:200, :200, :500] - -# %% - -# Simple background subtraction and normalization -average_psf -= np.min(average_psf) -average_psf /= np.max(average_psf) - -# %% -zyx_padding = np.array(stc_data.shape) - np.array(average_psf.shape) -pad_width = [(x // 2, x // 2) if x % 2 == 0 else (x // 2, x // 2 + 1) for x in zyx_padding] -padded_average_psf = np.pad( - average_psf, pad_width=pad_width, mode="constant", constant_values=0 -) -transfer_function = np.abs(np.fft.fftn(padded_average_psf)) -transfer_function /= np.max(transfer_function) -print(transfer_function.shape) - -# %% - -# %% -stc_data_deconvolved = apply_inverse_transfer_function( - torch.tensor(stc_data), torch.tensor(transfer_function), 0, regularization_strength=1e-3 -) - -v = napari.Viewer() -v.add_image(padded_average_psf) -v.add_image(stc_data) -v.add_image(stc_data_deconvolved.numpy()) - - -# %% diff --git a/mantis/analysis/scripts/simple_psf.py b/mantis/analysis/scripts/simple_psf.py deleted file mode 100644 index 0701ae38..00000000 --- a/mantis/analysis/scripts/simple_psf.py +++ /dev/null @@ -1,117 +0,0 @@ -# %% -import warnings - -import napari -import numpy as np -import torch - -from waveorder import optics - -from mantis.analysis.analyze_psf import analyze_psf, detect_peaks, extract_beads - - -# %% Generate simulated PSF library -def calculate_transfer_function( - zyx_shape, - yx_pixel_size, - z_pixel_size, - wavelength_emission, - z_padding, - index_of_refraction_media, - numerical_aperture_detection, -): - # Modified from waveorder - fy = torch.fft.fftfreq(zyx_shape[1], yx_pixel_size) - fx = torch.fft.fftfreq(zyx_shape[2], yx_pixel_size) - fyy, fxx = torch.meshgrid(fy, fx, indexing="ij") - radial_frequencies = torch.sqrt(fyy**2 + fxx**2) - - z_total = zyx_shape[0] + 2 * z_padding - z_position_list = torch.fft.ifftshift( - (torch.arange(z_total) - z_total // 2) * z_pixel_size - ) - - # Custom pupil - det_pupil = torch.zeros(radial_frequencies.shape, dtype=torch.complex64) - cutoff = numerical_aperture_detection / wavelength_emission - det_pupil[radial_frequencies < cutoff] = 1 - - propagation_kernel = optics.generate_propagation_kernel( - radial_frequencies, - det_pupil, - wavelength_emission / index_of_refraction_media, - z_position_list, - ) - - point_spread_function = torch.abs(torch.fft.ifft2(propagation_kernel, dim=(1, 2))) ** 2 - optical_transfer_function = torch.fft.fftn(point_spread_function, dim=(0, 1, 2)) - optical_transfer_function /= torch.max(torch.abs(optical_transfer_function)) # normalize - - return optical_transfer_function - - -def generate_psf(numerical_aperture_detection, zyx_shape, zyx_scale): - # detection parameters - wavelength_emission = 0.550 # um - index_of_refraction_media = 1.404 - - # internal simulation parameters - detection_otf_zyx = calculate_transfer_function( - zyx_shape, - zyx_scale[1], - zyx_scale[0], - wavelength_emission, - 0, - index_of_refraction_media, - numerical_aperture_detection, - ) - - simulated_psf = np.array( - torch.real(torch.fft.ifftshift(torch.fft.ifftn(detection_otf_zyx, dim=(0, 1, 2)))) - ) - simulated_psf *= 1e7 - return simulated_psf - - -numerical_apertures = [0.9, 1.1, 1.35] -zyx_shape = np.array([151, 151, 151]) -zyx_scale = (0.1, 0.0616, 0.0616) - -v = napari.Viewer() -for numerical_aperture in numerical_apertures: - print(f"Generating NA={numerical_aperture}") - - zyx_data = generate_psf(numerical_aperture, zyx_shape, zyx_scale) - v.add_image(zyx_data, name=f"{numerical_aperture}", scale=zyx_scale) - - epi_bead_detection_settings = { - "block_size": (8, 8, 8), - "blur_kernel_size": 3, - "min_distance": 0, - "threshold_abs": 100.0, - "max_num_peaks": 1, - "exclude_border": (0, 0, 0), - "device": "cuda" if torch.cuda.is_available() else "cpu", - } - - peaks = detect_peaks(zyx_data, **epi_bead_detection_settings, verbose=True) - - beads, offsets = extract_beads( - zyx_data=zyx_data, - points=peaks, - scale=zyx_scale, - ) - - print(f"Fitting NA={numerical_aperture}") - with warnings.catch_warnings(): - warnings.simplefilter("ignore") - df_gaussian_fit, df_1d_peak_width = analyze_psf( - zyx_patches=beads, - bead_offsets=offsets, - scale=zyx_scale, - ) - - print(df_gaussian_fit) - print(df_1d_peak_width) - -# %% diff --git a/mantis/analysis/scripts/simulate_psf.py b/mantis/analysis/scripts/simulate_psf.py deleted file mode 100644 index f17d89a9..00000000 --- a/mantis/analysis/scripts/simulate_psf.py +++ /dev/null @@ -1,136 +0,0 @@ -# Variable abbreviations -# stc = scan, tilt, coverslip --- raw data coordinates -# otf = optical transfer function -# psf = point spread function - -import napari -import numpy as np -import scipy -import torch - -from waveorder.models.isotropic_fluorescent_thick_3d import calculate_transfer_function - -from mantis.analysis.deskew import _deskew_matrix - - -def _apply_centered_affine(zyx_array, M): - """Applies a translation-free affine transformation to a 3D array while - maintaining the center coordinate at (zyx_array.shape // 2) - - For mantis - useful for moving PSFs between skewed and deskewed spaces. - - Parameters - ---------- - zyx_array : NDArray with ndim == 3 - 3D input array - M : NDArray with shape = (3, 3) - 3x3 transformation matrix, the translation-free part of an affine matrix - Can model reflection, scaling, rotation, and shear. - - Returns - ------- - NDArray with ndim == 3 - transformed matrix with shape matched to input - """ - - # keep (zyx_array.shape // 2) centered - offset = np.dot(np.eye(3) - M, np.array(zyx_array.shape) // 2) - - return scipy.ndimage.affine_transform( - zyx_array, - M, - offset=offset, - output_shape=zyx_array.shape, - order=1, - cval=0, - ) - - -v = napari.Viewer() - -# ---- psf input parameters (to be refactored) - -# sampling parameters -psf_stc_shape = 3 * (30,) -psf_stc_scale = 3 * (0.116,) # um -supersample_factor = 5 - -# illumination and detection parameters -ls_angle_deg = 30 - -# illumination parameters -ls_scan_waist_fwhm = 1.0 # um - -# detection parameters -wavelength_emission = 0.550 # um -numerical_aperture_detection = 1.35 -index_of_refraction_media = 1.404 - -# ---- - -# internal simulation parameters -px_to_scan_ratio = psf_stc_scale[1] / psf_stc_scale[0] -ct = np.cos(ls_angle_deg * np.pi / 180) -st = np.sin(ls_angle_deg * np.pi / 180) -deskew_matrix = _deskew_matrix(px_to_scan_ratio, ct) -skew_matrix = np.linalg.inv(deskew_matrix) - -psf_stc_ss_shape = np.array(psf_stc_shape) * supersample_factor -psf_stc_ss_scale = np.array(psf_stc_scale) / supersample_factor -psf_zyx_ss_scale = np.array( - [st * psf_stc_ss_scale[0], psf_stc_ss_scale[1], psf_stc_ss_scale[2]] -) - -# calculate illumination psf -ls_scan_waist_std = ls_scan_waist_fwhm / (2 * np.sqrt(2 * np.log(2))) -scan_positions = psf_stc_ss_scale[0] * ( - np.arange(psf_stc_ss_shape[0]) - (psf_stc_ss_shape[0] / 2) -) -illumination_psf_scan = np.exp(-(scan_positions**2) / (2 * ls_scan_waist_std**2)) - -# calculate detection psf in zyx coordinates using waveorder -detection_otf_zyx = calculate_transfer_function( - psf_stc_ss_shape, - psf_zyx_ss_scale[1], - psf_zyx_ss_scale[0], - wavelength_emission, - 0, - index_of_refraction_media, - numerical_aperture_detection, -) - -detection_psf_zyx = np.array( - torch.real(torch.fft.ifftshift(torch.fft.ifftn(detection_otf_zyx, dim=(0, 1, 2)))) -) - -detection_psf_stc = _apply_centered_affine(detection_psf_zyx, skew_matrix) -psf_stc = np.einsum('i,ijk->ijk', illumination_psf_scan, detection_psf_stc) - -# this dense illumination_psf is not necessary, but it's useful for debugging -illumination_psf_stc = np.einsum( - 'i,ijk->ijk', illumination_psf_scan, np.ones_like(detection_psf_stc) -) - -# prepare viewer -v.scale_bar.visible = True -v.scale_bar.unit = "um" - -v.add_image(illumination_psf_stc, name="raw illumination", scale=psf_stc_ss_scale) -v.add_image(detection_psf_stc, name="raw detection", scale=psf_stc_ss_scale) -v.add_image(psf_stc, name="raw total", scale=psf_stc_ss_scale) - -v.add_image( - _apply_centered_affine(illumination_psf_stc, deskew_matrix), - name="deskewed illumination", - scale=psf_stc_ss_scale, -) -v.add_image( - _apply_centered_affine(detection_psf_stc, deskew_matrix), - name="deskewed detection", - scale=psf_stc_ss_scale, -) -v.add_image( - _apply_centered_affine(psf_stc, deskew_matrix), - name="deskewed total", - scale=psf_stc_ss_scale, -) From 01e8ddbcbbb436a2570f8eb1324c3fb23dfd10b5 Mon Sep 17 00:00:00 2001 From: Talon Chandler Date: Mon, 5 Aug 2024 14:04:58 -0700 Subject: [PATCH 44/57] remove unused _deskew_matrix --- mantis/analysis/deskew.py | 27 --------------------------- 1 file changed, 27 deletions(-) diff --git a/mantis/analysis/deskew.py b/mantis/analysis/deskew.py index 30d43736..cbea4727 100644 --- a/mantis/analysis/deskew.py +++ b/mantis/analysis/deskew.py @@ -4,33 +4,6 @@ from monai.transforms.spatial.array import Affine -def _deskew_matrix(px_to_scan_ratio, ct): - """3x3 deskew matrix, relating sampling coordinates to deskewed coordinates - - Parameters - ---------- - px_to_scan_ratio : float - Ratio of the pixel size to light sheet scan step - ct : float - cos(theta), where theta is the light-sheet tilt angle - - Returns - ------- - 3x3 array - """ - return np.array( - [ - [ - -px_to_scan_ratio * ct, - 0, - px_to_scan_ratio, - ], - [-1, 0, 0], - [0, -1, 0], - ] - ) - - def _average_n_slices(data, average_window_width=1): """Average an array over its first axis From 8ccfe65b57ab9a9000a582fc173913dbadd3b443 Mon Sep 17 00:00:00 2001 From: Talon Chandler Date: Mon, 5 Aug 2024 14:27:09 -0700 Subject: [PATCH 45/57] test for incorrectlyt shifted deskew --- mantis/tests/test_analysis/test_deskew.py | 1 + 1 file changed, 1 insertion(+) diff --git a/mantis/tests/test_analysis/test_deskew.py b/mantis/tests/test_analysis/test_deskew.py index 28997685..a50b5c1c 100644 --- a/mantis/tests/test_analysis/test_deskew.py +++ b/mantis/tests/test_analysis/test_deskew.py @@ -36,6 +36,7 @@ def test_deskew_data(): raw_data, ls_angle_deg, px_to_scan_ratio, keep_overhang, average_n_slices ) assert deskewed_data.shape[1] == 4 + assert deskewed_data[0, 0, 0] != 1 # indicates incorrect shifting assert ( deskewed_data.shape From 40ec608a6b3d5509329802681e231322134f8a94 Mon Sep 17 00:00:00 2001 From: Talon Chandler Date: Mon, 5 Aug 2024 14:41:40 -0700 Subject: [PATCH 46/57] correct typo in test --- mantis/tests/test_analysis/test_deskew.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mantis/tests/test_analysis/test_deskew.py b/mantis/tests/test_analysis/test_deskew.py index a50b5c1c..6d588374 100644 --- a/mantis/tests/test_analysis/test_deskew.py +++ b/mantis/tests/test_analysis/test_deskew.py @@ -36,7 +36,7 @@ def test_deskew_data(): raw_data, ls_angle_deg, px_to_scan_ratio, keep_overhang, average_n_slices ) assert deskewed_data.shape[1] == 4 - assert deskewed_data[0, 0, 0] != 1 # indicates incorrect shifting + assert deskewed_data[0, 0, 0] != 0 # indicates incorrect shifting assert ( deskewed_data.shape From b2010d766cf2706566e315bad35be5dfdfe9bc89 Mon Sep 17 00:00:00 2001 From: Talon Chandler Date: Mon, 5 Aug 2024 17:04:58 -0700 Subject: [PATCH 47/57] fix deskew regression --- mantis/analysis/deskew.py | 18 +++++++----------- 1 file changed, 7 insertions(+), 11 deletions(-) diff --git a/mantis/analysis/deskew.py b/mantis/analysis/deskew.py index cbea4727..6d35391c 100644 --- a/mantis/analysis/deskew.py +++ b/mantis/analysis/deskew.py @@ -56,14 +56,13 @@ def _get_averaged_shape(deskewed_data_shape: tuple, average_window_width: int) - def _get_transform_matrix( - data_shape: tuple, ls_angle_deg: float, px_to_scan_ratio: float, keep_overhang: bool + ls_angle_deg: float, px_to_scan_ratio: float ): """ Compute affine transformation matrix used to deskew data. Parameters ---------- - data_shape : tuple ls_angle_deg : float px_to_scan_ratio : float keep_overhang : bool @@ -73,12 +72,7 @@ def _get_transform_matrix( matrix : np.array Affine transformation matrix. """ - Z, Y, X = data_shape - ct = np.cos(ls_angle_deg * np.pi / 180) - Z_shift = 0 - if not keep_overhang: - Z_shift = int(np.floor(Y * ct * px_to_scan_ratio)) matrix = np.array( [ @@ -86,10 +80,12 @@ def _get_transform_matrix( -px_to_scan_ratio * ct, 0, px_to_scan_ratio, - Z_shift, + 0, ], - [-1, 0, 0, Y - 1], - [0, -1, 0, X - 1], + [-1, 0, 0, 0], + [0, -1, 0, 0], + [0, 0, 0, 1], + ] ) @@ -192,7 +188,7 @@ def deskew_data( """ # Prepare transforms matrix = _get_transform_matrix( - raw_data.shape, ls_angle_deg, px_to_scan_ratio, keep_overhang + ls_angle_deg, px_to_scan_ratio, ) output_shape, _ = get_deskewed_data_shape( From df51f2ca33b50cdb02063515eb4be19c7fdc3010 Mon Sep 17 00:00:00 2001 From: Talon Chandler Date: Mon, 5 Aug 2024 17:19:46 -0700 Subject: [PATCH 48/57] style --- mantis/analysis/deskew.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/mantis/analysis/deskew.py b/mantis/analysis/deskew.py index 6d35391c..e4e6494e 100644 --- a/mantis/analysis/deskew.py +++ b/mantis/analysis/deskew.py @@ -55,9 +55,7 @@ def _get_averaged_shape(deskewed_data_shape: tuple, average_window_width: int) - return averaged_shape -def _get_transform_matrix( - ls_angle_deg: float, px_to_scan_ratio: float -): +def _get_transform_matrix(ls_angle_deg: float, px_to_scan_ratio: float): """ Compute affine transformation matrix used to deskew data. @@ -85,7 +83,6 @@ def _get_transform_matrix( [-1, 0, 0, 0], [0, -1, 0, 0], [0, 0, 0, 1], - ] ) @@ -188,7 +185,8 @@ def deskew_data( """ # Prepare transforms matrix = _get_transform_matrix( - ls_angle_deg, px_to_scan_ratio, + ls_angle_deg, + px_to_scan_ratio, ) output_shape, _ = get_deskewed_data_shape( From 3b27fde75f7f0007f9213c135ba6a03db0cf3241 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Wed, 7 Aug 2024 13:53:48 -0700 Subject: [PATCH 49/57] extend O3 scan range --- mantis/acquisition/acq_engine.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/mantis/acquisition/acq_engine.py b/mantis/acquisition/acq_engine.py index 822fda24..ad9a53d0 100644 --- a/mantis/acquisition/acq_engine.py +++ b/mantis/acquisition/acq_engine.py @@ -807,8 +807,8 @@ def refocus_ls_path(self): # Define relative travel limits, in steps o3_z_stage = self.ls_acq.o3_stage target_z_position = o3_z_stage.true_position + o3_z_range - max_z_position = 500 # O3 is allowed to travel ~10 um towards O2 - min_z_position = -1000 # O3 is allowed to travel ~20 um away from O2 + max_z_position = 750 # O3 is allowed to travel ~15 um towards O2 + min_z_position = -1500 # O3 is allowed to travel ~30 um away from O2 if np.any(target_z_position > max_z_position) or np.any( target_z_position < min_z_position ): From 6126d2cbe09bd82b0d720134aefb4a54494d04e0 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Wed, 7 Aug 2024 14:02:57 -0700 Subject: [PATCH 50/57] Update measure_psf.py --- mantis/acquisition/scripts/measure_psf.py | 41 ++++++++++++----------- 1 file changed, 21 insertions(+), 20 deletions(-) diff --git a/mantis/acquisition/scripts/measure_psf.py b/mantis/acquisition/scripts/measure_psf.py index 8f40efb0..79f75051 100644 --- a/mantis/acquisition/scripts/measure_psf.py +++ b/mantis/acquisition/scripts/measure_psf.py @@ -64,21 +64,22 @@ def check_acquisition_directory(root_dir: Path, acq_name: str, suffix='', idx=1) # mmc.set_property('Oryx2', 'Line Source', 'ExposureActive') # %% -data_dir = Path(r'E:\2024_05_10_A594_CAAX_DRAQ5') -date = '2024_05_07' +data_dir = Path(r'E:\2024_08_06_A549_TOMM20_SEC61') +date = '2024_08_06' # dataset = f'{date}_RR_Straight_O3_scan' -# dataset = f'{date}_epi_O1_benchmark' +dataset = f'{date}_epi_O1_benchmark' # dataset = f'{date}_LS_Oryx_epi_illum' # dataset = f'{date}_LS_Oryx_LS_illum' -dataset = f'{date}_LS_benchmark' +# dataset = f'{date}_LS_benchmark' # epi settings -# z_stage = 'PiezoStage:Q:35' -# z_step = 0.2 # in um -# z_range = (-2, 50) # in um -# pixel_size = 2 * 3.45 / 100 # in um -# # pixel_size = 3.45 / 55.7 # in um -# axis_labels = ("Z", "Y", "X") +z_stage = 'PiezoStage:Q:35' +z_step = 0.2 # in um +z_range = (-2, 50) # in um +pixel_size = 2 * 3.45 / 100 # in um +# pixel_size = 3.45 / 55.7 # in um +axis_labels = ("Z", "Y", "X") +step_per_um = None # ls settings # z_stage = 'AP Galvo' @@ -88,15 +89,15 @@ def check_acquisition_directory(root_dir: Path, acq_name: str, suffix='', idx=1) # axis_labels = ("SCAN", "TILT", "COVERSLIP") # epi illumination rr detection settings -z_stage = 'AP Galvo' -# z_step = 0.205 # in um -# z_range = (-85, 85) # in um -z_step = 0.1 # in um, reduced range and smaller step size -z_range = (-31, 49) # in um -# pixel_size = 0.116 # in um -pixel_size = 6.5 / 40 / 1.4 # in um, no binning -axis_labels = ("SCAN", "TILT", "COVERSLIP") -step_per_um = None +# z_stage = 'AP Galvo' +# # z_step = 0.205 # in um +# # z_range = (-85, 85) # in um +# z_step = 0.1 # in um, reduced range and smaller step size +# z_range = (-31, 49) # in um +# # pixel_size = 0.116 # in um +# pixel_size = 6.5 / 40 / 1.4 # in um, no binning +# axis_labels = ("SCAN", "TILT", "COVERSLIP") +# step_per_um = None # ls straight settings # from mantis.acquisition.microscope_operations import setup_kim101_stage @@ -193,7 +194,7 @@ def check_acquisition_directory(root_dir: Path, acq_name: str, suffix='', idx=1) zyx_data=zyx_data, zyx_scale=scale, settings=CharacterizeSettings( - **ls_bead_detection_settings, axis_labels=axis_labels, patch_size=patch_size + **epi_bead_detection_settings, axis_labels=axis_labels, patch_size=patch_size ), output_report_path=data_dir / (dataset + '_psf_analysis'), input_dataset_path=data_dir, From 7a7da9574b67fc63e072715ffc066e8e2d326f96 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Wed, 7 Aug 2024 14:48:35 -0700 Subject: [PATCH 51/57] fix overflow bug --- mantis/analysis/analyze_psf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mantis/analysis/analyze_psf.py b/mantis/analysis/analyze_psf.py index 192f5151..1d1641c7 100644 --- a/mantis/analysis/analyze_psf.py +++ b/mantis/analysis/analyze_psf.py @@ -501,7 +501,7 @@ def detect_peaks( """ zyx_shape = zyx_data.shape[-3:] - zyx_image = torch.from_numpy(zyx_data.astype(np.float16)[None, None]) + zyx_image = torch.from_numpy(zyx_data.astype(np.float32)[None, None]) if device != "cpu": zyx_image = zyx_image.to(device) From b99eafdfe4ab82b96f05cb58d56edc266968d04f Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Wed, 7 Aug 2024 14:49:48 -0700 Subject: [PATCH 52/57] Update measure_psf.py --- mantis/acquisition/scripts/measure_psf.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/mantis/acquisition/scripts/measure_psf.py b/mantis/acquisition/scripts/measure_psf.py index 79f75051..4e9c5644 100644 --- a/mantis/acquisition/scripts/measure_psf.py +++ b/mantis/acquisition/scripts/measure_psf.py @@ -4,6 +4,7 @@ import napari import numpy as np import torch +import time from iohub.ngff_meta import TransformationMeta from iohub.reader import open_ome_zarr @@ -56,6 +57,7 @@ def check_acquisition_directory(root_dir: Path, acq_name: str, suffix='', idx=1) mmc = Core() +step_per_um = None # mmc.set_property('Prime BSI Express', 'ExposeOutMode', 'Rolling Shutter') # mmc.set_property('Oryx2', 'Line Selector', 'Line5') @@ -65,7 +67,7 @@ def check_acquisition_directory(root_dir: Path, acq_name: str, suffix='', idx=1) # %% data_dir = Path(r'E:\2024_08_06_A549_TOMM20_SEC61') -date = '2024_08_06' +date = '2024_08_07' # dataset = f'{date}_RR_Straight_O3_scan' dataset = f'{date}_epi_O1_benchmark' # dataset = f'{date}_LS_Oryx_epi_illum' @@ -79,7 +81,6 @@ def check_acquisition_directory(root_dir: Path, acq_name: str, suffix='', idx=1) pixel_size = 2 * 3.45 / 100 # in um # pixel_size = 3.45 / 55.7 # in um axis_labels = ("Z", "Y", "X") -step_per_um = None # ls settings # z_stage = 'AP Galvo' @@ -97,7 +98,6 @@ def check_acquisition_directory(root_dir: Path, acq_name: str, suffix='', idx=1) # # pixel_size = 0.116 # in um # pixel_size = 6.5 / 40 / 1.4 # in um, no binning # axis_labels = ("SCAN", "TILT", "COVERSLIP") -# step_per_um = None # ls straight settings # from mantis.acquisition.microscope_operations import setup_kim101_stage @@ -232,6 +232,8 @@ def check_acquisition_directory(root_dir: Path, acq_name: str, suffix='', idx=1) **deskew_settings, ) + print('Deskewing data...') + t1 = time.time() deskewed_chunks = [] for chunk in chunked_data: deskewed_chunks.append( @@ -244,6 +246,7 @@ def check_acquisition_directory(root_dir: Path, acq_name: str, suffix='', idx=1) # concatenate arrays in reverse order deskewed_data = np.concatenate(deskewed_chunks[::-1], axis=-2) + print(f'Tike to deskew data: {time.time() - t1}') # Characterize deskewed peaks deskewed_peaks = characterize_peaks( From 9a7a4efd97b65ad74320060e5aeeccfd1b3ce7d1 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Wed, 7 Aug 2024 14:50:09 -0700 Subject: [PATCH 53/57] style --- mantis/acquisition/scripts/measure_psf.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/mantis/acquisition/scripts/measure_psf.py b/mantis/acquisition/scripts/measure_psf.py index 4e9c5644..2f953ede 100644 --- a/mantis/acquisition/scripts/measure_psf.py +++ b/mantis/acquisition/scripts/measure_psf.py @@ -1,10 +1,11 @@ # %% +import time + from pathlib import Path import napari import numpy as np import torch -import time from iohub.ngff_meta import TransformationMeta from iohub.reader import open_ome_zarr From 9e7533ec9741cc9ed8317e46ccf2b3f5e0473dab Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Wed, 7 Aug 2024 16:01:12 -0700 Subject: [PATCH 54/57] cleanup --- mantis/acquisition/scripts/measure_psf.py | 6 ------ 1 file changed, 6 deletions(-) diff --git a/mantis/acquisition/scripts/measure_psf.py b/mantis/acquisition/scripts/measure_psf.py index 2f953ede..9af1b24e 100644 --- a/mantis/acquisition/scripts/measure_psf.py +++ b/mantis/acquisition/scripts/measure_psf.py @@ -262,12 +262,6 @@ def check_acquisition_directory(root_dir: Path, acq_name: str, suffix='', idx=1) input_dataset_name=dataset, ) - # deskewed_beads, deskewed_offsets = extract_beads( - # zyx_data=deskewed_data, - # points=deskewed_peaks, - # scale=scale, ## Looks like there was a bug with the scale here, patch size may need retuning - # ) - if view: viewer2 = napari.Viewer() viewer2.add_image(deskewed_data) From d4d35accdfd66028ca3d209d766865855cd00f2c Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Wed, 7 Aug 2024 18:42:17 -0700 Subject: [PATCH 55/57] rename psf functions --- mantis/acquisition/scripts/measure_psf.py | 6 +++--- mantis/cli/{characterize.py => characterize_psf.py} | 6 +++--- mantis/cli/{psf_from_beads.py => estimate_psf.py} | 2 +- mantis/cli/main.py | 8 ++++---- 4 files changed, 11 insertions(+), 11 deletions(-) rename mantis/cli/{characterize.py => characterize_psf.py} (97%) rename mantis/cli/{psf_from_beads.py => estimate_psf.py} (99%) diff --git a/mantis/acquisition/scripts/measure_psf.py b/mantis/acquisition/scripts/measure_psf.py index 9af1b24e..5584f5be 100644 --- a/mantis/acquisition/scripts/measure_psf.py +++ b/mantis/acquisition/scripts/measure_psf.py @@ -14,7 +14,7 @@ from mantis.acquisition.microscope_operations import acquire_defocus_stack from mantis.analysis.AnalysisSettings import CharacterizeSettings from mantis.analysis.deskew import deskew_data, get_deskewed_data_shape -from mantis.cli.characterize import characterize_peaks +from mantis.cli.characterize_psf import _characterize_psf device = "cuda" if torch.cuda.is_available() else "cpu" epi_bead_detection_settings = { @@ -191,7 +191,7 @@ def check_acquisition_directory(root_dir: Path, acq_name: str, suffix='', idx=1) # %% Characterize peaks -peaks = characterize_peaks( +peaks = _characterize_psf( zyx_data=zyx_data, zyx_scale=scale, settings=CharacterizeSettings( @@ -250,7 +250,7 @@ def check_acquisition_directory(root_dir: Path, acq_name: str, suffix='', idx=1) print(f'Tike to deskew data: {time.time() - t1}') # Characterize deskewed peaks - deskewed_peaks = characterize_peaks( + deskewed_peaks = _characterize_psf( zyx_data=deskewed_data, zyx_scale=deskewed_voxel_size, settings=CharacterizeSettings( diff --git a/mantis/cli/characterize.py b/mantis/cli/characterize_psf.py similarity index 97% rename from mantis/cli/characterize.py rename to mantis/cli/characterize_psf.py index c76d7f79..ade508f3 100644 --- a/mantis/cli/characterize.py +++ b/mantis/cli/characterize_psf.py @@ -22,7 +22,7 @@ from mantis.cli.utils import yaml_to_model -def characterize_peaks( +def _characterize_psf( zyx_data: np.ndarray, zyx_scale: tuple[float, float, float], settings: CharacterizeSettings, @@ -85,7 +85,7 @@ def characterize_peaks( @input_position_dirpaths() @config_filepath() @output_dirpath() -def characterize( +def characterize_psf( input_position_dirpaths: List[str], config_filepath: str, output_dirpath: str, @@ -108,6 +108,6 @@ def characterize( settings = yaml_to_model(config_filepath, CharacterizeSettings) dataset_name = Path(input_position_dirpaths[0])[-4] - _ = characterize_peaks( + _ = _characterize_psf( zyx_data, zyx_scale, settings, output_dirpath, input_position_dirpaths[0], dataset_name ) diff --git a/mantis/cli/psf_from_beads.py b/mantis/cli/estimate_psf.py similarity index 99% rename from mantis/cli/psf_from_beads.py rename to mantis/cli/estimate_psf.py index 746e128f..469f1be7 100644 --- a/mantis/cli/psf_from_beads.py +++ b/mantis/cli/estimate_psf.py @@ -21,7 +21,7 @@ @input_position_dirpaths() @config_filepath() @output_dirpath() -def psf_from_beads( +def estimate_psf( input_position_dirpaths: List[str], config_filepath: str, output_dirpath: str, diff --git a/mantis/cli/main.py b/mantis/cli/main.py index 34f7bc4e..71d8991c 100644 --- a/mantis/cli/main.py +++ b/mantis/cli/main.py @@ -1,16 +1,16 @@ import click -from mantis.cli.characterize import characterize +from mantis.cli.characterize_psf import characterize_psf from mantis.cli.concatenate import concatenate from mantis.cli.deconvolve import deconvolve from mantis.cli.deskew import deskew from mantis.cli.estimate_bleaching import estimate_bleaching from mantis.cli.estimate_deskew import estimate_deskew +from mantis.cli.estimate_psf import estimate_psf from mantis.cli.estimate_registration import estimate_registration from mantis.cli.estimate_stabilization import estimate_stabilization from mantis.cli.estimate_stitch import estimate_stitch from mantis.cli.optimize_registration import optimize_registration -from mantis.cli.psf_from_beads import psf_from_beads from mantis.cli.register import register from mantis.cli.run_acquisition import run_acquisition from mantis.cli.stabilize import stabilize @@ -44,6 +44,6 @@ def cli(): cli.add_command(concatenate) cli.add_command(estimate_stabilization) cli.add_command(stabilize) -cli.add_command(psf_from_beads) +cli.add_command(estimate_psf) cli.add_command(deconvolve) -cli.add_command(characterize) +cli.add_command(characterize_psf) From 2be66007018b830bf2446ea0e26e22b0a7e91887 Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Mon, 12 Aug 2024 18:32:13 -0700 Subject: [PATCH 56/57] Update mantis/cli/characterize_psf.py Co-authored-by: Talon Chandler --- mantis/cli/characterize_psf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mantis/cli/characterize_psf.py b/mantis/cli/characterize_psf.py index ade508f3..091415d3 100644 --- a/mantis/cli/characterize_psf.py +++ b/mantis/cli/characterize_psf.py @@ -93,7 +93,7 @@ def characterize_psf( """ Characterize the point spread function (PSF) from bead images and output an html report - >> mantis characterize -i ./beads.zarr/*/*/* -c ./characterize_params.yml -o ./ + >> mantis characterize-psf -i ./beads.zarr/*/*/* -c ./characterize_params.yml -o ./ """ if len(input_position_dirpaths) > 1: warnings.warn("Only the first position will be characterized.") From cd0a8af88be41b9f811c3907ebf8abb7a513ab2d Mon Sep 17 00:00:00 2001 From: Ivan Ivanov Date: Mon, 12 Aug 2024 18:32:20 -0700 Subject: [PATCH 57/57] Update mantis/cli/estimate_psf.py Co-authored-by: Talon Chandler --- mantis/cli/estimate_psf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mantis/cli/estimate_psf.py b/mantis/cli/estimate_psf.py index 469f1be7..7c99b441 100644 --- a/mantis/cli/estimate_psf.py +++ b/mantis/cli/estimate_psf.py @@ -29,7 +29,7 @@ def estimate_psf( """ Estimate the point spread function (PSF) from bead images - >> mantis psf_from_beads -i ./beads.zarr/*/*/* -c ./psf_params.yml -o ./psf.zarr + >> mantis estimate-psf -i ./beads.zarr/*/*/* -c ./psf_params.yml -o ./psf.zarr """ # Convert string paths to Path objects output_dirpath = Path(output_dirpath)