diff --git a/WF_NTP/WF_NTP/WF_NTP_script.py b/WF_NTP/WF_NTP/WF_NTP_script.py index 1792ab0..bde0536 100755 --- a/WF_NTP/WF_NTP/WF_NTP_script.py +++ b/WF_NTP/WF_NTP/WF_NTP_script.py @@ -1,5 +1,4 @@ -""" -Copyright (C) 2019 Quentin Peter +"""Copyright (C) 2019 Quentin Peter. This file is part of WF_NTP. @@ -7,28 +6,39 @@ recieved a copy of the licence along with WF_NTP. If not, see https://creativecommons.org/licenses/by-nc-sa/4.0/. """ -import numpy as np -import matplotlib.pyplot as plt -import matplotlib.cm as cm -from scipy import interpolate, ndimage -import cv2 +import functools +import json import os +import pickle +import sys +import pickle +import sys import time +import traceback +import warnings +from collections import Counter, defaultdict +from tkinter import Image + +import cv2 import mahotas as mh +import matplotlib.cm as cm +import matplotlib.path as mplPath +import matplotlib.pyplot as plt +import numpy as np +import matplotlib.cm as cm +import matplotlib.path as mplPath +import matplotlib.pyplot as plt +import numpy as np import pandas as pd -import trackpy as tp -from skimage import measure, morphology, io import skimage.draw -import pickle -import warnings -import matplotlib.path as mplPath -from collections import defaultdict, Counter -from skimage.transform import resize -import traceback +import skimage.draw +import trackpy as tp +from PIL import Image +from pylab import imshow, show +from scipy import interpolate, ndimage from scipy.signal import savgol_filter -import functools -import sys -import json +from skimage import io, measure, morphology +from skimage.transform import resize class StdoutRedirector(object): @@ -37,7 +47,7 @@ class StdoutRedirector(object): def __init__(self, queue, prefix=None): self.queue = queue if not prefix: - prefix = '' + prefix = "" self.prefix = prefix def write(self, string): @@ -52,20 +62,17 @@ def flush(self): def save_settings(settings): # Make output directory try: - os.mkdir(settings['save_as']) + os.mkdir(settings["save_as"]) except OSError: - print( - 'Warning: job folder "%s" already created, overwriting.' % - settings['save_as']) + print('Warning: job folder "%s" already created, overwriting.' % settings["save_as"]) - settingsfilename = os.path.join(settings['save_as'], 'settings.json') - with open(settingsfilename, 'w') as f: + settingsfilename = os.path.join(settings["save_as"], "settings.json") + with open(settingsfilename, "w") as f: json.dump(settings, f, indent=4) def run_tracker(settings, stdout_queue=None): - """ - Run the tracker with the given settings. + """Run the tracker with the given settings. stdout_queue can be used to redirect stdout. """ @@ -77,14 +84,14 @@ def run_tracker(settings, stdout_queue=None): # Do some adjustments settings = settings.copy() - settings["frames_to_estimate_velocity"] = min([ - settings["frames_to_estimate_velocity"], - settings["min_track_length"]]) - settings["bend_threshold"] /= 100. + settings["frames_to_estimate_velocity"] = min( + [settings["frames_to_estimate_velocity"], settings["min_track_length"]] + ) + settings["bend_threshold"] /= 100.0 video = Video(settings, grey=True) - print('Video shape:', video[0].shape) + print("Video shape:", video[0].shape) regions = settings["regions"] try: @@ -96,15 +103,15 @@ def run_tracker(settings, stdout_queue=None): all_regions = im > 0.1 else: all_regions = np.zeros_like(video[0]) + for key, d in list(regions.items()): im = np.zeros_like(video[0]) - rr, cc = skimage.draw.polygon(np.array(d['y']), np.array(d['x'])) + rr, cc = skimage.draw.polygon(np.array(d["y"]), np.array(d["x"])) try: im[rr, cc] = 1 except IndexError: - print('Region "', key, '" cannot be applied to video', - settings["video_filename"]) - print('Input image sizes do not match.') + print('Region "', key, '" cannot be applied to video', settings["video_filename"]) + print("Input image sizes do not match.") return None, None all_regions += im all_regions = all_regions > 0.1 @@ -113,7 +120,7 @@ def run_tracker(settings, stdout_queue=None): t0 = time.time() save_folder = settings["save_as"] - ims_folder = os.path.join(save_folder, 'imgs') + ims_folder = os.path.join(save_folder, "imgs") if not os.path.exists(ims_folder): os.mkdir(ims_folder) @@ -123,18 +130,19 @@ def run_tracker(settings, stdout_queue=None): if settings["stop_after_example_output"]: return print_data, None track = form_trajectories(locations, settings) + print("this is what track looks like") + print(track) results = extract_data(track, settings) - if not check_for_worms(results["particle_dataframe"].index, - settings): - print('No worms detected. Stopping!') + if not check_for_worms(results["particle_dataframe"].index, settings): + print("No worms detected. Stopping!") return print_data, None # Output write_results_file(results, settings) - print('Done (in %.1f minutes).' % ((time.time() - t0) / 60.)) + print("Done (in %.1f minutes)." % ((time.time() - t0) / 60.0)) video.release() - return print_data, results['particle_dataframe'].loc[:, "bends"] + return print_data, results["particle_dataframe"].loc[:, "bends"] class Video: @@ -148,12 +156,10 @@ def __init__(self, settings, grey=False): raise RuntimeError(f"{video_filename} does not exist.") self.cap = cv2.VideoCapture(video_filename) - self.len = (self.cap.get(cv2.CAP_PROP_FRAME_COUNT) - - settings["start_frame"]) + self.len = self.cap.get(cv2.CAP_PROP_FRAME_COUNT) - settings["start_frame"] self.start_frame = settings["start_frame"] limit_images_to = settings["limit_images_to"] - if (limit_images_to and limit_images_to < ( - self.len - self.start_frame)): + if limit_images_to and limit_images_to < (self.len - self.start_frame): self.len = limit_images_to self.grey = grey if grey: @@ -202,6 +208,8 @@ def release(self): def track_all_locations(video, settings, stdout_queue): """Track and get all locations.""" + + def get_Z_brightness(zi): if settings["keep_paralyzed_method"]: return find_Z_with_paralyzed(video, settings, *zi) @@ -209,19 +217,18 @@ def get_Z_brightness(zi): return find_Z(video, settings, *zi) apply_indeces = list( - map(int, list(np.linspace(0, len(video), - len(video) // settings["use_images"] + 2)))) + map(int, list(np.linspace(0, len(video), len(video) // settings["use_images"] + 2))) + ) apply_indeces = list(zip(apply_indeces[:-1], apply_indeces[1:])) - Z_indeces = [(max([0, i - settings["use_around"]]), - min(j + settings["use_around"], len(video))) - for i, j in apply_indeces] + Z_indeces = [ + (max([0, i - settings["use_around"]]), min(j + settings["use_around"], len(video))) + for i, j in apply_indeces + ] - # Get frames0 print material Z, mean_brightness = get_Z_brightness(Z_indeces[0]) - print_data = process_frame(settings, Z, mean_brightness, - len(video), - args=(0, video[0]), - return_plot=True) + print_data = process_frame( + settings, Z, mean_brightness, len(video), args=(0, video[0]), return_plot=True + ) if settings["stop_after_example_output"]: return print_data, None @@ -232,8 +239,7 @@ def get_Z_brightness(zi): def locate(args): i, zi = args Z, mean_brightness = get_Z_brightness(zi) - return process_frames(video, settings, *i, Z=Z, - mean_brightness=mean_brightness) + return process_frames(video, settings, *i, Z=Z, mean_brightness=mean_brightness) split_results = list(map(locate, args)) locations = [] @@ -242,12 +248,13 @@ def locate(args): return print_data, locations -def process_frame(settings, Z, mean_brightness, nframes, - args=None, return_plot=False): +def process_frame(settings, Z, mean_brightness, nframes, args=None, return_plot=False): """Locate worms in a given frame.""" i, frameorig = args - print(' : Locating in frame %i/%i' % (i + 1 + settings["start_frame"], - nframes + settings["start_frame"])) + print( + " : Locating in frame %i/%i" + % (i + 1 + settings["start_frame"], nframes + settings["start_frame"]) + ) if mean_brightness: frame = frameorig * mean_brightness / np.mean(frameorig) @@ -255,77 +262,94 @@ def process_frame(settings, Z, mean_brightness, nframes, frame = np.array(frameorig, dtype=np.float64) frame = np.abs(frame - Z) * settings["all_regions"] if (frame > 1.1).any(): - frame /= 255. + frame /= 255.00 - thresholded = frame > (settings["threshold"] / 255.) + thresholded = frame > (settings["threshold"] / 255.00) opening = settings["opening"] closing = settings["closing"] save_folder = settings["save_as"] if opening > 0: frame_after_open = ndimage.binary_opening( - thresholded, - structure=np.ones((opening, opening))).astype(np.int) + thresholded, structure=np.ones((opening, opening)) + ).astype(np.int) else: frame_after_open = thresholded if closing > 0: frame_after_close = ndimage.binary_closing( - frame_after_open, - structure=np.ones((closing, closing))).astype(np.int) + frame_after_open, structure=np.ones((closing, closing)) + ).astype(np.int) else: frame_after_close = frame_after_open - labeled, _ = mh.label(frame_after_close, np.ones( - (3, 3), bool)) + labeled, _ = mh.label(frame_after_close, np.ones((3, 3), bool)) sizes = mh.labeled.labeled_size(labeled) - remove = np.where(np.logical_or(sizes < settings["min_size"], - sizes > settings["max_size"])) + remove = np.where(np.logical_or(sizes < settings["min_size"], sizes > settings["max_size"])) labeled_removed = mh.labeled.remove_regions(labeled, remove) labeled_removed, n_left = mh.labeled.relabel(labeled_removed) props = measure.regionprops(labeled_removed) - prop_list = [{"area": props[j].area, "centroid":props[j].centroid, - "eccentricity":props[j].eccentricity, - "area_eccentricity":props[j].eccentricity, - "minor_axis_length":props[j].minor_axis_length / - (props[j].major_axis_length + 0.001)} - for j in range(len(props))] + frame_width = frame_after_close.shape[1] + # this is where the magic happens + prop_list = [ + { + "area": props[j].area, + "centroid": props[j].centroid, + "eccentricity": props[j].eccentricity, + "area_eccentricity": props[j].eccentricity, + "minor_axis_length": props[j].minor_axis_length / (props[j].major_axis_length + 0.001), + "coords": props[j].coords, + "frame_width": frame_width, + } + for j in range(len(props)) + ] + if settings["skeletonize"]: skeletonized_frame = morphology.skeletonize(frame_after_close) - skeletonized_frame = prune(skeletonized_frame, - settings["prune_size"]) + skeletonized_frame = prune(skeletonized_frame, settings["prune_size"]) skel_labeled = labeled_removed * skeletonized_frame if settings["do_full_prune"]: skel_labeled = prune_fully(skel_labeled) - skel_props = measure.regionprops(skel_labeled) + skel_props = measure.regionprops(skel_labeled, coordinates="xy") for j in range(len(skel_props)): prop_list[j]["length"] = skel_props[j].area prop_list[j]["eccentricity"] = skel_props[j].eccentricity - prop_list[j]["minor_axis_length"] = \ - skel_props[j].minor_axis_length\ - / (skel_props[j].major_axis_length + 0.001) + prop_list[j]["minor_axis_length"] = skel_props[j].minor_axis_length / ( + skel_props[j].major_axis_length + 0.001 + ) if return_plot: - return (sizes, save_folder, frameorig, Z, frame, thresholded, - frame_after_open, frame_after_close, labeled, labeled_removed, - (skel_labeled if settings["skeletonize"] else None)) + return ( + sizes, + save_folder, + frameorig, + Z, + frame, + thresholded, + frame_after_open, + frame_after_close, + labeled, + labeled_removed, + (skel_labeled if settings["skeletonize"] else None), + ) output_overlayed_images = settings["output_overlayed_images"] if i < output_overlayed_images or output_overlayed_images is None: - io.imsave(os.path.join(save_folder, "imgs", '%05d.jpg' % (i)), - np.array(255 * (labeled_removed == 0), dtype=np.uint8), - check_contrast=False) + io.imsave( + os.path.join(save_folder, "imgs", "%05d.jpg" % (i)), + np.array(255 * (labeled_removed == 0), dtype=np.uint8), + check_contrast=False, + ) return prop_list def process_frames(video, settings, i0, i1, Z, mean_brightness): """Frocess frames from i0 to i1.""" - func = functools.partial( - process_frame, settings, Z, mean_brightness, len(video)) + func = functools.partial(process_frame, settings, Z, mean_brightness, len(video)) def args(): for i in range(i0, i1): @@ -334,58 +358,93 @@ def args(): return map(func, args()) +def coords_to_one_d(array, frame_width): + one_d = [] + # TODO get the video dimentions somehow + for arr in array: + one_d.append(arr[1] * frame_width + arr[0]) + return one_d + + +def activity_index(data): + test = data + activity_indices = [] + frame_width = data["frame_width"].iloc[0] + test["coords"] = test["coords"].apply(coords_to_one_d, frame_width=frame_width) + # get the last bend number + last_bend = data["bends"].max() + bend = 0 + while bend <= last_bend: + if bend % 2: + array = test[["frame", "coords", "bends"]][test["bends"].between(bend - 1, bend)] + sum = list(set(array["coords"].sum())) + total_area = len(sum) + array["area"] = array["coords"].apply(lambda x: len(x)) + average_area = array["area"].sum() / len(array["area"]) + activity_index = total_area - average_area + activity_index = activity_index / average_area + # activity_index = 1 - (average_area / (total_area - average_area)) + activity_indices.append(activity_index) + bend += 1 + return activity_indices + + def form_trajectories(loc, settings): """Form worm trajectories.""" - print('Forming worm trajectories...', end=' ') - data = {'x': [], 'y': [], 'frame': [], - 'eccentricity': [], 'area': [], - 'minor_axis_length': [], - 'area_eccentricity': []} + print("Forming worm trajectories...", end=" ") + data = { + "x": [], + "y": [], + "frame": [], + "eccentricity": [], + "area": [], + "minor_axis_length": [], + "area_eccentricity": [], + "frame_width": [], + "coords": [], + } for t, l in enumerate(loc): - data['x'] += [d['centroid'][0] for d in l] - data['y'] += [d['centroid'][1] for d in l] - data['eccentricity'] += [d['eccentricity'] for d in l] - data['area_eccentricity'] += [d['area_eccentricity'] for d in l] - data['minor_axis_length'] += [d['minor_axis_length'] for d in l] - data['area'] += [d['area'] for d in l] - data['frame'] += [t] * len(l) + data["x"] += [d["centroid"][0] for d in l] + data["y"] += [d["centroid"][1] for d in l] + data["eccentricity"] += [d["eccentricity"] for d in l] + data["area_eccentricity"] += [d["area_eccentricity"] for d in l] + data["minor_axis_length"] += [d["minor_axis_length"] for d in l] + data["area"] += [d["area"] for d in l] + data["frame"] += [t] * len(l) + data["frame_width"] += [d["frame_width"] for d in l] + data["coords"] += [d["coords"] for d in l] data = pd.DataFrame(data) + data.head() try: - track = tp.link_df(data, search_range=settings["max_dist_move"], - memory=settings["memory"]) + track = tp.link_df(data, search_range=settings["max_dist_move"], memory=settings["memory"]) except tp.linking.SubnetOversizeException: raise RuntimeError( - 'Linking problem too complex.' - ' Reduce maximum move distance or memory.') - track = tp.filter_stubs(track, min([settings["min_track_length"], - len(loc)])) + "Linking problem too complex." " Reduce maximum move distance or memory." + ) + track = tp.filter_stubs(track, min([settings["min_track_length"], len(loc)])) try: - with open(os.path.join(settings["save_as"], 'track.p'), - 'bw') as trackfile: + with open(os.path.join(settings["save_as"], "track.p"), "bw") as trackfile: pickle.dump(track, trackfile) except Exception: traceback.print_exc() - print('Warning: no track file saved. Track too long.') - print(' plot_path.py will not work on this file.') + print("Warning: no track file saved. Track too long.") + print(" plot_path.py will not work on this file.") return track def extract_data(track, settings): """Extract data from track and return a pandas DataFrame.""" - P = track['particle'] - columns_dtype = { - "bends": object - } + P = track["particle"] + columns_dtype = {"bends": object} # Use particle as index - particle_dataframe = pd.DataFrame(index=P.unique(), - columns=columns_dtype.keys()) + particle_dataframe = pd.DataFrame(index=P.unique(), columns=columns_dtype.keys()) # Set non float dtype correctly particle_dataframe = particle_dataframe.astype(columns_dtype) - T = track['frame'] - X = track['x'] - Y = track['y'] + T = track["frame"] + X = track["x"] + Y = track["y"] regions = settings["regions"] if len(regions) > 1: @@ -395,10 +454,10 @@ def extract_data(track, settings): for p in particle_dataframe.index: # Define signals t = T[P == p] - ecc = track['eccentricity'][P == p] - area_ecc = track['area_eccentricity'][P == p] + ecc = track["eccentricity"][P == p] + area_ecc = track["area_eccentricity"][P == p] # mal = track['minor_axis_length'][P == p] - area = track['area'][P == p] + area = track["area"][P == p] window_size = 7 @@ -420,16 +479,14 @@ def extract_data(track, settings): idx = area_ecc > settings["minimum_ecc"] if sum(idx) > 0: smooth_y = np.interp(x, x[idx], smooth_y[idx]) - particle_dataframe.at[p, "Round ratio"] = ( - 1.0 - float(sum(idx)) / float(len(idx))) + particle_dataframe.at[p, "Round ratio"] = 1.0 - float(sum(idx)) / float(len(idx)) else: # 0.001,0.991,0.992 are dummy variables specifically picked # to deal with coilers, see protocol. lengthX = 0.001 / len(idx) smooth_y = np.arange(0.991, 0.992, lengthX) np.random.shuffle(smooth_y) - particle_dataframe.at[p, "Round ratio"] = ( - 1.0 - float(sum(idx)) / float(len(idx))) + particle_dataframe.at[p, "Round ratio"] = 1.0 - float(sum(idx)) / float(len(idx)) # Bends bend_times = extract_bends(x, smooth_y, settings) @@ -438,9 +495,20 @@ def extract_data(track, settings): continue bl = form_bend_array(bend_times, T[P == p]) if len(bl) > 0: - bl = (np.asarray(bl, float)) + bl = np.asarray(bl, float) else: - bl = (np.array([0.0] * len(T[P == p]))) + bl = np.array([0.0] * len(T[P == p])) + + coords = track[["coords", "frame_width"]][P == p] + coords = coords.reset_index() + coords["bends"] = 0 + coords["bends"] = bl + activity_indices = activity_index(coords) + + if len(activity_indices): + particle_dataframe.at[p, "activity_index"] = np.median(activity_indices) + else: + particle_dataframe.at[p, "activity_index"] = 0 px_to_mm = settings["px_to_mm"] # Area @@ -454,16 +522,22 @@ def extract_data(track, settings): # Velocity particle_dataframe.at[p, "Speed"] = extract_velocity( - T[P == p], X[P == p], Y[P == p], settings) + T[P == p], X[P == p], Y[P == p], settings + + ) # Max velocity: 90th percentile to avoid skewed results due to tracking # inefficiency particle_dataframe.at[p, "Max speed"] = extract_max_speed( - T[P == p], X[P == p], Y[P == p], settings) + T[P == p], X[P == p], Y[P == p], settings + + ) # Move per bend particle_dataframe.at[p, "Dist per bend"] = extract_move_per_bend( - bl, T[P == p], X[P == p], Y[P == p], px_to_mm) + bl, T[P == p], X[P == p], Y[P == p], px_to_mm + + ) particle_dataframe.at[p, "bends"] = bl @@ -480,13 +554,18 @@ def extract_data(track, settings): with warnings.catch_warnings(): # Ignore ptp warnings as this is a numpy bug warnings.simplefilter("ignore") - particle_dataframe.at[index, "BPM"] = ( - last_bend / np.ptp(T[P == index]) * 60 * fps) - x = (settings["limit_images_to"] / fps) + particle_dataframe.at[index, "BPM"] = last_bend / np.ptp(T[P == index]) * 60 * fps + x = settings["limit_images_to"] / fps particle_dataframe.at[index, "bends_in_movie"] = ( - last_bend / np.ptp(T[P == index]) * x * fps) + last_bend / np.ptp(T[P == index]) * x * fps + ) + # particle_dataframe.at[index, "activity_index"] = ( + # particle_dataframe.at[index, "Area"] * particle_dataframe.at[index, "BPM"] / 120 + # ) particle_dataframe.at[index, "Appears in frames"] = len( - particle_dataframe.at[index, "bends"]) + particle_dataframe.at[index, "bends"] + + ) # Cut off-tool for skewed statistics if settings["cutoff_filter"]: @@ -502,8 +581,9 @@ def extract_data(track, settings): frames = np.array(frames) if settings["use_average"]: - cut_off = int(np.sum(list_number) / len(list_number)) + \ - (np.sum(list_number) % len(list_number) > 0) + cut_off = int(np.sum(list_number) / len(list_number)) + ( + np.sum(list_number) % len(list_number) > 0 + ) else: cut_off = max(list_number) @@ -518,16 +598,16 @@ def extract_data(track, settings): frames=frames, original_particles=original_particles, removed_particles_cutoff=removed_particles_cutoff, - ) + ) else: cutoff_filter_data = None # Cut off-tool for boundaries (spurious worms) if settings["extra_filter"]: - mask = ( - (particle_dataframe.loc[:, "BPM"] > settings["Bends_max"]) & - (particle_dataframe.loc[:, "Speed"] < settings["Speed_max"])) + mask = (particle_dataframe.loc[:, "BPM"] > settings["Bends_max"]) & ( + particle_dataframe.loc[:, "Speed"] < settings["Speed_max"] + ) extra_filter_spurious_worms = mask.sum() particle_dataframe = particle_dataframe.loc[~mask] else: @@ -541,14 +621,16 @@ def extract_data(track, settings): if not this_reg: continue else: - this_reg = ['all'] + this_reg = ["all"] particle_dataframe.at[index, "Region"] = str(this_reg) for reg in this_reg: region_particles[reg].append(index) particle_dataframe.loc[:, "Moving"] = np.logical_or( particle_dataframe.loc[:, "BPM"] > settings["maximum_bpm"], - particle_dataframe.loc[:, "Speed"] > settings["maximum_velocity"]) + particle_dataframe.loc[:, "Speed"] > settings["maximum_velocity"], + , + ) return dict( cutoff_filter_data=cutoff_filter_data, @@ -563,13 +645,15 @@ def extract_data(track, settings): # --- Utilities Functions --- # ============================================================================= + + def find_Z(video, settings, i0, i1): """Get thresholded image.""" # Adjust brightness: frame = video[(i0 + i1) // 2] mean_brightness = np.mean(frame) if mean_brightness > 1: - mean_brightness /= 255. + mean_brightness /= 255.00 Z = np.zeros_like(frame, dtype=np.float64) if settings["darkfield"]: minv = np.zeros_like(frame, dtype=np.float64) + 256 @@ -591,16 +675,20 @@ def find_Z(video, settings, i0, i1): def find_Z_with_paralyzed(video, settings, i0, i1): """Get thresholded image with paralyzed worms.""" frame = video[(i0 + i1) // 2] - Y, X = np.meshgrid(np.arange(frame.shape[1]), - np.arange(frame.shape[0])) + Y, X = np.meshgrid(np.arange(frame.shape[1]), np.arange(frame.shape[0])) thres = cv2.adaptiveThreshold( - frame, 1, cv2.ADAPTIVE_THRESH_GAUSSIAN_C, - cv2.THRESH_BINARY, 2 * (settings["std_px"] // 2) + 1, 0) + frame, + 1, + cv2.ADAPTIVE_THRESH_GAUSSIAN_C, + cv2.THRESH_BINARY, + 2 * (settings["std_px"] // 2) + 1, + 0, + ) mask = thres > 0.5 vals = frame[mask] x = X[mask] y = Y[mask] - Z = interpolate.griddata((x, y), vals, (X, Y), method='nearest') + Z = interpolate.griddata((x, y), vals, (X, Y), method="nearest") return Z, False @@ -614,7 +702,9 @@ def find_skel_endpoints(skel): np.array([[2, 1, 2], [0, 1, 0], [0, 0, 0]]), np.array([[1, 2, 0], [2, 1, 0], [0, 0, 0]]), np.array([[2, 0, 0], [1, 1, 0], [2, 0, 0]]), - np.array([[0, 0, 0], [2, 1, 0], [1, 2, 0]])] + np.array([[0, 0, 0], [2, 1, 0], [1, 2, 0]]), + , + ] ep = 0 for skel_endpoint in skel_endpoints: @@ -638,8 +728,7 @@ def prune_fully(skel_labeled): idx = np.argwhere(endpoints) reg = skel_labeled[idx[:, 0], idx[:, 1]] count = Counter(reg) - idx = np.array([idx[i, :] for i in range(len(reg)) - if count[reg[i]] > 2]) + idx = np.array([idx[i, :] for i in range(len(reg)) if count[reg[i]] > 2]) if len(idx) == 0: break endpoints[:] = 1 @@ -651,11 +740,11 @@ def prune_fully(skel_labeled): def check_for_worms(particles, settings): """Check if any worms have been detected.""" if len(particles) == 0: - with open(os.path.join(settings["save_as"], 'results.txt'), 'w') as f: - f.write('---------------------------------\n') - f.write(' Results for %s \n' % settings["video_filename"]) - f.write('---------------------------------\n\n') - f.write('No worms detected. Check your settings.\n\n') + with open(os.path.join(settings["save_as"], "results.txt"), "w") as f: + f.write("---------------------------------\n") + f.write(" Results for %s \n" % settings["video_filename"]) + f.write("---------------------------------\n\n") + f.write("No worms detected. Check your settings.\n\n") return False return True @@ -664,7 +753,8 @@ def make_region_paths(regions): reg_paths = {} for key, d in list(regions.items()): reg_paths[key] = mplPath.Path( - np.array(list(zip(d['x'] + [d['x'][0]], d['y'] + [d['y'][0]])))) + np.array(list(zip(d["x"] + [d["x"][0]], d["y"] + [d["y"][0]]))) + ) return reg_paths @@ -679,7 +769,7 @@ def identify_region(xs, ys, reg_paths): def extract_bends(x, smooth_y, settings): # Find extrema - ex = (np.diff(np.sign(np.diff(smooth_y))).nonzero()[0] + 1) + ex = np.diff(np.sign(np.diff(smooth_y))).nonzero()[0] + 1 if len(ex) >= 2 and ex[0] == 0: ex = ex[1:] bend_times = x[ex] @@ -722,8 +812,9 @@ def extract_velocity(tt, xx, yy, settings): dtt = -(np.roll(tt, ftev) - tt)[ftev:] dxx = (np.roll(xx, ftev) - xx)[ftev:] dyy = (np.roll(yy, ftev) - yy)[ftev:] - velocity = (settings["px_to_mm"] * settings["fps"] - * np.median(np.sqrt(dxx**2 + dyy**2) / dtt)) + velocity = ( + settings["px_to_mm"] * settings["fps"] * np.median(np.sqrt(dxx**2 + dyy**2) / dtt) + ) return velocity @@ -735,8 +826,10 @@ def extract_max_speed(tt, xx, yy, settings): dxx = (np.roll(xx, ftev) - xx)[ftev:] dyy = (np.roll(yy, ftev) - yy)[ftev:] percentile = ( - settings["px_to_mm"] * settings["fps"] * - np.percentile((np.sqrt(dxx**2 + dyy**2) / dtt), 90)) + settings["px_to_mm"] + * settings["fps"] + * np.percentile((np.sqrt(dxx**2 + dyy**2) / dtt), 90) + ) return percentile @@ -751,7 +844,7 @@ def extract_move_per_bend(bl, tt, xx, yy, px_to_mm): yi = np.interp(i, tt, yy) yj = np.interp(j, tt, yy) - dist = px_to_mm * np.sqrt((xj - xi)**2 + (yj - yi)**2) + dist = px_to_mm * np.sqrt((xj - xi) ** 2 + (yj - yi) ** 2) dists.append(dist) bend_i += 1 j = i @@ -762,140 +855,127 @@ def extract_move_per_bend(bl, tt, xx, yy, px_to_mm): return np.nan -def write_stats(settings, results, f, paralyzed_stats=True, prepend='', - mask=None): +def write_stats(settings, results, f, paralyzed_stats=True, prepend="", mask=None): stats = statistics(results, settings, mask) - f.write(f'\n-------------------------------\n{prepend}\n') + f.write(f"\n-------------------------------\n{prepend}\n") if settings["cutoff_filter"]: if mask is None: # Meaningless if mask != None - f.write('Total particles: %i\n' % - results['cutoff_filter_data']['original_particles']) + f.write("Total particles: %i\n" % results["cutoff_filter_data"]["original_particles"]) else: - f.write('Total particles: Not saved for regions\n') + f.write("Total particles: Not saved for regions\n") else: - f.write('Total particles: %i\n' % - stats['count']) + f.write("Total particles: %i\n" % stats["count"]) if paralyzed_stats and mask is None: # filters stats are only meaningful if mask == None - f.write('\nCUT-OFF tool/filters\n') + f.write("\nCUT-OFF tool/filters\n") # Not saved for cutoff_filter - f.write('Max particles present at same time: %i\n' - % stats['max_number_worms_present']) - f.write('\n') + f.write("Max particles present at same time: %i\n" % stats["max_number_worms_present"]) + f.write("\n") if settings["cutoff_filter"]: # Meaningless if mask != None - f.write('Frame number: ') - for item in results['cutoff_filter_data']["frames"]: - f.write('%i, ' % item) + f.write("Frame number: ") + for item in results["cutoff_filter_data"]["frames"]: + f.write("%i, " % item) - f.write('\n# of particles: ') - for item in results['cutoff_filter_data']["list_number"]: - f.write('%i, ' % item) + f.write("\n# of particles: ") + for item in results["cutoff_filter_data"]["list_number"]: + f.write("%i, " % item) - f.write('\nCut-off tool: Yes\n') + f.write("\nCut-off tool: Yes\n") if settings["use_average"]: - f.write('Method: averaging\n') + f.write("Method: averaging\n") else: - f.write('Method: maximum\n') + f.write("Method: maximum\n") f.write( - 'Removed particles: %i\n' % - results['cutoff_filter_data']['removed_particles_cutoff']) + "Removed particles: %i\n" + % results["cutoff_filter_data"]["removed_particles_cutoff"] + ) else: - f.write('Cut-off tool: No\n') + f.write("Cut-off tool: No\n") if settings["extra_filter"]: - f.write('Extra filter: Yes\n') + f.write("Extra filter: Yes\n") f.write( - 'Settings: remove when bpm > %.5f and velocity < %.5f\n' % - (settings["Bends_max"], settings["Speed_max"])) - f.write('Removed particles: %i' % - results['extra_filter_spurious_worms']) + "Settings: remove when bpm > %.5f and velocity < %.5f\n" + % (settings["Bends_max"], settings["Speed_max"]) + ) + f.write("Removed particles: %i" % results["extra_filter_spurious_worms"]) else: - f.write('Extra filter: No\n') - - f.write('\n-------------------------------\n\n') - - f.write(prepend + 'BPM Mean: %.5f\n' % stats['bpm_mean']) - f.write(prepend + 'BPM Standard deviation: %.5f\n' % stats['bpm_std']) - f.write(prepend + 'BPM Error on Mean: %.5f\n' % stats['bpm_mean_std']) - f.write(prepend + 'BPM Median: %.5f\n' % stats['bpm_median']) - - f.write(prepend + 'Bends in movie Mean: %.5f\n' % - stats['bends_in_movie_mean']) - f.write(prepend + 'Bends in movie Standard deviation: %.5f\n' % - stats['bends_in_movie_std']) - f.write(prepend + 'Bends in movie Error on Mean: %.5f\n' % - stats['bends_in_movie_mean_std']) - f.write( - prepend + - 'Bends in movie Median: %.5f\n' % - stats['bends_in_movie_median']) - - f.write(prepend + 'Speed Mean: %.6f\n' % stats['vel_mean']) - f.write(prepend + 'Speed Standard deviation: %.6f\n' % stats['vel_std']) - f.write(prepend + 'Speed Error on Mean: %.6f\n' % stats['vel_mean_std']) - f.write(prepend + 'Speed Median: %.6f\n' % stats['vel_median']) - - f.write( - prepend + - '90th Percentile speed Mean: %.6f\n' % - stats['max_speed_mean']) - f.write(prepend + '90th Percentile speed Standard deviation: %.6f\n' % - stats['max_speed_std']) - f.write(prepend + '90th Percentile speed Error on mean: %.6f\n' % - stats['max_speed_mean_std']) - if np.isnan(stats['move_per_bend_mean']): - f.write(prepend + 'Dist per bend Mean: nan\n') - f.write(prepend + 'Dist per bend Standard deviation: nan\n') - f.write(prepend + 'Dist per bend Error on Mean: nan\n') + f.write("Extra filter: No\n") + + f.write("\n-------------------------------\n\n") + + f.write(prepend + "BPM Mean: %.5f\n" % stats["bpm_mean"]) + f.write(prepend + "BPM Standard deviation: %.5f\n" % stats["bpm_std"]) + f.write(prepend + "BPM Error on Mean: %.5f\n" % stats["bpm_mean_std"]) + f.write(prepend + "BPM Median: %.5f\n" % stats["bpm_median"]) + + f.write(prepend + "Activity index Mean: %.5f\n" % stats["activity_index_mean"]) + f.write(prepend + "Activity index Standard deviation: %.5f\n" % stats["activity_index_std"]) + f.write(prepend + "Activity index Error on Mean: %.5f\n" % stats["activity_index_mean_std"]) + f.write(prepend + "Activity index Median: %.5f\n" % stats["activity_index_median"]) + + f.write(prepend + "Bends in movie Mean: %.5f\n" % stats["bends_in_movie_mean"]) + f.write(prepend + "Bends in movie Standard deviation: %.5f\n" % stats["bends_in_movie_std"]) + f.write(prepend + "Bends in movie Error on Mean: %.5f\n" % stats["bends_in_movie_mean_std"]) + f.write(prepend + "Bends in movie Median: %.5f\n" % stats["bends_in_movie_median"]) + + f.write(prepend + "Speed Mean: %.6f\n" % stats["vel_mean"]) + f.write(prepend + "Speed Standard deviation: %.6f\n" % stats["vel_std"]) + f.write(prepend + "Speed Error on Mean: %.6f\n" % stats["vel_mean_std"]) + f.write(prepend + "Speed Median: %.6f\n" % stats["vel_median"]) + + f.write(prepend + "90th Percentile speed Mean: %.6f\n" % stats["max_speed_mean"]) + f.write(prepend + "90th Percentile speed Standard deviation: %.6f\n" % stats["max_speed_std"]) + f.write(prepend + "90th Percentile speed Error on mean: %.6f\n" % stats["max_speed_mean_std"]) + if np.isnan(stats["move_per_bend_mean"]): + f.write(prepend + "Dist per bend Mean: nan\n") + f.write(prepend + "Dist per bend Standard deviation: nan\n") + f.write(prepend + "Dist per bend Error on Mean: nan\n") else: - f.write( - prepend + - 'Dist per bend Mean: %.6f\n' % - stats['move_per_bend_mean']) - f.write(prepend + 'Dist per bend Standard deviation: %.6f\n' % - stats['move_per_bend_std']) - f.write(prepend + 'Dist per bend Error on Mean: %.6f\n' % - stats['move_per_bend_mean_std']) + f.write(prepend + "Dist per bend Mean: %.6f\n" % stats["move_per_bend_mean"]) + f.write(prepend + "Dist per bend Standard deviation: %.6f\n" % stats["move_per_bend_std"]) + f.write(prepend + "Dist per bend Error on Mean: %.6f\n" % stats["move_per_bend_mean_std"]) if paralyzed_stats: - f.write(prepend + 'Moving worms: %i\n' % stats['n_moving']) - f.write(prepend + 'Paralyzed worms: %i\n' % stats['n_paralyzed']) - f.write(prepend + 'Total worms: %i\n' % - stats['max_number_worms_present']) - f.write(prepend + 'Moving ratio: %.6f\n' % - (float(stats['n_moving']) / stats['count'])) - f.write(prepend + 'Paralyzed ratio: %.6f\n' % - (float(stats['n_paralyzed']) / stats['count'])) - if stats['n_paralyzed'] > 0: - f.write(prepend + 'Moving-to-paralyzed ratio: %.6f\n' % (float( - stats['n_moving']) / stats['n_paralyzed'])) + f.write(prepend + "Moving worms: %i\n" % stats["n_moving"]) + f.write(prepend + "Paralyzed worms: %i\n" % stats["n_paralyzed"]) + f.write(prepend + "Total worms: %i\n" % stats["max_number_worms_present"]) + f.write(prepend + "Moving ratio: %.6f\n" % (float(stats["n_moving"]) / stats["count"])) + f.write( + prepend + "Paralyzed ratio: %.6f\n" % (float(stats["n_paralyzed"]) / stats["count"]) + ) + if stats["n_paralyzed"] > 0: + f.write( + prepend + + "Moving-to-paralyzed ratio: %.6f\n" + % (float(stats["n_moving"]) / stats["n_paralyzed"]) + ) else: - f.write(prepend + 'Moving-to-paralyzed ratio: inf\n') - if stats['n_moving'] > 0: - f.write(prepend + 'Paralyzed-to-moving ratio: %.6f\n' % (float( - stats['n_paralyzed']) / stats['n_moving'])) + f.write(prepend + "Moving-to-paralyzed ratio: inf\n") + if stats["n_moving"] > 0: + f.write( + prepend + + "Paralyzed-to-moving ratio: %.6f\n" + % (float(stats["n_paralyzed"]) / stats["n_moving"]) + ) else: - f.write(prepend + 'Paralyzed-to-moving ratio: inf\n') - f.write(prepend + 'Area Mean: %.6f\n' % stats['area_mean']) - f.write(prepend + 'Area Standard Deviation: %.6f\n' % stats['area_std']) - f.write(prepend + 'Area Error on Mean: %.6f\n' % stats['area_mean_std']) + f.write(prepend + "Paralyzed-to-moving ratio: inf\n") + f.write(prepend + "Area Mean: %.6f\n" % stats["area_mean"]) + f.write(prepend + "Area Standard Deviation: %.6f\n" % stats["area_std"]) + f.write(prepend + "Area Error on Mean: %.6f\n" % stats["area_mean_std"]) - f.write(prepend + 'Round ratio Mean: %.6f\n' % stats['round_ratio_mean']) - f.write(prepend + 'Round ratio Standard deviation: %.6f\n' % - stats['round_ratio_std']) - f.write(prepend + 'Round ratio Error on mean: %.6f\n' % - stats['round_ratio_mean_std']) + f.write(prepend + "Round ratio Mean: %.6f\n" % stats["round_ratio_mean"]) + f.write(prepend + "Round ratio Standard deviation: %.6f\n" % stats["round_ratio_std"]) + f.write(prepend + "Round ratio Error on mean: %.6f\n" % stats["round_ratio_mean_std"]) - f.write(prepend + 'Eccentricity Mean: %.6f\n' % stats['eccentricity_mean']) - f.write(prepend + 'Eccentricity Standard deviation: %.6f\n' % - stats['eccentricity_std']) - f.write(prepend + 'Eccentricity Error on mean: %.6f\n' % - stats['eccentricity_mean_std']) + f.write(prepend + "Eccentricity Mean: %.6f\n" % stats["eccentricity_mean"]) + f.write(prepend + "Eccentricity Standard deviation: %.6f\n" % stats["eccentricity_std"]) + f.write(prepend + "Eccentricity Error on mean: %.6f\n" % stats["eccentricity_mean_std"]) def mean_std(x, appears_in): @@ -912,15 +992,15 @@ def statistics(results, settings, mask=None): df = df.loc[mask, :] - P = results["track"]['particle'] - T = results["track"]['frame'] + P = results["track"]["particle"] + T = results["track"]["frame"] if settings["cutoff_filter"]: max_number_worms_present = len(df) else: max_number_worms_present = max( - [len([1 for p in set(P[T == t]) if p in df.index]) - for t in set(T)]) + [len([1 for p in set(P[T == t]) if p in df.index]) for t in set(T)] + ) count = len(df) n_moving = np.sum(df.loc[:, "Moving"]) n_paralyzed = len(df) - n_moving @@ -930,11 +1010,13 @@ def statistics(results, settings, mask=None): bpm_median = np.median(df.loc[:, "BPM"]) bpm_mean_std = bpm_std / np.sqrt(max_number_worms_present) - bends_in_movie_mean, bends_in_movie_std = mean_std( - df.loc[:, "bends_in_movie"], appears_in) + activity_index_mean, activity_index_std = mean_std(df.loc[:, "activity_index"], appears_in) + activity_index_median = np.median(df.loc[:, "activity_index"]) + activity_index_mean_std = activity_index_std / np.sqrt(max_number_worms_present) + + bends_in_movie_mean, bends_in_movie_std = mean_std(df.loc[:, "bends_in_movie"], appears_in) bends_in_movie_median = np.median(df.loc[:, "bends_in_movie"]) - bends_in_movie_mean_std = bends_in_movie_std / \ - np.sqrt(max_number_worms_present) + bends_in_movie_mean_std = bends_in_movie_std / np.sqrt(max_number_worms_present) vel_mean, vel_std = mean_std(df.loc[:, "Speed"], appears_in) vel_mean_std = vel_std / np.sqrt(max_number_worms_present) @@ -943,94 +1025,115 @@ def statistics(results, settings, mask=None): area_mean, area_std = mean_std(df.loc[:, "Area"], appears_in) area_mean_std = area_std / np.sqrt(max_number_worms_present) - max_speed_mean, max_speed_std = mean_std( - df.loc[:, "Max speed"], appears_in) + max_speed_mean, max_speed_std = mean_std(df.loc[:, "Max speed"], appears_in) max_speed_mean_std = max_speed_std / np.sqrt(max_number_worms_present) - round_ratio_mean, round_ratio_std = mean_std( - df.loc[:, "Round ratio"], appears_in) + round_ratio_mean, round_ratio_std = mean_std(df.loc[:, "Round ratio"], appears_in) round_ratio_mean_std = round_ratio_std / np.sqrt(max_number_worms_present) - eccentricity_mean, eccentricity_std = mean_std( - df.loc[:, "eccentricity"], appears_in) - eccentricity_mean_std = eccentricity_std / \ - np.sqrt(max_number_worms_present) + eccentricity_mean, eccentricity_std = mean_std(df.loc[:, "eccentricity"], appears_in) + eccentricity_mean_std = eccentricity_std / np.sqrt(max_number_worms_present) # Ignore nan particles for move_per_bend mask_appear = np.logical_not(np.isnan(df.loc[:, "Dist per bend"])) if np.any(mask_appear): move_per_bend_mean, move_per_bend_std = mean_std( - df.loc[mask_appear, "Dist per bend"], - df.loc[mask_appear, "Appears in frames"]) - move_per_bend_mean_std = move_per_bend_std / \ - np.sqrt(max([np.sum(mask_appear), max_number_worms_present])) + df.loc[mask_appear, "Dist per bend"], df.loc[mask_appear, "Appears in frames"] + ) + move_per_bend_mean_std = move_per_bend_std / np.sqrt( + max([np.sum(mask_appear), max_number_worms_present]) + ) else: move_per_bend_mean = np.nan move_per_bend_std = np.nan move_per_bend_mean_std = np.nan stats = { - 'max_number_worms_present': max_number_worms_present, - 'n_paralyzed': n_paralyzed, - 'n_moving': n_moving, - 'bpm_mean': bpm_mean, - 'bpm_std': bpm_std, - 'bpm_median': bpm_median, - 'bpm_mean_std': bpm_mean_std, - 'bends_in_movie_mean': bends_in_movie_mean, - 'bends_in_movie_std': bends_in_movie_std, - 'bends_in_movie_mean_std': bends_in_movie_mean_std, - 'bends_in_movie_median': bends_in_movie_median, - 'vel_mean': vel_mean, - 'vel_std': vel_std, - 'vel_mean_std': vel_mean_std, - 'vel_median': vel_median, - 'area_mean': area_mean, - 'area_std': area_std, - 'area_mean_std': area_mean_std, - 'max_speed_mean': max_speed_mean, - 'max_speed_std': max_speed_std, - 'max_speed_mean_std': max_speed_mean_std, - 'move_per_bend_mean': move_per_bend_mean, - 'move_per_bend_std': move_per_bend_std, - 'move_per_bend_mean_std': move_per_bend_mean_std, - 'count': count, - 'round_ratio_mean': round_ratio_mean, - 'round_ratio_std': round_ratio_std, - 'round_ratio_mean_std': round_ratio_mean_std, - 'eccentricity_mean': eccentricity_mean, - 'eccentricity_std': eccentricity_std, - 'eccentricity_mean_std': eccentricity_mean_std} + "max_number_worms_present": max_number_worms_present, + "n_paralyzed": n_paralyzed, + "n_moving": n_moving, + "bpm_mean": bpm_mean, + "bpm_std": bpm_std, + "bpm_median": bpm_median, + "bpm_mean_std": bpm_mean_std, + "activity_index_mean": activity_index_mean, + "activity_index_std": activity_index_std, + "activity_index_median": activity_index_median, + "activity_index_mean_std": activity_index_mean_std, + "bends_in_movie_mean": bends_in_movie_mean, + "bends_in_movie_std": bends_in_movie_std, + "bends_in_movie_mean_std": bends_in_movie_mean_std, + "bends_in_movie_median": bends_in_movie_median, + "vel_mean": vel_mean, + "vel_std": vel_std, + "vel_mean_std": vel_mean_std, + "vel_median": vel_median, + "area_mean": area_mean, + "area_std": area_std, + "area_mean_std": area_mean_std, + "max_speed_mean": max_speed_mean, + "max_speed_std": max_speed_std, + "max_speed_mean_std": max_speed_mean_std, + "move_per_bend_mean": move_per_bend_mean, + "move_per_bend_std": move_per_bend_std, + "move_per_bend_mean_std": move_per_bend_mean_std, + "count": count, + "round_ratio_mean": round_ratio_mean, + "round_ratio_std": round_ratio_std, + "round_ratio_mean_std": round_ratio_mean_std, + "eccentricity_mean": eccentricity_mean, + "eccentricity_std": eccentricity_std, + "eccentricity_mean_std": eccentricity_mean_std, + } return stats def write_particles(settings, particles_dataframe, filename): - """Write particles dataframe to csv""" - df = particles_dataframe.loc[:, [ - "BPM", "bends_in_movie", "Speed", "Max speed", "Dist per bend", - "Area", "Appears in frames", "Moving", "Region", "Round ratio", - "eccentricity"]] - - x = (settings["limit_images_to"] / settings["fps"]) + """Write particles dataframe to csv.""" + df = particles_dataframe.loc[ + :, + [ + "BPM", + "bends_in_movie", + "Speed", + "Max speed", + "Dist per bend", + "Area", + "Appears in frames", + "Moving", + "Region", + "Round ratio", + "eccentricity", + ], + ] + + x = settings["limit_images_to"] / settings["fps"] df.columns = [ - 'BPM', f'Bends per {x:.2f} s', 'Speed', 'Max speed', 'Dist per bend', - 'Area', 'Appears in frames', 'Moving (non-paralyzed)', 'Region', - 'Round ratio', 'Eccentricity'] + "BPM", + f"Bends per {x:.2f} s", + "Speed", + "Max speed", + "Dist per bend", + "Area", + "Appears in frames", + "Moving (non-paralyzed)", + "Region", + "Round ratio", + "Eccentricity", + ] df.to_csv(filename) def write_results_file(results, settings): df = results["particle_dataframe"] - write_particles(settings, - df, - os.path.join(settings["save_as"], 'particles.csv')) + write_particles(settings, df, os.path.join(settings["save_as"], "particles.csv")) - with open(os.path.join(settings["save_as"], 'results.txt'), 'w') as f: - f.write('---------------------------------\n') - f.write(' Results for %s \n' % settings["video_filename"]) - f.write('---------------------------------\n\n') + with open(os.path.join(settings["save_as"], "results.txt"), "w") as f: + f.write("---------------------------------\n") + f.write(" Results for %s \n" % settings["video_filename"]) + f.write("---------------------------------\n\n") # Stats for all worms write_stats(settings, results, f, paralyzed_stats=True) @@ -1038,19 +1141,20 @@ def write_results_file(results, settings): # Stats for moving worms moving_mask = df.loc[:, "Moving"] - write_stats(settings, results, f, paralyzed_stats=False, - prepend='Moving ', mask=moving_mask) + write_stats( + settings, results, f, paralyzed_stats=False, prepend="Moving ", mask=moving_mask + ) # Raw stats - f.write('---------------------------------\n\n') + f.write("---------------------------------\n\n") regions = settings["regions"] # Per region stats if len(regions) > 1: for reg in regions: - f.write('---------------------------------\n') - f.write('Stats for region: %s\n' % reg) - f.write('---------------------------------\n\n') + f.write("---------------------------------\n") + f.write("Stats for region: %s\n" % reg) + f.write("---------------------------------\n\n") # Worms of this region try: @@ -1058,64 +1162,60 @@ def write_results_file(results, settings): except TypeError: pars = [int(results["region_particles"][reg])] if len(pars) == 0: - f.write('Nothing found in region.\n\n') + f.write("Nothing found in region.\n\n") continue indices = [idx for idx in pars if idx in df.index] # All worms - write_stats(settings, results, f, paralyzed_stats=True, - mask=indices) + write_stats(settings, results, f, paralyzed_stats=True, mask=indices) - f.write('\n\n') - f.write('\n') + f.write("\n\n") + f.write("\n") - print('results.txt file produced.') + print("results.txt file produced.") # ============================================================================= # --- Matplotlib code--- # ============================================================================= def print_frame(settings, t, P, T, bends, track): - font = {'size': settings["font_size"]} - print('Printing frame', t + 1) - image_filename = os.path.join( - settings["save_as"], 'imgs', '%05d.jpg' % (int(t))) - frame = (255 - io.imread(image_filename)) + font = {"size": settings["font_size"]} + print("Printing frame", t + 1) + image_filename = os.path.join(settings["save_as"], "imgs", "%05d.jpg" % (int(t))) + frame = 255 - io.imread(image_filename) os.remove(image_filename) small_imshow(settings, frame, cmap=cm.binary, vmax=300) for p in bends.index: pp = P == p l = np.logical_and(pp, T == t) if np.sum(l) > 0: - x = track['x'][l].iloc[0] - y = track['y'][l].iloc[0] + x = track["x"][l].iloc[0] + y = track["y"][l].iloc[0] b = bends[p][np.sum(T[pp] < t)] - plt.text(y + 3, x + 3, 'p=%i\n%.1f' % - (p, b), font, color=[1, 0.3, 0.2]) + plt.text(y + 3, x + 3, "p=%i\n%.1f" % (p, b), font, color=[1, 0.3, 0.2]) m, n = frame.shape plt.plot( - [n - (5 + settings["scale_bar_size"] / float(settings["px_to_mm"])), - n - 5], + [n - (5 + settings["scale_bar_size"] / float(settings["px_to_mm"])), n - 5], [m - 5, m - 5], - linewidth=settings["scale_bar_thickness"], c=[0.5, 0.5, 0.5]) - plt.axis('off') - plt.axis('tight') - plt.savefig(os.path.join(settings["save_as"], 'imgs', '%05d.jpg' % (t))) + linewidth=settings["scale_bar_thickness"], + c=[0.5, 0.5, 0.5], + ) + plt.axis("off") + plt.axis("tight") + plt.savefig(os.path.join(settings["save_as"], "imgs", "%05d.jpg" % (t))) def print_images(settings, bends): plt.gcf().set_size_inches(20, 20) plt.clf() - with open(os.path.join(settings["save_as"], 'track.p'), - 'br') as trackfile: + with open(os.path.join(settings["save_as"], "track.p"), "br") as trackfile: track = pickle.load(trackfile) - P = track['particle'] - T = track['frame'] + P = track["particle"] + T = track["frame"] output_overlayed_images = settings["output_overlayed_images"] if output_overlayed_images != 0: - up_to = (len(set(T)) if output_overlayed_images is None - else output_overlayed_images) + up_to = len(set(T)) if output_overlayed_images is None else output_overlayed_images for t in range(up_to): print_frame(settings, t, P, T, bends, track) plt.clf() @@ -1131,58 +1231,87 @@ def small_imshow(settings, img, *args, **kwargs): img = resize( np.asarray(img, float), (int(img.shape[0] * factor), int(img.shape[1] * factor)), - preserve_range=True) + preserve_range=True, + , + ) plt.clf() - plt.imshow(img, *args, extent=[0, original_shape[1], - original_shape[0], 0], **kwargs) + plt.imshow(img, *args, extent=[0, original_shape[1], original_shape[0], 0], **kwargs) def output_processing_frames( - settings, save_folder, frameorig, Z, frame, thresholded, - frame_after_open, frame_after_close, labeled, - labeled_removed, skel_labeled=None): + settings, + save_folder, + frameorig, + Z, + frame, + thresholded, + frame_after_open, + frame_after_close, + labeled, + labeled_removed, + skel_labeled=None, +): plt.gcf().set_size_inches(20, 20) plt.clf() small_imshow(settings, frameorig, cmap=cm.gray) - plt.savefig(os.path.join(save_folder, '0frameorig.jpg')) + plt.savefig(os.path.join(save_folder, "0frameorig.jpg")) small_imshow(settings, Z, cmap=cm.gray) - plt.savefig(os.path.join(save_folder, '0z.jpg')) + plt.savefig(os.path.join(save_folder, "0z.jpg")) small_imshow(settings, frame, cmap=cm.gray) - plt.savefig(os.path.join(save_folder, '1framesubtract.jpg')) + plt.savefig(os.path.join(save_folder, "1framesubtract.jpg")) small_imshow(settings, thresholded, cmap=cm.binary) - plt.savefig(os.path.join(save_folder, '2thresholded.jpg')) + plt.savefig(os.path.join(save_folder, "2thresholded.jpg")) small_imshow(settings, frame_after_open, cmap=cm.binary) - plt.savefig(os.path.join(save_folder, '3opened.jpg')) + plt.savefig(os.path.join(save_folder, "3opened.jpg")) small_imshow(settings, frame_after_close, cmap=cm.binary) - plt.savefig(os.path.join(save_folder, '4closed.jpg')) + plt.savefig(os.path.join(save_folder, "4closed.jpg")) small_imshow(settings, labeled, cmap=cm.binary) - plt.savefig(os.path.join(save_folder, '5labelled.jpg')) + plt.savefig(os.path.join(save_folder, "5labelled.jpg")) small_imshow(settings, labeled_removed, cmap=cm.binary) - plt.savefig(os.path.join(save_folder, '6removed.jpg')) + plt.savefig(os.path.join(save_folder, "6removed.jpg")) if skel_labeled is not None: small_imshow(settings, skel_labeled, cmap=cm.binary) - plt.savefig(os.path.join(save_folder, '7skeletonized.jpg')) + plt.savefig(os.path.join(save_folder, "7skeletonized.jpg")) plt.clf() def print_example_frame( - settings, sizes, save_folder, frameorig, Z, frame, thresholded, - frame_after_open, frame_after_close, labeled, labeled_removed, - skel_labeled): - print('Sizes:') + settings, + sizes, + save_folder, + frameorig, + Z, + frame, + thresholded, + frame_after_open, + frame_after_close, + labeled, + labeled_removed, + skel_labeled, +): + print("Sizes:") print(sizes) output_processing_frames( - settings, save_folder, frameorig, Z, frame, thresholded, - frame_after_open, frame_after_close, labeled, labeled_removed, - (skel_labeled if settings["skeletonize"] else None)) - print('Example frame outputted!') + settings, + save_folder, + frameorig, + Z, + frame, + thresholded, + frame_after_open, + frame_after_close, + labeled, + labeled_removed, + (skel_labeled if settings["skeletonize"] else None), + ) + print("Example frame outputted!") diff --git a/audit.csv b/audit.csv new file mode 100644 index 0000000..8d28524 --- /dev/null +++ b/audit.csv @@ -0,0 +1,19 @@ +video_name,expected_frames,number_of_actual_frames,expected_interval,average_interval,stdev_interval,actual_length_seconds,avg_fps +220427_BF_RC7_30ms_11.3x-crawl-ACR125_50ms_1,300.0,300.0,50.0,264.76,165.13202096451826,79.428,3.777005589968273 +220427_BF_RC7_30ms_11.3x-crawl-ACR125_50ms_2_1,300.0,300.0,50.0,283.46666666666664,199.99634333335013,85.04,3.527751646284101 +220427_BF_RC7_30ms_11.3x-crawl-ACR125_burst_1,100.0,100.0,1.0,36.308,11.82934222067049,3.6308,27.54213947339429 +220427_BF_RC7_30ms_11.3x-crawl-ACR125_burst_3_1,100.0,100.0,1.0,37.5768,13.263488798619749,3.75768,26.612164952843244 +220427_BF_RC7_30ms_11.3x-swim-acr125_burst_1,100.0,100.0,1.0,37.6839,17.963933726759127,3.76839,26.53653151611166 +220427_BF_RC7_30ms_11.3x-swim-acr125_burst_2,100.0,107.0,1.0,34.70451612903226,9.860474293660149,3.22752,28.814693634741225 +220427_BF_RC7_30ms_11.3x-swim-acr125_burst_3,100.0,100.0,1.0,35.348600000000005,10.821602508338788,3.53486,28.28966352274206 +220427_BF_RC7_30ms_11.3x-swim-acr125_burst_4_2,100.0,100.0,1.0,34.4214,13.222972665063253,3.44214,29.051694585345164 +raw-220503_TPS_ACR085-crawling_zoom113_10ms_100img,100.0,100.0,10.0,33.0665,7.184647404941543,3.3066500000000003,30.24208791374956 +raw-220503_TPS_ACR085-trashing_zoom113_10ms_100img,100.0,100.0,10.0,33.1473,7.744858816718934,3.31473,30.1683696711346 +raw-220503_TPS_ACR125-crawling_zoom113_10ms_100img,100.0,100.0,10.0,32.8245,8.158093010358304,3.28245,30.465048972566223 +raw-220503_TPS_ACR125-thrashing_zoom113_10ms_100img,100.0,100.0,10.0,33.0078,6.778871450426971,3.30078,30.295869461157665 +raw-220503_TPS_N2-zoom113_20ms_2,500.0,747.0,20.0,250.1225296442688,159.04675365295503,63.281,3.998040486085871 +raw-220503_TPS_N2-zoom197_20ms_1,500.0,781.0,20.0,261.75342465753425,141.24246647006768,57.324,3.820389365710697 +raw-220503_TPS_N2-zoom197_20ms_2,,,,,,, +raw-dt100ms_11.3x_RC8_BF_expo33ms,200.0,200.0,100.0,214.67,25.872146020353487,42.934,4.658312759118648 +raw-dt10ms_11.3x_RC8_BF_expo33ms,200.0,200.0,10.0,33.1244,12.810651686121032,6.62488,30.18922606900049 +raw-dt50ms_11.3x_RC8_BF_expo33ms,100.0,100.0,50.0,188.82,31.321848314206857,18.882,5.2960491473360864 diff --git a/auditor/__main__.py b/auditor/__main__.py new file mode 100644 index 0000000..e0b3077 --- /dev/null +++ b/auditor/__main__.py @@ -0,0 +1,34 @@ +from os import stat + +from auditor.auditor import path_checker +from auditor.parser import audit_images, load_metadata, parser + +# ########################################################################## # +# FUNCTIONS # +# ########################################################################## # + + +def main(): + # parsing the argument(s) + args = parser() + + # print(args) + dir_path = args.path + + # checker of the path + path_checker(dir_path) + + # load the json metadata file + metadata = load_metadata(dir_path) + + # auditor of the tiff images based on metadata. + # Retrieving some info about frames + stat_frames = audit_images(metadata, dir_path) + + +# ########################################################################## # +# MAIN # +# ########################################################################## # + +if __name__ == "__main__": + main() diff --git a/auditor/auditor.py b/auditor/auditor.py new file mode 100644 index 0000000..0daf67f --- /dev/null +++ b/auditor/auditor.py @@ -0,0 +1,46 @@ +import os + +NB_LIMIT = 10 + +# Checker related to the argument parsed. +def path_checker(path: str): + """Check the existence and access of a directory. + + Arguments: + path (str): path to the input directory (containing the images). + + Raises: + NotADirectoryError: directory doesn't exist or is not a directory. + PermissionError: user doesn't have access to the directory. + """ + if not os.path.isdir(path): + raise NotADirectoryError(path + " is not a directory.") + if not os.access(path, os.R_OK | os.W_OK): + raise PermissionError("Permission denied to " + path) + + +def path_inside_checker(dir_path: str): + """Check that the files inside the directory are .tif or .json, that a .json file exists and + that there are at least 100 .tif files. + + Arguments: + dir_path : path to directory + + Raises: + FileNotFoundError: there is no .json file + Exception: there are fewer than 100 .tif files + Exception a file other than .tif or .json + """ + number_tif_files = 0 + metadata_file = False + for file in os.listdir(dir_path): + if file[-4:] == ".tif": + number_tif_files += 1 + elif file == "metadata.txt": + metadata_file = True + elif file[-5:] == ".json": + continue + else: + raise Exception("File other than .tif or .json or metadata.txt found") + if metadata_file is False: + raise FileNotFoundError("No metadata file found") diff --git a/auditor/parser.py b/auditor/parser.py new file mode 100644 index 0000000..2dd19ac --- /dev/null +++ b/auditor/parser.py @@ -0,0 +1,175 @@ +import argparse +import json +from cmath import exp +from curses import meta +from json import JSONDecodeError +from os import F_OK, R_OK, access, path +from os.path import exists as file_exists + +import cv2 as cv +import pandas as pd + + +# ########################################################################### # +# Parsing of the inputs of converter # +# ########################################################################### # +# Parser related to the arguments of converter program +def parser() -> dict: + """Parse arguments to get name directory as input and the video file's name as output. + + Return: + A dictionary containing the name of the path to input directory + and the defined name of the video file as output + Namespace(output='', path='') + """ + parser = argparse.ArgumentParser() + parser.add_argument( + "--path", type=str, required=True, help="path where source will be look in." + ) + return parser.parse_args() + + +# ########################################################################### # +# Parsing of the inputs of converter # +# ########################################################################### # + + +def load_metadata(directoryPath: str) -> dict: + """loads the metadata file in a python dictionary. + + args: + directorypath (str): directory path to json file to load. + + raises: + filenotfounderror: file [directorypath]/metadata.txt does not exist. + exception: [directorypath]/metadata.txt is not readable by the user. + jsondecodeerror: issue when loading the metadata from file. + returns: + dict: the loaded metadata. + """ + metadata_file = directoryPath + "/metadata.txt" + if not access(metadata_file, F_OK): + raise FileNotFoundError(f"File {metadata_file} does not exists.") + if not access(metadata_file, R_OK): + raise Exception(f"File {metadata_file} is not readable for the user.") + with open(file=metadata_file, mode="r") as file: + metadata = json.load(file) + return metadata + + +def load_dataframe(file_path): + """loads a dataframe from a .csv file containing the audit data, creates one if none exist. + + args: + directorypath (str): directory path to json file to load. + + raises: + filenotfounderror: file [directorypath]/metadata.txt does not exist. + exception: [directorypath]/metadata.txt is not readable by the user. + jsondecodeerror: issue when loading the metadata from file. + returns: + dict: the loaded metadata. + """ + if file_exists(file_path): + df = pd.read_csv(file_path, index_col=False) + else: + df = pd.DataFrame( + columns=[ + "video_name", + "expected_frames", + "number_of_actual_frames", + "expected_interval", + "average_interval", + "stdev_interval", + "actual_length_seconds", + "avg_fps", + ] + ) + return df + + +def audit_images(metadata: dict, directoryPath: str) -> dict: + """Audits the video frames and saves the results to .csv file. + + Args: + metadata: the metadata resulting from loadMetadata + directoryPath: the directory where the images are located + + Returns: + dict: a dictionary with the audited metadata + """ + video_name = directoryPath.rsplit("/", 1)[-1] + total_time_ms = 0 + expect_frame_no = 0 + expected_frames = metadata["Summary"]["Frames"] + theoretical_interval = metadata["Summary"]["Interval_ms"] + filenames_list = [] + intervals_list = [] + missing_frames = 0 + tmp_total_time_ms = 0 + audit_out_file = "./audit.csv" + # Loop over each frame obj in the metadata file + for obj in metadata: + if obj.startswith("Metadata-Default"): + filename = obj.rsplit("/", 1)[-1] + if filename != "Summary": + cv_img = cv.imread(directoryPath + "/" + filename) + if cv_img is None: + missing_frames += 1 + expect_frame_no = expect_frame_no + 1 + continue + filenames_list.append(filename) + + # Checking the shape of the image and the expected shape + actual_height, actual_width = cv_img.shape[0], cv_img.shape[1] + expected_width = metadata[obj]["Width"] + expected_height = metadata[obj]["Height"] + if (actual_height != expected_height) or (actual_width != expected_width): + raise Exception(f"Mismatched image size: frame: {directoryPath}/{filename}") + + currentFrame = metadata[obj]["Frame"] + if currentFrame == 0: + time_to_first_image = metadata[obj]["ElapsedTime-ms"] + + # checks for missing frames + if currentFrame != expect_frame_no: + missing_frames = missing_frames + 1 + + # create a list of the intervals between two frames + total_time_ms = metadata[obj]["ElapsedTime-ms"] - time_to_first_image + intervals_list.append(total_time_ms - tmp_total_time_ms) + tmp_total_time_ms = total_time_ms + expect_frame_no = expect_frame_no + 1 + + if expect_frame_no != expected_frames: + missing_frames = expect_frame_no - expected_frames + df = pd.DataFrame(intervals_list, columns=["intervals"]) + df2 = load_dataframe(audit_out_file) + data = [ + video_name, + expected_frames, + expected_frames - missing_frames, + theoretical_interval, + df["intervals"].mean(), + df["intervals"].std(), + total_time_ms / 1000, + expect_frame_no / (total_time_ms / 1000), + ] + # If video name not found create a new entry, else update data + if video_name not in df2.values: + df2.loc[len(df2.index)] = data + else: + df2.loc[df2["video_name"] == video_name] = data + df2.reset_index(drop=True, inplace=True) + + print(df2) + df2.to_csv(audit_out_file, index=False) + return { + "number_of_expected_frames": expected_frames, + "number_of_actual_frames": expected_frames - missing_frames, + "expected_interval": theoretical_interval, + "average_interval": df["intervals"].mean(), + "stdev_interval": df["intervals"].std(), + "actual_length_seconds": total_time_ms / 1000, + "avg_fps": expect_frame_no / (total_time_ms / 1000), + } diff --git a/experiments/activity_index/CeleST_vs_WFNTP_activity_index_comparison.csv b/experiments/activity_index/CeleST_vs_WFNTP_activity_index_comparison.csv new file mode 100644 index 0000000..68b026a --- /dev/null +++ b/experiments/activity_index/CeleST_vs_WFNTP_activity_index_comparison.csv @@ -0,0 +1,9 @@ +video_src,CeleST_activity_index_10_normalized,Celest_activity_index_median,Celest_activity_index_median_normalized,WF_NTP_activity_index_normalized +sample05,-1.30947885393232,140.453948,-0.735046359658527,-1.28607487396111 +sample05,0.366508953118392,170.811533,0.207815446789699,-0.248306412784322 +sample05,1.06576611343826,206.966828,1.33074557302894,0.99869915010498 +sample05,-0.12279621262433,138.249455,-0.803514660160113,0.535682136640454 +sample01,-0.636284514236627,55.244184,-0.703842871953718,-0.428269899834041 +sample01,-0.230149081290255,78.927102,-0.0532987065501446,1.11493457446069 +sample01,1.71021625284489,132.961965,1.43098058931329,-0.566481289642593 +sample01,-0.060401910118327,56.336468,-0.673839010809424,-1.11966300038972 diff --git a/experiments/activity_index/bucket_path.txt b/experiments/activity_index/bucket_path.txt new file mode 100644 index 0000000..56ab650 --- /dev/null +++ b/experiments/activity_index/bucket_path.txt @@ -0,0 +1,19 @@ +#s3://lab-nematode/raw/220503_TPS_ACR085/crawling_zoom113_10ms_100img/Default/ +#s3://lab-nematode/raw/220503_TPS_ACR085/trashing_zoom113_10ms_100img/Default/ +#s3://lab-nematode/raw/220503_TPS_ACR125/crawling_zoom113_10ms_100img/Default/ +#s3://lab-nematode/raw/220503_TPS_ACR125/thrashing_zoom113_10ms_100img/Default/ +#s3://lab-nematode/raw/220503_TPS_N2/zoom113_20ms_2/Default/ +#s3://lab-nematode/raw/220503_TPS_N2/zoom197_20ms_1/Default/ +#s3://lab-nematode/raw/220503_TPS_N2/zoom197_20ms_2/Default/ +#s3://lab-nematode/220427_BF_RC7_30ms_11.3x/crawl/ACR125_50ms_1/Default/ +#s3://lab-nematode/220427_BF_RC7_30ms_11.3x/crawl/ACR125_50ms_2_1/Default/ +#s3://lab-nematode/220427_BF_RC7_30ms_11.3x/crawl/ACR125_burst_1/Default/ +#s3://lab-nematode/220427_BF_RC7_30ms_11.3x/crawl/ACR125_burst_3_1/Default/ +#s3://lab-nematode/220427_BF_RC7_30ms_11.3x/swim/acr125_burst_1/Default/ +#s3://lab-nematode/220427_BF_RC7_30ms_11.3x/swim/acr125_burst_2/Default/ +#s3://lab-nematode/220427_BF_RC7_30ms_11.3x/swim/acr125_burst_3/Default/ +#s3://lab-nematode/220427_BF_RC7_30ms_11.3x/swim/acr125_burst_4_2/Default/ +#s3://lab-nematode/raw/dt100ms_11.3x_RC8_BF_expo33ms/Default/ +#s3://lab-nematode/raw/dt10ms_11.3x_RC8_BF_expo33ms/Default/ +s3://lab-nematode/raw/dt50ms_11.3x_RC8_BF_expo33ms/Default/ +s3://lab-nematode/raw/stream_11.3x_RC8_BF_expo_33ms/Default/ diff --git a/experiments/activity_index/dl_and_make_videos.py b/experiments/activity_index/dl_and_make_videos.py new file mode 100644 index 0000000..15c7e0a --- /dev/null +++ b/experiments/activity_index/dl_and_make_videos.py @@ -0,0 +1,103 @@ +import json +import os + + +def extract_bucket_names(abs_file_path: str) -> list: + """Extract aws bucket filepath from a txt file into a list. + + Does not take into account hashtag commented buckets. + Args: + abs_file_path (str): text file with the list of bucket path + Returns: + list: list of bucket paths + """ + bucket_list = [] + with open(abs_file_path, "r") as f: + while True: + bucket_name = f.readline() + if not bucket_name: + break + else: + if bucket_name.find("#") == -1: + bucket_list.append(bucket_name.replace("\n", " ")) + return bucket_list + + +def load_metadata(directoryPath: str) -> dict: + """loads the metadata file in a python dictionary. + + args: + directorypath (str): directory path to json file to load. + + raises: + filenotfounderror: file [directorypath]/metadata.txt does not exist. + exception: [directorypath]/metadata.txt is not readable by the user. + jsondecodeerror: issue when loading the metadata from file. + returns: + dict: the loaded metadata. + """ + metadata_file = directoryPath + "/metadata.txt" + if not os.access(metadata_file, os.F_OK): + raise FileNotFoundError(f"File {metadata_file} does not exists.") + if not os.access(metadata_file, os.R_OK): + raise Exception(f"File {metadata_file} is not readable for the user.") + with open(file=metadata_file, mode="r") as file: + metadata = json.load(file) + return metadata + + +def calculate_expected_fps(metadata_path=".") -> float: + """Use the metadata file provided to retrieve a theoretical fps. + + Args: + metadata_path (str, optional): path to the metadata file. Defaults to '.'. + + Returns: + float: the number of theoretical fps retrieved from metadata + """ + metadata = load_metadata(metadata_path) + theoretical_interval = metadata["Summary"]["Interval_ms"] + expected_fps = 1000 / theoretical_interval + return expected_fps + + +def makeVideo(expected_fps: float, dir_name: str) -> None: + """Use ffmpeg to make a video from available tif images in the provided dir. + + Args: + expected_fps (float): fps used to make the video + dir_name (str): name of dir containing tif images to convert + """ + ffmpeg_cmd = "ffmpeg -loglevel 1 -framerate " + ffmpeg_cmd += f"{expected_fps}" + ffmpeg_cmd += f" -i img_channel000_position000_time%09d_z000.tif {dir_name}.mp4" + print(ffmpeg_cmd) + os.system(ffmpeg_cmd) + + +if __name__ == "__main__": + working_dir = os.path.dirname(__file__) + bucket_names_file = os.path.join(working_dir, "bucket_path.txt") + bucket_list = extract_bucket_names(bucket_names_file) + # Loop over each bucket to download images and convert them into a video + for bucket_path in bucket_list: + # Set up new dir and cd to it + dir_name = ( + bucket_path.replace("/Default/", "") + .replace("s3://lab-nematode/", "") + .replace("/", "-") + .replace(" ", "") + ) + video_dir = os.path.join(working_dir, dir_name) + if not os.path.exists(video_dir): + os.mkdir(video_dir) + os.chdir(video_dir) + # Download all tiff images from an aws bucket + aws_cmd = f"aws s3 cp {bucket_path} . --recursive" + os.system(aws_cmd) + else: + raise FileExistsError(f"File {video_dir} already exists") + # Create a video from previously downloaded images + expected_fps = calculate_expected_fps() + makeVideo(expected_fps, dir_name) + os.chdir("../../../") diff --git a/experiments/activity_index/report_overview/report_overview.md b/experiments/activity_index/report_overview/report_overview.md new file mode 100644 index 0000000..bf54863 --- /dev/null +++ b/experiments/activity_index/report_overview/report_overview.md @@ -0,0 +1,7 @@ +# Description + + +# Methodology + + +# Results diff --git a/loop.sh b/loop.sh new file mode 100644 index 0000000..d5d658c --- /dev/null +++ b/loop.sh @@ -0,0 +1,9 @@ +#!/bin/bash + +abs_path=${PWD} +echo $abs_path +for dir in experiments/activity_index/* # list img directories + dir=${abs_path}'/'${dir%*/} # remove the trailing "/" + echo "${dir} audit done." # print everything after the final "/" + python3 -m auditor --path=${dir} +done